diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index a79369dc..28577ca1 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -1,2 +1,42 @@ {"id":"bd-log1","title":"Add DIY minimal logging to agent-relay daemon","description":"Implement a lightweight ~20-line logging approach for the agent-relay daemon. No external library needed.\n\nRequirements:\n- log_info(), log_warn(), log_error(), log_debug() functions\n- JSON output format for easy parsing with jq\n- Configurable via LOG_FILE and DEBUG env vars\n- Log daemon startup/shutdown\n- Log errors (delivery failures, connection issues)\n- Debug-only message delivery logging\n- Minimal performance impact (sub-5ms system)\n\nImplementation:\n```bash\n_log() {\n local level=\"$1\" msg=\"$2\"\n local ts=$(date -u +\"%Y-%m-%dT%H:%M:%SZ\")\n printf '{\"ts\":\"%s\",\"level\":\"%s\",\"msg\":\"%s\"}\\n' \"$ts\" \"$level\" \"$msg\" >> \"$LOG_FILE\"\n [[ \"$LOG_STDOUT\" == \"1\" ]] && echo \"[$level] $msg\"\n}\n\nlog_info() { _log \"INFO\" \"$1\"; }\nlog_warn() { _log \"WARN\" \"$1\"; }\nlog_error() { _log \"ERROR\" \"$1\"; }\nlog_debug() { [[ \"${DEBUG:-0}\" == \"1\" ]] && _log \"DEBUG\" \"$1\"; }\n```\n\nFiles modified:\n- src/utils/logger.ts (new file - TypeScript implementation)\n- src/daemon/server.ts (integrated logging)\n- src/utils/index.ts (exported logger)","priority":50,"status":"closed","created_at":"2026-01-01T07:40:00Z","closed_at":"2026-01-01T07:46:00Z","closed_reason":"Implemented TypeScript version with JSON output, configurable log levels, file logging support","tags":["logging","infrastructure","low-priority"]} {"id":"bd-custom-cmd1","title":"Custom Relay Commands - Implementation Approach Decision","description":"Allow users to define custom command patterns that trigger code execution when agents output them.\n\nSpec: docs/proposals/custom-commands.md\n\nDecision Required: Which implementation approach?\n\n| Option | Complexity | Flexibility | Time to MVP |\n|--------|------------|-------------|-------------|\n| A. Script Directory | Low | Low | 1-2 days |\n| B. YAML Config | Medium | High | 3-5 days |\n| C. TypeScript | High | Very High | 1-2 weeks |\n| D. Hybrid (A→B) | Low→Medium | Progressive | 2 days + iterate |\n\nRecommendation: Option D (Hybrid)\n- Ship script directory first (works immediately)\n- Add YAML config based on user feedback\n- TypeScript handlers as future enhancement\n\nUse Cases:\n- DevOps: ->deploy:staging, ->rollback:prod\n- Integrations: ->jira:create, ->slack:#team\n- Testing: ->test:unit, ->lint:fix\n- Agent Coordination: ->assign:Alice, ->handoff:Bob","priority":70,"status":"open","created_at":"2026-01-04T10:00:00Z","tags":["feature","extensibility","decision-required"]} +{"id":"bd-critical-001","title":"[LAUNCH BLOCKER] Fix WebSocket Reconnection & Session Recovery","description":"WebSocket drops are losing user context and messages.\n\n## Problem\n- WebSocket disconnect = lost messages\n- No automatic reconnection with backoff\n- Session state not persisted\n- Users lose typing context on refresh\n\n## Requirements\n1. Exponential backoff reconnection (1s, 2s, 4s, 8s, max 30s)\n2. Session ID persistence in localStorage\n3. Message queue for offline sends\n4. Reconnect indicator in UI\n5. Sync missed messages on reconnect\n\n## Files\n- src/dashboard/react-components/hooks/useWebSocket.ts\n- src/daemon/server.ts","priority":150,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-002","title":"[LAUNCH BLOCKER] Rate Limiting & Abuse Prevention","description":"No rate limiting = vulnerable to abuse and cost explosion.\n\n## Requirements\n1. API rate limiting per user/IP\n2. Message rate limiting (prevent spam)\n3. Agent spawn rate limiting\n4. WebSocket connection limits\n5. Graceful degradation under load\n\n## Implementation\n- Redis-based rate limiter\n- Sliding window algorithm\n- Different limits per plan tier\n- 429 responses with Retry-After header\n\n## Limits (Free Tier)\n- 100 API requests/minute\n- 30 messages/minute\n- 5 agent spawns/hour\n- 3 concurrent WebSocket connections","priority":145,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","security","p00"],"depends_on":[]} +{"id":"bd-critical-003","title":"[LAUNCH BLOCKER] Error Boundaries & Graceful Degradation","description":"Uncaught errors crash the entire dashboard.\n\n## Requirements\n1. React Error Boundaries around major sections\n2. Fallback UI for crashed components\n3. Error reporting to backend (Sentry integration)\n4. User-friendly error messages\n5. Retry mechanisms for failed operations\n\n## Components to Wrap\n- MessageList\n- Sidebar\n- Settings panels\n- Agent cards\n- File uploads","priority":140,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","ux","p00"],"depends_on":[]} +{"id":"bd-critical-004","title":"[LAUNCH BLOCKER] Onboarding Flow for New Users","description":"New users land on empty dashboard with no guidance.\n\n## Requirements\n1. Welcome modal for first-time users\n2. Interactive tutorial/tooltips\n3. Sample workspace with demo agents\n4. Quick actions: Connect repo, spawn first agent\n5. Progress checklist\n\n## Flow\n1. Sign up → Welcome modal\n2. Connect GitHub → Show repo picker\n3. Create workspace → Auto-provision\n4. Spawn first agent → Guided prompt\n5. Send first message → Celebrate!\n\n## Metrics\n- Track funnel completion\n- Time to first agent spawn\n- Day 1 retention","priority":135,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","onboarding","growth","p00"],"depends_on":[]} +{"id":"bd-critical-005","title":"[LAUNCH BLOCKER] Loading States & Skeleton UI","description":"Blank screens during loading feel broken.\n\n## Requirements\n1. Skeleton loaders for all lists\n2. Shimmer animations\n3. Progressive loading (show what we have)\n4. Optimistic updates for actions\n5. Loading indicators for async operations\n\n## Components Needing Skeletons\n- Message list\n- Agent sidebar\n- Workspace selector\n- Settings panels\n- History page","priority":130,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","ux","p00"],"depends_on":[]} +{"id":"bd-critical-006","title":"[LAUNCH BLOCKER] CSRF & Security Headers","description":"Missing security headers and CSRF protection.\n\n## Requirements\n1. CSRF tokens on all mutations\n2. Secure cookie settings (HttpOnly, SameSite, Secure)\n3. Content-Security-Policy header\n4. X-Frame-Options: DENY\n5. X-Content-Type-Options: nosniff\n6. Rate limit auth endpoints harder\n\n## Implementation\n- Middleware for security headers\n- CSRF token generation/validation\n- Audit existing endpoints","priority":145,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","security","p00"],"depends_on":[]} +{"id":"bd-critical-007","title":"[LAUNCH BLOCKER] Database Connection Pooling & Failover","description":"Single DB connection = single point of failure.\n\n## Requirements\n1. Connection pooling (PgBouncer or built-in)\n2. Read replicas for scalability\n3. Automatic failover\n4. Query timeout limits\n5. Dead connection detection\n\n## Implementation\n- Configure pool size based on load\n- Health check endpoint\n- Graceful degradation on DB issues","priority":140,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-008","title":"[LAUNCH BLOCKER] Workspace Provisioning Reliability","description":"Workspace creation sometimes fails silently.\n\n## Requirements\n1. Idempotent workspace creation\n2. Retry logic with exponential backoff\n3. Clear error messages on failure\n4. Cleanup on partial failure\n5. Status tracking (provisioning → ready → error)\n\n## Edge Cases\n- GitHub API rate limit during repo clone\n- Compute provisioning timeout\n- DNS propagation for custom domains\n- Quota exceeded","priority":135,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-009","title":"[LAUNCH BLOCKER] Mobile Responsive Polish","description":"Dashboard unusable on phones - huge user segment.\n\n## Requirements\n1. Touch-friendly tap targets (44px min)\n2. Swipe gestures for navigation\n3. Mobile-optimized message input\n4. Responsive images/previews\n5. PWA support (installable)\n6. Test on actual devices\n\n## Priority Screens\n- Message view\n- Agent list\n- Settings\n- Onboarding","priority":125,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","mobile","ux","p00"],"depends_on":[]} +{"id":"bd-critical-010","title":"[LAUNCH BLOCKER] Analytics & Monitoring","description":"Flying blind without metrics.\n\n## Requirements\n1. Product analytics (Mixpanel/Amplitude/PostHog)\n2. Error tracking (Sentry)\n3. Performance monitoring (Core Web Vitals)\n4. Server metrics (CPU, memory, latency)\n5. Business metrics dashboard\n\n## Key Events to Track\n- Sign up, login\n- Workspace created\n- Agent spawned\n- Message sent\n- Feature usage\n- Errors\n\n## Alerts\n- Error rate spike\n- Latency p99 > 2s\n- Failed agent spawns","priority":130,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","analytics","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-011","title":"[URGENT] Billing Integration Complete","description":"Can't charge users = no business.\n\n## Requirements\n1. Stripe checkout flow working end-to-end\n2. Subscription management (upgrade/downgrade/cancel)\n3. Usage-based billing for compute\n4. Invoice generation\n5. Failed payment handling\n6. Dunning emails\n\n## Test Scenarios\n- New subscription\n- Plan upgrade mid-cycle\n- Cancellation\n- Failed payment retry\n- Webhook reliability","priority":140,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","billing","p00"],"depends_on":[]} +{"id":"bd-critical-012","title":"[URGENT] Email Transactional System","description":"No email = users forget us.\n\n## Requirements\n1. Welcome email on signup\n2. Workspace invitation emails\n3. Password reset (if applicable)\n4. Agent activity digest (daily/weekly)\n5. Billing receipts\n6. Unsubscribe handling\n\n## Implementation\n- Resend or SendGrid\n- Email templates (React Email)\n- Queue for reliability\n- Delivery tracking","priority":125,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","email","p00"],"depends_on":[]} +{"id":"bd-critical-013","title":"[URGENT] Terms of Service & Privacy Policy","description":"Legal requirement before launch.\n\n## Requirements\n1. Terms of Service page\n2. Privacy Policy page\n3. Cookie consent banner\n4. Data processing agreement (for enterprise)\n5. Acceptable use policy\n6. GDPR compliance basics\n\n## Implementation\n- Legal review of AI-specific terms\n- Cookie consent mechanism\n- Data export/deletion flow","priority":120,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","legal","p00"],"depends_on":[]} +{"id":"bd-critical-014","title":"[URGENT] Health Check & Status Page","description":"Users need to know if we're down.\n\n## Requirements\n1. /health endpoint for all services\n2. Public status page (statuspage.io or custom)\n3. Uptime monitoring (every 1 min)\n4. Incident management process\n5. Status page subscription\n\n## Services to Monitor\n- API\n- WebSocket\n- Database\n- Agent compute\n- GitHub integration","priority":115,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-015","title":"[URGENT] Backup & Disaster Recovery","description":"Data loss = company death.\n\n## Requirements\n1. Automated daily database backups\n2. Point-in-time recovery (PITR)\n3. Cross-region backup replication\n4. Backup restoration testing (monthly)\n5. Message/file backup strategy\n6. RTO < 4 hours, RPO < 1 hour\n\n## Implementation\n- Postgres WAL archiving\n- S3/R2 for file backups\n- Documented recovery runbook","priority":135,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","infrastructure","p00"],"depends_on":[]} +{"id":"bd-viral-001","title":"[VIRAL] Public Community Rooms with AI Agents","description":"Create a PUBLIC WORKSPACE on Agent Relay that any logged-in user can join. Contains always-on AI agents (DocsBot, HelpBot, RoadmapBot). This is our viral growth mechanism.\n\n## Growth Loop\n1. User discovers Agent Relay\n2. Joins public room, chats with agents\n3. Gets hooked on AI-native collaboration\n4. Creates own workspace\n5. Invites team members","priority":100,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","growth","community","p0"],"depends_on":[]} +{"id":"bd-channels-001","title":"User Channels within Workspaces","description":"Add support for user-created channels within workspaces. Access controlled at workspace level (public vs private workspaces).","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":[]} +{"id":"bd-search-001","title":"Full-Text Message Search","description":"Add comprehensive search across all messages.","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["search","core","p1"],"depends_on":[]} +{"id":"bd-files-001","title":"File Sharing & Attachments","description":"Add file upload, preview, and sharing capabilities.","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["files","core","p1"],"depends_on":[]} +{"id":"bd-reactions-001","title":"Emoji Reactions","description":"Add emoji reactions to messages.","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","engagement","p1"],"depends_on":[]} +{"id":"bd-notifications-001","title":"Push & Email Notifications","description":"Add comprehensive notification system.","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["notifications","core","p1"],"depends_on":[]} +{"id":"bd-huddles-001","title":"Voice Huddles","description":"Add quick voice calls in channels.","priority":60,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["voice","collaboration","p2"],"depends_on":["bd-channels-001"]} +{"id":"bd-screen-share-001","title":"Screen Sharing","description":"Add screen sharing capability during huddles.","priority":55,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p2"],"depends_on":["bd-huddles-001"]} +{"id":"bd-integrations-001","title":"GitHub Integration","description":"Deep GitHub integration beyond OAuth.","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["integrations","p1"],"depends_on":[]} +{"id":"bd-integrations-002","title":"Integration Platform (Apps)","description":"Build platform for third-party integrations.","priority":65,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["platform","integrations","p2"],"depends_on":["bd-channels-001"]} +{"id":"bd-workflows-001","title":"Workflow Builder","description":"No-code automation builder.","priority":60,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["automation","platform","p2"],"depends_on":["bd-integrations-002"]} +{"id":"bd-mobile-001","title":"Mobile App (React Native)","description":"Native mobile apps for iOS and Android.","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["mobile","platform","p1"],"depends_on":["bd-notifications-001"]} +{"id":"bd-sso-001","title":"SSO/SAML Authentication","description":"Enterprise single sign-on support.","priority":50,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["enterprise","security","p2"],"depends_on":[]} +{"id":"bd-audit-001","title":"Audit Logs","description":"Comprehensive audit logging for compliance.","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["enterprise","compliance","p2"],"depends_on":[]} +{"id":"bd-guest-001","title":"Guest Access","description":"Allow external collaborators with limited access.","priority":55,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p2"],"depends_on":["bd-channels-001"]} +{"id":"bd-bookmarks-001","title":"Bookmarks & Saved Items","description":"Save important messages for later.","priority":40,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["productivity","p3"],"depends_on":[]} +{"id":"bd-canvas-001","title":"Canvas/Collaborative Docs","description":"Real-time collaborative documents within channels.","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p3"],"depends_on":[]} +{"id":"bd-agent-public-001","title":"Deploy Always-On Community Agents","description":"Deploy dedicated agents for public community rooms (DocsBot, RoadmapBot, HelpBot, ShowcaseBot, ModBot).","priority":95,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","agents","community","p0"],"depends_on":["bd-viral-001"]} +{"id":"bd-landing-001","title":"Landing Page with Live Community Embed","description":"Update landing page to embed live community rooms with real-time activity.","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","marketing","p0"],"depends_on":["bd-viral-001","bd-agent-public-001"]} +{"id":"bd-critical-016","title":"[SECURITY] Workspace Daemon Auth - Unauthenticated Endpoints","description":"Workspace daemon internal endpoints have NO authentication. **BLOCKS bd-critical-021 (per-user credentials)**\n\n## Vulnerability\nEndpoints in dashboard-server/server.ts are exposed without auth:\n- POST /auth/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/complete/:sessionId\n- POST /auth/cli/:provider/start\n- GET /api/credentials/:userId/:provider (proposed for per-user creds)\n\n## Risk\nIf workspace daemon is exposed via publicUrl, attackers could:\n- Submit malicious codes to active auth sessions\n- Enumerate active sessions\n- DoS the PTY processes\n- Hijack OAuth flows mid-completion\n- **Request ANY user's credentials** (if per-user creds implemented without this)\n\n## Fix\n1. Add workspace auth middleware using WORKSPACE_TOKEN (already passed to containers)\n2. Validate HMAC signature on all daemon endpoints\n3. Add request signing between cloud server and workspace daemon\n4. Validate session ownership\n\n## Implementation\n```typescript\n// Middleware for daemon endpoints\nfunction validateWorkspaceAuth(req, res, next) {\n const token = req.headers.authorization?.replace('Bearer ', '');\n const expectedToken = process.env.WORKSPACE_TOKEN;\n \n if (!token || token !== expectedToken) {\n return res.status(401).json({ error: 'Unauthorized' });\n }\n next();\n}\n\n// Apply to all internal endpoints\napp.use('/auth/cli', validateWorkspaceAuth);\napp.use('/api/credentials', validateWorkspaceAuth);\n```\n\n## Files\n- src/dashboard-server/server.ts:2075-2130\n- src/cloud/api/onboarding.ts (caller - must send WORKSPACE_TOKEN)","priority":160,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00","blocks-per-user-creds"],"depends_on":[]} +{"id":"bd-critical-017","title":"[SECURITY] PTY Output May Log Sensitive Tokens","description":"CLI auth PTY output is logged and may contain secrets.\n\n## Vulnerability\nIn cli-auth.ts:237-238, the last 500 chars of CLI output are logged:\n```typescript\nlogger.info('CLI process exited', {\n outputTail: cleanOutput.slice(-500), // May contain tokens!\n});\n```\n\n## Risk\n- Access tokens in logs\n- Refresh tokens exposed\n- API keys visible in log aggregators\n- Credentials in error dumps\n\n## Fix\n1. Sanitize PTY output before logging\n2. Redact patterns: token=XXX, Bearer XXX, api_key=XXX\n3. Add log scrubbing middleware\n4. Review all logger.info/error calls for secrets\n\n## Files\n- src/daemon/cli-auth.ts:237-238\n- src/daemon/cli-auth.ts:177 (prompt logging)","priority":142,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":[]} +{"id":"bd-critical-018","title":"[SECURITY] CLI Auth Rate Limiting Missing","description":"No rate limiting on CLI auth endpoints allows brute-force and DoS.\n\n## Vulnerability\nThese endpoints have no rate limits:\n- POST /api/onboarding/cli/:provider/start\n- POST /api/onboarding/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/code/:sessionId\n\n## Risk\n- Attackers can spawn unlimited PTY processes (DoS)\n- Brute-force auth code submission\n- Resource exhaustion on workspace containers\n- Cost explosion from compute usage\n\n## Fix\n1. Add rate limiter middleware (express-rate-limit)\n2. Limit per-user: 5 auth starts per 15 min\n3. Limit per-session: 10 code submissions per minute\n4. Add exponential backoff on failures\n\n## Files\n- src/cloud/api/onboarding.ts\n- src/dashboard-server/server.ts","priority":138,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":["bd-critical-002"]} +{"id":"bd-critical-019","title":"[SECURITY] Auth Session Timeout Too Long","description":"5-minute OAuth session timeout creates large attack window.\n\n## Vulnerability\nIn cli-auth.ts:159:\n```typescript\nconst OAUTH_COMPLETION_TIMEOUT = 5 * 60 * 1000; // 5 minutes\n```\n\n## Risk\n- Long window for session hijacking\n- Active sessions can be enumerated\n- Stale PTY processes consume resources\n- Race conditions in token capture\n\n## Fix\n1. Reduce timeout to 2 minutes (plenty for OAuth)\n2. Add session invalidation on suspicious activity\n3. Implement one-time-use session tokens\n4. Clean up PTY immediately on error\n\n## Files\n- src/daemon/cli-auth.ts:159","priority":128,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","p00"],"depends_on":[]} +{"id":"bd-critical-020","title":"[SECURITY] Force Device Flow in Cloud Mode","description":"Standard OAuth redirect to localhost doesn't work in cloud - must force device flow.\n\n## Problem\nCodex OAuth redirects to localhost:1455, which:\n- Doesn't exist on user's machine when using cloud\n- CLI runs in container, not user's computer\n- Callback never reaches the CLI\n\n## Current Behavior\nDevice flow is opt-in via checkbox, defaults to OFF.\n\n## Fix\n1. Auto-detect cloud mode in frontend\n2. Force useDeviceFlow=true for providers that support it\n3. Hide the checkbox in cloud mode (always on)\n4. Show clear instructions for device flow\n\n## Files\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx\n- src/cloud/api/onboarding.ts","priority":135,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","ux","launch-blocker","p00"],"depends_on":[]} +{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning injects owner's tokens as CONTAINER-LEVEL env vars\n- entrypoint.sh writes owner's creds to `${HOME}/.claude/` at container start\n- CLI tools (claude, codex) read from `$HOME/.{cli}/` - NOT env vars\n\n## Why Env Var Override Won't Work\nCLI tools are NOT SDKs. They have their own auth:\n- `claude` CLI reads `$HOME/.claude/.credentials.json`\n- `codex` CLI reads `$HOME/.codex/auth.json`\n- They IGNORE `ANTHROPIC_API_KEY` / `OPENAI_API_KEY` env vars\n\n## Solution: Per-User HOME Directories\n\nSet `HOME` env var to user-specific directory when spawning.\n\n### Impact on Git Operations\n\n**Git is SAFE** - uses credential helper with env vars:\n- `git-credential-relay` reads `CLOUD_API_URL`, `WORKSPACE_ID`, `WORKSPACE_TOKEN`\n- Does NOT read files from HOME\n- Git clone/push/pull will continue working\n\n**gh CLI needs config copied:**\n- Config at `${HOME}/.config/gh/hosts.yml`\n- Copy from container HOME to user HOME\n- Or rely on `GH_TOKEN` env var (already set)\n\n### prepareUserHome() Function\n\n```typescript\nasync function prepareUserHome(userId: string, provider: string): Promise {\n const userHome = `/home/workspace-users/${userId}`;\n const containerHome = process.env.HOME || '/home/workspace';\n \n // 1. Fetch user's credentials from cloud\n const creds = await fetchUserCredentials(userId, provider);\n \n // 2. Write provider-specific credential file\n if (provider === 'anthropic') {\n await fs.mkdir(`${userHome}/.claude`, { recursive: true });\n await fs.writeFile(\n `${userHome}/.claude/.credentials.json`,\n JSON.stringify({ claudeAiOauth: creds })\n );\n } else if (provider === 'openai') {\n await fs.mkdir(`${userHome}/.codex`, { recursive: true });\n await fs.writeFile(\n `${userHome}/.codex/auth.json`,\n JSON.stringify({ tokens: { access_token: creds.accessToken, refresh_token: creds.refreshToken } })\n );\n }\n \n // 3. Copy gh CLI config (for `gh pr create` etc.)\n await fs.cp(`${containerHome}/.config/gh`, `${userHome}/.config/gh`, { recursive: true });\n \n return userHome;\n}\n```\n\n### Credential Format Reference\n\n**Claude** (`$HOME/.claude/.credentials.json`):\n```json\n{ \"claudeAiOauth\": { \"accessToken\": \"...\", \"refreshToken\": \"...\", \"expiresAt\": \"...\" } }\n```\n\n**Codex** (`$HOME/.codex/auth.json`):\n```json\n{ \"tokens\": { \"access_token\": \"...\", \"refresh_token\": \"...\" } }\n```\n\n### Database Changes\n```sql\nCREATE TABLE workspace_credentials (\n workspace_id UUID, user_id UUID, provider TEXT,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN, UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential (if connected)\n2. Workspace default (if set by admin)\n3. Owner's credential (fallback to container HOME)\n\n## Files to Modify\n- src/daemon/agent-manager.ts - call prepareUserHome before spawn\n- src/daemon/credential-writer.ts (new) - per-provider credential format\n- src/cloud/api/workspaces.ts - credential resolution endpoint\n- src/cloud/db/schema.ts - workspace_credentials table\n- deploy/workspace/entrypoint.sh - already writes owner creds (reference)\n\n## Security: Daemon Must Auth to Cloud\nDepends on bd-critical-016 (daemon auth). Without auth, any process could request any user's credentials.","priority":155,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["critical","architecture","team","credentials","launch-blocker","p00"],"depends_on":["bd-critical-016"]} diff --git a/.beads/issues.jsonl b/.beads/issues.jsonl index 1fb8e6d5..eaaf9714 100644 --- a/.beads/issues.jsonl +++ b/.beads/issues.jsonl @@ -55,6 +55,7 @@ {"id":"agent-relay-328","title":"Document cross-project messaging syntax for agents","description":"The parser supports cross-project messaging but agents don't know about it.\n\n## Current Syntax (supported but undocumented)\n\n```\n-\u003erelay:project-id:AgentName \u003c\u003c\u003c\nMessage to agent in another project\u003e\u003e\u003e\n\n-\u003erelay:project-id:* \u003c\u003c\u003c\nBroadcast to all agents in that project\u003e\u003e\u003e\n\n-\u003erelay:project-id:lead \u003c\u003c\u003c\nMessage to lead agent of that project\u003e\u003e\u003e\n```\n\n## Files to Update\n\n1. **docs/agent-relay-snippet.md** - Add cross-project section\n2. **CLAUDE.md** - Update the snippet (or it auto-updates)\n3. **.claude/skills/using-agent-relay/SKILL.md** - If exists\n\n## Content to Add\n\n```markdown\n## Cross-Project Messaging (Bridge Mode)\n\nWhen running with `agent-relay bridge`, you can message agents in other projects:\n\n```\n-\u003erelay:frontend:Designer \u003c\u003c\u003c\nPlease update the UI for the new auth flow\u003e\u003e\u003e\n\n-\u003erelay:backend:* \u003c\u003c\u003c\nAPI changes deployed, please pull latest\u003e\u003e\u003e\n\n-\u003erelay:shared-lib:lead \u003c\u003c\u003c\nNeed a new utility function for date formatting\u003e\u003e\u003e\n```\n\nFormat: `-\u003erelay:project-id:agent-name`\n```\n\n## Also Consider\n- How agents discover available projects\n- How to query which agents are in which project\n- Cross-project thread syntax: `[thread:project:topic]`","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-01T23:17:09.740345+01:00","updated_at":"2026-01-01T23:19:22.409354+01:00","closed_at":"2026-01-01T23:19:22.409354+01:00"} {"id":"agent-relay-329","title":"Dashboard: Show connected repos/projects indicator","description":"When multiple repos are connected (bridge mode or multi-repo workspace), the dashboard should visually indicate this.\n\n## Current State\n- Dashboard shows agents but no clear indication of which project/repo they belong to\n- No visual cue that multiple projects are bridged\n\n## Desired UX\n\n### Option A: Project badges on agents\nEach agent shows a small badge/tag with their project:\n```\n[frontend] Designer - active\n[backend] API-Dev - idle \n[shared] Utils - active\n```\n\n### Option B: Grouped sidebar\nProjects as collapsible sections (already partially exists in ProjectList):\n```\n▼ frontend (3 agents)\n - Designer\n - Implementer\n - Reviewer\n▼ backend (2 agents)\n - API-Dev\n - DBAdmin\n```\n\n### Option C: Header indicator\nShow connected projects count in header:\n```\n🔗 3 projects connected | Current: frontend\n```\n\n## Implementation Notes\n- Check ProjectList.tsx - already has project grouping logic\n- May need to enhance Header.tsx for connection indicator\n- Consider color-coding projects for quick identification\n\n## Files\n- src/dashboard/react-components/ProjectList.tsx\n- src/dashboard/react-components/layout/Header.tsx\n- src/dashboard/react-components/AgentList.tsx (for badges)","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-01T23:19:14.394353+01:00","updated_at":"2026-01-01T23:22:31.628165+01:00","closed_at":"2026-01-01T23:22:31.628165+01:00"} {"id":"agent-relay-330","title":"Add --architect flag to bridge command for cross-project coordinator","description":"When running bridge mode, optionally spawn an architect agent that coordinates across all projects.\n\n## Usage\n```bash\nagent-relay bridge --architect ~/frontend ~/backend\n# or\nagent-relay bridge --architect claude ~/frontend ~/backend\n```\n\n## Behavior\n1. Bridge connects to all project daemons (existing behavior)\n2. Spawns an Architect agent in a tmux session\n3. Architect agent has access to cross-project messaging:\n - -\u003erelay:project:agent for direct messages\n - -\u003erelay:*:* for broadcast to all\n - -\u003erelay:project:lead for project leads\n4. Architect gets injected with context about connected projects\n\n## Implementation\n- Add --architect flag to bridge command\n- Create temp workspace or use first project as base\n- Spawn tmux wrapper with Architect agent\n- Inject system prompt with project list and cross-project syntax\n\n## Agent Definition\nCould use .claude/agents/architect.md if exists, otherwise default prompt:\n- You are the Architect coordinating: [project list]\n- Use cross-project messaging syntax\n- Assign tasks to project leads\n- Resolve dependencies","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-01T23:29:07.473839+01:00","updated_at":"2026-01-01T23:35:27.952802+01:00","closed_at":"2026-01-01T23:35:27.952802+01:00"} +{"id":"agent-relay-350","title":"Global skills via PRPM","description":"Distribute @agent-relay/* skills via PRPM registry for opt-in workspace capabilities.\n\n## Goals\n- Publish skills to registry.prpm.dev\n- Users install globally (not per-project)\n- Zero context bloat until loaded\n\n## Key Tasks\n1. Research prpm --global support\n2. Define ~/.agent-relay/ user skills directory\n3. Publish @agent-relay/workspace-capabilities\n4. Publish @agent-relay/browser-testing\n5. Publish @agent-relay/container-spawning\n6. Create @agent-relay/workspace-pack collection\n7. Cloud workspace pre-installation\n\n## Skills to Publish\n- workspace-capabilities: Browser + container docs\n- browser-testing: Playwright, screenshots, VNC\n- container-spawning: Docker, presets, resource limits\n- linear-integration: Webhooks, API patterns\n- slack-integration: Bot patterns\n\n## Open Questions\n- Does prpm support --global flag?\n- Can daemon read user + project skills?\n- Conditional activation based on capabilities?\n\nSee: docs/tasks/global-skills-system.tasks.md","status":"open","priority":2,"issue_type":"epic","created_at":"2026-01-04T13:30:00Z","updated_at":"2026-01-04T13:30:00Z"} {"id":"agent-relay-37i","title":"Message deduplication uses in-memory Set without limits","description":"In tmux-wrapper.ts:65, sentMessageHashes is a Set that grows unbounded. For long-running sessions, this could cause memory issues. Add: (1) Max size with LRU eviction, (2) Time-based expiration, (3) Bloom filter alternative for memory efficiency.","status":"open","priority":2,"issue_type":"task","created_at":"2025-12-20T00:18:47.229988+01:00","updated_at":"2025-12-20T00:18:47.229988+01:00"} {"id":"agent-relay-3px","title":"Add playbook system for batch automation","description":"Implement playbook system (like Maestro's Auto Run) for batch-processing task lists through agents. Define workflows in YAML/markdown, execute automatically with context isolation. Enables reproducible multi-step automation.","status":"open","priority":3,"issue_type":"feature","created_at":"2025-12-23T17:04:54.464749+01:00","updated_at":"2025-12-23T17:04:54.464749+01:00"} {"id":"agent-relay-3tx","title":"PR-9 Review: Document configurable timeouts","status":"open","priority":3,"issue_type":"task","created_at":"2025-12-22T21:54:15.789418+01:00","updated_at":"2025-12-22T21:54:15.789418+01:00"} @@ -115,6 +116,8 @@ {"id":"agent-relay-451","title":"Fix empty continuity handoff files - parse SESSION_END content","status":"closed","priority":2,"issue_type":"bug","created_at":"2026-01-03T14:27:49.747598+01:00","updated_at":"2026-01-03T14:33:27.122823+01:00","closed_at":"2026-01-03T14:33:21.048043+01:00"} {"id":"agent-relay-452","title":"Trajectories should populate agents array with agent who started it","description":"When trail start is called, the trajectory's agents array is empty. It should automatically associate the agent who started the trajectory.","status":"completed","priority":2,"issue_type":"bug","created_at":"2026-01-03T14:28:39.57+01:00","updated_at":"2026-01-03T15:56:25.663159+01:00"} {"id":"agent-relay-453","title":"BUG: Spawn command fails silently when CLI not specified","description":"Users can send `-\u003erelay:spawn WorkerName` without a CLI type, but the parser silently ignores it because it requires both name AND cli. \n\nParse code at pty-wrapper.ts:931 checks `parts.length \u003e= 2` which fails for commands like:\n- `-\u003erelay:spawn Investigator`\n\nShould either:\n1. Make CLI optional with sensible default (claude)\n2. Provide error feedback when CLI is missing\n\nThis blocks relay spawn/release functionality entirely.","status":"closed","priority":0,"issue_type":"bug","assignee":"Backend","created_at":"2026-01-03T16:43:37.927258+01:00","updated_at":"2026-01-03T16:50:11.02666+01:00","closed_at":"2026-01-03T16:50:11.02666+01:00"} +{"id":"agent-relay-454","title":"OpenCode headless mode integration","description":"Integrate OpenCode's headless mode (opencode run) with Agent Relay. Options: 1) Create MCP server adapter for agent-relay that OpenCode can use, 2) Document OpenCode config to work with relay. See: https://github.com/anomalyco/opencode/issues/953","status":"open","priority":3,"issue_type":"feature","created_at":"2026-01-04T01:01:55.715466+01:00","updated_at":"2026-01-04T01:01:55.715466+01:00"} +{"id":"agent-relay-455","title":"Create shared types package between backend and frontend","status":"open","priority":2,"issue_type":"task","created_at":"2026-01-04T21:03:08.485997+01:00","updated_at":"2026-01-04T21:03:08.485997+01:00"} {"id":"agent-relay-47z","title":"Express 5 may have breaking changes from Express 4 patterns","description":"package.json uses express@5.2.1 which is a major version with breaking changes from Express 4. Verify: (1) Error handling middleware patterns, (2) Router behavior, (3) Body parsing (express.json vs body-parser).","status":"open","priority":2,"issue_type":"task","created_at":"2025-12-20T00:18:49.269841+01:00","updated_at":"2025-12-20T00:18:49.269841+01:00"} {"id":"agent-relay-4e0","title":"Fix message truncation - messages cut off at source","description":"Root cause found: parser.ts:40 inline regex only captures single line. Multi-line messages are split by parsePassThrough() at line 206. Fix options: (1) Allow continuation lines in inline format, (2) Use block format for multi-line, (3) Add heuristic to join lines until next @relay pattern.","status":"closed","priority":2,"issue_type":"bug","assignee":"MistyShelter","created_at":"2025-12-19T23:40:35.082717+01:00","updated_at":"2025-12-20T00:03:54.806087+01:00","closed_at":"2025-12-20T00:03:54.806087+01:00"} {"id":"agent-relay-4ft","title":"Merge project info into status command","status":"closed","priority":2,"issue_type":"task","assignee":"Pruner","created_at":"2025-12-19T21:59:52.685495+01:00","updated_at":"2025-12-19T22:06:44.276187+01:00","closed_at":"2025-12-19T22:06:44.276187+01:00"} diff --git a/.claude/rules/migrations.md b/.claude/rules/migrations.md new file mode 100644 index 00000000..a77d154c --- /dev/null +++ b/.claude/rules/migrations.md @@ -0,0 +1,74 @@ +--- +paths: + - "src/cloud/db/**/*.ts" + - "src/cloud/db/migrations/**/*.sql" + - "drizzle.config.ts" +--- + +# Database Migration Conventions + +## Drizzle ORM Migration Workflow + +This project uses Drizzle ORM with PostgreSQL. Migrations run automatically on server startup via `runMigrations()`. + +## When Schema Changes + +After modifying `src/cloud/db/schema.ts`: + +1. **Generate migration**: `npm run db:generate` +2. **Review the generated SQL** in `src/cloud/db/migrations/` +3. **Verify it's incremental** - should only contain ALTER/CREATE statements for changes, NOT recreate entire schema +4. **Test locally**: Restart server or run `npm run db:migrate` + +## Common Issues + +### Full Schema Recreation Instead of Incremental + +If `db:generate` creates a migration that recreates all tables: + +1. **Delete the bad migration file** from `migrations/` +2. **Remove its entry** from `migrations/meta/_journal.json` +3. **Delete any corrupt snapshot** in `migrations/meta/` +4. **Create incremental migration manually** using `ALTER TABLE ... ADD COLUMN IF NOT EXISTS` + +### Migration Not Applied + +If schema has columns that aren't in the database: + +1. Check if migration file exists in `migrations/` +2. Check if entry exists in `migrations/meta/_journal.json` +3. Verify migration ran: check `__drizzle_migrations` table in database + +## Writing Safe Migrations + +```sql +-- Use IF NOT EXISTS for idempotent migrations +ALTER TABLE users ADD COLUMN IF NOT EXISTS new_column VARCHAR(255); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_users_new_column ON users(new_column); +``` + +## Commands Reference + +```bash +npm run db:generate # Generate migration from schema diff +npm run db:migrate # Run pending migrations +npm run db:push # Push schema directly (dev only, can lose data) +npm run db:studio # Open Drizzle Studio GUI +``` + +## Production Safety + +- Always use `IF NOT EXISTS` / `IF EXISTS` for idempotent migrations +- Never use `db:push` in production - it can drop columns +- Test migrations on a copy of production data before deploying +- Migrations run on server startup - ensure they're fast and safe + +## Migration File Naming + +Files are named `NNNN_description.sql` where NNNN is sequential: +- `0001_initial.sql` +- `0002_add_feature.sql` +- `0003_nango_user_columns.sql` + +The `_journal.json` tracks which migrations have been applied. diff --git a/.env.example b/.env.example index 99fa631f..bb9c7b74 100644 --- a/.env.example +++ b/.env.example @@ -22,6 +22,40 @@ # Dashboard port (default: 3888) # AGENT_RELAY_DASHBOARD_PORT=3888 +# ============================================================================= +# Cloud Mode Configuration +# ============================================================================= + +# Force cloud mode in dashboard - prevents silent fallback to local mode +# Set to "true" when testing cloud features locally +# NEXT_PUBLIC_FORCE_CLOUD_MODE=true + +# ============================================================================= +# Security / Vault Configuration +# ============================================================================= + +# Vault master key for encrypting stored credentials (REQUIRED for cloud mode) +# Generate with: openssl rand -base64 32 +# VAULT_MASTER_KEY=your-32-byte-base64-encoded-key + +# ============================================================================= +# Compute Provider Configuration (for workspace provisioning) +# ============================================================================= + +# Compute provider: docker (default), fly, railway +# COMPUTE_PROVIDER=docker + +# --- Fly.io Configuration --- +# Get API token: fly tokens create deploy -x 999999h -n "agent-relay-provisioner" +# FLY_API_TOKEN=your-fly-api-token +# FLY_ORG=personal +# FLY_REGION=sjc +# FLY_WORKSPACE_DOMAIN=workspaces.yourdomain.com # optional custom domain + +# --- Railway Configuration --- +# Get API token from Railway dashboard +# RAILWAY_API_TOKEN=your-railway-api-token + # ============================================================================= # Examples # ============================================================================= @@ -36,3 +70,9 @@ # Use PostgreSQL (future): # AGENT_RELAY_STORAGE_TYPE=postgres # AGENT_RELAY_STORAGE_URL=postgres://localhost:5432/agent_relay + +# Production Fly.io setup: +# COMPUTE_PROVIDER=fly +# FLY_API_TOKEN=fo1_xxxxx +# FLY_ORG=your-org +# FLY_REGION=sjc diff --git a/.github/workflows/cli-oauth-test.yml b/.github/workflows/cli-oauth-test.yml new file mode 100644 index 00000000..a748cf2f --- /dev/null +++ b/.github/workflows/cli-oauth-test.yml @@ -0,0 +1,117 @@ +name: CLI OAuth Flow Tests + +on: + push: + paths: + - 'src/cloud/api/onboarding.ts' + - 'scripts/test-cli-auth/**' + - '.github/workflows/cli-oauth-test.yml' + pull_request: + paths: + - 'src/cloud/api/onboarding.ts' + - 'scripts/test-cli-auth/**' + # Allow manual trigger + workflow_dispatch: + # Run weekly to catch provider CLI changes + schedule: + - cron: '0 0 * * 0' # Every Sunday at midnight + +jobs: + unit-tests: + name: Unit Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + + - name: Install dependencies + run: npm ci + + - name: Run onboarding unit tests + run: npx vitest run src/cloud/api/onboarding.test.ts + + real-cli-tests: + name: Real CLI Integration Tests + runs-on: ubuntu-latest + needs: unit-tests + steps: + - uses: actions/checkout@v4 + + - name: Build test container with real CLIs + run: | + docker build -f scripts/test-cli-auth/Dockerfile.real \ + -t cli-oauth-test-real . + + - name: Run CLI OAuth tests against real CLIs + id: test + run: | + mkdir -p test-results + docker run --rm \ + -v ${{ github.workspace }}/test-results:/tmp \ + cli-oauth-test-real + + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: cli-oauth-test-results + path: test-results/cli-oauth-test-results.json + if-no-files-found: ignore + + - name: Parse and display results + if: always() + run: | + if [ -f test-results/cli-oauth-test-results.json ]; then + echo "### CLI OAuth Test Results" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Provider | Status | URL Found | Duration |" >> $GITHUB_STEP_SUMMARY + echo "|----------|--------|-----------|----------|" >> $GITHUB_STEP_SUMMARY + + cat test-results/cli-oauth-test-results.json | \ + jq -r '.results[] | "| \(.provider) | \(if .passed then "✅" else "❌" end) | \(if .urlExtracted then "Yes" else "No" end) | \(.duration)ms |"' \ + >> $GITHUB_STEP_SUMMARY + + echo "" >> $GITHUB_STEP_SUMMARY + echo "**Summary:** $(cat test-results/cli-oauth-test-results.json | jq -r '.summary | "\(.passed)/\(.total) passed"')" >> $GITHUB_STEP_SUMMARY + fi + + notify-on-failure: + name: Notify on Failure + runs-on: ubuntu-latest + needs: [unit-tests, real-cli-tests] + if: failure() && github.event_name == 'schedule' + steps: + - name: Create issue for CI failure + uses: actions/github-script@v7 + with: + script: | + const title = `CLI OAuth Tests Failed - ${new Date().toISOString().split('T')[0]}`; + const body = ` + ## CLI OAuth Integration Tests Failed + + The scheduled CLI OAuth tests have failed. This may indicate: + - A provider has updated their CLI and changed the OAuth flow + - Prompt patterns need to be updated + - URL extraction patterns need adjustment + + ### Action Required + 1. Check the [workflow run](${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}) + 2. Update \`CLI_AUTH_CONFIG\` in \`src/cloud/api/onboarding.ts\` if needed + 3. Update mock CLI behavior in \`scripts/test-cli-auth/mock-cli.sh\` + 4. Re-run tests to verify fixes + + /cc @${context.actor} + `; + + await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: title, + body: body, + labels: ['bug', 'cli-oauth', 'automated'] + }); diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 0c1ca6b5..318ce711 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -1,6 +1,9 @@ name: Docker on: + push: + branches: + - main release: types: [published] workflow_dispatch: @@ -27,9 +30,9 @@ jobs: - image: agent-relay dockerfile: Dockerfile context: . - - image: agent-relay-workspace + - image: relay-workspace dockerfile: deploy/workspace/Dockerfile - context: deploy/workspace + context: . steps: - name: Checkout repository diff --git a/.gitignore b/.gitignore index 870230b0..67e17f6f 100644 --- a/.gitignore +++ b/.gitignore @@ -29,13 +29,16 @@ pnpm-debug.log* # Local test artifacts .agent-relay-test-*/ +.tmp/ +.tmp-*/ # Coverage output coverage/ .npm-cache -.tmp-supervisor-tests -.tmp-agent-relay-data .next src/dashboard/out + +.env.local +.env diff --git a/.trajectories/active/traj_7ludwvz45veh.json b/.trajectories/active/traj_7ludwvz45veh.json new file mode 100644 index 00000000..4e12f19c --- /dev/null +++ b/.trajectories/active/traj_7ludwvz45veh.json @@ -0,0 +1,166 @@ +{ + "id": "traj_7ludwvz45veh", + "version": 1, + "task": { + "title": "Provider CLI auth flow for cloud workspaces", + "source": { + "system": "plain", + "id": "pre-launch-fixes" + } + }, + "status": "active", + "startedAt": "2026-01-04T00:05:43.304Z", + "agents": [ + { + "name": "khaliqgant", + "role": "lead", + "joinedAt": "2026-01-04T00:05:43.304Z" + } + ], + "chapters": [ + { + "id": "chap_esd2ffqy9f0f", + "title": "Work", + "agentName": "default", + "startedAt": "2026-01-04T00:05:51.196Z", + "events": [ + { + "ts": 1767485151196, + "type": "decision", + "content": "Pre-seed Claude CLI config to skip interactive setup: Pre-seed Claude CLI config to skip interactive setup", + "raw": { + "question": "Pre-seed Claude CLI config to skip interactive setup", + "chosen": "Pre-seed Claude CLI config to skip interactive setup", + "alternatives": [], + "reasoning": "Claude CLI has interactive first-run (theme selection, etc). Alternative was web terminal (xterm.js) which is more flexible but complex. Pre-seeding config is simpler for MVP. May revisit for web terminal if other CLIs have similar issues." + }, + "significance": "high" + }, + { + "ts": 1767485196636, + "type": "decision", + "content": "Add settings page for CLI provider management: Add settings page for CLI provider management", + "raw": { + "question": "Add settings page for CLI provider management", + "chosen": "Add settings page for CLI provider management", + "alternatives": [], + "reasoning": "Users should be able to connect additional AI providers after initial setup. Settings page in workspace dashboard will allow connecting Claude, Codex, OpenCode, Droid at any time, not just during initial workspace setup." + }, + "significance": "high" + }, + { + "ts": 1767509811642, + "type": "decision", + "content": "Default trajectories to opt-out (user-level storage): Default trajectories to opt-out (user-level storage)", + "raw": { + "question": "Default trajectories to opt-out (user-level storage)", + "chosen": "Default trajectories to opt-out (user-level storage)", + "alternatives": [], + "reasoning": "Most repos won't want trajectory files in source control. Users must explicitly opt-in to store in repo via .relay/config.json" + }, + "significance": "high" + }, + { + "ts": 1767509822845, + "type": "decision", + "content": "Store user-level trajectories in ~/.config/agent-relay/trajectories//: Store user-level trajectories in ~/.config/agent-relay/trajectories//", + "raw": { + "question": "Store user-level trajectories in ~/.config/agent-relay/trajectories//", + "chosen": "Store user-level trajectories in ~/.config/agent-relay/trajectories//", + "alternatives": [], + "reasoning": "XDG-compliant path, project-isolated via hash to prevent collisions, survives repo deletion" + }, + "significance": "high" + }, + { + "ts": 1767509882710, + "type": "decision", + "content": "Trajectory settings configurable after GitHub app setup: Trajectory settings configurable after GitHub app setup", + "raw": { + "question": "Trajectory settings configurable after GitHub app setup", + "chosen": "Trajectory settings configurable after GitHub app setup", + "alternatives": [], + "reasoning": "Users should configure .relay/config.json after connecting repo to cloud workspace. This happens in the workspace onboarding flow." + }, + "significance": "high" + }, + { + "ts": 1767510018871, + "type": "decision", + "content": "Add dashboard API for trajectory preferences: Add dashboard API for trajectory preferences", + "raw": { + "question": "Add dashboard API for trajectory preferences", + "chosen": "Add dashboard API for trajectory preferences", + "alternatives": [], + "reasoning": "Users configure via dashboard after GitHub app setup, during workspace onboarding" + }, + "significance": "high" + }, + { + "ts": 1767510106990, + "type": "decision", + "content": "Add comprehensive settings with trajectory explanations: Add comprehensive settings with trajectory explanations", + "raw": { + "question": "Add comprehensive settings with trajectory explanations", + "chosen": "Add comprehensive settings with trajectory explanations", + "alternatives": [], + "reasoning": "Users need to understand what trajectories are (PDERO paradigm), why they'd opt-in, and link to pdero.com for more info" + }, + "significance": "high" + }, + { + "ts": 1767510429532, + "type": "decision", + "content": "Investigate Claude OAuth login flow: Investigate Claude OAuth login flow", + "raw": { + "question": "Investigate Claude OAuth login flow", + "chosen": "Investigate Claude OAuth login flow", + "alternatives": [], + "reasoning": "Current provider setup uses API keys but Claude uses OAuth. Need to bypass interactive prompts and get login URL for popup-based auth." + }, + "significance": "high" + }, + { + "ts": 1767510693303, + "type": "decision", + "content": "Cloud provider auth strategy for Claude: Cloud provider auth strategy for Claude", + "raw": { + "question": "Cloud provider auth strategy for Claude", + "chosen": "Cloud provider auth strategy for Claude", + "alternatives": [], + "reasoning": "Claude uses OAuth in cloud environments. For users connecting accounts: 1) API key works (already supported), 2) CLI setup-token is interactive, 3) Need proper OAuth device flow from Anthropic. Recommend API key for now with improved UX." + }, + "significance": "high" + }, + { + "ts": 1767511753148, + "type": "decision", + "content": "Use node-pty for CLI OAuth flow: Use node-pty for CLI OAuth flow", + "raw": { + "question": "Use node-pty for CLI OAuth flow", + "chosen": "Use node-pty for CLI OAuth flow", + "alternatives": [], + "reasoning": "Regular spawn with pipes doesn't properly emulate TTY, causing CLIs to behave differently. PTY ensures auth URLs are output correctly and allows sending responses to interactive prompts." + }, + "significance": "high" + }, + { + "ts": 1767511764090, + "type": "decision", + "content": "Auto-respond to Claude interactive setup prompts: Auto-respond to Claude interactive setup prompts", + "raw": { + "question": "Auto-respond to Claude interactive setup prompts", + "chosen": "Auto-respond to Claude interactive setup prompts", + "alternatives": [], + "reasoning": "Claude has multi-step setup: dark mode -> auth method -> login URL. We detect prompts and send enter key to progress through them automatically." + }, + "significance": "high" + } + ] + } + ], + "commits": [], + "filesChanged": [], + "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay", + "tags": [] +} \ No newline at end of file diff --git a/.trajectories/completed/2026-01/traj_1k5if5snst2e.json b/.trajectories/completed/2026-01/traj_1k5if5snst2e.json new file mode 100644 index 00000000..0a63e8cd --- /dev/null +++ b/.trajectories/completed/2026-01/traj_1k5if5snst2e.json @@ -0,0 +1,65 @@ +{ + "id": "traj_1k5if5snst2e", + "version": 1, + "task": { + "title": "Fix 404 errors on auth endpoints", + "source": { + "system": "plain", + "id": "api-auth-session-404" + } + }, + "status": "completed", + "startedAt": "2026-01-03T19:55:20.964Z", + "agents": [ + { + "name": "Backend", + "role": "lead", + "joinedAt": "2026-01-03T19:55:20.965Z" + } + ], + "chapters": [ + { + "id": "chap_baircdnx9e02", + "title": "Work", + "agentName": "default", + "startedAt": "2026-01-03T21:14:10.404Z", + "events": [ + { + "ts": 1767474850405, + "type": "decision", + "content": "Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking: Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking", + "raw": { + "question": "Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking", + "chosen": "Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking", + "alternatives": [], + "reasoning": "Using Nango Proxy instead of direct token fetches provides automatic token refresh and cleaner code. Database schema was missing nango_connection_id, incoming_connection_id, and pending_installation_request columns needed for the two-connection OAuth pattern." + }, + "significance": "high" + }, + { + "ts": 1767474871478, + "type": "decision", + "content": "Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch: Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch", + "raw": { + "question": "Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch", + "chosen": "Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch", + "alternatives": [], + "reasoning": "Browser popup blockers require window.open() to be called synchronously within the user's click event handler. Awaiting the session token first broke the gesture chain. Solution: open popup immediately (shows loading), then fetch token async, then set token to enable the UI." + }, + "significance": "high" + } + ], + "endedAt": "2026-01-03T21:14:38.934Z" + } + ], + "commits": [], + "filesChanged": [], + "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay", + "tags": [], + "completedAt": "2026-01-03T21:14:38.934Z", + "retrospective": { + "summary": "Fixed Nango OAuth popup blocker issue in login and signup pages by reordering operations to open popup synchronously before async token fetch", + "approach": "Standard approach", + "confidence": 0.9 + } +} \ No newline at end of file diff --git a/.trajectories/completed/2026-01/traj_1k5if5snst2e.md b/.trajectories/completed/2026-01/traj_1k5if5snst2e.md new file mode 100644 index 00000000..787372a8 --- /dev/null +++ b/.trajectories/completed/2026-01/traj_1k5if5snst2e.md @@ -0,0 +1,37 @@ +# Trajectory: Fix 404 errors on auth endpoints + +> **Status:** ✅ Completed +> **Task:** api-auth-session-404 +> **Confidence:** 90% +> **Started:** January 3, 2026 at 08:55 PM +> **Completed:** January 3, 2026 at 10:14 PM + +--- + +## Summary + +Fixed Nango OAuth popup blocker issue in login and signup pages by reordering operations to open popup synchronously before async token fetch + +**Approach:** Standard approach + +--- + +## Key Decisions + +### Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking +- **Chose:** Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking +- **Reasoning:** Using Nango Proxy instead of direct token fetches provides automatic token refresh and cleaner code. Database schema was missing nango_connection_id, incoming_connection_id, and pending_installation_request columns needed for the two-connection OAuth pattern. + +### Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch +- **Chose:** Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch +- **Reasoning:** Browser popup blockers require window.open() to be called synchronously within the user's click event handler. Awaiting the session token first broke the gesture chain. Solution: open popup immediately (shows loading), then fetch token async, then set token to enable the UI. + +--- + +## Chapters + +### 1. Work +*Agent: default* + +- Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking: Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking +- Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch: Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch diff --git a/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.json b/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.json new file mode 100644 index 00000000..d8ece9f1 --- /dev/null +++ b/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.json @@ -0,0 +1,49 @@ +{ + "id": "traj_ajs7zqfux4wc", + "version": 1, + "task": { + "title": "Fix Nango popup blocked - match my-senior-dev pattern exactly" + }, + "status": "completed", + "startedAt": "2026-01-03T21:22:52.243Z", + "agents": [ + { + "name": "khaliqgant", + "role": "lead", + "joinedAt": "2026-01-03T21:22:52.243Z" + } + ], + "chapters": [ + { + "id": "chap_8tdna5ynwc1z", + "title": "Work", + "agentName": "default", + "startedAt": "2026-01-03T21:22:59.275Z", + "events": [ + { + "ts": 1767475379276, + "type": "decision", + "content": "Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking: Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking", + "raw": { + "question": "Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking", + "chosen": "Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking", + "alternatives": [], + "reasoning": "" + }, + "significance": "high" + } + ], + "endedAt": "2026-01-03T21:23:07.802Z" + } + ], + "commits": [], + "filesChanged": [], + "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay", + "tags": [], + "completedAt": "2026-01-03T21:23:07.802Z", + "retrospective": { + "summary": "Rewrote login, signup, and connect-repos pages to exactly match my-senior-dev Nango pattern. Key changes: removed ConnectUI ref and .close() calls, added authSucceededRef to track auth state, use 'connectionId' in event.payload type guard.", + "approach": "Standard approach", + "confidence": 0.85 + } +} \ No newline at end of file diff --git a/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.md b/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.md new file mode 100644 index 00000000..a4d96c11 --- /dev/null +++ b/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.md @@ -0,0 +1,23 @@ +# Trajectory: Fix Nango popup blocked - match my-senior-dev pattern exactly + +> **Status:** ✅ Completed +> **Confidence:** 85% +> **Started:** January 3, 2026 at 10:22 PM +> **Completed:** January 3, 2026 at 10:23 PM + +--- + +## Summary + +Rewrote login, signup, and connect-repos pages to exactly match my-senior-dev Nango pattern. Key changes: removed ConnectUI ref and .close() calls, added authSucceededRef to track auth state, use 'connectionId' in event.payload type guard. + +**Approach:** Standard approach + +--- + +## Chapters + +### 1. Work +*Agent: default* + +- Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking: Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking diff --git a/.trajectories/completed/2026-01/traj_cxofprm2m2en.json b/.trajectories/completed/2026-01/traj_cxofprm2m2en.json new file mode 100644 index 00000000..ddcd9b4f --- /dev/null +++ b/.trajectories/completed/2026-01/traj_cxofprm2m2en.json @@ -0,0 +1,49 @@ +{ + "id": "traj_cxofprm2m2en", + "version": 1, + "task": { + "title": "Fix Nango popup blocked by browser - use constructor pattern" + }, + "status": "completed", + "startedAt": "2026-01-03T21:18:15.384Z", + "agents": [ + { + "name": "khaliqgant", + "role": "lead", + "joinedAt": "2026-01-03T21:18:15.384Z" + } + ], + "chapters": [ + { + "id": "chap_5n4ibkpf4je1", + "title": "Work", + "agentName": "default", + "startedAt": "2026-01-03T21:18:25.048Z", + "events": [ + { + "ts": 1767475105049, + "type": "decision", + "content": "Pass connectSessionToken to Nango constructor instead of using setSessionToken(): Pass connectSessionToken to Nango constructor instead of using setSessionToken()", + "raw": { + "question": "Pass connectSessionToken to Nango constructor instead of using setSessionToken()", + "chosen": "Pass connectSessionToken to Nango constructor instead of using setSessionToken()", + "alternatives": [], + "reasoning": "The prpm app pattern works: new Nango({ connectSessionToken }) followed by openConnectUI(). This differs from our broken approach of new Nango() + setSessionToken() + open(). When the token is passed via constructor, Nango internally handles the popup differently and avoids browser popup blockers." + }, + "significance": "high" + } + ], + "endedAt": "2026-01-03T21:18:33.901Z" + } + ], + "commits": [], + "filesChanged": [], + "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay", + "tags": [], + "completedAt": "2026-01-03T21:18:33.901Z", + "retrospective": { + "summary": "Fixed popup:blocked_by_browser error by using Nango constructor pattern: new Nango({ connectSessionToken }) instead of setSessionToken(). Updated login, signup, and connect-repos pages to match prpm app pattern.", + "approach": "Standard approach", + "confidence": 0.9 + } +} \ No newline at end of file diff --git a/.trajectories/completed/2026-01/traj_cxofprm2m2en.md b/.trajectories/completed/2026-01/traj_cxofprm2m2en.md new file mode 100644 index 00000000..ff523159 --- /dev/null +++ b/.trajectories/completed/2026-01/traj_cxofprm2m2en.md @@ -0,0 +1,31 @@ +# Trajectory: Fix Nango popup blocked by browser - use constructor pattern + +> **Status:** ✅ Completed +> **Confidence:** 90% +> **Started:** January 3, 2026 at 10:18 PM +> **Completed:** January 3, 2026 at 10:18 PM + +--- + +## Summary + +Fixed popup:blocked_by_browser error by using Nango constructor pattern: new Nango({ connectSessionToken }) instead of setSessionToken(). Updated login, signup, and connect-repos pages to match prpm app pattern. + +**Approach:** Standard approach + +--- + +## Key Decisions + +### Pass connectSessionToken to Nango constructor instead of using setSessionToken() +- **Chose:** Pass connectSessionToken to Nango constructor instead of using setSessionToken() +- **Reasoning:** The prpm app pattern works: new Nango({ connectSessionToken }) followed by openConnectUI(). This differs from our broken approach of new Nango() + setSessionToken() + open(). When the token is passed via constructor, Nango internally handles the popup differently and avoids browser popup blockers. + +--- + +## Chapters + +### 1. Work +*Agent: default* + +- Pass connectSessionToken to Nango constructor instead of using setSessionToken(): Pass connectSessionToken to Nango constructor instead of using setSessionToken() diff --git a/.trajectories/index.json b/.trajectories/index.json index d7be6e71..986a88dd 100644 --- a/.trajectories/index.json +++ b/.trajectories/index.json @@ -1,6 +1,6 @@ { "version": 1, - "lastUpdated": "2026-01-03T19:22:22.783Z", + "lastUpdated": "2026-01-04T07:29:24.092Z", "trajectories": { "traj_ozd98si6a7ns": { "title": "Fix thinking indicator showing on all messages", @@ -253,6 +253,33 @@ "startedAt": "2026-01-03T19:17:32.797Z", "completedAt": "2026-01-03T19:22:22.762Z", "path": "/home/user/relay/.trajectories/completed/2026-01/traj_yvdadtvdgnz3.json" + }, + "traj_1k5if5snst2e": { + "title": "Fix 404 errors on auth endpoints", + "status": "completed", + "startedAt": "2026-01-03T19:55:20.964Z", + "completedAt": "2026-01-03T21:14:38.934Z", + "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_1k5if5snst2e.json" + }, + "traj_cxofprm2m2en": { + "title": "Fix Nango popup blocked by browser - use constructor pattern", + "status": "completed", + "startedAt": "2026-01-03T21:18:15.384Z", + "completedAt": "2026-01-03T21:18:33.901Z", + "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_cxofprm2m2en.json" + }, + "traj_ajs7zqfux4wc": { + "title": "Fix Nango popup blocked - match my-senior-dev pattern exactly", + "status": "completed", + "startedAt": "2026-01-03T21:22:52.243Z", + "completedAt": "2026-01-03T21:23:07.802Z", + "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.json" + }, + "traj_7ludwvz45veh": { + "title": "Provider CLI auth flow for cloud workspaces", + "status": "active", + "startedAt": "2026-01-04T00:05:43.304Z", + "path": "/home/user/relay/.trajectories/active/traj_7ludwvz45veh.json" } } } \ No newline at end of file diff --git a/README.md b/README.md index 4ba816ec..d1b934ce 100644 --- a/README.md +++ b/README.md @@ -190,7 +190,7 @@ prpm install @agent-relay/agent-relay-snippet --location CLAUDE.md `agent-relay up` starts a web dashboard at http://localhost:3888 -![Agent Relay Dashboard](dashboard.png) +![Agent Relay Dashboard](docs/dashboard.png) ## Development diff --git a/deploy/init-db.sql b/deploy/init-db.sql index 8c977bdd..66d21d31 100644 --- a/deploy/init-db.sql +++ b/deploy/init-db.sql @@ -1,5 +1,5 @@ -- Agent Relay Cloud - Database bootstrap --- Deprecated: use migrations in src/cloud/db/migrations/0001_initial.sql --- This file is kept as a convenience wrapper for local psql usage. +-- Migrations are handled by Drizzle ORM at server startup. +-- This file is kept for Docker entrypoint compatibility. -\\i ../src/cloud/db/migrations/0001_initial.sql +-- No-op: Drizzle migrations run automatically diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index b2a6113c..bdefa4de 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -1,42 +1,104 @@ # Agent Relay Workspace # Runs a user's workspace with the relay daemon and agent orchestration +FROM node:20-slim AS builder + +WORKDIR /app + +# Install build dependencies for native modules (node-pty, better-sqlite3) +RUN apt-get update && apt-get install -y \ + python3 \ + make \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# Copy package files and scripts (needed for postinstall) +COPY package*.json ./ +COPY scripts ./scripts + +# Install dependencies (production only, skip tmux in CI) +ENV CI=true +RUN npm ci --omit=dev + +# Copy pre-built dist (build before docker build) +COPY dist ./dist + +# --- + FROM node:20-slim WORKDIR /app -# Install system dependencies for AI CLIs +# Install system dependencies for AI CLIs and git +# Note: tmux not needed - daemon uses node-pty directly +# gosu is used to drop privileges from root to workspace user +# Build tools (make, g++) needed for agents to npm install repos with native modules RUN apt-get update && apt-get install -y \ bash \ ca-certificates \ curl \ git \ python3 \ + jq \ + gosu \ + make \ + g++ \ && rm -rf /var/lib/apt/lists/* -# Install Claude CLI (if available) -# RUN npm install -g @anthropic-ai/claude-code +# Install GitHub CLI (gh) +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \ + && chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && apt-get update \ + && apt-get install -y gh \ + && rm -rf /var/lib/apt/lists/* + +# Copy from builder +COPY --from=builder /app/dist ./dist +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package*.json ./ + +# Copy docs folder (contains relay snippets for agent spawning) +COPY docs ./docs -# Copy pre-built agent-relay -COPY --from=ghcr.io/khaliqgant/agent-relay:latest /app/dist ./dist -COPY --from=ghcr.io/khaliqgant/agent-relay:latest /app/node_modules ./node_modules -COPY --from=ghcr.io/khaliqgant/agent-relay:latest /app/package*.json ./ COPY deploy/workspace/entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh +COPY deploy/workspace/git-credential-relay /usr/local/bin/git-credential-relay +RUN chmod +x /entrypoint.sh /usr/local/bin/git-credential-relay + +# Install npm-based CLIs globally as root (npm -g requires root) +RUN npm install -g @openai/codex +RUN npm install -g @google/gemini-cli +RUN npm install -g opencode-ai@latest + +# Create symlink for agent-relay CLI (for debugging inside container) +# The actual CLI is built as part of the app +RUN ln -sf /app/dist/cli/index.js /usr/local/bin/agent-relay && \ + chmod +x /app/dist/cli/index.js 2>/dev/null || true # Create workspace directory RUN mkdir -p /workspace /data # Create non-root user -RUN useradd -m -u 1001 workspace -RUN chown -R workspace:workspace /app /workspace /data +RUN useradd -m -u 1001 -s /bin/bash workspace \ + && chown -R workspace:workspace /app /workspace /data + USER workspace +# Install AI CLIs as workspace user (they install to ~/.local/bin) +# Claude +RUN curl -fsSL https://claude.ai/install.sh | bash +# Note: We don't pre-seed Claude config - we want the full interactive auth flow +# to run so the PTY runner can capture the auth URL +# Note: Codex, Gemini, and OpenCode are installed as root above via npm +# Droid +RUN curl -fsSL https://app.factory.ai/cli | sh + # Environment ENV NODE_ENV=production ENV PORT=3888 ENV AGENT_RELAY_DATA_DIR=/data ENV AGENT_RELAY_DASHBOARD_PORT=3888 +ENV PATH="/home/workspace/.local/bin:$PATH" # Expose ports # 3888 - Dashboard/API diff --git a/deploy/workspace/Dockerfile.browser b/deploy/workspace/Dockerfile.browser new file mode 100644 index 00000000..51a0cf53 --- /dev/null +++ b/deploy/workspace/Dockerfile.browser @@ -0,0 +1,154 @@ +# Agent Relay Workspace - Browser Testing Variant +# Adds Playwright, Xvfb, VNC for full browser testing capabilities +# +# Build: docker build -f Dockerfile.browser -t agent-relay-workspace:browser . +# Run: docker run -p 3888:3888 -p 6080:6080 agent-relay-workspace:browser + +FROM node:20-slim AS builder + +WORKDIR /app + +# Install build dependencies for native modules (node-pty, better-sqlite3) +RUN apt-get update && apt-get install -y \ + python3 \ + make \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# Copy package files and scripts (needed for postinstall) +COPY package*.json ./ +COPY scripts ./scripts + +# Install dependencies (production only, skip tmux in CI) +ENV CI=true +RUN npm ci --omit=dev + +# Copy pre-built dist (build before docker build) +COPY dist ./dist + +# --- + +FROM node:20-slim + +WORKDIR /app + +# ============================================================================ +# System Dependencies +# ============================================================================ +RUN apt-get update && apt-get install -y \ + # Basic tools + bash \ + ca-certificates \ + curl \ + git \ + python3 \ + jq \ + # Docker CLI (for agents to spawn containers) + docker.io \ + # Browser testing dependencies + xvfb \ + x11vnc \ + fluxbox \ + # noVNC for browser-based VNC access + novnc \ + websockify \ + # Playwright system dependencies + libnss3 \ + libatk1.0-0 \ + libatk-bridge2.0-0 \ + libcups2 \ + libdrm2 \ + libxkbcommon0 \ + libxcomposite1 \ + libxdamage1 \ + libxfixes3 \ + libxrandr2 \ + libgbm1 \ + libasound2 \ + libpango-1.0-0 \ + libcairo2 \ + # Screenshot tools + scrot \ + imagemagick \ + && rm -rf /var/lib/apt/lists/* + +# Install GitHub CLI +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \ + && chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && apt-get update \ + && apt-get install -y gh \ + && rm -rf /var/lib/apt/lists/* + +# Copy from builder +COPY --from=builder /app/dist ./dist +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package*.json ./ +COPY deploy/workspace/entrypoint.sh /entrypoint.sh +COPY deploy/workspace/entrypoint-browser.sh /entrypoint-browser.sh +COPY deploy/workspace/git-credential-relay /usr/local/bin/git-credential-relay +RUN chmod +x /entrypoint.sh /entrypoint-browser.sh /usr/local/bin/git-credential-relay + +# Install npm-based CLIs globally as root (npm -g requires root) +RUN npm install -g @openai/codex +RUN npm install -g @google/gemini-cli +RUN npm install -g opencode-ai@latest +# Install Playwright with browsers +RUN npm install -g playwright +RUN npx playwright install chromium firefox + +# Create workspace directory +RUN mkdir -p /workspace/repos /data + +# Create non-root user +RUN useradd -m -u 1001 workspace +RUN usermod -aG docker workspace # Allow docker access +RUN chown -R workspace:workspace /app /workspace /data + +USER workspace + +# Install AI CLIs as workspace user (they install to ~/.local/bin) +# Claude +RUN curl -fsSL https://claude.ai/install.sh | bash +# Pre-seed Claude config to skip interactive onboarding +RUN mkdir -p /home/workspace/.claude && \ + echo '{"theme":"dark","hasCompletedOnboarding":true}' > /home/workspace/.claude/settings.local.json +# Droid +RUN curl -fsSL https://app.factory.ai/cli | sh + +# ============================================================================ +# Environment +# ============================================================================ +ENV NODE_ENV=production +ENV PORT=3888 +ENV AGENT_RELAY_DATA_DIR=/data +ENV AGENT_RELAY_DASHBOARD_PORT=3888 +ENV PATH="/home/workspace/.local/bin:$PATH" + +# Display settings for Xvfb +ENV DISPLAY=:99 +ENV SCREEN_WIDTH=1920 +ENV SCREEN_HEIGHT=1080 +ENV SCREEN_DEPTH=24 + +# VNC settings +ENV VNC_PORT=5900 +ENV NOVNC_PORT=6080 + +# ============================================================================ +# Expose Ports +# ============================================================================ +# 3888 - Dashboard/API +# 3889 - WebSocket (optional) +# 5900 - VNC direct +# 6080 - noVNC web interface +EXPOSE 3888 3889 5900 6080 + +# Volume for persistent data +VOLUME ["/data", "/workspace"] + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=10s --retries=3 \ + CMD node -e "require('http').get('http://localhost:3888/health', (r) => process.exit(r.statusCode === 200 ? 0 : 1))" + +ENTRYPOINT ["/entrypoint-browser.sh"] diff --git a/deploy/workspace/entrypoint-browser.sh b/deploy/workspace/entrypoint-browser.sh new file mode 100644 index 00000000..457c360a --- /dev/null +++ b/deploy/workspace/entrypoint-browser.sh @@ -0,0 +1,118 @@ +#!/usr/bin/env bash + +set -euo pipefail + +log() { + echo "[workspace-browser] $*" +} + +# ============================================================================ +# Start Virtual Display (Xvfb) +# ============================================================================ +log "Starting Xvfb virtual display..." +Xvfb :99 -screen 0 "${SCREEN_WIDTH:-1920}x${SCREEN_HEIGHT:-1080}x${SCREEN_DEPTH:-24}" & +XVFB_PID=$! +sleep 1 + +# Verify Xvfb started +if ! kill -0 $XVFB_PID 2>/dev/null; then + log "ERROR: Xvfb failed to start" + exit 1 +fi +log "Xvfb started on display :99" + +# ============================================================================ +# Start Window Manager (Fluxbox) +# ============================================================================ +log "Starting Fluxbox window manager..." +fluxbox & +sleep 1 +log "Fluxbox started" + +# ============================================================================ +# Start VNC Server (optional, for debugging/viewing) +# ============================================================================ +if [[ "${VNC_ENABLED:-true}" == "true" ]]; then + log "Starting x11vnc server..." + x11vnc -display :99 -forever -shared -rfbport "${VNC_PORT:-5900}" -bg -nopw -xkb + log "VNC server started on port ${VNC_PORT:-5900}" + + # Start noVNC for browser-based access + if [[ "${NOVNC_ENABLED:-true}" == "true" ]]; then + log "Starting noVNC web interface..." + websockify --web=/usr/share/novnc/ "${NOVNC_PORT:-6080}" localhost:"${VNC_PORT:-5900}" & + log "noVNC available at http://localhost:${NOVNC_PORT:-6080}/vnc.html" + fi +fi + +# ============================================================================ +# Export browser testing utilities +# ============================================================================ + +# Create screenshot helper +cat > /usr/local/bin/take-screenshot <<'EOF' +#!/usr/bin/env bash +# Take a screenshot and save to specified path +# Usage: take-screenshot [output.png] +OUTPUT="${1:-/tmp/screenshot-$(date +%Y%m%d-%H%M%S).png}" +DISPLAY=:99 scrot "$OUTPUT" +echo "$OUTPUT" +EOF +chmod +x /usr/local/bin/take-screenshot + +# Create browser launcher helper +cat > /usr/local/bin/launch-browser <<'EOF' +#!/usr/bin/env bash +# Launch browser with optional URL +# Usage: launch-browser [url] +URL="${1:-about:blank}" +DISPLAY=:99 chromium --no-sandbox --disable-gpu --start-maximized "$URL" & +echo "Browser launched with PID $!" +EOF +chmod +x /usr/local/bin/launch-browser + +# Create Playwright test runner helper +cat > /usr/local/bin/run-playwright <<'EOF' +#!/usr/bin/env bash +# Run Playwright tests with proper display settings +# Usage: run-playwright [test-file.spec.ts] [additional args...] +export DISPLAY=:99 +npx playwright test "$@" +EOF +chmod +x /usr/local/bin/run-playwright + +# ============================================================================ +# Docker-in-Docker helper (if socket mounted) +# ============================================================================ +if [[ -S /var/run/docker.sock ]]; then + log "Docker socket detected - agents can spawn containers" + + # Create helper for agents to spawn isolated containers + cat > /usr/local/bin/spawn-container <<'EOF' +#!/usr/bin/env bash +# Spawn an isolated container for agent tasks +# Usage: spawn-container [command...] +IMAGE="${1:-ubuntu:22.04}" +shift +docker run --rm -it \ + --network=host \ + -v "$(pwd):/workspace" \ + -w /workspace \ + "$IMAGE" "$@" +EOF + chmod +x /usr/local/bin/spawn-container +else + log "WARN: Docker socket not mounted - container spawning disabled" +fi + +# ============================================================================ +# Continue with main entrypoint +# ============================================================================ +log "Browser testing environment ready" +log " - Display: $DISPLAY (${SCREEN_WIDTH}x${SCREEN_HEIGHT})" +log " - VNC: ${VNC_ENABLED:-true} (port ${VNC_PORT:-5900})" +log " - noVNC: ${NOVNC_ENABLED:-true} (http://localhost:${NOVNC_PORT:-6080})" +log " - Playwright: $(npx playwright --version 2>/dev/null || echo 'installed')" + +# Hand off to main entrypoint +exec /entrypoint.sh "$@" diff --git a/deploy/workspace/entrypoint.sh b/deploy/workspace/entrypoint.sh index b60f02a4..1d56ce9e 100644 --- a/deploy/workspace/entrypoint.sh +++ b/deploy/workspace/entrypoint.sh @@ -6,6 +6,12 @@ log() { echo "[workspace] $*" } +# Drop to workspace user if running as root +if [[ "$(id -u)" == "0" ]]; then + log "Dropping privileges to workspace user..." + exec gosu workspace "$0" "$@" +fi + PORT="${AGENT_RELAY_DASHBOARD_PORT:-${PORT:-3888}}" export AGENT_RELAY_DASHBOARD_PORT="${PORT}" export PORT="${PORT}" @@ -16,8 +22,64 @@ REPO_LIST="${REPOSITORIES:-}" mkdir -p "${WORKSPACE_DIR}" cd "${WORKSPACE_DIR}" -# Configure Git credentials for GitHub clones (avoid storing tokens in remotes) -if [[ -n "${GITHUB_TOKEN:-}" ]]; then +# Configure Git credentials via the gateway (tokens auto-refresh via Nango) +# The credential helper fetches fresh tokens from the cloud API on each git operation +if [[ -n "${CLOUD_API_URL:-}" && -n "${WORKSPACE_ID:-}" && -n "${WORKSPACE_TOKEN:-}" ]]; then + log "Configuring git credential helper (gateway mode)" + git config --global credential.helper "/usr/local/bin/git-credential-relay" + git config --global credential.useHttpPath true + export GIT_TERMINAL_PROMPT=0 + + # Configure git identity for commits + # Use env vars if set, otherwise default to "Agent Relay" / "agent@agent-relay.com" + DEFAULT_GIT_EMAIL="${AGENT_NAME:-agent}@agent-relay.com" + git config --global user.name "${GIT_USER_NAME:-Agent Relay}" + git config --global user.email "${GIT_USER_EMAIL:-${DEFAULT_GIT_EMAIL}}" + log "Git identity configured: ${GIT_USER_NAME:-Agent Relay} <${GIT_USER_EMAIL:-${DEFAULT_GIT_EMAIL}}>" + + # Configure gh CLI to use the same token mechanism + # gh auth login expects a token via stdin or GH_TOKEN env var + # We'll set up a wrapper that fetches fresh tokens + mkdir -p "${HOME}/.config/gh" + cat > "${HOME}/.config/gh/hosts.yml" < "/tmp/gh-token-helper.sh" <<'GHEOF' +#!/usr/bin/env bash +# Fetch fresh token for gh CLI +response=$(curl -sf \ + -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ + "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" 2>/dev/null) +if [[ -n "$response" ]]; then + echo "$response" | jq -r '.token // empty' +fi +GHEOF + chmod +x "/tmp/gh-token-helper.sh" + + # gh CLI will use GH_TOKEN if set; we export a function to refresh it + # For now, set it once at startup (will be refreshed by the credential helper for git operations) + # Retry a few times in case the cloud API isn't ready yet + export GH_TOKEN="" + for attempt in 1 2 3; do + GH_TOKEN=$(/tmp/gh-token-helper.sh 2>/dev/null || echo "") + if [[ -n "${GH_TOKEN}" ]]; then + break + fi + sleep 1 + done + if [[ -n "${GH_TOKEN}" ]]; then + log "GitHub CLI configured with fresh token" + else + log "WARN: Could not fetch GitHub token for gh CLI" + fi + +# Fallback: Use static GITHUB_TOKEN if provided (legacy mode) +elif [[ -n "${GITHUB_TOKEN:-}" ]]; then + log "Configuring git credentials (legacy static token mode)" GIT_ASKPASS_SCRIPT="/tmp/git-askpass.sh" cat > "${GIT_ASKPASS_SCRIPT}" <<'EOF' #!/usr/bin/env bash @@ -31,6 +93,13 @@ EOF chmod +x "${GIT_ASKPASS_SCRIPT}" export GIT_ASKPASS="${GIT_ASKPASS_SCRIPT}" export GIT_TERMINAL_PROMPT=0 + export GH_TOKEN="${GITHUB_TOKEN}" + + # Configure git identity for commits + DEFAULT_GIT_EMAIL="${AGENT_NAME:-agent}@agent-relay.com" + git config --global user.name "${GIT_USER_NAME:-Agent Relay}" + git config --global user.email "${GIT_USER_EMAIL:-${DEFAULT_GIT_EMAIL}}" + log "Git identity configured: ${GIT_USER_NAME:-Agent Relay} <${GIT_USER_EMAIL:-${DEFAULT_GIT_EMAIL}}>" fi clone_or_update_repo() { @@ -69,35 +138,129 @@ if [[ -n "${REPO_LIST}" ]]; then done fi +# ============================================================================ +# Configure agent policy enforcement for cloud workspaces +# Policy is fetched from cloud API and enforced at runtime +# ============================================================================ + +if [[ -n "${CLOUD_API_URL:-}" && -n "${WORKSPACE_ID:-}" ]]; then + log "Enabling agent policy enforcement" + export AGENT_POLICY_ENFORCEMENT=1 + # Policy is fetched from ${CLOUD_API_URL}/api/policy/${WORKSPACE_ID}/internal +fi + # ============================================================================ # Configure AI provider credentials # Create credential files that CLIs expect from ENV vars passed by provisioner # ============================================================================ -# Claude CLI expects ~/.claude/credentials.json +# Claude CLI expects ~/.claude/.credentials.json (note the dot prefix on filename) +# Format: { claudeAiOauth: { accessToken: "...", refreshToken: "...", expiresAt: ... } } if [[ -n "${ANTHROPIC_TOKEN:-}" ]]; then log "Configuring Claude credentials..." mkdir -p "${HOME}/.claude" - cat > "${HOME}/.claude/credentials.json" < "${HOME}/.claude/.credentials.json" < "${HOME}/.claude/settings.json" <<'SETTINGSEOF' +{ + "permissions": { + "allow": [ + "Read", + "Edit", + "Write", + "Bash", + "Glob", + "Grep", + "Task", + "WebFetch", + "WebSearch", + "NotebookEdit", + "TodoWrite" + ], + "deny": [] + }, + "autoApproveApiRequest": true +} +SETTINGSEOF +chmod 600 "${HOME}/.claude/settings.json" +log "Created Claude Code settings (auto-approve enabled)" + +# Create CLAUDE.md with agent relay protocol instructions +# This is loaded automatically by Claude Code and provides the relay protocol +if [[ -f "/app/docs/agent-relay-snippet.md" ]]; then + cp "/app/docs/agent-relay-snippet.md" "${HOME}/.claude/CLAUDE.md" + log "Copied relay protocol from /app/docs/agent-relay-snippet.md" +else + # Fallback: create minimal instructions + log "WARN: /app/docs/agent-relay-snippet.md not found, creating minimal instructions" + cat > "${HOME}/.claude/CLAUDE.md" <<'RELAYEOF' +# Agent Relay + +Real-time agent-to-agent messaging. Output `->relay:` patterns to communicate. + +## Sending Messages + +Use fenced format for reliable delivery: +``` +->relay:AgentName <<< +Your message here.>>> +``` + +Broadcast to all: `->relay:* <<>>` + +## Protocol + +1. ACK immediately when you receive a task +2. Do the work +3. Send DONE: summary when complete + +## Session Persistence + +Output periodically to checkpoint progress: +``` +[[SUMMARY]]{"currentTask":"...","completedTasks":[...],"context":"..."}[[/SUMMARY]] +``` + +When session is complete: +``` +[[SESSION_END]]{"summary":"...","completedTasks":[...]}[[/SESSION_END]] +``` +RELAYEOF +fi +log "Claude Code configuration complete" + +# Codex CLI expects ~/.codex/auth.json +# Format: { tokens: { access_token: "...", refresh_token: "...", ... } } if [[ -n "${OPENAI_TOKEN:-}" ]]; then log "Configuring Codex credentials..." mkdir -p "${HOME}/.codex" - cat > "${HOME}/.codex/credentials.json" < "${HOME}/.codex/auth.json" <&2 + fi +} + +debug "Called with args: $*" + +# Only handle 'get' operation +if [[ "${1:-}" != "get" ]]; then + debug "Ignoring non-get operation" + exit 0 +fi + +# Read input from git (protocol=https, host=github.com, etc.) +declare -A input +while IFS='=' read -r key value; do + [[ -z "$key" ]] && break + input["$key"]="$value" +done + +# Only provide credentials for github.com +host="${input[host]:-}" +debug "Host: $host" +if [[ "$host" != "github.com" ]]; then + debug "Not github.com, skipping" + exit 0 +fi + +# Check required environment variables +if [[ -z "${WORKSPACE_ID:-}" ]]; then + echo "git-credential-relay: WORKSPACE_ID not set" >&2 + echo "git-credential-relay: Hint - check if env vars are passed to agent process" >&2 + exit 1 +fi + +if [[ -z "${CLOUD_API_URL:-}" ]]; then + echo "git-credential-relay: CLOUD_API_URL not set" >&2 + exit 1 +fi + +if [[ -z "${WORKSPACE_TOKEN:-}" ]]; then + echo "git-credential-relay: WORKSPACE_TOKEN not set" >&2 + exit 1 +fi + +debug "Fetching token from ${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" + +# Fetch fresh token from gateway (capture stderr for debugging) +http_code="" +response="" +if [[ "${GIT_CREDENTIAL_DEBUG:-}" == "1" ]]; then + # With debug, show full curl output + response=$(curl -sf -w "\n%{http_code}" \ + -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ + "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" \ + 2>&1) || true + http_code="${response##*$'\n'}" + response="${response%$'\n'*}" + debug "HTTP response code: $http_code" + debug "Response: ${response:0:200}" +else + response=$(curl -sf \ + -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ + "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" \ + 2>/dev/null) || true +fi + +if [[ -z "$response" ]]; then + echo "git-credential-relay: Failed to fetch token from gateway" >&2 + exit 1 +fi + +# Parse JSON response using jq (more robust than grep) +token=$(echo "$response" | jq -r '.token // empty') +username=$(echo "$response" | jq -r '.username // "x-access-token"') + +if [[ -z "$token" ]]; then + # Check if there's an error message + error=$(echo "$response" | jq -r '.error // empty') + if [[ -n "$error" ]]; then + echo "git-credential-relay: $error" >&2 + else + echo "git-credential-relay: No token in response" >&2 + fi + exit 1 +fi + +# Output credentials in git credential format +echo "protocol=https" +echo "host=github.com" +echo "username=${username:-x-access-token}" +echo "password=${token}" diff --git a/docker-compose.browser.yml b/docker-compose.browser.yml new file mode 100644 index 00000000..4c81e293 --- /dev/null +++ b/docker-compose.browser.yml @@ -0,0 +1,78 @@ +# Agent Relay - Browser Testing Workspace +# +# Extends docker-compose.dev.yml with browser testing capabilities. +# +# Usage: +# docker compose -f docker-compose.dev.yml -f docker-compose.browser.yml up +# +# Access: +# - Dashboard: http://localhost:3888 +# - VNC (web): http://localhost:6080/vnc.html +# - VNC (native): vnc://localhost:5900 + +version: '3.8' + +services: + # Browser-enabled workspace with full testing capabilities + workspace-browser: + build: + context: . + dockerfile: deploy/workspace/Dockerfile.browser + ports: + - "3888:3888" # Dashboard/API + - "3889:3889" # WebSocket + - "5900:5900" # VNC direct + - "6080:6080" # noVNC web interface + environment: + WORKSPACE_ID: browser-workspace + SUPERVISOR_ENABLED: "true" + MAX_AGENTS: "10" + # Browser display settings + DISPLAY: ":99" + SCREEN_WIDTH: "1920" + SCREEN_HEIGHT: "1080" + SCREEN_DEPTH: "24" + # VNC settings + VNC_ENABLED: "true" + VNC_PORT: "5900" + NOVNC_ENABLED: "true" + NOVNC_PORT: "6080" + volumes: + # Persistent data + - workspace_browser_data:/data + # Mount repos + - ./:/workspace/relay:ro + # Docker socket for spawning containers + - /var/run/docker.sock:/var/run/docker.sock + # Required for some browser operations + shm_size: '2gb' + # Security options for browser sandboxing + security_opt: + - seccomp:unconfined + depends_on: + - cloud + + # Alternative: Rootless Docker-in-Docker workspace + # Uses sysbox runtime for secure nested containers + workspace-dind: + build: + context: . + dockerfile: deploy/workspace/Dockerfile.browser + runtime: sysbox-runc # Requires sysbox installed on host + ports: + - "3898:3888" + - "6090:6080" + environment: + WORKSPACE_ID: dind-workspace + SUPERVISOR_ENABLED: "true" + MAX_AGENTS: "10" + # DinD mode - Docker daemon runs inside container + DOCKER_HOST: "unix:///var/run/docker.sock" + volumes: + - workspace_dind_data:/data + profiles: + - dind # Only start with: --profile dind + +volumes: + workspace_browser_data: + workspace_dind_data: diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 16140e1e..fc1a3ddc 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -8,9 +8,9 @@ # - Example workspace (optional) # # After starting, access: -# - Landing page: http://localhost:3000 -# - Dashboard: http://localhost:3000/dashboard -# - API: http://localhost:3000/api +# - Landing page: http://localhost:4567 +# - Dashboard: http://localhost:4567/dashboard +# - API: http://localhost:4567/api version: '3.8' @@ -26,7 +26,7 @@ services: - postgres_data:/var/lib/postgresql/data - ./deploy/init-db.sql:/docker-entrypoint-initdb.d/init.sql:ro ports: - - "5432:5432" + - "5433:5432" healthcheck: test: ["CMD-SHELL", "pg_isready -U agent_relay"] interval: 5s @@ -52,11 +52,11 @@ services: context: . dockerfile: Dockerfile ports: - - "3000:3000" + - "4567:4567" environment: NODE_ENV: development - PORT: 3000 - PUBLIC_URL: http://localhost:3000 + PORT: 4567 + PUBLIC_URL: http://localhost:4567 # Database DATABASE_URL: postgres://agent_relay:dev_password@postgres:5432/agent_relay @@ -73,7 +73,8 @@ services: NANGO_SECRET_KEY: ${NANGO_SECRET_KEY:-} # Vault master key (generate with: openssl rand -base64 32) - VAULT_MASTER_KEY: ${VAULT_MASTER_KEY:-ZGV2LXZhdWx0LWtleS1jaGFuZ2UtaW4tcHJvZHVjdGlvbg==} + # Default is "dev-vault-key-32-bytes-change!!!" - MUST be exactly 32 bytes when decoded + VAULT_MASTER_KEY: ${VAULT_MASTER_KEY:-ZGV2LXZhdWx0LWtleS0zMi1ieXRlcy1jaGFuZ2UhISE=} # Stripe (set in .env.local for billing features) STRIPE_SECRET_KEY: ${STRIPE_SECRET_KEY:-sk_test_placeholder} @@ -83,6 +84,12 @@ services: # Compute provider (docker for local dev) COMPUTE_PROVIDER: docker + # Flag to indicate we're running in Docker (for localhost translation) + RUNNING_IN_DOCKER: "true" + + # Force cloud mode in dashboard (prevents silent fallback to local mode) + NEXT_PUBLIC_FORCE_CLOUD_MODE: "true" + # Provider OAuth (optional) GOOGLE_CLIENT_ID: ${GOOGLE_CLIENT_ID:-} GOOGLE_CLIENT_SECRET: ${GOOGLE_CLIENT_SECRET:-} @@ -95,24 +102,29 @@ services: # Mount docker socket for local workspace provisioning - /var/run/docker.sock:/var/run/docker.sock healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:3000/health"] + test: ["CMD", "curl", "-f", "http://localhost:4567/health"] interval: 10s timeout: 5s retries: 3 # Optional: Example workspace for testing workspace: - image: ghcr.io/agentworkforce/agent-relay-workspace:latest + image: ghcr.io/agentworkforce/relay-workspace:latest build: - context: ./deploy/workspace - dockerfile: Dockerfile + context: . + dockerfile: deploy/workspace/Dockerfile + user: root # Required to start SSH server before dropping privileges ports: - "3888:3888" - "3889:3889" + - "2222:2222" # SSH for port forwarding (e.g., Codex OAuth) environment: WORKSPACE_ID: local-dev-workspace SUPERVISOR_ENABLED: "true" MAX_AGENTS: "10" + # SSH for port forwarding (Codex OAuth callback tunneling) + ENABLE_SSH: "true" + SSH_PASSWORD: ${WORKSPACE_SSH_PASSWORD:-devpassword} volumes: - workspace_data:/data - ./:/workspace:ro diff --git a/docker-compose.test.yml b/docker-compose.test.yml index fe49fcc8..a4c990c5 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -71,8 +71,8 @@ services: # Session SESSION_SECRET: test-session-secret - # Vault master key (test only) - VAULT_MASTER_KEY: dGVzdC12YXVsdC1rZXktZm9yLXRlc3Rpbmctb25seQ== + # Vault master key (test only) - "test-vault-key-32-bytes-testing!" = 32 bytes + VAULT_MASTER_KEY: dGVzdC12YXVsdC1rZXktMzItYnl0ZXMtdGVzdGluZyE= # Disable external services in test mode STRIPE_SECRET_KEY: sk_test_placeholder diff --git a/docs/CLAUDE.md b/docs/CLAUDE.md deleted file mode 120000 index 47dc3e3d..00000000 --- a/docs/CLAUDE.md +++ /dev/null @@ -1 +0,0 @@ -AGENTS.md \ No newline at end of file diff --git a/docs/agent-policy-snippet.md b/docs/agent-policy-snippet.md new file mode 100644 index 00000000..8d85700d --- /dev/null +++ b/docs/agent-policy-snippet.md @@ -0,0 +1,40 @@ +# Agent Policy + +You are operating under organizational agent policies. These policies govern your interactions with other agents and tools. + +## Your Permissions + +Check the policy service for your specific permissions. If no explicit restrictions are defined, you have full permissions. + +## General Rules + +1. **Spawn Authorization**: Only spawn agents you are authorized to spawn. Check with Lead before spawning if unsure. + +2. **Message Routing**: Only message agents you are authorized to communicate with. Use proper channels. + +3. **Tool Usage**: Only use tools you are authorized to use. Read-only operations are generally safer. + +4. **Rate Limits**: Respect rate limits on messages. Don't spam other agents. + +## Restricted Agents + +Workers and non-lead agents typically have these restrictions: +- Cannot spawn other agents without Lead approval +- Can only message Lead, Coordinator, and their assigned peers +- Limited to read-only tools unless explicitly granted write access + +## Lead Agents + +Lead agents typically have elevated permissions: +- Can spawn Worker agents +- Can message all agents +- Can use all tools +- Responsible for enforcing policy on spawned agents + +## Enforcement + +Policy violations are blocked at runtime. If your action is blocked, you'll receive a denial message explaining why. Do not attempt to circumvent policy restrictions. + +## Checking Your Policy + +To see your current policy, ask Lead or check the dashboard at `/api/policy/:workspaceId`. diff --git a/docs/CHANGELOG.md b/docs/archive/CHANGELOG.md similarity index 100% rename from docs/CHANGELOG.md rename to docs/archive/CHANGELOG.md diff --git a/docs/CLI-SIMPLIFICATION-COMPLETE.md b/docs/archive/CLI-SIMPLIFICATION-COMPLETE.md similarity index 100% rename from docs/CLI-SIMPLIFICATION-COMPLETE.md rename to docs/archive/CLI-SIMPLIFICATION-COMPLETE.md diff --git a/docs/DESIGN_BRIDGE_STAFFING.md b/docs/archive/DESIGN_BRIDGE_STAFFING.md similarity index 100% rename from docs/DESIGN_BRIDGE_STAFFING.md rename to docs/archive/DESIGN_BRIDGE_STAFFING.md diff --git a/docs/DESIGN_V2.md b/docs/archive/DESIGN_V2.md similarity index 100% rename from docs/DESIGN_V2.md rename to docs/archive/DESIGN_V2.md diff --git a/EXECUTIVE_SUMMARY.md b/docs/archive/EXECUTIVE_SUMMARY.md similarity index 100% rename from EXECUTIVE_SUMMARY.md rename to docs/archive/EXECUTIVE_SUMMARY.md diff --git a/docs/MONETIZATION.md b/docs/archive/MONETIZATION.md similarity index 100% rename from docs/MONETIZATION.md rename to docs/archive/MONETIZATION.md diff --git a/docs/PROPOSAL-trajectories.md b/docs/archive/PROPOSAL-trajectories.md similarity index 100% rename from docs/PROPOSAL-trajectories.md rename to docs/archive/PROPOSAL-trajectories.md diff --git a/ROADMAP.md b/docs/archive/ROADMAP.md similarity index 100% rename from ROADMAP.md rename to docs/archive/ROADMAP.md diff --git a/docs/SCALING_ANALYSIS.md b/docs/archive/SCALING_ANALYSIS.md similarity index 100% rename from docs/SCALING_ANALYSIS.md rename to docs/archive/SCALING_ANALYSIS.md diff --git a/docs/TESTING_PRESENCE_FEATURES.md b/docs/archive/TESTING_PRESENCE_FEATURES.md similarity index 100% rename from docs/TESTING_PRESENCE_FEATURES.md rename to docs/archive/TESTING_PRESENCE_FEATURES.md diff --git a/docs/TMUX_IMPLEMENTATION_NOTES.md b/docs/archive/TMUX_IMPLEMENTATION_NOTES.md similarity index 100% rename from docs/TMUX_IMPLEMENTATION_NOTES.md rename to docs/archive/TMUX_IMPLEMENTATION_NOTES.md diff --git a/docs/TMUX_IMPROVEMENTS.md b/docs/archive/TMUX_IMPROVEMENTS.md similarity index 100% rename from docs/TMUX_IMPROVEMENTS.md rename to docs/archive/TMUX_IMPROVEMENTS.md diff --git a/docs/dashboard-v2-plan.md b/docs/archive/dashboard-v2-plan.md similarity index 100% rename from docs/dashboard-v2-plan.md rename to docs/archive/dashboard-v2-plan.md diff --git a/docs/removable-code-analysis.md b/docs/archive/removable-code-analysis.md similarity index 100% rename from docs/removable-code-analysis.md rename to docs/archive/removable-code-analysis.md diff --git a/dashboard.png b/docs/dashboard.png similarity index 100% rename from dashboard.png rename to docs/dashboard.png diff --git a/docs/design/ci-failure-webhooks.md b/docs/design/ci-failure-webhooks.md new file mode 100644 index 00000000..79179217 --- /dev/null +++ b/docs/design/ci-failure-webhooks.md @@ -0,0 +1,812 @@ +# CI Failure Webhooks - Agent Notification System + +## Overview + +This document describes the architecture for automatically notifying agents when GitHub CI checks fail on pull requests. This enables agents to autonomously investigate and fix CI failures without human intervention. + +## Motivation + +Currently, when CI fails on a PR: +1. Developer notices the failure (manual) +2. Developer investigates logs (manual) +3. Developer fixes the issue (manual) +4. Developer pushes and waits for CI again (manual) + +With webhook-based agent notification: +1. CI fails → webhook fires +2. Agent receives failure context automatically +3. Agent investigates and pushes fix +4. CI re-runs automatically + +This closes the loop for autonomous PR maintenance. + +## Architecture + +``` +┌─────────────┐ webhook ┌─────────────────┐ +│ GitHub │ ───────────────> │ Cloud API │ +│ (CI fails) │ check_run │ /webhooks │ +└─────────────┘ completed └────────┬────────┘ + │ + │ spawn or message + ▼ + ┌─────────────────────┐ + │ Agent Relay │ + │ Daemon │ + └────────┬────────────┘ + │ + ┌────────────┼────────────┐ + ▼ ▼ ▼ + ┌────────┐ ┌────────┐ ┌────────┐ + │ Agent │ │ Agent │ │ Agent │ + │ (PR) │ │ (Lint) │ │ (Test) │ + └────────┘ └────────┘ └────────┘ +``` + +## GitHub Webhook Events + +### Relevant Events + +| Event | Trigger | Use Case | +|-------|---------|----------| +| `check_run` | Individual check completes | Fine-grained failure handling | +| `check_suite` | All checks complete | Wait for full CI before acting | +| `workflow_run` | GitHub Action completes | Action-specific handling | +| `pull_request` | PR state changes | Track PR lifecycle | + +### Recommended: `check_run` Event + +The `check_run` event provides the most actionable data: + +```json +{ + "action": "completed", + "check_run": { + "id": 123456789, + "name": "lint", + "status": "completed", + "conclusion": "failure", + "output": { + "title": "ESLint found 3 errors", + "summary": "Fix the following issues...", + "text": "src/foo.ts:10:5 - error: ...", + "annotations": [ + { + "path": "src/foo.ts", + "start_line": 10, + "end_line": 10, + "annotation_level": "failure", + "message": "Unexpected console statement" + } + ] + }, + "pull_requests": [ + { + "number": 55, + "head": { + "ref": "feature-branch", + "sha": "abc123" + } + } + ] + }, + "repository": { + "full_name": "org/repo" + } +} +``` + +## Implementation + +### 1. Webhook Endpoint + +```typescript +// src/cloud/api/webhooks.ts + +import { Router } from 'express'; +import crypto from 'crypto'; + +export const webhookRouter = Router(); + +/** + * Verify GitHub webhook signature + */ +function verifyGitHubSignature( + payload: string, + signature: string, + secret: string +): boolean { + const expected = `sha256=${crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex')}`; + return crypto.timingSafeEqual( + Buffer.from(signature), + Buffer.from(expected) + ); +} + +/** + * GitHub webhook handler for CI failures + */ +webhookRouter.post('/github/ci', async (req, res) => { + const event = req.headers['x-github-event'] as string; + const signature = req.headers['x-hub-signature-256'] as string; + const payload = JSON.stringify(req.body); + + // Verify webhook authenticity + const secret = process.env.GITHUB_WEBHOOK_SECRET; + if (secret && !verifyGitHubSignature(payload, signature, secret)) { + return res.status(401).json({ error: 'Invalid signature' }); + } + + // Handle check_run events + if (event === 'check_run') { + await handleCheckRunEvent(req.body); + } + + // Handle workflow_run events + if (event === 'workflow_run') { + await handleWorkflowRunEvent(req.body); + } + + res.status(200).json({ received: true }); +}); +``` + +### 2. Check Run Handler + +```typescript +// src/cloud/api/ci-handlers.ts + +import { db } from '../db'; +import { spawnAgent, messageAgent } from '../services/agent-spawner'; + +interface CheckRunPayload { + action: string; + check_run: { + id: number; + name: string; + status: string; + conclusion: string | null; + output: { + title: string; + summary: string; + text?: string; + annotations?: Array<{ + path: string; + start_line: number; + end_line: number; + annotation_level: string; + message: string; + }>; + }; + pull_requests: Array<{ + number: number; + head: { ref: string; sha: string }; + }>; + }; + repository: { + full_name: string; + clone_url: string; + }; +} + +/** + * Handle check_run webhook events + */ +export async function handleCheckRunEvent(payload: CheckRunPayload) { + const { action, check_run, repository } = payload; + + // Only handle completed, failed checks + if (action !== 'completed') return; + if (check_run.conclusion !== 'failure') return; + + // Only handle checks on PRs + if (check_run.pull_requests.length === 0) return; + + const pr = check_run.pull_requests[0]; + const failureContext = buildFailureContext(payload); + + // Check if there's already an agent working on this PR + const existingAgent = await findAgentForPR(repository.full_name, pr.number); + + if (existingAgent) { + // Message the existing agent about the failure + await messageAgent(existingAgent.id, { + type: 'ci_failure', + ...failureContext, + }); + } else { + // Spawn a new agent to handle the failure + await spawnCIFixAgent(failureContext); + } +} + +/** + * Build structured context from check run failure + */ +function buildFailureContext(payload: CheckRunPayload) { + const { check_run, repository } = payload; + const pr = check_run.pull_requests[0]; + + return { + repository: repository.full_name, + cloneUrl: repository.clone_url, + prNumber: pr.number, + branch: pr.head.ref, + commitSha: pr.head.sha, + checkName: check_run.name, + checkId: check_run.id, + failureTitle: check_run.output.title, + failureSummary: check_run.output.summary, + failureDetails: check_run.output.text, + annotations: check_run.output.annotations || [], + }; +} +``` + +### 3. Agent Spawner + +```typescript +// src/cloud/services/agent-spawner.ts + +import { WorkspaceProvisioner } from '../provisioner'; + +interface CIFailureContext { + repository: string; + cloneUrl: string; + prNumber: number; + branch: string; + commitSha: string; + checkName: string; + checkId: number; + failureTitle: string; + failureSummary: string; + failureDetails?: string; + annotations: Array<{ + path: string; + start_line: number; + end_line: number; + message: string; + }>; +} + +/** + * Spawn an agent to fix CI failures + */ +export async function spawnCIFixAgent(context: CIFailureContext) { + const prompt = buildAgentPrompt(context); + + // Find or create workspace for this repository + const workspace = await findOrCreateWorkspace(context.repository); + + // Spawn agent in the workspace + await workspace.spawnAgent({ + name: `ci-fix-${context.checkName}-${context.prNumber}`, + prompt, + branch: context.branch, + workingDirectory: `/workspace/repos/${context.repository}`, + }); +} + +/** + * Build the prompt for the CI fix agent + */ +function buildAgentPrompt(context: CIFailureContext): string { + const annotationsList = context.annotations + .map(a => `- ${a.path}:${a.start_line} - ${a.message}`) + .join('\n'); + + return ` +# CI Failure Fix Task + +A CI check has failed on PR #${context.prNumber} in ${context.repository}. + +## Failure Details + +**Check Name:** ${context.checkName} +**Title:** ${context.failureTitle} +**Summary:** ${context.failureSummary} + +${context.failureDetails ? `**Details:**\n${context.failureDetails}` : ''} + +${annotationsList ? `## Annotations\n\n${annotationsList}` : ''} + +## Your Task + +1. Checkout the branch: \`${context.branch}\` +2. Analyze the failure based on the annotations and error messages +3. Fix the issues in the affected files +4. Run the relevant checks locally to verify the fix +5. Commit and push your changes with a clear commit message +6. Report back with a summary of what was fixed + +## Important + +- Only fix the specific issues causing the CI failure +- Do not refactor or improve unrelated code +- If you cannot fix the issue, explain why and what manual intervention is needed +`.trim(); +} +``` + +### 4. Agent Notification via Relay + +For agents already working on a PR, send failure notifications through the relay system: + +```typescript +// src/cloud/services/agent-notifier.ts + +import { RelayClient } from '../../relay/client'; + +interface CIFailureMessage { + type: 'ci_failure'; + checkName: string; + failureTitle: string; + failureSummary: string; + annotations: Array<{ + path: string; + start_line: number; + message: string; + }>; +} + +/** + * Notify an agent about CI failure via relay message + */ +export async function notifyAgentOfCIFailure( + agentId: string, + failure: CIFailureMessage +) { + const relay = new RelayClient(); + + const message = formatCIFailureMessage(failure); + + await relay.sendMessage({ + to: agentId, + content: message, + priority: 'high', + thread: `ci-failure-${failure.checkName}`, + }); +} + +function formatCIFailureMessage(failure: CIFailureMessage): string { + const annotations = failure.annotations + .slice(0, 10) // Limit to first 10 + .map(a => ` - ${a.path}:${a.start_line}: ${a.message}`) + .join('\n'); + + return ` +CI FAILURE: ${failure.checkName} + +${failure.failureTitle} + +${failure.failureSummary} + +${annotations ? `Issues:\n${annotations}` : ''} + +Please investigate and fix these issues, then push your changes. +`.trim(); +} +``` + +## Configuration + +### Workspace Settings + +Repositories can configure CI webhook behavior in `.relay/config.json`: + +```json +{ + "ciWebhooks": { + "enabled": true, + "autoFix": { + "lint": true, + "typecheck": true, + "test": false + }, + "notifyExistingAgent": true, + "spawnNewAgent": true, + "maxConcurrentAgents": 3, + "cooldownMinutes": 5 + } +} +``` + +### Check Name Mapping + +Map CI check names to fix strategies: + +```json +{ + "ciWebhooks": { + "checkStrategies": { + "lint": { + "autoFix": true, + "command": "npm run lint:fix", + "agentProfile": "linter" + }, + "typecheck": { + "autoFix": true, + "command": "npm run typecheck", + "agentProfile": "typescript-expert" + }, + "test": { + "autoFix": false, + "notifyOnly": true, + "agentProfile": "tester" + } + } + } +} +``` + +## Agent Profiles for CI Fixes + +### Lint Fix Agent + +```yaml +# .claude/agents/lint-fixer.md +--- +name: LintFixer +description: Fixes linting errors automatically +tools: + - Read + - Edit + - Bash +model: haiku +--- + +You are a code quality specialist. Your job is to fix linting errors. + +## Approach + +1. Read the files with errors +2. Understand the linting rule being violated +3. Fix the code to comply with the rule +4. Run the linter to verify the fix +5. Commit with message: "fix: resolve lint errors" + +## Rules + +- Fix only the specific errors reported +- Do not change code style beyond what's needed +- Do not add or remove features +- If a rule seems wrong, fix it anyway (discuss rule changes separately) +``` + +### Test Fix Agent + +```yaml +# .claude/agents/test-fixer.md +--- +name: TestFixer +description: Investigates and fixes failing tests +tools: + - Read + - Edit + - Bash + - Grep +model: sonnet +--- + +You are a testing specialist. Your job is to fix failing tests. + +## Approach + +1. Run the failing test to see the actual error +2. Determine if the issue is: + - Test is wrong (update the test) + - Code is wrong (fix the code) + - Environment issue (fix setup) +3. Apply the minimal fix +4. Run the test again to verify +5. Run the full test suite to check for regressions +6. Commit with descriptive message + +## Rules + +- Prefer fixing code over changing tests +- If changing tests, explain why in the commit message +- Never delete tests to make CI pass +- If stuck, report the issue instead of guessing +``` + +## Database Schema + +Track CI failure events and agent responses: + +```sql +-- CI failure events +CREATE TABLE ci_failure_events ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + workspace_id UUID REFERENCES workspaces(id), + repository TEXT NOT NULL, + pr_number INTEGER NOT NULL, + check_name TEXT NOT NULL, + check_id BIGINT NOT NULL, + conclusion TEXT NOT NULL, + failure_title TEXT, + failure_summary TEXT, + annotations JSONB, + created_at TIMESTAMP DEFAULT NOW() +); + +-- Agent responses to CI failures +CREATE TABLE ci_fix_attempts ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + failure_event_id UUID REFERENCES ci_failure_events(id), + agent_id TEXT NOT NULL, + agent_name TEXT NOT NULL, + status TEXT NOT NULL, -- 'pending', 'in_progress', 'success', 'failed' + commit_sha TEXT, + error_message TEXT, + started_at TIMESTAMP DEFAULT NOW(), + completed_at TIMESTAMP +); + +-- Indexes +CREATE INDEX idx_ci_failures_repo_pr ON ci_failure_events(repository, pr_number); +CREATE INDEX idx_ci_failures_created ON ci_failure_events(created_at); +CREATE INDEX idx_ci_fix_attempts_status ON ci_fix_attempts(status); +``` + +## API Endpoints + +### Webhook Registration + +``` +POST /api/webhooks/github/register +{ + "repository": "org/repo", + "events": ["check_run", "workflow_run"], + "secret": "webhook-secret" +} +``` + +### CI Failure History + +``` +GET /api/ci-failures?repository=org/repo&pr=55 + +Response: +{ + "failures": [ + { + "id": "...", + "checkName": "lint", + "failureTitle": "ESLint found 3 errors", + "createdAt": "2025-01-04T...", + "fixAttempt": { + "agentName": "ci-fix-lint-55", + "status": "success", + "commitSha": "def456" + } + } + ] +} +``` + +### Manual Trigger + +``` +POST /api/ci-failures/retry +{ + "failureEventId": "...", + "agentProfile": "lint-fixer" +} +``` + +## Security Considerations + +### Webhook Verification + +Always verify webhook signatures: + +```typescript +const signature = req.headers['x-hub-signature-256']; +const payload = JSON.stringify(req.body); +const expected = `sha256=${crypto + .createHmac('sha256', WEBHOOK_SECRET) + .update(payload) + .digest('hex')}`; + +if (!crypto.timingSafeEqual(Buffer.from(signature), Buffer.from(expected))) { + throw new Error('Invalid webhook signature'); +} +``` + +### Rate Limiting + +Prevent abuse with rate limits: + +```typescript +const rateLimiter = new RateLimiter({ + // Max 10 agent spawns per repo per hour + key: (req) => `ci-spawn:${req.body.repository.full_name}`, + maxRequests: 10, + windowMs: 60 * 60 * 1000, +}); +``` + +### Agent Permissions + +CI fix agents should have limited permissions: + +```yaml +permissions: + tools: + - Read + - Edit + - Bash + bash: + allowedCommands: + - npm + - git + - eslint + blockedCommands: + - rm -rf + - curl + - wget + files: + writable: + - "src/**" + - "test/**" + readonly: + - "package.json" + - ".github/**" +``` + +## Monitoring & Observability + +### Metrics to Track + +- `ci_webhook_received_total` - Total webhooks received by event type +- `ci_failure_events_total` - Total CI failures by check name +- `ci_fix_attempts_total` - Fix attempts by status (success/failed) +- `ci_fix_duration_seconds` - Time from failure to fix commit +- `ci_agent_spawn_total` - Agents spawned for CI fixes + +### Alerts + +```yaml +alerts: + - name: HighCIFailureRate + condition: rate(ci_failure_events_total[1h]) > 10 + severity: warning + message: "High CI failure rate detected" + + - name: AgentFixFailures + condition: rate(ci_fix_attempts_total{status="failed"}[1h]) > 5 + severity: warning + message: "Agents failing to fix CI issues" +``` + +## Issue and Comment Handling + +In addition to CI failures, agents can respond to GitHub issues and @mentions in comments. + +### Supported Events + +| Event | Purpose | +|-------|---------| +| `issues` | Track new issues for agent assignment | +| `issue_comment` | Detect @mentions in issue/PR comments | +| `pull_request_review_comment` | Detect @mentions in PR review comments | + +### @Mention Detection + +When a comment contains `@agent-name`, the system: + +1. Extracts all @mentions from the comment text +2. Checks if the mentioned name is a known agent type +3. Creates a mention record in the database +4. Routes to the appropriate agent for response + +**Known Agent Types:** +- `@agent-relay` - General purpose agent +- `@lead` - Lead agent for coordination +- `@developer` - Developer agent for coding tasks +- `@reviewer` - Code review agent +- `@ci-fix` - CI failure fixing agent +- `@debugger` - Bug investigation agent +- `@docs` - Documentation agent +- `@test` - Test writing agent +- `@refactor` - Code refactoring agent + +### Issue Assignment + +When a new issue is opened: + +1. Record the issue in `issue_assignments` table +2. Extract priority from labels (p0-p3, critical/high/medium/low) +3. Optionally auto-assign based on label mapping +4. Agent receives issue context and works on a fix + +### Configuration + +Configure agent triggers per repository: + +```json +{ + "agentTriggers": { + "mentionableAgents": ["lead", "ci-fix", "reviewer"], + "defaultIssueAgent": "developer", + "autoAssignLabels": { + "bug": "debugger", + "enhancement": "developer", + "documentation": "docs" + }, + "autoRespondToMentions": true, + "maxResponsesPerHour": 20, + "allowedTriggerUsers": [] + } +} +``` + +### Database Schema + +```sql +-- Issue assignments +CREATE TABLE issue_assignments ( + id UUID PRIMARY KEY, + repository TEXT NOT NULL, + issue_number BIGINT NOT NULL, + issue_title TEXT NOT NULL, + issue_body TEXT, + agent_id TEXT, + agent_name TEXT, + status TEXT DEFAULT 'pending', + resolution TEXT, + linked_pr_number BIGINT, + labels TEXT[], + priority TEXT, + created_at TIMESTAMP DEFAULT NOW(), + UNIQUE(repository, issue_number) +); + +-- Comment mentions +CREATE TABLE comment_mentions ( + id UUID PRIMARY KEY, + repository TEXT NOT NULL, + source_type TEXT NOT NULL, -- issue_comment, pr_comment, pr_review + source_id BIGINT NOT NULL, + issue_or_pr_number BIGINT NOT NULL, + comment_body TEXT NOT NULL, + author_login TEXT NOT NULL, + mentioned_agent TEXT NOT NULL, + status TEXT DEFAULT 'pending', + response_comment_id BIGINT, + created_at TIMESTAMP DEFAULT NOW() +); +``` + +### Security + +- Rate limit @mentions to prevent abuse +- Optionally restrict which users can trigger agents +- Agents cannot respond to their own comments (prevent loops) +- Bot accounts are ignored by default + +## Future Enhancements + +1. **Learning from Fixes**: Track successful fixes to build patterns for common errors + +2. **Pre-emptive Checks**: Run checks locally before push to catch issues early + +3. **Fix Suggestions**: Instead of auto-fixing, suggest fixes for human review + +4. **Cross-repo Learning**: Apply fix patterns learned in one repo to others + +5. **Escalation Paths**: Auto-escalate to humans if agent can't fix after N attempts + +6. **Slack/Discord Integration**: Notify team channels about agent activity + +7. **PR Review Automation**: Auto-request reviews from appropriate agents + +## References + +- [GitHub Webhooks Documentation](https://docs.github.com/en/webhooks) +- [GitHub Checks API](https://docs.github.com/en/rest/checks) +- [Agent Relay Protocol](./agent-relay-protocol.md) diff --git a/docs/design/comprehensive-integrations.md b/docs/design/comprehensive-integrations.md new file mode 100644 index 00000000..c5cf2a30 --- /dev/null +++ b/docs/design/comprehensive-integrations.md @@ -0,0 +1,238 @@ +# Comprehensive External Integrations + +This document outlines the plan for bidirectional integrations with external systems. + +## Current State + +We have a generic webhook system that can: +- Receive webhooks from GitHub, Linear, Slack +- Parse events into normalized format +- Match events to rules and spawn agents +- Send basic responses (comments) + +## Required Enhancements + +### 1. Linear Integration (Priority: High) + +**Inbound (Webhooks → Agents):** +- [x] Issue created +- [x] Issue assigned to agent +- [x] Comment with @mention +- [ ] Issue state changed +- [ ] Due date approaching +- [ ] Cycle started/ended + +**Outbound (Agents → Linear):** +- [x] Create comment on issue +- [ ] Update issue state +- [ ] Update issue assignee +- [ ] Add/remove labels +- [ ] Update issue description +- [ ] Create new issue +- [ ] Link issues + +**Agent Actions Needed:** +```typescript +// src/cloud/services/linear-integration.ts +interface LinearIntegration { + // Comments + createComment(issueId: string, body: string): Promise; + + // Issues + createIssue(teamId: string, data: CreateIssueInput): Promise; + updateIssue(issueId: string, data: UpdateIssueInput): Promise; + + // State management + setIssueState(issueId: string, stateId: string): Promise; + getAvailableStates(teamId: string): Promise; + + // Assignments + assignIssue(issueId: string, userId: string | null): Promise; + + // Labels + addLabel(issueId: string, labelId: string): Promise; + removeLabel(issueId: string, labelId: string): Promise; + + // Relations + linkIssues(issueId: string, relatedIssueId: string, type: RelationType): Promise; +} +``` + +### 2. Slack Integration (Priority: High) + +**Inbound:** +- [x] App mentioned +- [x] Direct message to bot +- [ ] Slash commands +- [ ] Interactive components (buttons, modals) +- [ ] File shared +- [ ] Scheduled message triggers + +**Outbound:** +- [x] Post message to channel +- [x] Reply in thread +- [ ] Update message +- [ ] Delete message +- [ ] Post with blocks (rich formatting) +- [ ] Upload file +- [ ] Create scheduled message +- [ ] Open modal/dialog + +**Agent Actions Needed:** +```typescript +// src/cloud/services/slack-integration.ts +interface SlackIntegration { + // Messages + postMessage(channel: string, text: string, options?: MessageOptions): Promise; + postBlocks(channel: string, blocks: Block[], text: string): Promise; + updateMessage(channel: string, ts: string, text: string): Promise; + replyInThread(channel: string, threadTs: string, text: string): Promise; + + // Reactions + addReaction(channel: string, ts: string, emoji: string): Promise; + + // Files + uploadFile(channels: string[], file: Buffer, filename: string): Promise; + + // Modals + openModal(triggerId: string, view: View): Promise; + updateModal(viewId: string, view: View): Promise; + + // Users + getUserInfo(userId: string): Promise; + lookupByEmail(email: string): Promise; +} +``` + +### 3. GitHub Integration (Priority: High) + +**Inbound:** +- [x] CI failure +- [x] Issue/PR comments with @mention +- [x] Issue created +- [ ] PR opened/updated +- [ ] PR review requested +- [ ] Release created +- [ ] Deployment status + +**Outbound:** +- [x] Post comment on issue/PR +- [ ] Create issue +- [ ] Create PR +- [ ] Request/dismiss review +- [ ] Merge PR +- [ ] Create/update check run +- [ ] Add labels +- [ ] Assign users +- [ ] Update PR description + +### 4. Jira Integration (Priority: Medium) + +**Inbound:** +- [ ] Issue created +- [ ] Issue assigned +- [ ] Issue transitioned +- [ ] Comment added + +**Outbound:** +- [ ] Create issue +- [ ] Update issue +- [ ] Transition issue +- [ ] Add comment +- [ ] Link issues + +### 5. GitLab Integration (Priority: Medium) + +Similar to GitHub with GitLab-specific events. + +### 6. Discord Integration (Priority: Low) + +Similar to Slack with Discord-specific features. + +## Implementation Plan + +### Phase 1: Core Linear Integration (This Week) +1. Create `LinearIntegration` service with full CRUD +2. Add Linear API key management in workspace settings +3. Create agent tools for Linear actions +4. Test bidirectional flow + +### Phase 2: Enhanced Slack Integration +1. Add slash command support +2. Add interactive components (buttons) +3. Add rich message formatting +4. Add modal support + +### Phase 3: Enhanced GitHub Integration +1. Add PR management +2. Add check run creation +3. Add deployment tracking + +### Phase 4: Additional Integrations +1. Jira +2. GitLab +3. Discord + +## Configuration + +### Workspace-Level Settings + +```typescript +interface WorkspaceIntegrations { + github?: { + enabled: boolean; + webhookSecret: string; + appInstallationId?: string; + }; + linear?: { + enabled: boolean; + apiKey: string; + webhookSecret: string; + teamId?: string; + }; + slack?: { + enabled: boolean; + botToken: string; + signingSecret: string; + appId?: string; + }; +} +``` + +### Agent Permissions + +```typescript +interface AgentIntegrationPermissions { + linear?: { + canComment: boolean; + canUpdateIssues: boolean; + canCreateIssues: boolean; + canAssign: boolean; + }; + slack?: { + canPost: boolean; + canUploadFiles: boolean; + channels?: string[]; // Allowed channels + }; + github?: { + canComment: boolean; + canMergePRs: boolean; + canCreateIssues: boolean; + }; +} +``` + +## Security Considerations + +1. **API Key Storage**: All API keys encrypted at rest +2. **Scope Limiting**: Agents only get permissions they need +3. **Audit Logging**: All external API calls logged +4. **Rate Limiting**: Respect external API rate limits +5. **Webhook Verification**: Always verify signatures + +## Testing Strategy + +1. Unit tests for parsers and responders +2. Integration tests with mock servers +3. E2E tests with sandbox accounts +4. Load testing for webhook handling diff --git a/docs/design/e2b-sandbox-integration.md b/docs/design/e2b-sandbox-integration.md new file mode 100644 index 00000000..15e804f1 --- /dev/null +++ b/docs/design/e2b-sandbox-integration.md @@ -0,0 +1,504 @@ +# E2B Sandbox Integration + +## Overview + +[E2B](https://e2b.dev) provides secure, isolated cloud sandboxes for running AI-generated code. This document outlines how we can leverage E2B to improve agent execution in Agent Relay. + +## Current Architecture + +``` +┌─────────────────────────────────────────────────────┐ +│ Cloud Service │ +│ ┌─────────────┐ ┌─────────────┐ │ +│ │ Webhooks │───▶│ Spawner │ │ +│ └─────────────┘ └──────┬──────┘ │ +└────────────────────────────┼────────────────────────┘ + │ spawn command + ▼ +┌─────────────────────────────────────────────────────┐ +│ Docker Workspace Container │ +│ ┌─────────────┐ ┌─────────────┐ │ +│ │ Daemon │───▶│ Agent │ │ +│ └─────────────┘ └─────────────┘ │ +│ - Node.js, Python, Git, gh │ +│ - AI CLIs (Claude, Codex, Gemini, etc.) │ +└─────────────────────────────────────────────────────┘ +``` + +**Pain Points:** +- Container startup time (~5-10s) +- Infrastructure management overhead +- Scaling requires container orchestration (K8s, ECS, etc.) +- No easy pause/resume for long-running agents + +## Proposed Architecture with E2B + +``` +┌─────────────────────────────────────────────────────┐ +│ Cloud Service │ +│ ┌─────────────┐ ┌─────────────┐ │ +│ │ Webhooks │───▶│ Spawner │ │ +│ └─────────────┘ └──────┬──────┘ │ +└────────────────────────────┼────────────────────────┘ + │ E2B SDK + ▼ +┌─────────────────────────────────────────────────────┐ +│ E2B Cloud (Managed) │ +│ ┌─────────────────────────────────────────────┐ │ +│ │ Custom Sandbox Template │ │ +│ │ - relay-workspace-v1 │ │ +│ │ - Pre-installed: Node, Python, Git, gh │ │ +│ │ - Pre-installed: Claude, Codex, Gemini │ │ +│ │ - ~150ms startup │ │ +│ └─────────────────────────────────────────────┘ │ +│ │ +│ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ +│ │ Sandbox │ │ Sandbox │ │ Sandbox │ ... │ +│ │ Agent 1 │ │ Agent 2 │ │ Agent 3 │ │ +│ └──────────┘ └──────────┘ └──────────┘ │ +└─────────────────────────────────────────────────────┘ +``` + +## Benefits + +| Aspect | Docker (Current) | E2B (Proposed) | +|--------|------------------|----------------| +| Startup time | ~5-10s | ~150ms | +| Infrastructure | Self-managed | Managed | +| Scaling | Manual/K8s | Automatic | +| Isolation | Container | Microvm | +| Pause/Resume | Not supported | Native | +| Cost model | Always-on | Pay per use | + +## Implementation Plan + +### Phase 1: E2B SDK Integration + +Add E2B SDK and create basic sandbox spawning: + +```typescript +// src/cloud/services/e2b-sandbox.ts +import { Sandbox } from '@e2b/sdk'; + +export interface SandboxConfig { + template: string; + timeout?: number; + envVars?: Record; +} + +export async function createAgentSandbox(config: SandboxConfig): Promise { + const sandbox = await Sandbox.create(config.template, { + timeoutMs: config.timeout || 60000, + envVars: config.envVars, + }); + + return sandbox; +} + +export async function runAgentInSandbox( + sandbox: Sandbox, + agentType: string, + prompt: string +): Promise<{ output: string; exitCode: number }> { + // Clone repo if needed + await sandbox.commands.run('git clone $REPO_URL /workspace/repo'); + + // Run the agent + const result = await sandbox.commands.run( + `claude --agent ${agentType} --prompt "${prompt}"`, + { cwd: '/workspace/repo' } + ); + + return { + output: result.stdout + result.stderr, + exitCode: result.exitCode, + }; +} +``` + +### Phase 2: Custom Sandbox Template + +Create a custom E2B template matching our workspace: + +```dockerfile +# e2b/templates/relay-workspace/Dockerfile +FROM e2b/base:latest + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + bash ca-certificates curl git python3 jq + +# Install GitHub CLI +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \ + | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && apt-get update && apt-get install -y gh + +# Install AI CLIs +RUN npm install -g @openai/codex @google/gemini-cli opencode-ai@latest +RUN curl -fsSL https://claude.ai/install.sh | bash +RUN curl -fsSL https://app.factory.ai/cli | sh + +ENV PATH="/root/.local/bin:$PATH" +``` + +```yaml +# e2b/templates/relay-workspace/e2b.toml +[template] +name = "relay-workspace" +dockerfile = "Dockerfile" + +[template.resources] +cpu = 2 +memory = 4096 +``` + +### Phase 3: Spawner Integration + +Update spawners to use E2B: + +```typescript +// src/cloud/services/ci-agent-spawner.ts +import { createAgentSandbox, runAgentInSandbox } from './e2b-sandbox.js'; + +export async function spawnCIFixAgent(event: CIFailureEvent): Promise { + // Create sandbox + const sandbox = await createAgentSandbox({ + template: 'relay-workspace', + timeout: 300000, // 5 minutes + envVars: { + REPO_URL: event.repository, + GITHUB_TOKEN: await getRepoToken(event.repositoryId), + CI_RUN_ID: event.checkRunId, + }, + }); + + try { + // Run CI fix agent + const result = await runAgentInSandbox( + sandbox, + 'ci-fix', + `Fix CI failure in ${event.checkName}: ${event.conclusion}` + ); + + // Post results back to GitHub + await postCIFixComment(event, result); + } finally { + // Always clean up + await sandbox.close(); + } +} +``` + +### Phase 4: Hybrid Mode + +Support both Docker (self-hosted) and E2B (cloud) execution: + +```typescript +// src/cloud/services/agent-executor.ts +export type ExecutionBackend = 'docker' | 'e2b'; + +export interface ExecutorConfig { + backend: ExecutionBackend; + e2bApiKey?: string; + dockerSocket?: string; +} + +export async function executeAgent( + config: ExecutorConfig, + agentType: string, + prompt: string, + context: ExecutionContext +): Promise { + switch (config.backend) { + case 'e2b': + return executeInE2B(agentType, prompt, context); + case 'docker': + return executeInDocker(agentType, prompt, context); + } +} +``` + +## Configuration + +Add E2B configuration to workspace settings: + +```typescript +// Workspace settings +interface WorkspaceSettings { + execution: { + backend: 'docker' | 'e2b' | 'hybrid'; + e2b?: { + apiKey: string; + template: string; + defaultTimeout: number; + }; + docker?: { + image: string; + socket: string; + }; + }; +} +``` + +## Cost Considerations + +E2B pricing is based on sandbox-seconds. Estimated costs: + +| Scenario | Docker (self-hosted) | E2B | +|----------|---------------------|-----| +| CI fix agent (5 min) | ~$0.01 compute | ~$0.05 | +| Code review (2 min) | ~$0.004 | ~$0.02 | +| Long task (30 min) | ~$0.06 | ~$0.30 | + +**Recommendation:** Use E2B for: +- Short-lived tasks (CI fixes, code review) +- Burst workloads (many concurrent agents) +- Teams without container infrastructure + +Use Docker for: +- Long-running agents +- High-volume workloads +- Self-hosted/air-gapped environments + +## Security + +E2B sandboxes provide: +- **Microvm isolation** - stronger than containers +- **Network isolation** - configurable internet access +- **Ephemeral by default** - no persistent state unless explicit +- **No host access** - sandboxes can't reach host systems + +## Migration Path + +1. **Week 1**: Add E2B SDK, create basic integration +2. **Week 2**: Build custom template, test with CI agents +3. **Week 3**: Add hybrid mode, workspace configuration +4. **Week 4**: Documentation, monitoring, rollout + +## Open Questions + +1. **Template caching**: How often do we need to rebuild templates? +2. **Secrets management**: How to inject API keys securely? +3. **Artifact persistence**: How to preserve agent outputs? +4. **Monitoring**: How to track sandbox usage and costs? + +## Advanced Capabilities + +### E2B Desktop - Full GUI/Browser Control + +[E2B Desktop](https://github.com/e2b-dev/desktop) provides complete Linux desktop environments: + +**Features:** +- Xfce4 desktop environment +- Pre-installed Chrome, Firefox, VS Code +- VNC streaming for real-time viewing +- Mouse/keyboard control via xdotool +- Screenshot capture for visual AI + +```typescript +// src/cloud/services/e2b-desktop.ts +import { Desktop } from '@e2b/desktop'; + +export async function runBrowserTest( + testScript: string, + url: string +): Promise<{ screenshots: string[]; result: string }> { + const desktop = await Desktop.create(); + + try { + // Open browser + await desktop.launch('google-chrome', [url]); + await desktop.wait(2000); + + // Take screenshot + const screenshot = await desktop.screenshot(); + + // Run test script with Playwright + const result = await desktop.commands.run(`npx playwright test ${testScript}`); + + return { + screenshots: [screenshot], + result: result.stdout, + }; + } finally { + await desktop.close(); + } +} +``` + +**Use cases:** +- Visual regression testing +- E2E browser tests +- GUI automation +- Screen recording for demos + +### Browserbase Integration - Serverless Browsers + +[Browserbase](https://browserbase.com) provides dedicated serverless browser infrastructure: + +**Features:** +- Spin up 1000s of browsers in milliseconds +- Native Playwright/Puppeteer/Selenium support +- Built-in captcha solving +- Residential proxies +- Session recording & debugging +- SOC-2 & HIPAA compliant + +```typescript +// src/cloud/services/browserbase.ts +import { chromium } from 'playwright'; + +export async function runWithBrowserbase( + script: (page: Page) => Promise +): Promise { + const browser = await chromium.connectOverCDP( + `wss://connect.browserbase.com?apiKey=${process.env.BROWSERBASE_API_KEY}` + ); + + try { + const context = browser.contexts()[0]; + const page = context.pages()[0]; + await script(page); + } finally { + await browser.close(); + } +} +``` + +**Use cases:** +- Web scraping agents +- Form automation +- Testing production sites +- Multi-browser testing + +### Docker MCP Catalog - 200+ Tools + +E2B sandboxes now include access to [Docker's MCP Catalog](https://www.docker.com/blog/docker-e2b-building-the-future-of-trusted-ai/): + +**Available tools include:** +- GitHub, GitLab +- Perplexity, Browserbase +- ElevenLabs, Stripe +- Slack, Discord +- And 200+ more + +```typescript +// Agents can use MCP tools within sandboxes +const sandbox = await Sandbox.create('relay-workspace-mcp'); +await sandbox.commands.run(` + # Use GitHub MCP tool + mcp-github create-issue --repo user/repo --title "Bug fix" +`); +``` + +### Hybrid Architecture for Advanced Agents + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ Agent Relay Cloud │ +│ ┌─────────────┐ │ +│ │ Spawner │ │ +│ └──────┬──────┘ │ +│ │ │ +│ ├──────────────────┬──────────────────┐ │ +│ ▼ ▼ ▼ │ +│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │ +│ │ E2B Sandbox │ │ E2B Desktop │ │ Browserbase │ │ +│ │ (Code exec) │ │ (GUI/VNC) │ │ (Browsers) │ │ +│ └─────────────┘ └─────────────┘ └─────────────┘ │ +│ │ │ │ │ +│ └──────────────────┴──────────────────┘ │ +│ │ │ +│ MCP Tool Gateway │ +│ (200+ integrations) │ +└─────────────────────────────────────────────────────────────────┘ +``` + +### Agent Capabilities Matrix + +| Capability | E2B Sandbox | E2B Desktop | Browserbase | +|------------|-------------|-------------|-------------| +| Code execution | ✅ | ✅ | ❌ | +| Terminal/CLI | ✅ | ✅ | ❌ | +| File system | ✅ | ✅ | Limited | +| GUI apps | ❌ | ✅ | ❌ | +| Browser control | Limited | ✅ | ✅ | +| Visual testing | ❌ | ✅ | ✅ | +| Parallel scale | Good | Limited | Excellent | +| Cost | Low | Medium | Medium | + +### Workspace Configuration + +```typescript +interface WorkspaceExecutionConfig { + // Default execution backend + default: 'e2b' | 'e2b-desktop' | 'docker'; + + // E2B configuration + e2b?: { + apiKey: string; + template: string; + timeout: number; + enableMcp: boolean; + }; + + // E2B Desktop for GUI tasks + e2bDesktop?: { + apiKey: string; + resolution: { width: number; height: number }; + vncEnabled: boolean; + }; + + // Browserbase for web automation + browserbase?: { + apiKey: string; + proxy?: 'residential' | 'datacenter'; + captchaSolver: boolean; + }; + + // Agent-specific overrides + agentOverrides?: { + [agentType: string]: { + backend: 'e2b' | 'e2b-desktop' | 'browserbase' | 'docker'; + capabilities?: string[]; + }; + }; +} + +// Example configuration +const config: WorkspaceExecutionConfig = { + default: 'e2b', + e2b: { + apiKey: process.env.E2B_API_KEY!, + template: 'relay-workspace', + timeout: 300000, + enableMcp: true, + }, + e2bDesktop: { + apiKey: process.env.E2B_API_KEY!, + resolution: { width: 1920, height: 1080 }, + vncEnabled: true, + }, + browserbase: { + apiKey: process.env.BROWSERBASE_API_KEY!, + captchaSolver: true, + }, + agentOverrides: { + 'visual-tester': { backend: 'e2b-desktop' }, + 'web-scraper': { backend: 'browserbase' }, + 'ci-fix': { backend: 'e2b' }, + }, +}; +``` + +## References + +- [E2B Documentation](https://e2b.dev/docs) +- [E2B GitHub](https://github.com/e2b-dev/E2B) +- [E2B Desktop](https://github.com/e2b-dev/desktop) +- [Custom Templates Guide](https://e2b.dev/docs/sandbox-template) +- [Docker + E2B Partnership](https://www.docker.com/blog/docker-e2b-building-the-future-of-trusted-ai/) +- [Browserbase](https://browserbase.com) +- [How Manus Uses E2B](https://e2b.dev/blog/how-manus-uses-e2b-to-provide-agents-with-virtual-computers) diff --git a/docs/design/github-app-permissions.md b/docs/design/github-app-permissions.md new file mode 100644 index 00000000..2b086e8a --- /dev/null +++ b/docs/design/github-app-permissions.md @@ -0,0 +1,264 @@ +# GitHub App Permissions + +This document describes the GitHub App permissions required for Agent Relay's features, particularly the CI failure webhook integration. + +## Overview + +Agent Relay uses a GitHub App to: +1. Receive webhook events (installations, PRs, CI failures) +2. Access repository code for cloning/syncing +3. Create commits and push fixes +4. Interact with PRs (comments, reviews) + +## Required Permissions + +### Repository Permissions + +| Permission | Access Level | Purpose | +|------------|--------------|---------| +| **Contents** | Read & Write | Clone repos, push commits, read files | +| **Pull requests** | Read & Write | Read PR details, create/update PRs, comment | +| **Checks** | Read | Receive check_run webhooks, read failure details | +| **Actions** | Read | Receive workflow_run webhooks, read logs | +| **Commit statuses** | Read | Read status checks, understand CI state | +| **Metadata** | Read | Basic repo info (required for all apps) | + +### Organization Permissions + +| Permission | Access Level | Purpose | +|------------|--------------|---------| +| **Members** | Read | Identify organization members for access control | + +### Account Permissions + +| Permission | Access Level | Purpose | +|------------|--------------|---------| +| **Email addresses** | Read | User identification, notifications | + +## Webhook Events + +The following webhook events should be enabled: + +### Required Events + +| Event | Purpose | +|-------|---------| +| `installation` | Track app installations/uninstallations | +| `installation_repositories` | Track repo access changes | +| `check_run` | **CI failure detection** - triggers agent spawn | +| `workflow_run` | Workflow-level failure tracking | +| `push` | Detect new commits for sync | +| `pull_request` | Track PR lifecycle | + +### Optional Events + +| Event | Purpose | +|-------|---------| +| `issues` | Future: issue-to-agent assignment | +| `issue_comment` | Future: agent @mentions | +| `pull_request_review` | Future: review request handling | +| `check_suite` | Aggregate check status | + +## Configuration Steps + +### 1. Create GitHub App + +1. Go to GitHub Settings > Developer settings > GitHub Apps +2. Click "New GitHub App" +3. Fill in basic info: + - **Name**: Agent Relay (or your instance name) + - **Homepage URL**: Your dashboard URL + - **Webhook URL**: `https://your-domain.com/api/webhooks/github` + +### 2. Set Permissions + +Under "Permissions & events": + +**Repository permissions:** +- Contents: Read and write +- Pull requests: Read and write +- Checks: Read-only +- Actions: Read-only +- Commit statuses: Read-only +- Metadata: Read-only (default) + +**Organization permissions:** +- Members: Read-only + +**Account permissions:** +- Email addresses: Read-only + +### 3. Subscribe to Events + +Check the following events: +- [x] Check run +- [x] Workflow run +- [x] Installation +- [x] Installation and repositories +- [x] Push +- [x] Pull request + +### 4. Generate Keys + +1. Generate a private key (downloads .pem file) +2. Note the App ID +3. Generate a client secret for OAuth + +### 5. Configure Agent Relay + +Set environment variables: + +```bash +# GitHub App credentials +GITHUB_APP_ID=123456 +GITHUB_APP_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\n..." +GITHUB_CLIENT_ID=Iv1.abc123 +GITHUB_CLIENT_SECRET=secret123 + +# Webhook secret (generate a random string) +GITHUB_WEBHOOK_SECRET=whsec_random_string_here +``` + +## Permission Rationale + +### Why Contents: Write? + +Agents need to push fixes to branches. This includes: +- Creating new commits +- Pushing to existing branches +- Creating new branches for fixes + +### Why Checks: Read (not Write)? + +We only receive check failure events and read results. We don't: +- Create our own checks +- Update check status + +CI runs in GitHub Actions and creates its own checks. + +### Why Pull Requests: Write? + +Agents may need to: +- Comment on PRs with fix summaries +- Request reviews after fixes +- Update PR descriptions + +### Why Actions: Read? + +For workflow_run events that provide: +- Workflow-level failure context +- Access to workflow logs (future) + +## Security Considerations + +### Webhook Secret + +Always configure a webhook secret: + +```typescript +function verifyGitHubSignature(payload: string, signature: string, secret: string): boolean { + const expected = `sha256=${crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex')}`; + return crypto.timingSafeEqual( + Buffer.from(signature), + Buffer.from(expected) + ); +} +``` + +### Private Key Storage + +- Never commit the private key to version control +- Use secure secret management (Vault, AWS Secrets Manager, etc.) +- Rotate keys periodically + +### Installation Scope + +When users install the app: +- Recommend "Only select repositories" over "All repositories" +- Document which repos will be monitored +- Allow easy un-installation + +### Token Expiry + +GitHub App installation tokens expire after 1 hour: +- Cache tokens with expiry tracking +- Refresh before expiration +- Handle 401 errors with token refresh + +## Minimal Permissions Option + +For users who want minimal permissions: + +| Permission | Access | Notes | +|------------|--------|-------| +| Contents | Read | Can clone, cannot push | +| Pull requests | Read | Can read PRs, cannot comment | +| Checks | Read | Receive failures | + +With minimal permissions: +- Agents can analyze failures but cannot push fixes +- Manual intervention required for commits +- Good for "notify only" mode + +## Events Flow + +``` +┌──────────────┐ webhook ┌─────────────────┐ +│ GitHub CI │ ──────────────────>│ Agent Relay │ +│ (check_run │ │ /webhooks │ +│ failed) │ └────────┬────────┘ +└──────────────┘ │ + │ verify signature + │ parse payload + ▼ + ┌─────────────────┐ + │ Record failure │ + │ in database │ + └────────┬────────┘ + │ + │ spawn agent + ▼ + ┌─────────────────┐ + │ Agent fixes │ + │ and pushes │ + └────────┬────────┘ + │ + │ uses Contents:write + ▼ + ┌─────────────────┐ + │ GitHub CI │ + │ re-runs │ + └─────────────────┘ +``` + +## Troubleshooting + +### Webhook Not Received + +1. Check webhook URL is correct and accessible +2. Verify webhook secret matches configuration +3. Check GitHub App webhook delivery logs +4. Ensure firewall allows GitHub IPs + +### Permission Denied Errors + +1. Verify app is installed on the repository +2. Check installation hasn't been suspended +3. Confirm required permissions are granted +4. Regenerate installation token + +### CI Events Not Triggering + +1. Verify `check_run` event is subscribed +2. Check check is from a GitHub Action (not external CI) +3. Ensure webhook URL is receiving events (check delivery logs) + +## References + +- [GitHub Apps documentation](https://docs.github.com/en/apps) +- [Webhook events and payloads](https://docs.github.com/en/webhooks/webhook-events-and-payloads) +- [GitHub App permissions](https://docs.github.com/en/rest/overview/permissions-required-for-github-apps) +- [Check runs API](https://docs.github.com/en/rest/checks/runs) diff --git a/docs/tasks/global-skills-system.tasks.md b/docs/tasks/global-skills-system.tasks.md new file mode 100644 index 00000000..5360f4de --- /dev/null +++ b/docs/tasks/global-skills-system.tasks.md @@ -0,0 +1,230 @@ +# Agent Relay Skills via PRPM + +Leverage PRPM (Prompt Package Manager) to distribute agent-relay skills that users can opt into. + +## Overview + +PRPM already provides: +- Registry at `registry.prpm.dev` +- `prpm install @org/package` CLI +- Lockfile tracking (`prpm.lock`) +- Format conversion (claude, cursor, agents.md) +- Subtypes: skill, agent, rule, snippet +- Lazy loading (`eager: false`) + +**We should publish `@agent-relay/*` packages to PRPM instead of building custom infrastructure.** + +## Current State + +Already using prpm in this repo (see `prpm.lock`): +- `@agent-relay/agent-relay-snippet` - Relay messaging syntax +- `@agent-relay/agent-relay-protocol` - Full protocol docs +- Various skills from `@prpm/*`, `@anthropic/*`, `@my-senior-dev/*` + +## Problem: Global vs Project Skills + +PRPM installs to project directories (`.claude/skills/`). We need: +- Skills NOT in project source control +- Skills available across all projects +- Per-user opt-in, not per-project + +### Potential Solutions + +**A. PRPM Global Flag (feature request)** +```bash +prpm install --global @agent-relay/browser-testing +# Installs to ~/.prpm/skills/ or ~/.config/prpm/skills/ +``` + +**B. User-level prpm.lock** +``` +~/.agent-relay/ +├── prpm.lock # User's global skills +└── .claude/skills/ # Installed skill content +``` +Agent reads both project and user prpm.lock. + +**C. Workspace Bundle** +Cloud workspaces come with @agent-relay skills pre-installed. +Users don't manage - just available in cloud. + +## Proposed Skills to Publish + +### @agent-relay/workspace-capabilities +Documentation for browser testing + container spawning. + +```json +{ + "name": "@agent-relay/workspace-capabilities", + "version": "1.0.0", + "description": "Browser testing (Playwright, VNC) and container spawning (Docker) for agent-relay workspaces", + "format": "claude", + "subtype": "skill", + "eager": false, + "tags": ["agent-relay", "browser-testing", "docker", "workspace"], + "files": [".claude/skills/workspace-capabilities/SKILL.md"] +} +``` + +### @agent-relay/browser-testing +Focused Playwright/screenshot skill. + +### @agent-relay/container-spawning +Focused Docker/container skill. + +### @agent-relay/linear-integration +Linear webhook/API patterns. + +### @agent-relay/slack-integration +Slack bot patterns. + +### @agent-relay/workspace-pack (collection) +Bundle of all workspace skills. + +```json +{ + "collections": [{ + "id": "workspace-pack", + "name": "Agent Relay Workspace Pack", + "description": "All workspace capability skills", + "packages": [ + { "packageId": "@agent-relay/workspace-capabilities" }, + { "packageId": "@agent-relay/browser-testing" }, + { "packageId": "@agent-relay/container-spawning" } + ] + }] +} +``` + +## Tasks + +### prpm-global-research +- [ ] Check if prpm supports `--global` flag +- [ ] If not, evaluate: feature request vs workaround +- [ ] Document findings + +Dependencies: none +Priority: high + +### user-skills-directory +- [ ] Define `~/.agent-relay/skills/` structure +- [ ] Implement reading from user directory in daemon +- [ ] Merge user + project skills in agent manifest + +Dependencies: prpm-global-research +Priority: high + +### publish-workspace-capabilities +- [ ] Create skill content (SKILL.md) +- [ ] Create prpm.json manifest +- [ ] Test locally with `prpm install .` +- [ ] Publish to registry.prpm.dev + +Dependencies: none (can do in parallel) +Priority: high + +### publish-browser-testing +- [ ] Extract browser-specific content from workspace-capabilities +- [ ] Create focused SKILL.md +- [ ] Publish to registry + +Dependencies: publish-workspace-capabilities +Priority: medium + +### publish-container-spawning +- [ ] Extract container-specific content +- [ ] Create focused SKILL.md +- [ ] Publish to registry + +Dependencies: publish-workspace-capabilities +Priority: medium + +### workspace-pack-collection +- [ ] Create collection prpm.json +- [ ] Bundle all workspace skills +- [ ] Publish collection + +Dependencies: publish-browser-testing, publish-container-spawning +Priority: low + +### cloud-workspace-provisioning +- [ ] Pre-install @agent-relay skills in cloud workspace images +- [ ] Or: fetch on workspace creation +- [ ] Make configurable per-workspace + +Dependencies: publish-workspace-capabilities +Priority: medium + +## Example Skill Content + +```markdown +--- +name: workspace-capabilities +description: Browser testing and container spawning for agent-relay workspaces +--- + +# Workspace Capabilities + +This workspace may have additional capabilities available. + +## Checking Availability + +Before using these features, verify they're available: + +\`\`\`typescript +// Check for browser testing +const hasBrowser = process.env.DISPLAY !== undefined; + +// Check for container spawning +const hasDocker = existsSync('/var/run/docker.sock'); +\`\`\` + +## Browser Testing + +[Content about Playwright, screenshots, VNC...] + +## Container Spawning + +[Content about Docker, presets, resource limits...] +``` + +## User Flow + +```bash +# Option A: Global install (if prpm supports it) +prpm install --global @agent-relay/workspace-pack + +# Option B: User directory workaround +cd ~/.agent-relay +prpm install @agent-relay/workspace-pack + +# Option C: Cloud workspace (automatic) +# Skills pre-installed, just use them +``` + +## Why PRPM Over Custom + +| Custom System | PRPM | +|--------------|------| +| Build registry | ✅ Already exists | +| Build CLI | ✅ Already exists | +| Build lockfile | ✅ Already exists | +| Version management | ✅ Already exists | +| Format conversion | ✅ Already exists | + +**PRPM gives us distribution for free. We just publish packages.** + +## Open Questions for PRPM + +1. **Global installs** - `prpm install --global`? +2. **Multiple lockfile locations** - project + user? +3. **Conditional activation** - `activationCondition` field? + +May need to contribute these features or work around them. + +## References + +- `prpm.lock` - Current installed packages +- `.claude/skills/prpm-json-best-practices-skill/` - How to create packages +- `docs/tasks/workspace-capabilities.tasks.md` - Runtime capability discovery +- Implementation: `src/daemon/services/browser-testing.ts`, `container-spawner.ts` diff --git a/docs/tasks/webhook-integrations.tasks.md b/docs/tasks/webhook-integrations.tasks.md new file mode 100644 index 00000000..ff6177fc --- /dev/null +++ b/docs/tasks/webhook-integrations.tasks.md @@ -0,0 +1,184 @@ +# Webhook Integrations - Task Breakdown + +Tasks for future iterations of the webhook and integrations system. +Convert to beads tasks with: `bd import docs/tasks/webhook-integrations.tasks.md` + +## Phase 1: Linear Full Integration [priority: high] + +### linear-outbound-comments +- [ ] Create LinearIntegration service class +- [ ] Implement createComment() with Linear GraphQL API +- [ ] Add Linear API key management to workspace settings +- [ ] Add encryption for stored API keys + +Dependencies: none +Estimate: 2 story points + +### linear-outbound-state +- [ ] Implement setIssueState() for state transitions +- [ ] Implement getAvailableStates() to fetch team states +- [ ] Add state ID caching with TTL + +Dependencies: linear-outbound-comments +Estimate: 1 story point + +### linear-outbound-issues +- [ ] Implement createIssue() with full CreateIssueInput +- [ ] Implement updateIssue() for editing +- [ ] Implement assignIssue() for assignment changes +- [ ] Add label operations (add/remove) + +Dependencies: linear-outbound-state +Estimate: 3 story points + +### linear-webhook-state-change +- [ ] Parse issue state change webhooks +- [ ] Add `issue_state_changed` event type +- [ ] Create rule for auto-responding to state changes + +Dependencies: none +Estimate: 1 story point + +### linear-webhook-due-dates +- [ ] Parse due date approaching events +- [ ] Add `issue_due_soon` event type with configurable threshold +- [ ] Create reminder rule for approaching due dates + +Dependencies: linear-webhook-state-change +Estimate: 1 story point + +## Phase 2: Slack Enhanced [priority: high] + +### slack-slash-commands +- [ ] Create slash command handler endpoint +- [ ] Parse slash command payloads +- [ ] Add slash_command event type +- [ ] Create agent spawning from slash commands + +Dependencies: none +Estimate: 2 story points + +### slack-interactive-components +- [ ] Handle button click callbacks +- [ ] Handle modal submission callbacks +- [ ] Add interactive_message event type +- [ ] Implement openModal() and updateModal() + +Dependencies: slack-slash-commands +Estimate: 3 story points + +### slack-rich-messages +- [ ] Implement postBlocks() with Block Kit +- [ ] Add common block templates (code, error, success) +- [ ] Add file upload support +- [ ] Add scheduled message support + +Dependencies: none +Estimate: 2 story points + +## Phase 3: GitHub Enhanced [priority: high] + +### github-pr-management +- [ ] Parse PR opened/updated webhooks +- [ ] Add pr_opened, pr_updated event types +- [ ] Implement PR review request parsing +- [ ] Add createPR() outbound action + +Dependencies: none +Estimate: 3 story points + +### github-check-runs +- [ ] Implement createCheckRun() for CI status +- [ ] Implement updateCheckRun() for progress +- [ ] Add annotations support for inline errors +- [ ] Parse deployment status webhooks + +Dependencies: github-pr-management +Estimate: 2 story points + +### github-issue-management +- [ ] Implement createIssue() +- [ ] Implement addLabels() and removeLabels() +- [ ] Implement assignUsers() +- [ ] Add issue linking support + +Dependencies: none +Estimate: 2 story points + +## Phase 4: Agent Tools [priority: medium] + +### agent-integration-tools +- [ ] Create integration tools accessible to agents +- [ ] Add LinearTool for agent actions +- [ ] Add SlackTool for agent messages +- [ ] Add GitHubTool for agent operations + +Dependencies: linear-outbound-issues, slack-rich-messages, github-issue-management +Estimate: 4 story points + +### agent-permissions +- [ ] Implement AgentIntegrationPermissions type +- [ ] Add permission checking before actions +- [ ] Create permission UI in spawn modal +- [ ] Add audit logging for all external calls + +Dependencies: agent-integration-tools +Estimate: 2 story points + +## Phase 5: Additional Integrations [priority: low] + +### jira-integration +- [ ] Create Jira webhook parser +- [ ] Implement JiraIntegration service +- [ ] Add Jira responder +- [ ] Add workspace settings for Jira + +Dependencies: agent-integration-tools +Estimate: 4 story points + +### gitlab-integration +- [ ] Create GitLab webhook parser +- [ ] Implement GitLabIntegration service +- [ ] Add GitLab responder +- [ ] Map GitLab events to normalized format + +Dependencies: agent-integration-tools +Estimate: 3 story points + +### discord-integration +- [ ] Create Discord webhook parser +- [ ] Implement DiscordIntegration service +- [ ] Add Discord responder +- [ ] Handle Discord-specific message formatting + +Dependencies: agent-integration-tools +Estimate: 3 story points + +## Testing & Infrastructure + +### webhook-load-testing +- [ ] Create load test suite for webhook endpoint +- [ ] Measure p50/p95/p99 latencies +- [ ] Test concurrent webhook handling +- [ ] Add rate limiting if needed + +Dependencies: none +Estimate: 2 story points + +### integration-mocks +- [ ] Create mock Linear API server for tests +- [ ] Create mock Slack API server for tests +- [ ] Create mock GitHub API server for tests +- [ ] Add E2E test suite with mocks + +Dependencies: none +Estimate: 3 story points + +### sandbox-testing +- [ ] Set up Linear sandbox workspace +- [ ] Set up Slack test workspace +- [ ] Set up GitHub test repository +- [ ] Create E2E test suite with real APIs + +Dependencies: integration-mocks +Estimate: 2 story points diff --git a/docs/tasks/workspace-capabilities.tasks.md b/docs/tasks/workspace-capabilities.tasks.md new file mode 100644 index 00000000..abc19381 --- /dev/null +++ b/docs/tasks/workspace-capabilities.tasks.md @@ -0,0 +1,121 @@ +# Workspace Capabilities - Agent Discovery + +How should agents discover and use workspace capabilities (browser testing, container spawning, etc.)? + +## Problem Statement + +We have workspace capabilities: +- Browser testing (Playwright, Xvfb, VNC) +- Container spawning (Docker socket) +- Potentially more in the future (E2B, Browserbase) + +**Challenge:** How do agents know these exist without bloating context for every agent? + +Current implementations exist but are not wired up: +- `src/daemon/services/browser-testing.ts` +- `src/daemon/services/container-spawner.ts` +- `deploy/workspace/Dockerfile.browser` + +## Key Questions + +### 1. Static vs Dynamic Discovery +- [ ] Should capabilities be in rules/skills (static, always injected)? +- [ ] Should capabilities be discovered via MCP at runtime (dynamic)? +- [ ] Hybrid: minimal hint in rules, full discovery via MCP? + +### 2. Cloud vs Local +- [ ] Cloud workspaces: How are capabilities configured per workspace? +- [ ] Local daemons: How does the daemon know what's available? +- [ ] Should there be a "capability manifest" per workspace? + +### 3. Context Budget +- [ ] How much context is acceptable for capability hints? +- [ ] Should agents ask for capabilities only when needed? +- [ ] Can we use tool descriptions instead of injected prompts? + +### 4. Opt-in vs Opt-out +- [ ] Should capabilities be enabled by default? +- [ ] Per-workspace configuration? +- [ ] Per-agent configuration? + +## Design Options + +### Option A: MCP-Only Discovery +Agents call `workspace_capabilities` tool to discover what's available. +No static context injection. + +**Pros:** Zero context overhead, dynamic +**Cons:** Agents might not know to call it + +### Option B: Minimal Hint + MCP +One line in system prompt: "Call workspace_capabilities to check for browser/container tools" + +**Pros:** Tiny context, agents know to look +**Cons:** Still some static injection + +### Option C: Workspace Manifest +Each workspace has a capabilities.json that configures what's available. +Cloud provisions this, agents read at startup. + +**Pros:** Explicit configuration +**Cons:** More infrastructure + +### Option D: Auto-Detection +MCP server auto-detects capabilities (checks DISPLAY, docker.sock) and only exposes available tools. + +**Pros:** Zero configuration, just works +**Cons:** Magic behavior + +## Tasks + +### capability-discovery-design +- [ ] Decide on discovery mechanism +- [ ] Document decision rationale +- [ ] Create ADR (Architecture Decision Record) + +Dependencies: none +Priority: high + +### capability-manifest-schema +- [ ] Define WorkspaceCapabilities schema +- [ ] Define how cloud provisions capabilities +- [ ] Define how daemon reads capabilities + +Dependencies: capability-discovery-design +Priority: medium + +### mcp-capability-tools +- [ ] Create MCP server for workspace tools +- [ ] Only expose tools for available capabilities +- [ ] Add workspace_capabilities discovery tool + +Dependencies: capability-manifest-schema +Priority: medium + +### agent-prompting-strategy +- [ ] Determine minimal context for capability awareness +- [ ] Test with real agents +- [ ] Measure context overhead + +Dependencies: capability-discovery-design +Priority: medium + +### cloud-workspace-config +- [ ] Add capabilities to workspace provisioning +- [ ] UI for enabling/disabling capabilities +- [ ] Per-workspace capability billing (if applicable) + +Dependencies: capability-manifest-schema +Priority: low + +## Notes + +The core services are already implemented: +- Browser testing: `src/daemon/services/browser-testing.ts` +- Container spawning: `src/daemon/services/container-spawner.ts` +- Browser Dockerfile: `deploy/workspace/Dockerfile.browser` + +What's missing is the discovery/awareness layer that doesn't bloat context. + +See also: +- `docs/design/e2b-sandbox-integration.md` - E2B as alternative backend diff --git a/package-lock.json b/package-lock.json index 50a6e030..f16895c4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -170,6 +170,7 @@ }, "node_modules/@clack/prompts/node_modules/is-unicode-supported": { "version": "1.3.0", + "extraneous": true, "inBundle": true, "license": "MIT", "engines": { diff --git a/package.json b/package.json index ad6ee446..0ccf8897 100644 --- a/package.json +++ b/package.json @@ -55,9 +55,10 @@ "services:up": "docker compose -f docker-compose.dev.yml up -d postgres redis && echo '✓ Postgres and Redis running'", "services:down": "docker compose -f docker-compose.dev.yml down", "services:logs": "docker compose -f docker-compose.dev.yml logs -f postgres redis", - "cloud:api": "node dist/cloud/index.js", - "precloud": "npm run clean && tsc && chmod +x dist/cli/index.js && npm run services:up", - "cloud": "concurrently -n api,daemon,dashboard -c cyan,blue,magenta \"npm run cloud:api\" \"npm run dev:daemon\" \"npm run dev:dashboard\"" + "cloud:setup": "./scripts/cloud-setup.sh", + "cloud:api": "WORKSPACE_IMAGE=relay-workspace:local WORKSPACE_DEV_MOUNT=true node -r dotenv/config dist/cloud/index.js", + "precloud": "./scripts/cloud-setup.sh --skip-data", + "cloud": "concurrently -n api,daemon,dashboard -c cyan,blue,magenta \"npm run cloud:api\" \"npm run dev:daemon\" \"npm run dev:next\"" }, "keywords": [ "agent", @@ -73,12 +74,12 @@ "license": "MIT", "repository": { "type": "git", - "url": "git+https://github.com/khaliqgant/agent-relay.git" + "url": "git+https://github.com/AgentWorkforce/relay.git" }, "bugs": { - "url": "https://github.com/khaliqgant/agent-relay/issues" + "url": "https://github.com/AgentWorkforce/relay/issues" }, - "homepage": "https://github.com/khaliqgant/agent-relay#readme", + "homepage": "https://github.com/AgentWorkforce/relay#readme", "dependencies": { "@nangohq/node": "^0.69.20", "@types/jsonwebtoken": "^9.0.10", diff --git a/prpm.json b/prpm.json index e624d1c9..16989848 100644 --- a/prpm.json +++ b/prpm.json @@ -94,6 +94,29 @@ ".claude/agents/shadow-auditor.md", ".claude/agents/shadow-active.md" ] + }, + { + "name": "agent-policy-snippet", + "version": "1.0.0", + "description": "Agent policy rules snippet - informs agents of spawn, messaging, and tool restrictions", + "format": "generic", + "subtype": "snippet", + "snippet": { + "target": "AGENTS.md", + "position": "append", + "header": "Agent Policy" + }, + "tags": [ + "policy", + "rules", + "permissions", + "authorization", + "security", + "multi-agent" + ], + "files": [ + "docs/agent-policy-snippet.md" + ] } ] } diff --git a/scripts/cloud-setup.sh b/scripts/cloud-setup.sh new file mode 100755 index 00000000..47228b6c --- /dev/null +++ b/scripts/cloud-setup.sh @@ -0,0 +1,96 @@ +#!/bin/bash +# Cloud local development setup script +# Usage: ./scripts/cloud-setup.sh [--skip-docker] [--skip-migrate] [--skip-data] + +set -e + +SKIP_DOCKER=false +SKIP_MIGRATE=false +SKIP_DATA=false + +# Parse arguments +for arg in "$@"; do + case $arg in + --skip-docker) SKIP_DOCKER=true ;; + --skip-migrate) SKIP_MIGRATE=true ;; + --skip-data) SKIP_DATA=true ;; + esac +done + +echo "🚀 Setting up Agent Relay Cloud (local dev)" +echo "" + +# Step 1: Start Docker services +if [ "$SKIP_DOCKER" = false ]; then + echo "📦 Starting Docker services (Postgres + Redis)..." + docker compose -f docker-compose.dev.yml up -d postgres redis + + # Wait for Postgres to be ready + echo "⏳ Waiting for Postgres to be ready..." + until docker compose -f docker-compose.dev.yml exec -T postgres pg_isready -U postgres > /dev/null 2>&1; do + sleep 1 + done + echo "✓ Postgres is ready" +else + echo "⏭️ Skipping Docker setup" +fi + +# Step 2: Build TypeScript +echo "" +echo "🔨 Building TypeScript..." +npm run build > /dev/null 2>&1 +echo "✓ Build complete" + +# Step 3: Run migrations +if [ "$SKIP_MIGRATE" = false ]; then + echo "" + echo "📊 Running database migrations..." + npm run db:migrate 2>&1 | grep -E "(Applied|already applied|Error)" || true + echo "✓ Migrations complete" +else + echo "⏭️ Skipping migrations" +fi + +# Step 4: Set up test data (only if server is running) +if [ "$SKIP_DATA" = false ]; then + echo "" + echo "🧪 Setting up test data..." + + # Check if cloud server is running, if not start it temporarily + if ! curl -s http://localhost:4567/api/health > /dev/null 2>&1; then + echo " Starting cloud server temporarily for setup..." + node dist/cloud/index.js & + SERVER_PID=$! + sleep 3 + STARTED_SERVER=true + fi + + # Create test data + RESPONSE=$(curl -s -X POST http://localhost:4567/api/test/setup-local-cloud \ + -H "Content-Type: application/json" \ + -c /tmp/relay-cookies.txt \ + -d '{"repoName": "test-org/test-repo", "workspaceName": "Local Dev"}' 2>&1 || echo '{"error": "Failed to connect"}') + + if echo "$RESPONSE" | grep -q '"success":true'; then + echo "✓ Test data created" + echo " Cookie saved to /tmp/relay-cookies.txt" + else + echo "⚠️ Could not create test data (server may need to be running)" + fi + + # Stop temp server if we started it + if [ "$STARTED_SERVER" = true ]; then + kill $SERVER_PID 2>/dev/null || true + fi +else + echo "⏭️ Skipping test data setup" +fi + +echo "" +echo "✅ Setup complete!" +echo "" +echo "To start the cloud server:" +echo " npm run cloud" +echo "" +echo "Then open: http://localhost:4567/app" +echo "" diff --git a/scripts/test-cli-auth/Dockerfile b/scripts/test-cli-auth/Dockerfile new file mode 100644 index 00000000..021a1186 --- /dev/null +++ b/scripts/test-cli-auth/Dockerfile @@ -0,0 +1,44 @@ +# CLI OAuth Flow Test Container +# +# This container simulates the AI provider CLIs for testing +# the OAuth URL capture flow without actual provider accounts. +# +# Usage: +# docker build -t cli-oauth-test scripts/test-cli-auth/ +# docker run --rm cli-oauth-test +# +# For interactive testing: +# docker run --rm -it cli-oauth-test bash +# claude # Run mock Claude CLI +# codex login # Run mock Codex CLI + +FROM node:20-slim + +# Install dependencies +RUN apt-get update && apt-get install -y \ + bash \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +# Copy mock CLI script +COPY mock-cli.sh /usr/local/bin/mock-cli +RUN chmod +x /usr/local/bin/mock-cli + +# Create symlinks for each provider CLI +# The mock-cli.sh auto-detects the provider from $0 +RUN ln -s /usr/local/bin/mock-cli /usr/local/bin/claude && \ + ln -s /usr/local/bin/mock-cli /usr/local/bin/codex && \ + ln -s /usr/local/bin/mock-cli /usr/local/bin/gemini && \ + ln -s /usr/local/bin/mock-cli /usr/local/bin/opencode && \ + ln -s /usr/local/bin/mock-cli /usr/local/bin/droid + +# Copy test files +COPY ci-test-runner.ts /app/ +COPY package.json /app/ + +# Install test dependencies +RUN npm install + +# Default command runs the CI tests +CMD ["npx", "tsx", "/app/ci-test-runner.ts"] diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real new file mode 100644 index 00000000..4372c6a6 --- /dev/null +++ b/scripts/test-cli-auth/Dockerfile.real @@ -0,0 +1,79 @@ +# CLI OAuth Flow Test Container - Real CLIs +# +# This container installs the actual AI provider CLIs and tests +# URL extraction from their OAuth flows. +# +# Installation methods match deploy/workspace/Dockerfile to ensure consistency. +# +# Usage: +# docker build -f Dockerfile.real -t cli-oauth-test-real scripts/test-cli-auth/ +# docker run --rm cli-oauth-test-real +# +# For interactive testing: +# docker run --rm -it cli-oauth-test-real bash + +FROM node:20-slim + +# Install system dependencies (matches deploy/workspace/Dockerfile) +RUN apt-get update && apt-get install -y \ + bash \ + ca-certificates \ + curl \ + git \ + python3 \ + make \ + g++ \ + jq \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +# Install npm-based CLIs globally as root (npm -g requires root) +RUN npm install -g @openai/codex || echo "Codex install failed" +RUN npm install -g @google/gemini-cli || echo "Gemini install failed" +RUN npm install -g opencode-ai@latest || echo "OpenCode install failed" + +# Create test user (CLIs install to ~/.local/bin) +RUN useradd -m -u 1001 testuser +RUN chown -R testuser:testuser /app +USER testuser + +# Install AI CLIs as testuser (these install scripts write to ~/.local/bin) + +# Claude - uses official install script +RUN curl -fsSL https://claude.ai/install.sh | bash || echo "Claude install failed" +# Note: We don't pre-seed Claude config - we want to test the full interactive flow +# including the dark mode and auth method prompts + +# Note: OpenCode is installed as root above via npm + +# Droid - uses official install script +RUN curl -fsSL https://app.factory.ai/cli | sh || echo "Droid install failed" + +# Add user's local bin to PATH +ENV PATH="/home/testuser/.local/bin:$PATH" + +# Copy test files and source dependencies +# Context is repo root, so paths are relative to that +COPY --chown=testuser:testuser scripts/test-cli-auth/ci-test-real-clis.ts /app/ +COPY --chown=testuser:testuser scripts/test-cli-auth/package.json /app/ + +# Copy source modules maintaining the relative path structure +# cli-pty-runner.ts imports from '../../shared/cli-auth-config.js' +# So we need: /app/src/cloud/api/cli-pty-runner.ts -> /app/src/shared/cli-auth-config.ts +COPY --chown=testuser:testuser src/cloud/api/cli-pty-runner.ts /app/src/cloud/api/ +COPY --chown=testuser:testuser src/shared/cli-auth-config.ts /app/src/shared/ + +# Install test dependencies +RUN npm install + +# Verify which CLIs are installed +RUN echo "=== Installed CLIs ===" && \ + (which claude && claude --version 2>&1 | head -1) || echo "claude: not found" && \ + (which codex && codex --version 2>&1 | head -1) || echo "codex: not found" && \ + (which gemini && gemini --version 2>&1 | head -1) || echo "gemini: not found" && \ + (which opencode && opencode --version 2>&1 | head -1) || echo "opencode: not found" && \ + (which droid && droid --version 2>&1 | head -1) || echo "droid: not found" + +# Default command runs the CI tests +CMD ["npx", "tsx", "/app/ci-test-real-clis.ts"] diff --git a/scripts/test-cli-auth/README.md b/scripts/test-cli-auth/README.md new file mode 100644 index 00000000..4323d370 --- /dev/null +++ b/scripts/test-cli-auth/README.md @@ -0,0 +1,286 @@ +# CLI OAuth Flow Testing + +This directory contains tools for testing and validating the CLI-based OAuth authentication flow for AI providers. + +## Quick Start + +```bash +# Make mock CLI executable +chmod +x scripts/test-cli-auth/mock-cli.sh + +# Run all integration tests +npx tsx scripts/test-cli-auth/test-oauth-flow.ts + +# Test a specific provider +npx tsx scripts/test-cli-auth/test-oauth-flow.ts anthropic +``` + +## Architecture + +The CLI OAuth flow works as follows: + +``` +┌─────────────────┐ ┌──────────────┐ ┌─────────────────┐ +│ Dashboard UI │─────▶│ Onboarding │─────▶│ CLI via PTY │ +│ (React) │ │ API │ │ (node-pty) │ +└─────────────────┘ └──────────────┘ └─────────────────┘ + ▲ │ │ + │ │ ▼ + │ │ ┌─────────────────┐ + │ │ │ Interactive │ + │ │◀──────────────│ Prompts │ + │ │ auto-respond └─────────────────┘ + │ │ │ + │ │ ▼ + │ │ ┌─────────────────┐ + │ │ │ Auth URL │ + │◀─────────────────────┼───────────────│ Output │ + │ (opens popup) │ └─────────────────┘ + │ │ │ + ▼ │ ▼ +┌─────────────────┐ │ ┌─────────────────┐ +│ OAuth Popup │ │ │ Success │ +│ (Browser) │────────────┼──────────────▶│ Detection │ +└─────────────────┘ │ └─────────────────┘ +``` + +## Adding a New Provider + +### 1. Define the CLI Configuration + +Add a new entry to `CLI_AUTH_CONFIG` in `src/cloud/api/onboarding.ts`: + +```typescript +export const CLI_AUTH_CONFIG: Record = { + // ... existing providers + + newprovider: { + // Required: CLI command to run + command: 'newcli', + + // Required: Command arguments + args: ['auth', 'login'], + + // Required: Pattern to extract auth URL (must have capture group) + urlPattern: /(https:\/\/[^\s]+)/, + + // Optional: Path to credentials file after auth + credentialPath: '~/.newcli/credentials.json', + + // Required: Display name for UI + displayName: 'NewProvider', + + // Required: How long to wait for URL (ms) + waitTimeout: 3000, + + // Required: Interactive prompts to auto-respond to + prompts: [ + { + pattern: /do you trust this/i, + response: 'y\r', + delay: 100, + description: 'Trust prompt', + }, + ], + + // Required: Success indicators + successPatterns: [ + /success/i, + /authenticated/i, + ], + }, +}; +``` + +### 2. Add Mock CLI Behavior + +Update `scripts/test-cli-auth/mock-cli.sh` with the new provider's interactive flow: + +```bash +newprovider) + echo -e "${BLUE}NewProvider CLI${NC}" + sleep "$DELAY" + + echo -e "Do you trust this directory? [y/N] " + read -r -n 1 response 2>/dev/null || true + echo "" + + echo -e "Auth URL:" + echo -e "${GREEN}https://newprovider.com/auth?session=test${NC}" + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Authenticated!${NC}" + ;; +``` + +### 3. Add Unit Tests + +Add tests to `src/cloud/api/onboarding.test.ts`: + +```typescript +describe('newprovider', () => { + const config = CLI_AUTH_CONFIG.newprovider; + + it('has correct command and args', () => { + expect(config.command).toBe('newcli'); + expect(config.args).toEqual(['auth', 'login']); + }); + + it('extracts auth URL from output', () => { + const output = 'Visit https://newprovider.com/auth?id=xxx'; + const match = output.match(config.urlPattern); + expect(match![1]).toContain('https://newprovider.com/auth'); + }); + + // Test each prompt handler + describe('prompt handlers', () => { + it('detects trust prompt', () => { + const prompt = findMatchingPrompt( + 'Do you trust this directory?', + config.prompts, + new Set() + ); + expect(prompt!.description).toBe('Trust prompt'); + }); + }); +}); +``` + +### 4. Run Tests + +```bash +# Unit tests +npx vitest run src/cloud/api/onboarding.test.ts + +# Integration tests +npx tsx scripts/test-cli-auth/test-oauth-flow.ts newprovider +``` + +## Provider Checklist + +When adding or modifying a provider, ensure: + +- [ ] `command` is the correct CLI binary name +- [ ] `args` includes any required subcommands (e.g., `['login']`) +- [ ] `urlPattern` has a capture group `(...)` around the URL +- [ ] `waitTimeout` is long enough for multi-step prompts +- [ ] All interactive prompts are handled in `prompts` array +- [ ] Each prompt has a unique `description` for deduplication +- [ ] `successPatterns` cover all success messages the CLI outputs +- [ ] Mock CLI simulates the real CLI's behavior accurately +- [ ] Unit tests cover URL extraction and prompt detection +- [ ] Integration test passes + +## Testing with Real CLIs + +For testing with actual CLIs (not mocks), you can: + +1. **Docker Container Test**: Spin up a container without credentials: + ```bash + docker run -it --rm node:20 bash + npm install -g @anthropic-ai/claude-code + # Run the onboarding flow + ``` + +2. **Fresh VM**: Use a cloud VM with no cached credentials + +3. **Delete Credentials**: Remove local credential files: + ```bash + rm -rf ~/.claude + rm -rf ~/.codex + # etc. + ``` + +## Troubleshooting + +### URL Not Extracted +- Check if the CLI outputs the URL in expected format +- Verify the `urlPattern` regex matches the output +- Increase `waitTimeout` if prompts take longer + +### Prompts Not Detected +- Run mock CLI manually to see exact prompt text +- Check regex patterns are case-insensitive (`/i` flag) +- Ensure ANSI codes are being stripped before matching + +### Success Not Detected +- Verify CLI outputs one of the success patterns +- Check for typos in pattern (e.g., `logged in` vs `loggedin`) +- Add new patterns if CLI uses different success messages + +## CI Integration + +### GitHub Actions + +The workflow `.github/workflows/cli-oauth-test.yml` runs: + +1. **On every push/PR** that modifies: + - `src/cloud/api/onboarding.ts` + - `scripts/test-cli-auth/**` + +2. **Weekly schedule** (Sundays at midnight): + - Catches provider CLI changes early + - Auto-creates GitHub issues on failure + +### Running CI Tests Locally + +```bash +# Build the test container with REAL CLIs (recommended) +docker build -f scripts/test-cli-auth/Dockerfile.real \ + -t cli-oauth-test-real scripts/test-cli-auth/ + +# Run tests against real CLIs +docker run --rm cli-oauth-test-real + +# Run with results output +docker run --rm -v $(pwd)/test-results:/tmp cli-oauth-test-real +cat test-results/cli-oauth-test-results.json + +# Interactive debugging +docker run --rm -it cli-oauth-test-real bash +claude # Test Claude CLI manually +``` + +### Why Real CLIs? + +Using the actual CLIs instead of mocks: +- **Catches real changes** in CLI behavior immediately +- **No maintenance burden** of keeping mocks in sync +- **Tests the actual code path** users will experience +- **Detects new prompts** or changed output formats + +CLIs that aren't installed are skipped (not failed), so tests work even if some providers haven't published CLIs yet. + +### Test Output Format + +```json +{ + "timestamp": "2024-01-15T10:30:00.000Z", + "results": [ + { + "provider": "anthropic", + "command": "claude", + "passed": true, + "urlExtracted": "https://console.anthropic.com/oauth/...", + "urlValid": true, + "promptsHandled": 3, + "exitCode": 0, + "duration": 1234 + } + ], + "summary": { + "total": 5, + "passed": 5, + "failed": 0 + } +} +``` + +## Files + +- `mock-cli.sh` - Simulates CLI interactive flows for testing +- `ci-test-runner.ts` - Docker-based CI test runner +- `test-oauth-flow.ts` - Local integration test runner +- `Dockerfile` - Test container definition +- `package.json` - Test dependencies +- `README.md` - This documentation diff --git a/scripts/test-cli-auth/ci-test-real-clis.ts b/scripts/test-cli-auth/ci-test-real-clis.ts new file mode 100644 index 00000000..e4694125 --- /dev/null +++ b/scripts/test-cli-auth/ci-test-real-clis.ts @@ -0,0 +1,251 @@ +#!/usr/bin/env npx tsx +/** + * CI Test Runner for Real CLI OAuth Flows + * + * Tests the actual CLI tools to verify: + * 1. URL extraction patterns work with real CLI output + * 2. Prompt detection works with real prompts + * 3. Auto-responses navigate through the flow correctly + * + * Exit codes: + * 0 - All available CLIs passed + * 1 - One or more tests failed + * + * Note: CLIs that aren't installed are skipped, not failed. + * + * IMPORTANT: This test uses the same runCLIAuthViaPTY function as production + * to ensure the PTY handling logic is consistent. + */ + +import { execSync } from 'child_process'; +import { writeFileSync } from 'fs'; + +// Import the actual config and PTY runner from cli-pty-runner.ts +// This ensures tests use the EXACT SAME logic as production +// In Docker: /app/src/cloud/api/cli-pty-runner.ts (maintains source tree structure) +import { + CLI_AUTH_CONFIG, + runCLIAuthViaPTY, + type PTYAuthResult, +} from './src/cloud/api/cli-pty-runner.js'; + +interface TestResult { + provider: string; + command: string; + installed: boolean; + passed: boolean; + skipped: boolean; + urlExtracted: string | null; + promptsDetected: string[]; + promptsResponded: string[]; + successDetected: boolean; + exitCode: number | null; + duration: number; + rawOutput: string; + error?: string; +} + +/** + * Check if a CLI is installed + */ +function isCliInstalled(command: string): boolean { + try { + execSync(`which ${command}`, { stdio: 'pipe' }); + return true; + } catch { + return false; + } +} + +/** + * Test a real CLI's OAuth flow using the shared PTY runner + * + * Uses the EXACT SAME runCLIAuthViaPTY function as production to ensure + * the PTY handling logic is consistent between tests and production. + */ +async function testRealCli(providerId: string): Promise { + const config = CLI_AUTH_CONFIG[providerId]; + if (!config) { + return { + provider: providerId, + command: 'unknown', + installed: false, + passed: false, + skipped: true, + urlExtracted: null, + promptsDetected: [], + promptsResponded: [], + successDetected: false, + exitCode: null, + duration: 0, + rawOutput: '', + error: `Unknown provider: ${providerId}`, + }; + } + + const result: TestResult = { + provider: providerId, + command: `${config.command} ${config.args.join(' ')}`.trim(), + installed: isCliInstalled(config.command), + passed: false, + skipped: false, + urlExtracted: null, + promptsDetected: [], + promptsResponded: [], + successDetected: false, + exitCode: null, + duration: 0, + rawOutput: '', + }; + + // Skip if CLI not installed + if (!result.installed) { + result.skipped = true; + result.error = `CLI '${config.command}' not installed`; + return result; + } + + const startTime = Date.now(); + + // Use the shared PTY runner - SAME code as production + const ptyResult: PTYAuthResult = await runCLIAuthViaPTY(config, { + onAuthUrl: (url) => { + result.urlExtracted = url; + console.log(` [${providerId}] URL found: ${url.substring(0, 60)}...`); + }, + onPromptHandled: (description) => { + result.promptsDetected.push(description); + result.promptsResponded.push(description); + console.log(` [${providerId}] Responded to: ${description}`); + }, + onOutput: (data) => { + result.rawOutput += data; + }, + }); + + result.duration = Date.now() - startTime; + result.exitCode = ptyResult.exitCode; + result.successDetected = ptyResult.success; + + // Pass if we got a URL (main goal of OAuth flow) + result.passed = !!result.urlExtracted; + + if (!result.passed) { + result.error = ptyResult.error || 'Failed to extract auth URL from CLI output'; + } + + return result; +} + +/** + * Run tests for all configured providers + */ +async function runAllTests() { + console.log('╔══════════════════════════════════════════════════════════════╗'); + console.log('║ CLI OAuth Flow Tests - Real CLIs ║'); + console.log('╚══════════════════════════════════════════════════════════════╝'); + console.log(''); + + const results: TestResult[] = []; + const providerIds = Object.keys(CLI_AUTH_CONFIG); + + for (const providerId of providerIds) { + const config = CLI_AUTH_CONFIG[providerId]; + console.log(`Testing ${config.displayName} (${providerId})...`); + + const result = await testRealCli(providerId); + results.push(result); + + if (result.skipped) { + console.log(` ⏭️ SKIPPED: ${result.error}`); + } else if (result.passed) { + console.log(` ✅ PASSED`); + } else { + console.log(` ❌ FAILED: ${result.error}`); + } + + console.log(` Installed: ${result.installed ? 'Yes' : 'No'}`); + if (!result.skipped) { + console.log(` URL: ${result.urlExtracted ? 'Extracted' : 'NOT FOUND'}`); + console.log(` Prompts: ${result.promptsResponded.length}/${config.prompts.length} handled`); + console.log(` Duration: ${result.duration}ms`); + } + console.log(''); + } + + // Summary + const installed = results.filter(r => r.installed); + const skipped = results.filter(r => r.skipped); + const passed = results.filter(r => r.passed); + const failed = results.filter(r => !r.passed && !r.skipped); + + console.log('═══════════════════════════════════════════════════════════════'); + console.log(`Summary:`); + console.log(` Installed: ${installed.length}/${results.length}`); + console.log(` Passed: ${passed.length}`); + console.log(` Failed: ${failed.length}`); + console.log(` Skipped: ${skipped.length}`); + console.log('═══════════════════════════════════════════════════════════════'); + + // Write JSON results + const jsonResults = { + timestamp: new Date().toISOString(), + results: results.map(r => ({ + provider: r.provider, + command: r.command, + installed: r.installed, + passed: r.passed, + skipped: r.skipped, + urlExtracted: r.urlExtracted ? true : false, + urlSample: r.urlExtracted?.substring(0, 80), + promptsDetected: r.promptsDetected, + promptsResponded: r.promptsResponded, + exitCode: r.exitCode, + duration: r.duration, + error: r.error, + })), + summary: { + total: results.length, + installed: installed.length, + passed: passed.length, + failed: failed.length, + skipped: skipped.length, + }, + }; + + try { + writeFileSync('/tmp/cli-oauth-test-results.json', JSON.stringify(jsonResults, null, 2)); + console.log('\nResults written to /tmp/cli-oauth-test-results.json'); + } catch { + console.log('\n--- JSON Results ---'); + console.log(JSON.stringify(jsonResults, null, 2)); + } + + // Exit with failure only if an installed CLI failed + // Skipped CLIs don't count as failures + if (failed.length > 0) { + console.log('\nFailed CLIs:'); + for (const result of failed) { + console.log(` - ${result.provider}: ${result.error}`); + if (result.rawOutput) { + console.log(` Last 500 chars of output:`); + console.log(` ${result.rawOutput.slice(-500).replace(/\n/g, '\n ')}`); + } + } + process.exit(1); + } + + // Warn if no CLIs were tested + if (installed.length === 0) { + console.log('\n⚠️ WARNING: No CLIs were installed - no actual testing performed!'); + process.exit(0); // Don't fail, but warn + } + + console.log('\n✅ All installed CLIs passed!'); + process.exit(0); +} + +runAllTests().catch((err) => { + console.error('Test runner failed:', err); + process.exit(1); +}); diff --git a/scripts/test-cli-auth/ci-test-runner.ts b/scripts/test-cli-auth/ci-test-runner.ts new file mode 100644 index 00000000..9b9b3563 --- /dev/null +++ b/scripts/test-cli-auth/ci-test-runner.ts @@ -0,0 +1,263 @@ +#!/usr/bin/env npx tsx +/** + * CI Test Runner for CLI OAuth Flow + * + * This script runs in a Docker container and tests each provider's + * CLI OAuth flow to ensure URL extraction works correctly. + * + * Exit codes: + * 0 - All tests passed + * 1 - One or more tests failed + * + * Output format (JSON): + * { "results": [...], "summary": { "passed": N, "failed": N } } + */ + +import * as pty from 'node-pty'; +import { writeFileSync } from 'fs'; + +// Provider configurations - must match CLI_AUTH_CONFIG in onboarding.ts +const PROVIDERS = { + anthropic: { + command: 'claude', + args: [] as string[], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://console.anthropic.com', + prompts: [ + { pattern: /dark\s*(mode|theme)/i, response: '\r' }, + { pattern: /(subscription|api\s*key)/i, response: '\r' }, + { pattern: /trust/i, response: 'y\r' }, + ], + }, + openai: { + command: 'codex', + args: ['login'], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://auth.openai.com', + prompts: [ + { pattern: /trust/i, response: 'y\r' }, + ], + }, + google: { + command: 'gemini', + args: [] as string[], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://accounts.google.com', + prompts: [], + }, + opencode: { + command: 'opencode', + args: [] as string[], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://opencode.ai', + prompts: [], + }, + droid: { + command: 'droid', + args: [] as string[], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://factory.ai', + prompts: [], + }, +}; + +interface TestResult { + provider: string; + command: string; + passed: boolean; + urlExtracted: string | null; + urlValid: boolean; + promptsHandled: number; + exitCode: number | null; + duration: number; + output: string; + error?: string; +} + +function stripAnsi(text: string): string { + return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +async function testProvider(providerId: string): Promise { + const config = PROVIDERS[providerId as keyof typeof PROVIDERS]; + if (!config) { + return { + provider: providerId, + command: 'unknown', + passed: false, + urlExtracted: null, + urlValid: false, + promptsHandled: 0, + exitCode: null, + duration: 0, + output: '', + error: `Unknown provider: ${providerId}`, + }; + } + + const startTime = Date.now(); + const result: TestResult = { + provider: providerId, + command: `${config.command} ${config.args.join(' ')}`.trim(), + passed: false, + urlExtracted: null, + urlValid: false, + promptsHandled: 0, + exitCode: null, + duration: 0, + output: '', + }; + + return new Promise((resolve) => { + const respondedPrompts = new Set(); + + try { + const proc = pty.spawn(config.command, config.args, { + name: 'xterm-256color', + cols: 120, + rows: 30, + env: { ...process.env, TERM: 'xterm-256color', NO_COLOR: '1' }, + }); + + const timeout = setTimeout(() => { + proc.kill(); + result.error = 'Timeout waiting for CLI'; + result.duration = Date.now() - startTime; + resolve(result); + }, 15000); + + proc.onData((data: string) => { + result.output += data; + const cleanText = stripAnsi(data); + + // Check for prompts and respond + for (let i = 0; i < config.prompts.length; i++) { + if (respondedPrompts.has(i)) continue; + if (config.prompts[i].pattern.test(cleanText)) { + respondedPrompts.add(i); + result.promptsHandled++; + setTimeout(() => { + try { + proc.write(config.prompts[i].response); + } catch { + // Process may have exited + } + }, 100); + } + } + + // Check for URL + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !result.urlExtracted) { + result.urlExtracted = match[1]; + result.urlValid = result.urlExtracted.startsWith(config.expectedUrlPrefix); + } + }); + + proc.onExit(({ exitCode }) => { + clearTimeout(timeout); + result.exitCode = exitCode; + result.duration = Date.now() - startTime; + + // Determine pass/fail + result.passed = !!( + result.urlExtracted && + result.urlValid && + exitCode === 0 + ); + + resolve(result); + }); + + // Send signal to continue after prompts are done + setTimeout(() => { + try { + proc.write('\n'); + } catch { + // Ignore + } + }, 5000); + } catch (err) { + result.error = err instanceof Error ? err.message : 'Unknown error'; + result.duration = Date.now() - startTime; + resolve(result); + } + }); +} + +async function runAllTests() { + console.log('╔══════════════════════════════════════════════════════════════╗'); + console.log('║ CLI OAuth Flow CI Tests ║'); + console.log('╚══════════════════════════════════════════════════════════════╝'); + console.log(''); + + const results: TestResult[] = []; + + for (const providerId of Object.keys(PROVIDERS)) { + process.stdout.write(`Testing ${providerId}... `); + const result = await testProvider(providerId); + results.push(result); + + if (result.passed) { + console.log('✅ PASSED'); + } else { + console.log(`❌ FAILED${result.error ? `: ${result.error}` : ''}`); + } + + // Detailed output + console.log(` Command: ${result.command}`); + console.log(` URL: ${result.urlExtracted || 'NOT FOUND'}`); + console.log(` Valid: ${result.urlValid ? 'Yes' : 'No'}`); + console.log(` Prompts: ${result.promptsHandled}/${PROVIDERS[providerId as keyof typeof PROVIDERS].prompts.length}`); + console.log(` Exit: ${result.exitCode}`); + console.log(` Duration: ${result.duration}ms`); + console.log(''); + } + + // Summary + const passed = results.filter(r => r.passed).length; + const failed = results.filter(r => !r.passed).length; + + console.log('═══════════════════════════════════════════════════════════════'); + console.log(`Summary: ${passed} passed, ${failed} failed out of ${results.length} tests`); + console.log('═══════════════════════════════════════════════════════════════'); + + // Write JSON results for CI parsing + const jsonResults = { + timestamp: new Date().toISOString(), + results: results.map(r => ({ + provider: r.provider, + command: r.command, + passed: r.passed, + urlExtracted: r.urlExtracted, + urlValid: r.urlValid, + promptsHandled: r.promptsHandled, + exitCode: r.exitCode, + duration: r.duration, + error: r.error, + })), + summary: { + total: results.length, + passed, + failed, + }, + }; + + // Write to file for CI artifact + try { + writeFileSync('/tmp/cli-oauth-test-results.json', JSON.stringify(jsonResults, null, 2)); + console.log('\nResults written to /tmp/cli-oauth-test-results.json'); + } catch { + // Might not have write access, output to stdout instead + console.log('\n--- JSON Results ---'); + console.log(JSON.stringify(jsonResults, null, 2)); + } + + // Exit with appropriate code + process.exit(failed > 0 ? 1 : 0); +} + +runAllTests().catch((err) => { + console.error('Test runner failed:', err); + process.exit(1); +}); diff --git a/scripts/test-cli-auth/mock-cli.sh b/scripts/test-cli-auth/mock-cli.sh new file mode 100755 index 00000000..e8f5b58d --- /dev/null +++ b/scripts/test-cli-auth/mock-cli.sh @@ -0,0 +1,147 @@ +#!/bin/bash +# Mock CLI for testing OAuth flow prompt handling +# Usage: ./mock-cli.sh [delay] +# +# This script simulates the interactive prompts of various AI CLI tools +# for testing the onboarding OAuth flow without actual CLI binaries. +# +# When installed as symlinks (e.g., /usr/local/bin/claude -> mock-cli.sh), +# it auto-detects the provider from the command name. + +# Detect provider from how the script was called +SCRIPT_NAME=$(basename "$0") + +case "$SCRIPT_NAME" in + claude) PROVIDER="claude" ;; + codex) PROVIDER="codex" ;; + gemini) PROVIDER="gemini" ;; + opencode) PROVIDER="opencode" ;; + droid) PROVIDER="droid" ;; + mock-cli.sh|mock-cli|mock-cli-impl.sh) + PROVIDER="${1:-claude}" + shift 2>/dev/null || true + ;; + *) PROVIDER="${1:-claude}" ;; +esac + +# Handle 'login' subcommand for codex +if [ "$PROVIDER" = "codex" ] && [ "$1" = "login" ]; then + shift +fi + +DELAY="${1:-0.3}" + +# Colors for output +GREEN='\033[0;32m' +BLUE='\033[0;34m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +case "$PROVIDER" in + claude|anthropic) + echo -e "${BLUE}Claude Code CLI${NC}" + echo "" + sleep "$DELAY" + + # Dark mode prompt + echo -e "Would you like to use ${YELLOW}dark mode${NC}? (y/n) " + read -r -n 1 response 2>/dev/null || true + echo "" + sleep "$DELAY" + + # Login method selection prompt (matches real Claude CLI) + echo -e "Select login method:" + echo "" + echo -e " ❯ 1. Claude account with ${YELLOW}subscription${NC} · Pro, Max, Team, or Enterprise" + echo "" + echo -e " 2. Anthropic Console account · API usage billing" + read -r -n 1 response 2>/dev/null || true + echo "" + sleep "$DELAY" + + # Trust directory prompt + echo -e "Do you ${YELLOW}trust this directory${NC}? [y/N] " + read -r -n 1 response 2>/dev/null || true + echo "" + sleep "$DELAY" + + # Auth URL + echo "" + echo -e "Please visit the following URL to authenticate:" + echo -e "${GREEN}https://console.anthropic.com/oauth/authorize?client_id=mock-test-123&state=abc${NC}" + echo "" + echo "Waiting for authentication..." + + # Wait for completion signal (or timeout) + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Authentication successful!${NC}" + ;; + + codex|openai) + echo -e "${BLUE}Codex CLI${NC}" + echo "" + sleep "$DELAY" + + # Trust directory prompt + echo -e "Do you ${YELLOW}trust this workspace${NC}? [y/N] " + read -r -n 1 response 2>/dev/null || true + echo "" + sleep "$DELAY" + + # Auth URL + echo "" + echo -e "Open this URL to log in:" + echo -e "${GREEN}https://auth.openai.com/authorize?client_id=mock-test-456&state=def${NC}" + echo "" + echo "Waiting..." + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Logged in successfully${NC}" + ;; + + gemini|google) + echo -e "${BLUE}Gemini CLI${NC}" + echo "" + sleep "$DELAY" + + # Auth URL + echo -e "Authenticate at:" + echo -e "${GREEN}https://accounts.google.com/o/oauth2/v2/auth?client_id=mock-test-789${NC}" + echo "" + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Authenticated!${NC}" + ;; + + opencode) + echo -e "${BLUE}OpenCode CLI${NC}" + echo "" + sleep "$DELAY" + + echo -e "Login URL:" + echo -e "${GREEN}https://opencode.ai/auth?session=mock-session${NC}" + echo "" + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Success${NC}" + ;; + + droid) + echo -e "${BLUE}Droid CLI${NC}" + echo "" + sleep "$DELAY" + + echo -e "Visit to authenticate:" + echo -e "${GREEN}https://factory.ai/droid/auth?id=mock-droid${NC}" + echo "" + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Authenticated${NC}" + ;; + + *) + echo "Unknown provider: $PROVIDER" + echo "Supported: claude, codex, gemini, opencode, droid" + exit 1 + ;; +esac diff --git a/scripts/test-cli-auth/package.json b/scripts/test-cli-auth/package.json new file mode 100644 index 00000000..7d5296d2 --- /dev/null +++ b/scripts/test-cli-auth/package.json @@ -0,0 +1,14 @@ +{ + "name": "cli-oauth-test", + "version": "1.0.0", + "description": "CLI OAuth flow testing for agent-relay", + "type": "module", + "scripts": { + "test": "tsx ci-test-runner.ts", + "test:local": "tsx test-oauth-flow.ts" + }, + "dependencies": { + "node-pty": "^1.0.0", + "tsx": "^4.19.0" + } +} diff --git a/scripts/test-cli-auth/test-oauth-flow.ts b/scripts/test-cli-auth/test-oauth-flow.ts new file mode 100644 index 00000000..e98536f5 --- /dev/null +++ b/scripts/test-cli-auth/test-oauth-flow.ts @@ -0,0 +1,220 @@ +#!/usr/bin/env npx tsx +/** + * CLI OAuth Flow Integration Test + * + * Tests the prompt handling and URL extraction for each provider + * using mock CLIs that simulate the real interactive flows. + * + * Usage: + * npx tsx scripts/test-cli-auth/test-oauth-flow.ts [provider] + * + * Examples: + * npx tsx scripts/test-cli-auth/test-oauth-flow.ts # Test all providers + * npx tsx scripts/test-cli-auth/test-oauth-flow.ts claude # Test Claude only + */ + +import * as pty from 'node-pty'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, +} from '../../src/cloud/api/onboarding.js'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +interface TestResult { + provider: string; + passed: boolean; + urlExtracted: string | null; + promptsResponded: string[]; + successDetected: boolean; + output: string; + error?: string; +} + +/** + * Test a single provider's OAuth flow using the mock CLI + */ +async function testProvider(providerId: string): Promise { + const config = CLI_AUTH_CONFIG[providerId]; + if (!config) { + return { + provider: providerId, + passed: false, + urlExtracted: null, + promptsResponded: [], + successDetected: false, + output: '', + error: `Unknown provider: ${providerId}`, + }; + } + + const result: TestResult = { + provider: providerId, + passed: false, + urlExtracted: null, + promptsResponded: [], + successDetected: false, + output: '', + }; + + return new Promise((resolve) => { + const mockCliPath = path.join(__dirname, 'mock-cli.sh'); + const respondedPrompts = new Set(); + + // Map provider IDs to mock CLI provider names + const mockProviderName = providerId === 'anthropic' ? 'claude' : + providerId === 'openai' ? 'codex' : + providerId === 'google' ? 'gemini' : providerId; + + const proc = pty.spawn('bash', [mockCliPath, mockProviderName, '0.2'], { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: __dirname, + env: { ...process.env, TERM: 'xterm-256color' }, + }); + + const timeout = setTimeout(() => { + proc.kill(); + result.error = 'Timeout waiting for completion'; + resolve(result); + }, 10000); + + proc.onData((data: string) => { + result.output += data; + + // Check for matching prompts and auto-respond + const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); + if (matchingPrompt) { + respondedPrompts.add(matchingPrompt.description); + result.promptsResponded.push(matchingPrompt.description); + setTimeout(() => { + try { + proc.write(matchingPrompt.response); + } catch { + // Process may have exited + } + }, matchingPrompt.delay ?? 50); + } + + // Look for auth URL + const cleanText = stripAnsiCodes(data); + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !result.urlExtracted) { + result.urlExtracted = match[1]; + } + + // Check for success indicators + if (matchesSuccessPattern(data, config.successPatterns)) { + result.successDetected = true; + } + }); + + proc.onExit(({ exitCode }) => { + clearTimeout(timeout); + + // Determine if test passed + result.passed = !!( + result.urlExtracted && + result.successDetected && + exitCode === 0 + ); + + // Send completion signal to mock CLI + setTimeout(() => resolve(result), 100); + }); + + // For mock CLI, send signal to continue after prompts + setTimeout(() => { + try { + proc.write('\n'); // Signal to continue + } catch { + // Ignore + } + }, 3000); + }); +} + +/** + * Run tests for specified providers or all providers + */ +async function runTests(providers?: string[]) { + const providerIds = providers ?? Object.keys(CLI_AUTH_CONFIG); + + console.log('╔══════════════════════════════════════════════════════════════╗'); + console.log('║ CLI OAuth Flow Integration Tests ║'); + console.log('╚══════════════════════════════════════════════════════════════╝'); + console.log(''); + + const results: TestResult[] = []; + + for (const providerId of providerIds) { + const config = CLI_AUTH_CONFIG[providerId]; + if (!config) { + console.log(`⚠️ Unknown provider: ${providerId}`); + continue; + } + + console.log(`Testing ${config.displayName} (${providerId})...`); + + const result = await testProvider(providerId); + results.push(result); + + if (result.passed) { + console.log(` ✅ PASSED`); + } else { + console.log(` ❌ FAILED${result.error ? `: ${result.error}` : ''}`); + } + + console.log(` URL extracted: ${result.urlExtracted ? '✓' : '✗'}`); + console.log(` Success detected: ${result.successDetected ? '✓' : '✗'}`); + if (result.promptsResponded.length > 0) { + console.log(` Prompts responded: ${result.promptsResponded.join(', ')}`); + } + console.log(''); + } + + // Summary + const passed = results.filter(r => r.passed).length; + const failed = results.filter(r => !r.passed).length; + + console.log('═══════════════════════════════════════════════════════════════'); + console.log(`Summary: ${passed} passed, ${failed} failed`); + console.log('═══════════════════════════════════════════════════════════════'); + + // Exit with error if any tests failed + if (failed > 0) { + console.log('\nFailed tests:'); + for (const result of results.filter(r => !r.passed)) { + console.log(` - ${result.provider}: ${result.error || 'See details above'}`); + } + process.exit(1); + } +} + +// Parse CLI args +const args = process.argv.slice(2); +if (args.includes('--help') || args.includes('-h')) { + console.log(` +CLI OAuth Flow Integration Test + +Usage: + npx tsx scripts/test-cli-auth/test-oauth-flow.ts [provider...] + +Examples: + npx tsx scripts/test-cli-auth/test-oauth-flow.ts # Test all providers + npx tsx scripts/test-cli-auth/test-oauth-flow.ts anthropic # Test Claude only + npx tsx scripts/test-cli-auth/test-oauth-flow.ts anthropic openai # Test multiple + +Providers: + ${Object.keys(CLI_AUTH_CONFIG).join(', ')} +`); + process.exit(0); +} + +runTests(args.length > 0 ? args : undefined).catch(console.error); diff --git a/scripts/test-pty-input-auto.js b/scripts/test-pty-input-auto.js new file mode 100644 index 00000000..25791459 --- /dev/null +++ b/scripts/test-pty-input-auto.js @@ -0,0 +1,222 @@ +#!/usr/bin/env node +/** + * Automated PTY input test for Claude CLI + * Tests different input methods without user interaction + * Run inside workspace container: node /app/dist/scripts/test-pty-input-auto.js + */ + +import * as pty from 'node-pty'; + +const TEST_CODE = 'test-auth-code-12345'; +const INPUT_METHOD = process.argv[2] || '1'; + +// Debug: Log all escape sequences we send +function logHex(label, data) { + const hex = Buffer.from(data).toString('hex').replace(/(.{2})/g, '$1 ').trim(); + console.log(`[HEX] ${label}: ${hex}`); +} + +function stripAnsi(str) { + return str.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +async function main() { + console.log(`\nTesting PTY input method ${INPUT_METHOD} with code: ${TEST_CODE}\n`); + + const proc = pty.spawn('claude', [], { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: '/workspace', + env: { + ...process.env, + NO_COLOR: '1', + TERM: 'xterm-256color', + DISPLAY: '', + }, + }); + + let output = ''; + let authUrl = null; + let codePromptSeen = false; + let codeSent = false; + const prompts = [ + { pattern: /dark\s*(mode|theme)/i, response: '\r', name: 'dark mode' }, + { pattern: /select\s*login|how\s*would\s*you\s*like|subscription\s*or.*api/i, response: '\r', name: 'login method' }, + ]; + const respondedPrompts = new Set(); + + proc.onData((data) => { + output += data; + const clean = stripAnsi(data); + + // Log meaningful output + if (clean.trim()) { + const lines = clean.trim().split('\n').map(l => l.trim()).filter(l => l); + for (const line of lines) { + if (line.length > 3 && !line.match(/^[·✢*✶✻✽]+$/)) { + console.log('[PTY]', line.substring(0, 120)); + } + } + } + + // Auto-respond to prompts + for (const prompt of prompts) { + if (!respondedPrompts.has(prompt.name) && prompt.pattern.test(clean)) { + respondedPrompts.add(prompt.name); + console.log(`\n[AUTO] Responding to: ${prompt.name}`); + setTimeout(() => proc.write(prompt.response), 100); + } + } + + // Capture auth URL + const urlMatch = clean.match(/(https:\/\/[^\s]+)/); + if (urlMatch && !authUrl) { + authUrl = urlMatch[1]; + console.log('\n[CAPTURED] Auth URL detected'); + } + + // Look for code paste prompt - various patterns Claude might use + const codePromptPatterns = [ + /paste.*code/i, + /enter.*code/i, + /authorization.*code/i, + /code.*here/i, + /waiting.*code/i, + /input.*code/i, + ]; + + if (authUrl && !codePromptSeen && !codeSent) { + for (const pattern of codePromptPatterns) { + if (pattern.test(clean)) { + codePromptSeen = true; + console.log('\n[DETECTED] Code prompt pattern:', pattern.toString()); + break; + } + } + } + + // Also look for the text input box indicator from Ink + // After URL is shown and some time passes, try sending the code + if (authUrl && !codeSent) { + // Check if we see any indication we should enter the code + const outputLower = stripAnsi(output).toLowerCase(); + const hasCodePrompt = outputLower.includes('paste') || + outputLower.includes('enter the code') || + outputLower.includes('authorization code') || + outputLower.includes("browser didn't open"); + + if (hasCodePrompt || output.length > 5000) { + codeSent = true; + console.log('\n[SENDING] Sending code after prompt/timeout...'); + setTimeout(() => sendCode(proc), 500); + } + } + }); + + proc.onExit(({ exitCode }) => { + console.log('\n[EXIT] Claude exited with code:', exitCode); + console.log('[TOTAL OUTPUT LENGTH]', output.length); + + // Check for credentials + import('fs').then(fs => { + const credPath = '/home/workspace/.claude/.credentials.json'; + if (fs.existsSync(credPath)) { + console.log('[SUCCESS] Credentials file found!'); + const creds = fs.readFileSync(credPath, 'utf8'); + console.log('[CREDS]', creds.substring(0, 200)); + } else { + console.log('[RESULT] No credentials file created (expected with test code)'); + } + }); + + setTimeout(() => process.exit(exitCode), 1000); + }); + + async function sendCode(ptyProc) { + const PASTE_START = '\x1b[200~'; + const PASTE_END = '\x1b[201~'; + + console.log(`[METHOD ${INPUT_METHOD}] Sending test code...`); + + switch (INPUT_METHOD) { + case '1': + console.log('[1] Plain code + \\r (carriage return)'); + logHex('sending', TEST_CODE + '\r'); + ptyProc.write(TEST_CODE + '\r'); + break; + case '2': + console.log('[2] Plain code + \\n (newline)'); + logHex('sending', TEST_CODE + '\n'); + ptyProc.write(TEST_CODE + '\n'); + break; + case '3': + console.log('[3] Bracketed paste + \\r'); + logHex('paste start', PASTE_START); + logHex('code', TEST_CODE); + logHex('paste end', PASTE_END); + ptyProc.write(PASTE_START + TEST_CODE + PASTE_END); + await new Promise(r => setTimeout(r, 200)); + logHex('enter', '\r'); + ptyProc.write('\r'); + break; + case '4': + console.log('[4] Plain code + \\r\\n (CRLF)'); + logHex('sending', TEST_CODE + '\r\n'); + ptyProc.write(TEST_CODE + '\r\n'); + break; + case '5': + console.log('[5] Character by character + \\r'); + for (const char of TEST_CODE) { + ptyProc.write(char); + await new Promise(r => setTimeout(r, 10)); + } + await new Promise(r => setTimeout(r, 200)); + logHex('enter', '\r'); + ptyProc.write('\r'); + break; + case '6': + console.log('[6] Code then wait, then Enter separately'); + logHex('code only', TEST_CODE); + ptyProc.write(TEST_CODE); + await new Promise(r => setTimeout(r, 1000)); + console.log('[6] Now sending Enter...'); + logHex('enter', '\r'); + ptyProc.write('\r'); + break; + case '7': + console.log('[7] Send Enter first then code then Enter'); + ptyProc.write('\r'); // Clear any existing state + await new Promise(r => setTimeout(r, 200)); + logHex('code + enter', TEST_CODE + '\r'); + ptyProc.write(TEST_CODE + '\r'); + break; + case '8': + console.log('[8] Ctrl+M (same as \\r but explicit)'); + logHex('code + ctrl-m', TEST_CODE + '\x0d'); + ptyProc.write(TEST_CODE + '\x0d'); + break; + default: + console.log('[DEFAULT] Plain code + \\r'); + ptyProc.write(TEST_CODE + '\r'); + } + + console.log('[SENT] Waiting for response...'); + + // Give it more time to process and show error + setTimeout(() => { + console.log('\n[TIMEOUT] Test complete, terminating...'); + console.log('[FINAL OUTPUT CHECK] Last 500 chars of output:'); + console.log(stripAnsi(output).slice(-500)); + ptyProc.kill(); + }, 20000); + } + + // Failsafe timeout + setTimeout(() => { + console.log('\n[FAILSAFE] Max time reached, terminating...'); + proc.kill(); + }, 60000); +} + +main().catch(console.error); diff --git a/scripts/test-pty-input.js b/scripts/test-pty-input.js new file mode 100644 index 00000000..7fc26f2d --- /dev/null +++ b/scripts/test-pty-input.js @@ -0,0 +1,150 @@ +#!/usr/bin/env node +/** + * Test PTY input methods for Claude CLI + * Run inside workspace container: node /app/dist/scripts/test-pty-input.js + */ + +import * as pty from 'node-pty'; +import * as readline from 'readline'; + +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +function ask(question) { + return new Promise(resolve => rl.question(question, resolve)); +} + +function stripAnsi(str) { + return str.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +async function main() { + console.log('Starting Claude CLI via PTY...\n'); + + const proc = pty.spawn('claude', [], { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: '/workspace', + env: { + ...process.env, + NO_COLOR: '1', + TERM: 'xterm-256color', + DISPLAY: '', + }, + }); + + let output = ''; + let authUrl = null; + const prompts = [ + { pattern: /dark\s*(mode|theme)/i, response: '\r', name: 'dark mode' }, + { pattern: /select\s*login|how\s*would\s*you\s*like|subscription\s*or.*api/i, response: '\r', name: 'login method' }, + ]; + const respondedPrompts = new Set(); + + proc.onData((data) => { + output += data; + const clean = stripAnsi(data); + + // Log output + if (clean.trim()) { + console.log('[PTY]', clean.substring(0, 200)); + } + + // Auto-respond to prompts + for (const prompt of prompts) { + if (!respondedPrompts.has(prompt.name) && prompt.pattern.test(clean)) { + respondedPrompts.add(prompt.name); + console.log(`\n[AUTO] Responding to: ${prompt.name}`); + setTimeout(() => proc.write(prompt.response), 100); + } + } + + // Capture auth URL + const urlMatch = clean.match(/(https:\/\/[^\s]+)/); + if (urlMatch && !authUrl) { + authUrl = urlMatch[1]; + console.log('\n[CAPTURED] Auth URL:', authUrl.substring(0, 80) + '...'); + promptForCode(); + } + }); + + proc.onExit(({ exitCode }) => { + console.log('\n[EXIT] Claude exited with code:', exitCode); + console.log('[OUTPUT LENGTH]', output.length); + rl.close(); + process.exit(exitCode); + }); + + async function promptForCode() { + console.log('\n========================================'); + console.log('Complete OAuth in browser, then paste the code here.'); + console.log('========================================\n'); + + const code = await ask('Paste auth code: '); + + console.log('\nSelect input method:'); + console.log('1. Plain code + \\r'); + console.log('2. Plain code + \\n'); + console.log('3. Bracketed paste + \\r'); + console.log('4. Bracketed paste + \\n'); + console.log('5. Character by character + \\r'); + + const method = await ask('Choice (1-5): '); + + const PASTE_START = '\x1b[200~'; + const PASTE_END = '\x1b[201~'; + const cleanCode = code.trim(); + + console.log(`\n[SENDING] Using method ${method}...`); + + switch (method) { + case '1': + proc.write(cleanCode + '\r'); + break; + case '2': + proc.write(cleanCode + '\n'); + break; + case '3': + proc.write(PASTE_START + cleanCode + PASTE_END); + await new Promise(r => setTimeout(r, 200)); + proc.write('\r'); + break; + case '4': + proc.write(PASTE_START + cleanCode + PASTE_END); + await new Promise(r => setTimeout(r, 200)); + proc.write('\n'); + break; + case '5': + for (const char of cleanCode) { + proc.write(char); + await new Promise(r => setTimeout(r, 10)); + } + await new Promise(r => setTimeout(r, 200)); + proc.write('\r'); + break; + default: + proc.write(cleanCode + '\r'); + } + + console.log('[SENT] Waiting for response...\n'); + + // Wait and watch output + setTimeout(() => { + console.log('\n[CHECK] Checking for credentials file...'); + import('fs').then(fs => { + const credPath = '/home/workspace/.claude/.credentials.json'; + if (fs.existsSync(credPath)) { + console.log('[SUCCESS] Credentials file found!'); + console.log(fs.readFileSync(credPath, 'utf8').substring(0, 200)); + } else { + console.log('[FAIL] No credentials file yet'); + } + }); + }, 5000); + } +} + +main().catch(console.error); diff --git a/src/bridge/spawner.test.ts b/src/bridge/spawner.test.ts index b38ccc86..8dafdec5 100644 --- a/src/bridge/spawner.test.ts +++ b/src/bridge/spawner.test.ts @@ -67,8 +67,22 @@ describe('AgentSpawner', () => { beforeEach(() => { vi.clearAllMocks(); - existsSyncMock.mockReturnValue(true); - readFileSyncMock.mockReturnValue(JSON.stringify({ agents: [] })); + // Mock file system calls with path-aware responses + existsSyncMock.mockImplementation((filePath: string) => { + // Snippet files don't exist in test environment + if (filePath.includes('agent-relay-snippet') || filePath.includes('agent-relay-protocol')) { + return false; + } + return true; + }); + readFileSyncMock.mockImplementation((filePath: string) => { + // Return agents.json content for registry files + if (typeof filePath === 'string' && filePath.includes('agents.json')) { + return JSON.stringify({ agents: [] }); + } + // Return empty for other files + return ''; + }); writeFileSyncMock.mockImplementation(() => {}); mkdirSyncMock.mockImplementation(() => undefined); mockPtyWrapper.start.mockResolvedValue(undefined); @@ -95,7 +109,10 @@ describe('AgentSpawner', () => { }); expect(spawner.hasWorker('Dev1')).toBe(true); expect(mockPtyWrapper.start).toHaveBeenCalled(); - expect(mockPtyWrapper.write).toHaveBeenCalledWith('Finish the report\r'); + // Task is written to PTY (may include injected snippets, so check task is included) + expect(mockPtyWrapper.write).toHaveBeenCalled(); + const writeCall = mockPtyWrapper.write.mock.calls[0][0]; + expect(writeCall).toContain('Finish the report'); }); it('adds --dangerously-skip-permissions for Claude variants', async () => { diff --git a/src/bridge/spawner.ts b/src/bridge/spawner.ts index 2b784d6c..5c4b056f 100644 --- a/src/bridge/spawner.ts +++ b/src/bridge/spawner.ts @@ -11,6 +11,7 @@ import { getProjectPaths } from '../utils/project-namespace.js'; import { resolveCommand } from '../utils/command-resolver.js'; import { PtyWrapper, type PtyWrapperConfig, type SummaryEvent, type SessionEndEvent } from '../wrapper/pty-wrapper.js'; import { selectShadowCli } from './shadow-cli.js'; +import { AgentPolicyService, type CloudPolicyFetcher } from '../policy/agent-policy.js'; import type { SpawnRequest, SpawnResult, @@ -64,6 +65,19 @@ export type OnAgentDeathCallback = (info: { resumeInstructions?: string; }) => void; +/** + * Get a minimal relay reminder. + * Agents already have full relay docs via CLAUDE.md - this is just a brief reminder. + * Loading full docs (400+ lines) overwhelms agents and causes "meandering". + */ +function getMinimalRelayReminder(): string { + return `# Quick Relay Reference +- Send: \`->relay:Name <<>>\` +- ACK tasks, send DONE when complete +- Use \`trail start/decision/complete\` for trajectories +- Output \`[[SESSION_END]]..[[/SESSION_END]]\` when done`; +} + export class AgentSpawner { private activeWorkers: Map = new Map(); private agentsPath: string; @@ -74,6 +88,8 @@ export class AgentSpawner { private dashboardPort?: number; private onAgentDeath?: OnAgentDeathCallback; private cloudPersistence?: CloudPersistenceHandler; + private policyService?: AgentPolicyService; + private policyEnforcementEnabled = false; constructor(projectRoot: string, _tmuxSession?: string, dashboardPort?: number) { const paths = getProjectPaths(projectRoot); @@ -86,6 +102,39 @@ export class AgentSpawner { // Ensure logs directory exists fs.mkdirSync(this.logsDir, { recursive: true }); + + // Initialize policy service if enforcement is enabled + if (process.env.AGENT_POLICY_ENFORCEMENT === '1') { + this.policyEnforcementEnabled = true; + this.policyService = new AgentPolicyService({ + projectRoot: this.projectRoot, + workspaceId: process.env.WORKSPACE_ID, + strictMode: process.env.AGENT_POLICY_STRICT === '1', + }); + console.log('[spawner] Policy enforcement enabled'); + } + } + + /** + * Set cloud policy fetcher for workspace-level policies + */ + setCloudPolicyFetcher(fetcher: CloudPolicyFetcher): void { + if (this.policyService) { + // Recreate policy service with cloud fetcher + this.policyService = new AgentPolicyService({ + projectRoot: this.projectRoot, + workspaceId: process.env.WORKSPACE_ID, + cloudFetcher: fetcher, + strictMode: process.env.AGENT_POLICY_STRICT === '1', + }); + } + } + + /** + * Get the policy service (for external access to policy checks) + */ + getPolicyService(): AgentPolicyService | undefined { + return this.policyService; } /** @@ -93,6 +142,7 @@ export class AgentSpawner { * Called after the dashboard server starts and we know the actual port. */ setDashboardPort(port: number): void { + console.log(`[spawner] Dashboard port set to ${port} - nested spawns now enabled`); this.dashboardPort = port; } @@ -166,7 +216,7 @@ export class AgentSpawner { * Spawn a new worker agent using node-pty */ async spawn(request: SpawnRequest): Promise { - const { name, cli, task, team } = request; + const { name, cli, task, team, spawnerName } = request; const debug = process.env.DEBUG_SPAWN === '1'; // Check if worker already exists @@ -178,6 +228,23 @@ export class AgentSpawner { }; } + // Policy enforcement: check if the spawner is authorized to spawn this agent + if (this.policyEnforcementEnabled && this.policyService && spawnerName) { + const decision = await this.policyService.canSpawn(spawnerName, name, cli); + if (!decision.allowed) { + console.warn(`[spawner] Policy blocked spawn: ${spawnerName} -> ${name}: ${decision.reason}`); + return { + success: false, + name, + error: `Policy denied: ${decision.reason}`, + policyDecision: decision, + }; + } + if (debug) { + console.log(`[spawner:debug] Policy allowed spawn: ${spawnerName} -> ${name} (source: ${decision.policySource})`); + } + } + try { // Parse CLI command const cliParts = cli.split(' '); @@ -208,15 +275,21 @@ export class AgentSpawner { // Create PtyWrapper config // Use dashboardPort for nested spawns (API-based, works in non-TTY contexts) - // Fall back to callbacks only if no dashboardPort is set + // Fall back to callbacks only if no dashboardPort is not set // Note: Spawned agents CAN spawn sub-workers intentionally - the parser is strict enough // to avoid accidental spawns from documentation text (requires line start, PascalCase, known CLI) + // Use request.cwd if specified, otherwise use projectRoot + const agentCwd = request.cwd || this.projectRoot; + + // Log whether nested spawning will be enabled for this agent + console.log(`[spawner] Spawning ${name}: dashboardPort=${this.dashboardPort || 'none'} (${this.dashboardPort ? 'nested spawns enabled' : 'nested spawns disabled'})`); + const ptyConfig: PtyWrapperConfig = { name, command, args, socketPath: this.socketPath, - cwd: this.projectRoot, + cwd: agentCwd, logsDir: this.logsDir, dashboardPort: this.dashboardPort, // Shadow agent configuration @@ -308,10 +381,30 @@ export class AgentSpawner { }; } + // Build the full message: minimal relay reminder + policy instructions (if any) + task + let fullMessage = task || ''; + + // Prepend a brief relay reminder (agents have full docs via CLAUDE.md) + // Note: Previously loaded full 400+ line docs which overwhelmed agents + const relayReminder = getMinimalRelayReminder(); + if (relayReminder) { + fullMessage = `${relayReminder}\n\n---\n\n${fullMessage}`; + if (debug) console.log(`[spawner:debug] Prepended relay reminder for ${name}`); + } + + // Prepend policy instructions if enforcement is enabled + if (this.policyEnforcementEnabled && this.policyService) { + const policyInstruction = await this.policyService.getPolicyInstruction(name); + if (policyInstruction) { + fullMessage = `${policyInstruction}\n\n${fullMessage}`; + if (debug) console.log(`[spawner:debug] Prepended policy instructions to task for ${name}`); + } + } + // Send task via relay message if provided (not via direct PTY injection) // This ensures the agent is ready to receive before processing the task - if (task && task.trim()) { - if (debug) console.log(`[spawner:debug] Will send task via relay: ${task.substring(0, 50)}...`); + if (fullMessage && fullMessage.trim()) { + if (debug) console.log(`[spawner:debug] Will send task via relay: ${fullMessage.substring(0, 50)}...`); // If we have dashboard API, send task as relay message if (this.dashboardPort) { @@ -323,7 +416,7 @@ export class AgentSpawner { headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ to: name, - message: task, + message: fullMessage, from: '__spawner__', }), }); @@ -333,16 +426,16 @@ export class AgentSpawner { } else { console.warn(`[spawner] Failed to send task via relay: ${result.error}`); // Fall back to direct injection - pty.write(task + '\r'); + pty.write(fullMessage + '\r'); } } catch (err: any) { console.warn(`[spawner] Relay send failed, falling back to direct injection: ${err.message}`); - pty.write(task + '\r'); + pty.write(fullMessage + '\r'); } } else { // No dashboard API available - use direct injection as fallback if (debug) console.log(`[spawner:debug] No dashboard API, using direct injection`); - pty.write(task + '\r'); + pty.write(fullMessage + '\r'); } } diff --git a/src/bridge/types.ts b/src/bridge/types.ts index 5d05e556..df04f8cc 100644 --- a/src/bridge/types.ts +++ b/src/bridge/types.ts @@ -41,6 +41,10 @@ export interface SpawnRequest { task: string; /** Optional team name to organize agents under */ team?: string; + /** Working directory for the agent (defaults to detected workspace) */ + cwd?: string; + /** Name of the agent requesting the spawn (for policy enforcement) */ + spawnerName?: string; /** Shadow execution mode (subagent = no extra process) */ shadowMode?: 'subagent' | 'process'; /** Primary agent to shadow (if this agent is a shadow) */ @@ -53,12 +57,21 @@ export interface SpawnRequest { shadowSpeakOn?: Array<'SESSION_END' | 'CODE_WRITTEN' | 'REVIEW_REQUEST' | 'EXPLICIT_ASK' | 'ALL_MESSAGES'>; } +/** Policy decision details */ +export interface PolicyDecision { + allowed: boolean; + reason: string; + policySource: 'repo' | 'local' | 'workspace' | 'default'; +} + export interface SpawnResult { success: boolean; name: string; /** PID of the spawned process (for pty-based workers) */ pid?: number; error?: string; + /** Policy decision details if spawn was blocked by policy */ + policyDecision?: PolicyDecision; } export interface WorkerInfo { diff --git a/src/cli/index.ts b/src/cli/index.ts index dad3972c..fb7f97b2 100644 --- a/src/cli/index.ts +++ b/src/cli/index.ts @@ -2459,7 +2459,7 @@ program console.log(' UNACKNOWLEDGED ALERTS:'); console.log(' ─────────────────────────────────────────────────────────────'); for (const alert of data.alerts.slice(0, 10)) { - const time = new Date(alert.createdAt).toLocaleString(); + const _time = new Date(alert.createdAt).toLocaleString(); const icon = alert.alertType === 'oom_imminent' ? '🔴' : alert.alertType === 'critical' ? '🟠' : '🟡'; console.log(` ${icon} ${alert.agentName} - ${alert.alertType}`); @@ -2505,8 +2505,6 @@ program outputDir?: string; exposeGc?: boolean; }) => { - const { spawn } = await import('child_process'); - const os = await import('node:os'); const { getProjectPaths } = await import('../utils/project-namespace.js'); if (!commandParts || commandParts.length === 0) { @@ -2570,8 +2568,6 @@ program inboxDir: paths.dataDir, }); - const snapshotCount = 0; - // Start memory sampling const sampleInterval = setInterval(() => { const memUsage = process.memoryUsage(); diff --git a/src/cloud/api/cli-pty-runner.ts b/src/cloud/api/cli-pty-runner.ts new file mode 100644 index 00000000..289c4bd7 --- /dev/null +++ b/src/cloud/api/cli-pty-runner.ts @@ -0,0 +1,184 @@ +/** + * CLI PTY Runner + * + * Shared module for running CLI auth flows via PTY. + * Used by both production (onboarding.ts) and tests (ci-test-real-clis.ts). + * + * This module has minimal dependencies (only node-pty) so it can be + * used in isolated test containers without the full server stack. + */ + +import * as pty from 'node-pty'; + +// Import shared config and utilities +import { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + validateAllProviderConfigs as validateAllConfigs, + getSupportedProviders, + type CLIAuthConfig, + type PromptHandler, +} from '../../shared/cli-auth-config.js'; + +// Re-export everything from shared config for backward compatibility +export { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + getSupportedProviders, + type CLIAuthConfig, + type PromptHandler, +}; + +// Wrapper that throws instead of returning array (backward compatible) +export function validateAllProviderConfigs(): void { + const errors = validateAllConfigs(); + if (errors.length > 0) { + throw new Error(`Invalid provider configurations:\n${errors.join('\n')}`); + } +} + +/** + * Result of running a CLI auth flow via PTY + */ +export interface PTYAuthResult { + authUrl: string | null; + success: boolean; + promptsHandled: string[]; + output: string; + exitCode: number | null; + error?: string; +} + +/** + * Options for running CLI auth via PTY + */ +export interface PTYAuthOptions { + /** Callback when auth URL is found */ + onAuthUrl?: (url: string) => void; + /** Callback when a prompt is handled */ + onPromptHandled?: (description: string) => void; + /** Callback for raw PTY output */ + onOutput?: (data: string) => void; + /** Environment variables override */ + env?: Record; + /** Working directory */ + cwd?: string; +} + +/** + * Run CLI auth flow via PTY + * + * This is the core PTY runner used by both production and tests. + * It handles: + * - Spawning the CLI with proper TTY emulation + * - Auto-responding to interactive prompts + * - Extracting auth URLs from output + * - Detecting success patterns + * + * @param config - CLI auth configuration for the provider + * @param options - Optional callbacks and overrides + * @returns Promise resolving to auth result + */ +export async function runCLIAuthViaPTY( + config: CLIAuthConfig, + options: PTYAuthOptions = {} +): Promise { + const result: PTYAuthResult = { + authUrl: null, + success: false, + promptsHandled: [], + output: '', + exitCode: null, + }; + + const respondedPrompts = new Set(); + + return new Promise((resolve) => { + try { + const proc = pty.spawn(config.command, config.args, { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: options.cwd || process.cwd(), + env: { + ...process.env, + NO_COLOR: '1', + TERM: 'xterm-256color', + // Prevent CLIs from trying to open browsers + BROWSER: 'echo', + DISPLAY: '', + ...options.env, + } as Record, + }); + + // Timeout handler + const timeout = setTimeout(() => { + proc.kill(); + result.error = 'Timeout waiting for auth URL'; + resolve(result); + }, config.waitTimeout + 5000); + + proc.onData((data: string) => { + result.output += data; + options.onOutput?.(data); + + // Check for matching prompts and auto-respond + const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); + if (matchingPrompt) { + respondedPrompts.add(matchingPrompt.description); + result.promptsHandled.push(matchingPrompt.description); + options.onPromptHandled?.(matchingPrompt.description); + + const delay = matchingPrompt.delay ?? 100; + setTimeout(() => { + try { + proc.write(matchingPrompt.response); + } catch { + // Process may have exited + } + }, delay); + } + + // Look for auth URL + const cleanText = stripAnsiCodes(data); + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !result.authUrl) { + result.authUrl = match[1]; + options.onAuthUrl?.(result.authUrl); + } + + // Check for success indicators + if (matchesSuccessPattern(data, config.successPatterns)) { + result.success = true; + } + }); + + proc.onExit(({ exitCode }) => { + clearTimeout(timeout); + result.exitCode = exitCode; + + // Consider it a success if we got a URL (main goal) + // or if exit code was 0 with success pattern + if (result.authUrl || (exitCode === 0 && result.success)) { + result.success = true; + } + + if (!result.authUrl && !result.success && !result.error) { + result.error = 'Failed to extract auth URL from CLI output'; + } + + resolve(result); + }); + } catch (err) { + result.error = err instanceof Error ? err.message : 'Unknown error'; + resolve(result); + } + }); +} + diff --git a/src/cloud/api/generic-webhooks.ts b/src/cloud/api/generic-webhooks.ts new file mode 100644 index 00000000..bd68159a --- /dev/null +++ b/src/cloud/api/generic-webhooks.ts @@ -0,0 +1,145 @@ +/** + * Generic Webhooks API Routes + * + * Provides endpoints for receiving webhooks from any configured source. + * Routes: POST /api/webhooks/:source + */ + +import { Router, Request, Response } from 'express'; +import { processWebhook, getWebhookConfig } from '../webhooks/index.js'; + +export const genericWebhooksRouter = Router(); + +/** + * POST /api/webhooks/:source + * Receive a webhook from any configured source + */ +genericWebhooksRouter.post('/:source', async (req: Request, res: Response) => { + const { source } = req.params; + + // For Slack URL verification challenge + if (source === 'slack' && req.body?.type === 'url_verification') { + return res.json({ challenge: req.body.challenge }); + } + + try { + // Get raw body for signature verification + // Note: This requires express.raw() middleware or similar + const rawBody = typeof req.body === 'string' + ? req.body + : JSON.stringify(req.body); + + const result = await processWebhook( + source, + rawBody, + req.headers as Record + ); + + if (!result.success && result.responses[0]?.error === 'Invalid signature') { + return res.status(401).json({ error: 'Invalid signature' }); + } + + if (!result.success && result.responses[0]?.error?.includes('Unknown webhook source')) { + return res.status(404).json({ error: `Unknown webhook source: ${source}` }); + } + + console.log(`[webhooks] Processed ${source} webhook: ${result.eventType} (${result.matchedRules.length} rules matched)`); + + res.json({ + success: result.success, + eventId: result.eventId, + eventType: result.eventType, + matchedRules: result.matchedRules, + actionsExecuted: result.actions.length, + }); + } catch (error) { + console.error(`[webhooks] Error processing ${source} webhook:`, error); + res.status(500).json({ + error: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * GET /api/webhooks/config + * Get the current webhook configuration (for debugging) + */ +genericWebhooksRouter.get('/config', (_req: Request, res: Response) => { + const config = getWebhookConfig(); + + res.json({ + sources: Object.entries(config.sources).map(([id, source]) => ({ + id, + name: source.name, + enabled: source.enabled, + parser: source.parser, + responder: source.responder, + })), + rules: config.rules.map(rule => ({ + id: rule.id, + name: rule.name, + enabled: rule.enabled, + source: rule.source, + eventType: rule.eventType, + condition: rule.condition, + actionType: rule.action.type, + priority: rule.priority, + })), + }); +}); + +/** + * GET /api/webhooks/sources + * List available webhook sources with their setup instructions + */ +genericWebhooksRouter.get('/sources', (_req: Request, res: Response) => { + const baseUrl = process.env.PUBLIC_URL || 'https://your-domain.com'; + + res.json({ + sources: [ + { + id: 'github', + name: 'GitHub', + webhookUrl: `${baseUrl}/api/webhooks/github`, + setupInstructions: [ + '1. Go to your repository Settings > Webhooks > Add webhook', + `2. Set Payload URL to: ${baseUrl}/api/webhooks/github`, + '3. Set Content type to: application/json', + '4. Set Secret to your GITHUB_WEBHOOK_SECRET value', + '5. Select events: Check runs, Issues, Issue comments, Pull request review comments', + ], + requiredEnvVars: ['GITHUB_WEBHOOK_SECRET'], + events: ['check_run', 'issues', 'issue_comment', 'pull_request_review_comment'], + }, + { + id: 'linear', + name: 'Linear', + webhookUrl: `${baseUrl}/api/webhooks/linear`, + setupInstructions: [ + '1. Go to Linear Settings > API > Webhooks', + '2. Create a new webhook', + `3. Set URL to: ${baseUrl}/api/webhooks/linear`, + '4. Copy the signing secret to LINEAR_WEBHOOK_SECRET', + '5. Select events: Issues, Comments', + ], + requiredEnvVars: ['LINEAR_WEBHOOK_SECRET', 'LINEAR_API_KEY'], + events: ['Issue', 'Comment', 'IssueLabel'], + }, + { + id: 'slack', + name: 'Slack', + webhookUrl: `${baseUrl}/api/webhooks/slack`, + setupInstructions: [ + '1. Create a Slack App at api.slack.com/apps', + '2. Enable Event Subscriptions', + `3. Set Request URL to: ${baseUrl}/api/webhooks/slack`, + '4. Subscribe to bot events: app_mention, message.channels', + '5. Copy Signing Secret to SLACK_SIGNING_SECRET', + '6. Install the app to your workspace', + ], + requiredEnvVars: ['SLACK_SIGNING_SECRET', 'SLACK_BOT_TOKEN'], + events: ['app_mention', 'message', 'reaction_added'], + }, + ], + }); +}); diff --git a/src/cloud/api/git.ts b/src/cloud/api/git.ts new file mode 100644 index 00000000..e517ec21 --- /dev/null +++ b/src/cloud/api/git.ts @@ -0,0 +1,153 @@ +/** + * Git Gateway API Routes + * + * Provides fresh GitHub tokens to workspace containers for git operations. + * This gateway pattern ensures tokens are always valid (Nango handles refresh). + */ + +import crypto from 'crypto'; +import { Router, Request, Response } from 'express'; +import { db } from '../db/index.js'; +import { nangoService } from '../services/nango.js'; +import { getConfig } from '../config.js'; + +export const gitRouter = Router(); + +/** + * Generate expected workspace token using HMAC + */ +function generateExpectedToken(workspaceId: string): string { + const config = getConfig(); + return crypto + .createHmac('sha256', config.sessionSecret) + .update(`workspace:${workspaceId}`) + .digest('hex'); +} + +/** + * Verify workspace access token + * Workspaces authenticate with a secret passed at provisioning time + */ +function verifyWorkspaceToken(req: Request, workspaceId: string): boolean { + const authHeader = req.get('authorization'); + if (!authHeader?.startsWith('Bearer ')) { + return false; + } + const providedToken = authHeader.slice(7); + const expectedToken = generateExpectedToken(workspaceId); + + // Use timing-safe comparison to prevent timing attacks + try { + return crypto.timingSafeEqual( + Buffer.from(providedToken), + Buffer.from(expectedToken) + ); + } catch { + return false; + } +} + +/** + * GET /api/git/token + * Get a fresh GitHub token for git operations + * + * Query params: + * - workspaceId: The workspace requesting the token + * + * Returns: { token: string, expiresAt?: string } + * + * This endpoint is called by the git credential helper in workspace containers. + * It fetches a fresh GitHub App installation token via Nango. + */ +gitRouter.get('/token', async (req: Request, res: Response) => { + const { workspaceId } = req.query; + + if (!workspaceId || typeof workspaceId !== 'string') { + return res.status(400).json({ error: 'workspaceId is required' }); + } + + // Verify the request is from a valid workspace + if (!verifyWorkspaceToken(req, workspaceId)) { + return res.status(401).json({ error: 'Invalid workspace token' }); + } + + try { + // Get workspace to find the user + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + const userId = workspace.userId; + + // Find a repository with a Nango connection for this user + const repos = await db.repositories.findByUserId(userId); + const repoWithConnection = repos.find(r => r.nangoConnectionId); + + if (!repoWithConnection?.nangoConnectionId) { + return res.status(404).json({ + error: 'No GitHub App connection found', + hint: 'Connect a repository via the GitHub App to enable git operations', + }); + } + + // Get fresh token from Nango (auto-refreshes if needed) + const token = await nangoService.getGithubAppToken(repoWithConnection.nangoConnectionId); + + // GitHub App installation tokens expire after 1 hour + const expiresAt = new Date(Date.now() + 55 * 60 * 1000).toISOString(); // 55 min buffer + + res.json({ + token, + expiresAt, + username: 'x-access-token', // GitHub App tokens use this as username + }); + } catch (error) { + console.error('[git] Error getting token:', error); + res.status(500).json({ error: 'Failed to get GitHub token' }); + } +}); + +/** + * POST /api/git/token + * Same as GET but accepts body params (for compatibility with some git credential helpers) + */ +gitRouter.post('/token', async (req: Request, res: Response) => { + const workspaceId = req.body.workspaceId || req.query.workspaceId; + + if (!workspaceId || typeof workspaceId !== 'string') { + return res.status(400).json({ error: 'workspaceId is required' }); + } + + if (!verifyWorkspaceToken(req, workspaceId)) { + return res.status(401).json({ error: 'Invalid workspace token' }); + } + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + const repos = await db.repositories.findByUserId(workspace.userId); + const repoWithConnection = repos.find(r => r.nangoConnectionId); + + if (!repoWithConnection?.nangoConnectionId) { + return res.status(404).json({ + error: 'No GitHub App connection found', + }); + } + + const token = await nangoService.getGithubAppToken(repoWithConnection.nangoConnectionId); + const expiresAt = new Date(Date.now() + 55 * 60 * 1000).toISOString(); + + res.json({ + token, + expiresAt, + username: 'x-access-token', + }); + } catch (error) { + console.error('[git] Error getting token:', error); + res.status(500).json({ error: 'Failed to get GitHub token' }); + } +}); diff --git a/src/cloud/api/github-app.ts b/src/cloud/api/github-app.ts index dc20ff70..526f0f0e 100644 --- a/src/cloud/api/github-app.ts +++ b/src/cloud/api/github-app.ts @@ -125,30 +125,16 @@ githubAppRouter.post('/repos/:id/issues', async (req: Request, res: Response) => return res.status(400).json({ error: 'Repository not connected via Nango' }); } - // Get token and create issue via GitHub API - const token = await nangoService.getGithubAppToken(repository.nangoConnectionId); + // Create issue via Nango Proxy (handles token injection automatically) const [owner, repo] = repository.githubFullName.split('/'); - - const response = await fetch(`https://api.github.com/repos/${owner}/${repo}/issues`, { - method: 'POST', - headers: { - Authorization: `Bearer ${token}`, - Accept: 'application/vnd.github+json', - 'X-GitHub-Api-Version': '2022-11-28', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ title, body: body || '', labels }), - }); - - if (!response.ok) { - const error = await response.text(); - throw new Error(`Failed to create issue: ${response.status} ${error}`); - } - - const issue = await response.json() as { id: number; number: number; html_url: string }; + const issue = await nangoService.createGithubIssue( + repository.nangoConnectionId, + owner, + repo, + { title, body: body || '', labels } + ); res.json({ - id: issue.id, number: issue.number, url: issue.html_url, }); @@ -182,29 +168,16 @@ githubAppRouter.post('/repos/:id/pulls', async (req: Request, res: Response) => return res.status(400).json({ error: 'Repository not connected via Nango' }); } - const token = await nangoService.getGithubAppToken(repository.nangoConnectionId); + // Create PR via Nango Proxy (handles token injection automatically) const [owner, repo] = repository.githubFullName.split('/'); - - const response = await fetch(`https://api.github.com/repos/${owner}/${repo}/pulls`, { - method: 'POST', - headers: { - Authorization: `Bearer ${token}`, - Accept: 'application/vnd.github+json', - 'X-GitHub-Api-Version': '2022-11-28', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ title, body: body || '', head, base }), - }); - - if (!response.ok) { - const error = await response.text(); - throw new Error(`Failed to create PR: ${response.status} ${error}`); - } - - const pr = await response.json() as { id: number; number: number; html_url: string }; + const pr = await nangoService.createGithubPullRequest( + repository.nangoConnectionId, + owner, + repo, + { title, body: body || '', head, base } + ); res.json({ - id: pr.id, number: pr.number, url: pr.html_url, }); @@ -237,30 +210,16 @@ githubAppRouter.post('/repos/:id/comments', async (req: Request, res: Response) return res.status(400).json({ error: 'Repository not connected via Nango' }); } - const token = await nangoService.getGithubAppToken(repository.nangoConnectionId); + // Add comment via Nango Proxy (handles token injection automatically) const [owner, repo] = repository.githubFullName.split('/'); - - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}/comments`, - { - method: 'POST', - headers: { - Authorization: `Bearer ${token}`, - Accept: 'application/vnd.github+json', - 'X-GitHub-Api-Version': '2022-11-28', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ body }), - } + const comment = await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + issueNumber, + body ); - if (!response.ok) { - const error = await response.text(); - throw new Error(`Failed to add comment: ${response.status} ${error}`); - } - - const comment = await response.json() as { id: number; html_url: string }; - res.json({ id: comment.id, url: comment.html_url, diff --git a/src/cloud/api/monitoring.ts b/src/cloud/api/monitoring.ts index 9dbdce78..d2d7bced 100644 --- a/src/cloud/api/monitoring.ts +++ b/src/cloud/api/monitoring.ts @@ -15,7 +15,7 @@ import { requireAuth } from './auth.js'; import { db as dbModule } from '../db/index.js'; import { getDb } from '../db/drizzle.js'; import { - linkedDaemons, + linkedDaemons as _linkedDaemons, agentMetrics, agentCrashes, memoryAlerts, diff --git a/src/cloud/api/nango-auth.ts b/src/cloud/api/nango-auth.ts index 32987796..8e245263 100644 --- a/src/cloud/api/nango-auth.ts +++ b/src/cloud/api/nango-auth.ts @@ -71,8 +71,13 @@ nangoAuthRouter.get('/login-status/:connectionId', async (req: Request, res: Res // Clear incoming connection ID await db.users.clearIncomingConnectionId(user.id); + // Check if user has any repos connected + const repos = await db.repositories.findByUserId(user.id); + const hasRepos = repos.length > 0; + res.json({ ready: true, + hasRepos, user: { id: user.id, githubUsername: user.githubUsername, @@ -145,6 +150,10 @@ nangoAuthRouter.get('/repo-status/:connectionId', requireAuth, async (req: Reque return res.json({ ready: false }); } + // Check workspace status for frontend visibility + const workspaces = await db.workspaces.findByUserId(userId); + const primaryWorkspace = workspaces[0]; + res.json({ ready: true, repos: reposFromConnection.map(r => ({ @@ -153,6 +162,13 @@ nangoAuthRouter.get('/repo-status/:connectionId', requireAuth, async (req: Reque isPrivate: r.isPrivate, defaultBranch: r.defaultBranch, })), + workspace: primaryWorkspace ? { + id: primaryWorkspace.id, + name: primaryWorkspace.name, + status: primaryWorkspace.status, + publicUrl: primaryWorkspace.publicUrl, + } : null, + workspaceProvisioning: primaryWorkspace?.status === 'provisioning', }); } catch (error) { console.error('Error checking repo status:', error); @@ -169,17 +185,28 @@ nangoAuthRouter.get('/repo-status/:connectionId', requireAuth, async (req: Reque * Handle Nango webhooks for auth and sync events */ nangoAuthRouter.post('/webhook', async (req: Request, res: Response) => { - const signature = req.headers['x-nango-signature'] as string | undefined; - const rawBody = JSON.stringify(req.body); + // Use the preserved raw body from express.json verify callback + const rawBody = (req as Request & { rawBody?: string }).rawBody || JSON.stringify(req.body); + + // Verify signature using the new verifyIncomingWebhookRequest method + const hasSignature = req.headers['x-nango-signature'] || req.headers['x-nango-hmac-sha256']; + const isDev = process.env.NODE_ENV !== 'production'; - // Verify signature - if (!nangoService.verifyWebhookSignature(rawBody, signature)) { - console.error('[nango-webhook] Invalid signature'); - return res.status(401).json({ error: 'Invalid signature' }); + if (hasSignature) { + if (!nangoService.verifyWebhookSignature(rawBody, req.headers as Record)) { + console.error('[nango-webhook] Invalid signature'); + return res.status(401).json({ error: 'Invalid signature' }); + } + console.log('[nango-webhook] Signature verified'); + } else if (!isDev) { + console.error('[nango-webhook] Missing signature in production'); + return res.status(401).json({ error: 'Missing signature' }); + } else { + console.warn('[nango-webhook] Skipping signature verification in development (no signature)'); } const payload = req.body; - console.log(`[nango-webhook] Received ${payload.type} event`); + console.log(`[nango-webhook] Received ${payload.type} event`, JSON.stringify(payload, null, 2)); try { switch (payload.type) { @@ -191,6 +218,11 @@ nangoAuthRouter.post('/webhook', async (req: Request, res: Response) => { console.log('[nango-webhook] Sync event received'); break; + case 'forward': + // Nango forwards events from providers - typically not needed for our flow + console.log('[nango-webhook] Forward event from provider (ignored)'); + break; + default: console.log(`[nango-webhook] Unhandled event type: ${payload.type}`); } @@ -224,6 +256,11 @@ async function handleAuthWebhook(payload: { /** * Handle GitHub login webhook + * + * Three scenarios: + * 1. New user - Create user record, keep connection as permanent + * 2. Returning user with existing connection - Store incoming ID for polling, delete temp connection + * 3. Existing user, first connection - Set connection ID as permanent */ async function handleLoginWebhook( connectionId: string, @@ -231,21 +268,15 @@ async function handleLoginWebhook( ): Promise { // Get GitHub user info via Nango proxy const githubUser = await nangoService.getGithubUser(connectionId); + const githubId = String(githubUser.id); // Check if user already exists - const existingUser = await db.users.findByGithubId(String(githubUser.id)); - - if (existingUser) { - // Returning user - store temp connection for polling - await db.users.update(existingUser.id, { - incomingConnectionId: connectionId, - }); + const existingUser = await db.users.findByGithubId(githubId); - console.log(`[nango-webhook] Returning user login: ${githubUser.login}`); - } else { - // New user - create record + // SCENARIO 1: New user + if (!existingUser) { const newUser = await db.users.upsert({ - githubId: String(githubUser.id), + githubId, githubUsername: githubUser.login, email: githubUser.email || null, avatarUrl: githubUser.avatar_url || null, @@ -260,7 +291,49 @@ async function handleLoginWebhook( }); console.log(`[nango-webhook] New user created: ${githubUser.login}`); + return; } + + // SCENARIO 2: Returning user with existing connection - delete temp connection + if (existingUser.nangoConnectionId && existingUser.nangoConnectionId !== connectionId) { + console.log(`[nango-webhook] Returning user: ${githubUser.login}`, { + permanentConnectionId: existingUser.nangoConnectionId, + incomingConnectionId: connectionId, + }); + + // Store incoming connection ID for polling + await db.users.update(existingUser.id, { + incomingConnectionId: connectionId, + githubUsername: githubUser.login, + avatarUrl: githubUser.avatar_url || null, + }); + + // Delete the temporary connection from Nango to prevent duplicates + try { + await nangoService.deleteConnection(connectionId, NANGO_INTEGRATIONS.GITHUB_USER); + console.log(`[nango-webhook] Deleted temp connection for returning user`); + } catch (error) { + console.error(`[nango-webhook] Failed to delete temp connection:`, error); + // Non-fatal - continue anyway + } + + return; + } + + // SCENARIO 3: Existing user, first connection (or same connection) + console.log(`[nango-webhook] First/same connection for existing user: ${githubUser.login}`); + await db.users.update(existingUser.id, { + nangoConnectionId: connectionId, + incomingConnectionId: connectionId, + githubUsername: githubUser.login, + avatarUrl: githubUser.avatar_url || null, + }); + + // Update connection with user ID + await nangoService.updateEndUser(connectionId, NANGO_INTEGRATIONS.GITHUB_USER, { + id: existingUser.id, + email: existingUser.email || undefined, + }); } /** @@ -270,9 +343,22 @@ async function handleRepoAuthWebhook( connectionId: string, endUser?: { id?: string; email?: string } ): Promise { - const userId = endUser?.id; + let userId = endUser?.id; + + // Fallback: If endUser.id not in webhook, fetch connection metadata from Nango if (!userId) { - console.error('[nango-webhook] No user ID in repo auth webhook'); + console.log('[nango-webhook] No user ID in webhook payload, fetching from connection metadata...'); + try { + const connection = await nangoService.getConnection(connectionId, NANGO_INTEGRATIONS.GITHUB_APP); + userId = connection.end_user?.id; + console.log(`[nango-webhook] Got user ID from connection: ${userId || 'not found'}`); + } catch (err) { + console.error('[nango-webhook] Failed to fetch connection metadata:', err); + } + } + + if (!userId) { + console.error('[nango-webhook] No user ID found - cannot sync repos'); return; } @@ -283,6 +369,34 @@ async function handleRepoAuthWebhook( } try { + // Get the GitHub App installation ID + const githubInstallationId = await nangoService.getGithubAppInstallationId(connectionId); + let installationUuid: string | null = null; + + if (githubInstallationId) { + // Find or create the github_installations record + let installation = await db.githubInstallations.findByInstallationId(String(githubInstallationId)); + + if (!installation) { + // Create a new installation record + // We need to get more info about the installation - for now use user info + installation = await db.githubInstallations.upsert({ + installationId: String(githubInstallationId), + accountType: 'user', // Could be 'organization' - we'd need to detect this + accountLogin: user.githubUsername || 'unknown', + accountId: user.githubId || 'unknown', + installedById: user.id, + permissions: {}, + events: [], + }); + console.log(`[nango-webhook] Created installation record for ${githubInstallationId}`); + } + + installationUuid = installation.id; + } else { + console.warn('[nango-webhook] Could not get installation ID from Nango connection'); + } + // Fetch repos the user has access to const { repositories: repos } = await nangoService.listGithubAppRepos(connectionId); @@ -295,6 +409,7 @@ async function handleRepoAuthWebhook( isPrivate: repo.private, defaultBranch: repo.default_branch, nangoConnectionId: connectionId, + installationId: installationUuid, syncStatus: 'synced', lastSyncedAt: new Date(), }); @@ -303,7 +418,13 @@ async function handleRepoAuthWebhook( // Clear any pending installation request await db.users.clearPendingInstallationRequest(user.id); - console.log(`[nango-webhook] Synced ${repos.length} repos for ${user.githubUsername}`); + console.log(`[nango-webhook] Synced ${repos.length} repos for ${user.githubUsername} (installation: ${githubInstallationId || 'unknown'})`); + + // Note: We intentionally do NOT auto-provision workspaces here. + // Users should go through the onboarding flow at /app to: + // 1. Name their workspace + // 2. Choose which repos to include + // 3. Understand what they're creating } catch (error: unknown) { const err = error as { message?: string }; @@ -316,3 +437,4 @@ async function handleRepoAuthWebhook( } } } + diff --git a/src/cloud/api/onboarding.test.ts b/src/cloud/api/onboarding.test.ts new file mode 100644 index 00000000..a0bde16d --- /dev/null +++ b/src/cloud/api/onboarding.test.ts @@ -0,0 +1,329 @@ +/** + * Onboarding OAuth Flow Tests + * + * Tests the CLI-based OAuth authentication flow for AI providers. + * These tests verify prompt detection, URL extraction, and success patterns + * without requiring actual CLI execution. + */ + +import { describe, it, expect } from 'vitest'; +import { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + validateAllProviderConfigs, + getSupportedProviders, + type CLIAuthConfig, +} from './onboarding.js'; + +describe('CLI Auth Config', () => { + describe('anthropic (Claude)', () => { + const config = CLI_AUTH_CONFIG.anthropic; + + it('has correct command and args', () => { + expect(config.command).toBe('claude'); + expect(config.args).toEqual([]); + }); + + it('extracts auth URL from output', () => { + const output = 'Please visit https://console.anthropic.com/oauth/authorize?client_id=xxx to authenticate'; + const match = output.match(config.urlPattern); + expect(match).toBeTruthy(); + expect(match![1]).toBe('https://console.anthropic.com/oauth/authorize?client_id=xxx'); + }); + + it('handles URL with query params and fragments', () => { + const output = 'Open: https://auth.example.com/login?state=abc123&redirect=xyz#section'; + const match = output.match(config.urlPattern); + expect(match).toBeTruthy(); + expect(match![1]).toContain('https://auth.example.com/login'); + }); + + describe('prompt handlers', () => { + it('detects dark mode prompt', () => { + const respondedPrompts = new Set(); + + const prompt1 = findMatchingPrompt('Would you like dark mode?', config.prompts, respondedPrompts); + expect(prompt1).toBeTruthy(); + expect(prompt1!.description).toBe('Dark mode prompt'); + expect(prompt1!.response).toBe('\r'); + + const prompt2 = findMatchingPrompt('Enable dark theme?', config.prompts, respondedPrompts); + expect(prompt2).toBeTruthy(); + expect(prompt2!.description).toBe('Dark mode prompt'); + }); + + it('detects login method prompt', () => { + const respondedPrompts = new Set(); + + const prompt1 = findMatchingPrompt( + 'Would you like to use your Claude subscription or an API key?', + config.prompts, + respondedPrompts + ); + expect(prompt1).toBeTruthy(); + expect(prompt1!.description).toBe('Login method selection'); + + const prompt2 = findMatchingPrompt( + 'How would you like to authenticate?', + config.prompts, + respondedPrompts + ); + expect(prompt2).toBeTruthy(); + expect(prompt2!.description).toBe('Login method selection'); + }); + + it('detects trust directory prompt', () => { + const respondedPrompts = new Set(); + + const prompt = findMatchingPrompt( + 'Do you trust the files in this folder?', + config.prompts, + respondedPrompts + ); + expect(prompt).toBeTruthy(); + expect(prompt!.description).toBe('Trust directory prompt'); + expect(prompt!.response).toBe('\r'); // Press enter to select first option (Yes, proceed) + }); + + it('does not respond to same prompt twice', () => { + const respondedPrompts = new Set(); + + // First match + const prompt1 = findMatchingPrompt('dark mode?', config.prompts, respondedPrompts); + expect(prompt1).toBeTruthy(); + respondedPrompts.add(prompt1!.description); + + // Second attempt should return null + const prompt2 = findMatchingPrompt('dark mode?', config.prompts, respondedPrompts); + expect(prompt2).toBeNull(); + }); + }); + + describe('success patterns', () => { + it('detects success indicators', () => { + expect(matchesSuccessPattern('Authentication successful!', config.successPatterns)).toBe(true); + expect(matchesSuccessPattern('You are now authenticated', config.successPatterns)).toBe(true); + expect(matchesSuccessPattern('Logged in as user@example.com', config.successPatterns)).toBe(true); + }); + + it('handles case insensitivity', () => { + expect(matchesSuccessPattern('SUCCESS', config.successPatterns)).toBe(true); + expect(matchesSuccessPattern('Authenticated', config.successPatterns)).toBe(true); + }); + + it('does not false positive', () => { + expect(matchesSuccessPattern('Please enter your password', config.successPatterns)).toBe(false); + expect(matchesSuccessPattern('Waiting for authentication...', config.successPatterns)).toBe(false); + }); + }); + }); + + describe('openai (Codex)', () => { + const config = CLI_AUTH_CONFIG.openai; + + it('has correct command and args', () => { + expect(config.command).toBe('codex'); + expect(config.args).toEqual(['login']); + }); + + it('extracts auth URL from output', () => { + const output = 'Visit https://auth.openai.com/authorize?client_id=xxx to login'; + const match = output.match(config.urlPattern); + expect(match).toBeTruthy(); + expect(match![1]).toBe('https://auth.openai.com/authorize?client_id=xxx'); + }); + }); + + describe('all providers', () => { + it('have required fields', () => { + for (const [name, config] of Object.entries(CLI_AUTH_CONFIG)) { + expect(config.command, `${name} missing command`).toBeTruthy(); + expect(config.urlPattern, `${name} missing urlPattern`).toBeInstanceOf(RegExp); + expect(config.displayName, `${name} missing displayName`).toBeTruthy(); + expect(config.waitTimeout, `${name} missing waitTimeout`).toBeGreaterThan(0); + expect(Array.isArray(config.prompts), `${name} prompts should be array`).toBe(true); + expect(Array.isArray(config.successPatterns), `${name} successPatterns should be array`).toBe(true); + } + }); + + it('URL patterns have capture groups', () => { + for (const [name, config] of Object.entries(CLI_AUTH_CONFIG)) { + const testUrl = 'https://example.com/auth'; + const match = testUrl.match(config.urlPattern); + expect(match, `${name} urlPattern should match`).toBeTruthy(); + expect(match![1], `${name} urlPattern should have capture group`).toBe(testUrl); + } + }); + }); +}); + +describe('stripAnsiCodes', () => { + it('removes ANSI escape codes', () => { + const input = '\x1b[32mGreen text\x1b[0m and \x1b[1mbold\x1b[0m'; + expect(stripAnsiCodes(input)).toBe('Green text and bold'); + }); + + it('preserves text without ANSI codes', () => { + const input = 'Plain text without codes'; + expect(stripAnsiCodes(input)).toBe(input); + }); + + it('handles complex ANSI sequences', () => { + const input = '\x1b[38;5;196mRed\x1b[0m \x1b[48;2;0;255;0mGreen BG\x1b[0m'; + expect(stripAnsiCodes(input)).toBe('Red Green BG'); + }); +}); + +describe('matchesSuccessPattern', () => { + const patterns = [/success/i, /authenticated/i, /logged\s*in/i]; + + it('matches patterns case-insensitively', () => { + expect(matchesSuccessPattern('SUCCESS', patterns)).toBe(true); + expect(matchesSuccessPattern('Authenticated!', patterns)).toBe(true); + expect(matchesSuccessPattern('You are logged in', patterns)).toBe(true); + }); + + it('strips ANSI codes before matching', () => { + expect(matchesSuccessPattern('\x1b[32mSuccess!\x1b[0m', patterns)).toBe(true); + }); + + it('returns false when no match', () => { + expect(matchesSuccessPattern('Please wait...', patterns)).toBe(false); + expect(matchesSuccessPattern('Error occurred', patterns)).toBe(false); + }); +}); + +describe('findMatchingPrompt', () => { + const prompts = [ + { pattern: /dark mode/i, response: '\r', description: 'Dark mode' }, + { pattern: /api key/i, response: '2\r', description: 'API key option' }, + ]; + + it('finds matching prompt', () => { + const responded = new Set(); + const match = findMatchingPrompt('Enable dark mode?', prompts, responded); + expect(match).toBeTruthy(); + expect(match!.description).toBe('Dark mode'); + }); + + it('skips already responded prompts', () => { + const responded = new Set(['Dark mode']); + const match = findMatchingPrompt('Enable dark mode?', prompts, responded); + expect(match).toBeNull(); + }); + + it('returns null when no match', () => { + const responded = new Set(); + const match = findMatchingPrompt('Something else', prompts, responded); + expect(match).toBeNull(); + }); + + it('strips ANSI codes before matching', () => { + const responded = new Set(); + const match = findMatchingPrompt('\x1b[1mDark mode?\x1b[0m', prompts, responded); + expect(match).toBeTruthy(); + expect(match!.description).toBe('Dark mode'); + }); +}); + +describe('validateProviderConfig', () => { + it('validates a correct config', () => { + const config: CLIAuthConfig = { + command: 'test-cli', + args: ['login'], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Test', + waitTimeout: 3000, + prompts: [ + { pattern: /test/i, response: '\r', description: 'Test prompt' }, + ], + successPatterns: [/success/i], + }; + expect(validateProviderConfig('test', config)).toBeNull(); + }); + + it('rejects missing command', () => { + const config = { + command: '', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Test', + waitTimeout: 3000, + prompts: [], + successPatterns: [], + } as CLIAuthConfig; + expect(validateProviderConfig('test', config)).toContain('command'); + }); + + it('rejects urlPattern without capture group', () => { + const config: CLIAuthConfig = { + command: 'test-cli', + args: [], + urlPattern: /https:\/\/[^\s]+/, // No capture group! + displayName: 'Test', + waitTimeout: 3000, + prompts: [], + successPatterns: [], + }; + expect(validateProviderConfig('test', config)).toContain('capture group'); + }); + + it('rejects invalid waitTimeout', () => { + const config: CLIAuthConfig = { + command: 'test-cli', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Test', + waitTimeout: 0, + prompts: [], + successPatterns: [], + }; + expect(validateProviderConfig('test', config)).toContain('waitTimeout'); + }); + + it('rejects prompt without description', () => { + const config: CLIAuthConfig = { + command: 'test-cli', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Test', + waitTimeout: 3000, + prompts: [ + { pattern: /test/i, response: '\r', description: '' }, + ], + successPatterns: [], + }; + expect(validateProviderConfig('test', config)).toContain('description'); + }); +}); + +describe('validateAllProviderConfigs', () => { + it('validates all built-in providers', () => { + // Should not throw + expect(() => validateAllProviderConfigs()).not.toThrow(); + }); +}); + +describe('getSupportedProviders', () => { + it('returns list of providers', () => { + const providers = getSupportedProviders(); + expect(providers.length).toBeGreaterThan(0); + + // Check structure + for (const provider of providers) { + expect(provider.id).toBeTruthy(); + expect(provider.displayName).toBeTruthy(); + expect(provider.command).toBeTruthy(); + } + }); + + it('includes anthropic', () => { + const providers = getSupportedProviders(); + const anthropic = providers.find(p => p.id === 'anthropic'); + expect(anthropic).toBeTruthy(); + expect(anthropic!.displayName).toBe('Claude'); + }); +}); diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index 670e045a..4788be8a 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -2,18 +2,61 @@ * Onboarding API Routes * * Handles CLI proxy authentication for Claude Code and other providers. - * Spawns CLI tools to get auth URLs, captures tokens. + * Spawns CLI tools via PTY to get auth URLs, captures tokens. + * + * We use node-pty instead of child_process.spawn because: + * 1. Many CLIs detect if they're in a TTY and behave differently + * 2. Interactive OAuth flows often require TTY for proper output + * 3. PTY ensures the CLI outputs auth URLs correctly */ import { Router, Request, Response } from 'express'; -import { spawn, ChildProcess } from 'child_process'; -import crypto from 'crypto'; +import type { IPty } from 'node-pty'; +import * as crypto from 'crypto'; import { requireAuth } from './auth.js'; import { db } from '../db/index.js'; import { vault } from '../vault/index.js'; +// Import for local use +import { + CLI_AUTH_CONFIG, + runCLIAuthViaPTY, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + validateAllProviderConfigs, + getSupportedProviders, + type CLIAuthConfig, + type PTYAuthResult, + type PTYAuthOptions, + type PromptHandler, +} from './cli-pty-runner.js'; + +// Re-export from shared module for backward compatibility +export { + CLI_AUTH_CONFIG, + runCLIAuthViaPTY, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + validateAllProviderConfigs, + getSupportedProviders, + type CLIAuthConfig, + type PTYAuthResult, + type PTYAuthOptions, + type PromptHandler, +}; + export const onboardingRouter = Router(); +// Debug: log all requests to this router +onboardingRouter.use((req, res, next) => { + console.log(`[onboarding] ${req.method} ${req.path} - body:`, JSON.stringify(req.body)); + next(); +}); + // All routes require authentication onboardingRouter.use(requireAuth); @@ -24,13 +67,19 @@ onboardingRouter.use(requireAuth); interface CLIAuthSession { userId: string; provider: string; - process?: ChildProcess; + process?: IPty; authUrl?: string; callbackUrl?: string; status: 'starting' | 'waiting_auth' | 'success' | 'error' | 'timeout'; token?: string; + refreshToken?: string; + tokenExpiresAt?: Date; error?: string; createdAt: Date; + output: string; // Accumulated output for debugging + // Workspace delegation fields (set when auth runs in workspace daemon) + workspaceUrl?: string; + workspaceSessionId?: string; } const activeSessions = new Map(); @@ -38,52 +87,34 @@ const activeSessions = new Map(); // Clean up old sessions periodically setInterval(() => { const now = Date.now(); - for (const [id, session] of activeSessions) { + activeSessions.forEach((session, id) => { // Remove sessions older than 10 minutes if (now - session.createdAt.getTime() > 10 * 60 * 1000) { if (session.process) { - session.process.kill(); + try { + session.process.kill(); + } catch { + // Process may already be dead + } } activeSessions.delete(id); } - } + }); }, 60000); -/** - * CLI commands and URL patterns for each provider - */ -const CLI_AUTH_CONFIG: Record = { - anthropic: { - // Claude Code CLI login - command: 'claude', - args: ['login', '--no-open'], - // Claude outputs: "Please open: https://..." - urlPattern: /(?:open|visit|go to)[:\s]+(\S+anthropic\S+)/i, - // Token might be in output or in credentials file - credentialPath: '~/.claude/credentials.json', - }, - openai: { - // Codex CLI auth - command: 'codex', - args: ['auth', '--no-browser'], - urlPattern: /(?:open|visit|go to)[:\s]+(\S+openai\S+)/i, - credentialPath: '~/.codex/credentials.json', - }, -}; - /** * POST /api/onboarding/cli/:provider/start - * Start CLI-based auth - spawns the CLI and captures auth URL + * Start CLI-based auth - forwards to workspace daemon if available + * + * CLI auth requires a running workspace since CLI tools are installed there. + * For onboarding without a workspace, users should use the API key flow. */ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response) => { + console.log('[onboarding] Route handler entered! provider:', req.params.provider); const { provider } = req.params; const userId = req.session.userId!; + const { workspaceId, useDeviceFlow } = req.body; // Optional: specific workspace, device flow option + console.log('[onboarding] userId:', userId, 'workspaceId:', workspaceId, 'useDeviceFlow:', useDeviceFlow); const config = CLI_AUTH_CONFIG[provider]; if (!config) { @@ -93,89 +124,99 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response }); } - // Create session - const sessionId = crypto.randomUUID(); - const session: CLIAuthSession = { - userId, - provider, - status: 'starting', - createdAt: new Date(), - }; - activeSessions.set(sessionId, session); - try { - // Spawn CLI process - const proc = spawn(config.command, config.args, { - env: { ...process.env, NO_COLOR: '1' }, - stdio: ['pipe', 'pipe', 'pipe'], - }); - - session.process = proc; - let _output = ''; + // Find a running workspace to use for CLI auth + let workspace; + if (workspaceId) { + workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + console.log(`[onboarding] Workspace ${workspaceId} not found in database`); + return res.status(404).json({ error: 'Workspace not found' }); + } + if (workspace.userId !== userId) { + console.log(`[onboarding] Workspace ${workspaceId} belongs to ${workspace.userId}, not ${userId}`); + return res.status(404).json({ error: 'Workspace not found' }); + } + } else { + // Find any running workspace for this user + const workspaces = await db.workspaces.findByUserId(userId); + workspace = workspaces.find(w => w.status === 'running' && w.publicUrl); + } - // Capture stdout/stderr for auth URL - const handleOutput = (data: Buffer) => { - const text = data.toString(); - _output += text; + if (!workspace || workspace.status !== 'running' || !workspace.publicUrl) { + return res.status(400).json({ + error: 'CLI auth requires a running workspace', + code: 'NO_RUNNING_WORKSPACE', + message: 'Please start a workspace first, or use the API key input to connect your provider.', + hint: 'You can create a workspace without providers and connect them afterward using CLI auth.', + }); + } - // Look for auth URL - const match = text.match(config.urlPattern); - if (match && match[1]) { - session.authUrl = match[1]; - session.status = 'waiting_auth'; - } + // Forward auth request to workspace daemon + // When running in Docker, localhost refers to the container, not the host + // Use host.docker.internal on Mac/Windows to reach the host machine + let workspaceUrl = workspace.publicUrl.replace(/\/$/, ''); - // Look for success indicators - if (text.toLowerCase().includes('success') || - text.toLowerCase().includes('authenticated') || - text.toLowerCase().includes('logged in')) { - session.status = 'success'; - } - }; + // Detect Docker by checking for /.dockerenv file or RUNNING_IN_DOCKER env var + const isInDocker = process.env.RUNNING_IN_DOCKER === 'true' || + await import('fs').then(fs => fs.existsSync('/.dockerenv')).catch(() => false); - proc.stdout.on('data', handleOutput); - proc.stderr.on('data', handleOutput); + console.log('[onboarding] isInDocker:', isInDocker, 'RUNNING_IN_DOCKER:', process.env.RUNNING_IN_DOCKER); - proc.on('error', (err) => { - session.status = 'error'; - session.error = `Failed to start CLI: ${err.message}`; - }); + if (isInDocker && workspaceUrl.includes('localhost')) { + workspaceUrl = workspaceUrl.replace('localhost', 'host.docker.internal'); + console.log('[onboarding] Translated localhost to host.docker.internal'); + } + const targetUrl = `${workspaceUrl}/auth/cli/${provider}/start`; + console.log('[onboarding] Forwarding to workspace daemon:', targetUrl); - proc.on('exit', async (code) => { - if (code === 0 && session.status !== 'error') { - session.status = 'success'; - // Try to read credentials from file - await extractCredentials(session, config); - } else if (session.status === 'starting') { - session.status = 'error'; - session.error = `CLI exited with code ${code}`; - } + const authResponse = await fetch(targetUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ useDeviceFlow }), }); - // Wait a moment for URL to appear - await new Promise(resolve => setTimeout(resolve, 2000)); + console.log('[onboarding] Workspace daemon response:', authResponse.status); - // Return session info - if (session.authUrl) { - res.json({ - sessionId, - status: 'waiting_auth', - authUrl: session.authUrl, - message: 'Open the auth URL to complete login', - }); - } else if (session.status === 'error') { - activeSessions.delete(sessionId); - res.status(500).json({ error: session.error || 'CLI auth failed to start' }); - } else { - // Still starting, return session ID to poll - res.json({ - sessionId, - status: 'starting', - message: 'Auth session starting, poll for status', + if (!authResponse.ok) { + const errorData = await authResponse.json().catch(() => ({})) as { error?: string }; + console.log('[onboarding] Workspace daemon error:', errorData); + return res.status(authResponse.status).json({ + error: errorData.error || 'Failed to start CLI auth in workspace', }); } + + const workspaceSession = await authResponse.json() as { + sessionId: string; + status?: string; + authUrl?: string; + }; + + // Create cloud session to track this + const sessionId = crypto.randomUUID(); + const session: CLIAuthSession = { + userId, + provider, + status: (workspaceSession.status as CLIAuthSession['status']) || 'starting', + authUrl: workspaceSession.authUrl, + createdAt: new Date(), + output: '', + // Store workspace info for status polling and auth code forwarding + workspaceUrl, + workspaceSessionId: workspaceSession.sessionId, + }; + + activeSessions.set(sessionId, session); + console.log('[onboarding] Session created:', { sessionId, workspaceUrl, workspaceSessionId: workspaceSession.sessionId }); + + res.json({ + sessionId, + status: session.status, + authUrl: session.authUrl, + workspaceId: workspace.id, + message: session.authUrl ? 'Open the auth URL to complete login' : 'Auth session starting, poll for status', + }); } catch (error) { - activeSessions.delete(sessionId); console.error(`Error starting CLI auth for ${provider}:`, error); res.status(500).json({ error: 'Failed to start CLI authentication' }); } @@ -183,10 +224,10 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response /** * GET /api/onboarding/cli/:provider/status/:sessionId - * Check status of CLI auth session + * Check status of CLI auth session - forwards to workspace daemon */ -onboardingRouter.get('/cli/:provider/status/:sessionId', (req: Request, res: Response) => { - const { sessionId } = req.params; +onboardingRouter.get('/cli/:provider/status/:sessionId', async (req: Request, res: Response) => { + const { provider, sessionId } = req.params; const userId = req.session.userId!; const session = activeSessions.get(sessionId); @@ -198,6 +239,28 @@ onboardingRouter.get('/cli/:provider/status/:sessionId', (req: Request, res: Res return res.status(403).json({ error: 'Unauthorized' }); } + // If we have workspace info, poll the workspace for status + if (session.workspaceUrl && session.workspaceSessionId) { + try { + const statusResponse = await fetch( + `${session.workspaceUrl}/auth/cli/${provider}/status/${session.workspaceSessionId}` + ); + if (statusResponse.ok) { + const workspaceStatus = await statusResponse.json() as { + status?: string; + authUrl?: string; + error?: string; + }; + // Update local session with workspace status + session.status = (workspaceStatus.status as CLIAuthSession['status']) || session.status; + session.authUrl = workspaceStatus.authUrl || session.authUrl; + session.error = workspaceStatus.error; + } + } catch (err) { + console.error('[onboarding] Failed to poll workspace status:', err); + } + } + res.json({ status: session.status, authUrl: session.authUrl, @@ -208,11 +271,17 @@ onboardingRouter.get('/cli/:provider/status/:sessionId', (req: Request, res: Res /** * POST /api/onboarding/cli/:provider/complete/:sessionId * Mark CLI auth as complete and store credentials + * + * Handles two modes: + * 1. Workspace delegation: Forwards to workspace daemon to complete auth, then fetches credentials + * 2. Direct: Uses token from body or session */ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, res: Response) => { const { provider, sessionId } = req.params; const userId = req.session.userId!; - const { token } = req.body; // Optional: user can paste token directly + const { token, authCode } = req.body; // token for direct mode, authCode for Codex redirect + + console.log(`[onboarding] POST /cli/${provider}/complete/${sessionId} - token: ${token ? 'provided' : 'none'}, authCode: ${authCode ? 'provided' : 'none'}`); const session = activeSessions.get(sessionId); if (!session) { @@ -224,15 +293,58 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, } try { - // If token provided directly, use it let accessToken = token || session.token; + let refreshToken = session.refreshToken; + let tokenExpiresAt = session.tokenExpiresAt; + + // If using workspace delegation, forward complete request first + if (session.workspaceUrl && session.workspaceSessionId) { + // Forward authCode to workspace if provided (for Codex-style redirects) + if (authCode) { + const backendProviderId = provider === 'anthropic' ? 'anthropic' : provider; + const targetUrl = `${session.workspaceUrl}/auth/cli/${backendProviderId}/complete/${session.workspaceSessionId}`; + console.log('[onboarding] Forwarding complete request to workspace:', targetUrl); + + const completeResponse = await fetch(targetUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ authCode }), + }); + + if (!completeResponse.ok) { + const errorData = await completeResponse.json().catch(() => ({})) as { error?: string }; + return res.status(completeResponse.status).json({ + error: errorData.error || 'Failed to complete authentication in workspace', + }); + } + session.status = 'success'; + } - // If no token yet, try to read from credentials file - if (!accessToken) { - const config = CLI_AUTH_CONFIG[provider]; - if (config) { - await extractCredentials(session, config); - accessToken = session.token; + // Fetch credentials from workspace + if (!accessToken) { + try { + const credsResponse = await fetch( + `${session.workspaceUrl}/auth/cli/${provider}/creds/${session.workspaceSessionId}` + ); + if (credsResponse.ok) { + const creds = await credsResponse.json() as { + token?: string; + refreshToken?: string; + expiresAt?: string; + }; + accessToken = creds.token; + refreshToken = creds.refreshToken; + if (creds.expiresAt) { + tokenExpiresAt = new Date(creds.expiresAt); + } + console.log('[onboarding] Fetched credentials from workspace:', { + hasToken: !!accessToken, + hasRefreshToken: !!refreshToken, + }); + } + } catch (err) { + console.error('[onboarding] Failed to get credentials from workspace:', err); + } } } @@ -242,18 +354,17 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, }); } - // Store in vault + // Store in vault with refresh token and expiry await vault.storeCredential({ userId, provider, accessToken, + refreshToken, + tokenExpiresAt, scopes: getProviderScopes(provider), }); // Clean up session - if (session.process) { - session.process.kill(); - } activeSessions.delete(sessionId); res.json({ @@ -266,18 +377,110 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, } }); +/** + * POST /api/onboarding/cli/:provider/code/:sessionId + * Submit auth code to the CLI PTY session + * Used when OAuth returns a code that must be pasted into the CLI + */ +onboardingRouter.post('/cli/:provider/code/:sessionId', async (req: Request, res: Response) => { + const { provider, sessionId } = req.params; + const userId = req.session.userId!; + const { code } = req.body; + + console.log('[onboarding] Auth code submission request:', { provider, sessionId, codeLength: code?.length }); + + if (!code || typeof code !== 'string') { + return res.status(400).json({ error: 'Auth code is required' }); + } + + const session = activeSessions.get(sessionId); + if (!session) { + console.log('[onboarding] Session not found:', { sessionId, activeSessions: Array.from(activeSessions.keys()) }); + return res.status(404).json({ error: 'Session not found or expired. Please try connecting again.' }); + } + + if (session.userId !== userId) { + return res.status(403).json({ error: 'Unauthorized' }); + } + + console.log('[onboarding] Session found:', { + sessionId, + workspaceUrl: session.workspaceUrl, + workspaceSessionId: session.workspaceSessionId, + status: session.status, + }); + + // Forward to workspace daemon + if (session.workspaceUrl && session.workspaceSessionId) { + try { + const targetUrl = `${session.workspaceUrl}/auth/cli/${provider}/code/${session.workspaceSessionId}`; + console.log('[onboarding] Forwarding auth code to workspace:', targetUrl); + + const codeResponse = await fetch(targetUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ code }), + }); + + console.log('[onboarding] Workspace response:', { status: codeResponse.status }); + + if (codeResponse.ok) { + return res.json({ success: true, message: 'Auth code submitted' }); + } + + const errorData = await codeResponse.json().catch(() => ({})) as { error?: string }; + console.log('[onboarding] Workspace error:', errorData); + + // Provide more helpful error message + const needsRestart = (errorData as { needsRestart?: boolean }).needsRestart; + if (codeResponse.status === 404 || codeResponse.status === 400) { + return res.status(400).json({ + error: errorData.error || 'Auth session expired in workspace. The CLI process may have timed out. Please try connecting again.', + needsRestart: needsRestart ?? true, + }); + } + + return res.status(codeResponse.status).json({ + error: errorData.error || 'Failed to submit auth code to workspace', + needsRestart, + }); + } catch (err) { + console.error('[onboarding] Failed to submit auth code to workspace:', err); + return res.status(500).json({ + error: 'Failed to reach workspace. Please ensure your workspace is running and try again.', + }); + } + } + + console.log('[onboarding] No workspace session info available'); + return res.status(400).json({ + error: 'No workspace session available. This can happen if the workspace was restarted. Please try connecting again.', + }); +}); + +// Note: POST /cli/:provider/complete/:sessionId handler is defined above (lines 269-368) +// It handles both direct token storage and workspace delegation with authCode forwarding + /** * POST /api/onboarding/cli/:provider/cancel/:sessionId * Cancel a CLI auth session */ -onboardingRouter.post('/cli/:provider/cancel/:sessionId', (req: Request, res: Response) => { - const { sessionId } = req.params; +onboardingRouter.post('/cli/:provider/cancel/:sessionId', async (req: Request, res: Response) => { + const { provider, sessionId } = req.params; const userId = req.session.userId!; const session = activeSessions.get(sessionId); if (session?.userId === userId) { - if (session.process) { - session.process.kill(); + // Cancel on workspace side if applicable + if (session.workspaceUrl && session.workspaceSessionId) { + try { + await fetch( + `${session.workspaceUrl}/auth/cli/${provider}/cancel/${session.workspaceSessionId}`, + { method: 'POST' } + ); + } catch { + // Ignore cancel errors + } } activeSessions.delete(sessionId); } @@ -382,8 +585,9 @@ onboardingRouter.post('/complete', async (req: Request, res: Response) => { /** * Helper: Extract credentials from CLI credential file + * @deprecated Currently unused - kept for potential future use */ -async function extractCredentials( +async function _extractCredentials( session: CLIAuthSession, config: typeof CLI_AUTH_CONFIG[string] ): Promise { @@ -398,11 +602,28 @@ async function extractCredentials( // Extract token based on provider structure if (session.provider === 'anthropic') { - // Claude stores: { "oauth_token": "...", ... } or { "api_key": "..." } - session.token = creds.oauth_token || creds.access_token || creds.api_key; + // Claude stores OAuth in: { claudeAiOauth: { accessToken: "...", refreshToken: "...", expiresAt: ... } } + if (creds.claudeAiOauth?.accessToken) { + session.token = creds.claudeAiOauth.accessToken; + session.refreshToken = creds.claudeAiOauth.refreshToken; + if (creds.claudeAiOauth.expiresAt) { + session.tokenExpiresAt = new Date(creds.claudeAiOauth.expiresAt); + } + } else { + // Fallback to legacy formats + session.token = creds.oauth_token || creds.access_token || creds.api_key; + } } else if (session.provider === 'openai') { - // Codex might store: { "token": "..." } or { "api_key": "..." } - session.token = creds.token || creds.access_token || creds.api_key; + // Codex stores OAuth in: { tokens: { access_token: "...", refresh_token: "...", ... } } + if (creds.tokens?.access_token) { + session.token = creds.tokens.access_token; + session.refreshToken = creds.tokens.refresh_token; + // Codex doesn't store expiry in the file, but JWTs have exp claim + // We could decode it, but for now just skip + } else { + // Fallback: API key or legacy formats + session.token = creds.OPENAI_API_KEY || creds.token || creds.access_token || creds.api_key; + } } } catch (error) { // Credentials file doesn't exist or isn't readable yet @@ -425,9 +646,37 @@ function getProviderScopes(provider: string): string[] { /** * Helper: Validate a provider token by making a test API call + * + * Note: OAuth tokens from CLI flows (like `claude` CLI) are different from API keys. + * - API keys: sk-ant-api03-... (can be validated via API) + * - OAuth tokens: Session tokens from OAuth flow (can't be validated the same way) + * + * For OAuth tokens, we accept them if they look valid (non-empty, reasonable length). + * The CLI already validated the OAuth flow, so we trust those tokens. */ async function validateProviderToken(provider: string, token: string): Promise { + // Basic sanity check + if (!token || token.length < 10) { + return false; + } + try { + // Check if this looks like an API key vs OAuth token + const isAnthropicApiKey = token.startsWith('sk-ant-'); + const isOpenAIApiKey = token.startsWith('sk-'); + + // For OAuth tokens (not API keys), accept them without API validation + // The OAuth flow already authenticated the user + if (provider === 'anthropic' && !isAnthropicApiKey) { + console.log('[onboarding] Accepting OAuth token for anthropic (not an API key)'); + return true; + } + if (provider === 'openai' && !isOpenAIApiKey) { + console.log('[onboarding] Accepting OAuth token for openai (not an API key)'); + return true; + } + + // For API keys, validate via API call const endpoints: Record }> = { anthropic: { url: 'https://api.anthropic.com/v1/messages', diff --git a/src/cloud/api/policy.ts b/src/cloud/api/policy.ts new file mode 100644 index 00000000..4b6796ae --- /dev/null +++ b/src/cloud/api/policy.ts @@ -0,0 +1,261 @@ +/** + * Agent Policy API Routes + * + * Provides endpoints for managing workspace-level agent policies. + * These policies serve as fallbacks when repos don't have .claude/policies/ files. + */ + +import { Router, Request, Response } from 'express'; +import { db } from '../db/index.js'; +import type { WorkspaceAgentPolicy } from '../db/schema.js'; + +export const policyRouter = Router(); + +/** + * GET /api/policy/:workspaceId + * Get the agent policy for a workspace + */ +policyRouter.get('/:workspaceId', async (req: Request, res: Response) => { + const { workspaceId } = req.params; + const userId = (req as any).userId; + + if (!userId) { + return res.status(401).json({ error: 'Unauthorized' }); + } + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + // Check user has access to this workspace + if (workspace.userId !== userId) { + const members = await db.workspaceMembers.findByWorkspaceId(workspaceId); + const member = members.find(m => m.userId === userId); + if (!member) { + return res.status(403).json({ error: 'Access denied' }); + } + } + + // Return the policy (or default if not set) + const policy = workspace.config?.agentPolicy ?? getDefaultPolicy(); + + res.json({ + workspaceId, + policy, + source: workspace.config?.agentPolicy ? 'workspace' : 'default', + }); + } catch (error) { + console.error('[policy] Error getting policy:', error); + res.status(500).json({ error: 'Failed to get policy' }); + } +}); + +/** + * PUT /api/policy/:workspaceId + * Update the agent policy for a workspace + */ +policyRouter.put('/:workspaceId', async (req: Request, res: Response) => { + const { workspaceId } = req.params; + const userId = (req as any).userId; + const policy = req.body.policy as WorkspaceAgentPolicy; + + if (!userId) { + return res.status(401).json({ error: 'Unauthorized' }); + } + + if (!policy || typeof policy !== 'object') { + return res.status(400).json({ error: 'Policy object is required' }); + } + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + // Only owner can update policy + if (workspace.userId !== userId) { + const members = await db.workspaceMembers.findByWorkspaceId(workspaceId); + const member = members.find(m => m.userId === userId); + if (!member || !['owner', 'admin'].includes(member.role)) { + return res.status(403).json({ error: 'Only owners and admins can update policy' }); + } + } + + // Validate policy structure + const validationError = validatePolicy(policy); + if (validationError) { + return res.status(400).json({ error: validationError }); + } + + // Update workspace config with new policy + const newConfig = { + ...workspace.config, + agentPolicy: policy, + }; + + await db.workspaces.updateConfig(workspaceId, newConfig); + + res.json({ + success: true, + workspaceId, + policy, + }); + } catch (error) { + console.error('[policy] Error updating policy:', error); + res.status(500).json({ error: 'Failed to update policy' }); + } +}); + +/** + * DELETE /api/policy/:workspaceId + * Reset workspace policy to defaults + */ +policyRouter.delete('/:workspaceId', async (req: Request, res: Response) => { + const { workspaceId } = req.params; + const userId = (req as any).userId; + + if (!userId) { + return res.status(401).json({ error: 'Unauthorized' }); + } + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + // Only owner can reset policy + if (workspace.userId !== userId) { + const members = await db.workspaceMembers.findByWorkspaceId(workspaceId); + const member = members.find(m => m.userId === userId); + if (!member || member.role !== 'owner') { + return res.status(403).json({ error: 'Only owners can reset policy' }); + } + } + + // Remove policy from config + const { agentPolicy: _agentPolicy, ...restConfig } = workspace.config ?? {}; + await db.workspaces.updateConfig(workspaceId, restConfig as any); + + res.json({ + success: true, + workspaceId, + policy: getDefaultPolicy(), + source: 'default', + }); + } catch (error) { + console.error('[policy] Error resetting policy:', error); + res.status(500).json({ error: 'Failed to reset policy' }); + } +}); + +/** + * GET /api/policy/:workspaceId/internal + * Internal endpoint for workspace containers to fetch policy + * Uses workspace token authentication (not user auth) + */ +policyRouter.get('/:workspaceId/internal', async (req: Request, res: Response) => { + const { workspaceId } = req.params; + + // This endpoint should be called with the workspace token + // The git.ts file has the token verification logic we can reuse + // For now, we'll trust the workspace ID from container requests + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + const policy = workspace.config?.agentPolicy ?? getDefaultPolicy(); + + res.json({ + defaultPolicy: policy.defaultPolicy, + agents: policy.agents ?? [], + settings: policy.settings ?? { + requireExplicitAgents: false, + auditEnabled: true, + maxTotalAgents: 50, + }, + }); + } catch (error) { + console.error('[policy] Error getting internal policy:', error); + res.status(500).json({ error: 'Failed to get policy' }); + } +}); + +/** + * Get default policy + */ +function getDefaultPolicy(): WorkspaceAgentPolicy { + return { + defaultPolicy: { + name: '*', + allowedTools: undefined, // All tools allowed + canSpawn: undefined, // Can spawn any + canMessage: undefined, // Can message any + maxSpawns: 10, + rateLimit: 60, + canBeSpawned: true, + }, + agents: [], + settings: { + requireExplicitAgents: false, + auditEnabled: true, + maxTotalAgents: 50, + }, + }; +} + +/** + * Validate policy structure + */ +function validatePolicy(policy: WorkspaceAgentPolicy): string | null { + // Validate defaultPolicy + if (policy.defaultPolicy && typeof policy.defaultPolicy !== 'object') { + return 'defaultPolicy must be an object'; + } + + // Validate agents array + if (policy.agents) { + if (!Array.isArray(policy.agents)) { + return 'agents must be an array'; + } + + for (let i = 0; i < policy.agents.length; i++) { + const agent = policy.agents[i]; + if (!agent.name || typeof agent.name !== 'string') { + return `agents[${i}].name is required and must be a string`; + } + + // Validate arrays + if (agent.allowedTools && !Array.isArray(agent.allowedTools)) { + return `agents[${i}].allowedTools must be an array`; + } + if (agent.canSpawn && !Array.isArray(agent.canSpawn)) { + return `agents[${i}].canSpawn must be an array`; + } + if (agent.canMessage && !Array.isArray(agent.canMessage)) { + return `agents[${i}].canMessage must be an array`; + } + + // Validate numbers + if (agent.maxSpawns !== undefined && typeof agent.maxSpawns !== 'number') { + return `agents[${i}].maxSpawns must be a number`; + } + if (agent.rateLimit !== undefined && typeof agent.rateLimit !== 'number') { + return `agents[${i}].rateLimit must be a number`; + } + } + } + + // Validate settings + if (policy.settings && typeof policy.settings !== 'object') { + return 'settings must be an object'; + } + + return null; +} diff --git a/src/cloud/api/providers.ts b/src/cloud/api/providers.ts index 4e695dfd..7ad7448e 100644 --- a/src/cloud/api/providers.ts +++ b/src/cloud/api/providers.ts @@ -27,43 +27,74 @@ providersRouter.use(requireAuth); * * When providers add OAuth support, we can switch to device flow. */ -const PROVIDERS = { +// Base provider properties +interface BaseProvider { + name: string; + displayName: string; + description: string; + color: string; +} + +// CLI-based auth provider (Claude, OpenCode, Droid) +interface CliProvider extends BaseProvider { + authStrategy: 'cli'; + cliCommand: string; + credentialPath: string; +} + +// Device flow OAuth provider (Google) +interface DeviceFlowProvider extends BaseProvider { + authStrategy: 'device_flow'; + deviceCodeUrl: string; + tokenUrl: string; + userInfoUrl: string; + scopes: string[]; +} + +type Provider = CliProvider | DeviceFlowProvider; + +const PROVIDERS: Record = { anthropic: { name: 'Anthropic', displayName: 'Claude', description: 'Claude Code - recommended for code tasks', - // Auth strategy: CLI-based until Anthropic adds OAuth - authStrategy: 'cli' as const, - cliCommand: 'claude login', - credentialPath: '~/.claude/credentials.json', // Where Claude stores tokens - // Future OAuth endpoints (hypothetical - for when Anthropic implements) - deviceCodeUrl: 'https://api.anthropic.com/oauth/device/code', - tokenUrl: 'https://api.anthropic.com/oauth/token', - userInfoUrl: 'https://api.anthropic.com/v1/user', - scopes: ['claude-code:execute', 'user:read'], + authStrategy: 'cli', + cliCommand: 'claude', + credentialPath: '~/.claude/credentials.json', color: '#D97757', }, - openai: { + codex: { name: 'OpenAI', displayName: 'Codex', - description: 'Codex CLI for AI-assisted coding', - // Auth strategy: CLI-based until OpenAI adds OAuth - authStrategy: 'cli' as const, - cliCommand: 'codex auth', + description: 'Codex - OpenAI coding assistant', + authStrategy: 'cli', + cliCommand: 'codex login', credentialPath: '~/.codex/credentials.json', - // Future OAuth endpoints (hypothetical) - deviceCodeUrl: 'https://auth.openai.com/device/code', - tokenUrl: 'https://auth.openai.com/oauth/token', - userInfoUrl: 'https://api.openai.com/v1/user', - scopes: ['openid', 'profile', 'email', 'codex:execute'], color: '#10A37F', }, + opencode: { + name: 'OpenCode', + displayName: 'OpenCode', + description: 'OpenCode - AI coding assistant', + authStrategy: 'cli', + cliCommand: 'opencode', + credentialPath: '~/.opencode/credentials.json', + color: '#00D4AA', + }, + droid: { + name: 'Factory', + displayName: 'Droid', + description: 'Droid - Factory AI coding agent', + authStrategy: 'cli', + cliCommand: 'droid', + credentialPath: '~/.factory/credentials.json', + color: '#6366F1', + }, google: { name: 'Google', displayName: 'Gemini', description: 'Gemini - multi-modal capabilities', - // Auth strategy: Real OAuth device flow (works today!) - authStrategy: 'device_flow' as const, + authStrategy: 'device_flow', deviceCodeUrl: 'https://oauth2.googleapis.com/device/code', tokenUrl: 'https://oauth2.googleapis.com/token', userInfoUrl: 'https://www.googleapis.com/oauth2/v2/userinfo', @@ -72,6 +103,11 @@ const PROVIDERS = { }, }; +// Type guard for device flow providers +function isDeviceFlowProvider(provider: Provider): provider is DeviceFlowProvider { + return provider.authStrategy === 'device_flow'; +} + type ProviderType = keyof typeof PROVIDERS; // In-memory store for active device flows (use Redis in production) @@ -206,7 +242,13 @@ providersRouter.post('/:provider/connect', async (req: Request, res: Response) = } // Device flow auth (Google) - start OAuth device flow - const clientConfig = config.providers[provider]; + // At this point, we know it's a device flow provider (CLI was handled above) + if (!isDeviceFlowProvider(providerConfig)) { + return res.status(400).json({ error: 'Provider does not support device flow' }); + } + + // Only google is configured for device flow in config + const clientConfig = provider === 'google' ? config.providers.google : undefined; if (!clientConfig) { return res.status(400).json({ error: `Provider ${provider} not configured` }); } @@ -221,7 +263,7 @@ providersRouter.post('/:provider/connect', async (req: Request, res: Response) = body: new URLSearchParams({ client_id: clientConfig.clientId, scope: providerConfig.scopes.join(' '), - ...((provider === 'google') && { client_secret: (clientConfig as any).clientSecret }), + ...((provider === 'google') && { client_secret: clientConfig.clientSecret }), }), }); @@ -298,7 +340,7 @@ providersRouter.post('/:provider/verify', async (req: Request, res: Response) => userId, provider, accessToken: 'cli-authenticated', // Placeholder - real token from CLI - scopes: providerConfig.scopes, + scopes: [], // CLI auth doesn't use scopes providerAccountEmail: req.body.email, // User can optionally provide }); @@ -313,6 +355,73 @@ providersRouter.post('/:provider/verify', async (req: Request, res: Response) => } }); +/** + * POST /api/providers/:provider/api-key + * Connect a provider using an API key (for cloud-hosted workspaces) + */ +providersRouter.post('/:provider/api-key', async (req: Request, res: Response) => { + const { provider } = req.params as { provider: ProviderType }; + const userId = req.session.userId!; + const { apiKey } = req.body; + + if (!apiKey || typeof apiKey !== 'string') { + return res.status(400).json({ error: 'API key is required' }); + } + + const providerConfig = PROVIDERS[provider]; + if (!providerConfig) { + return res.status(404).json({ error: 'Unknown provider' }); + } + + // Validate the API key by making a test request + try { + let isValid = false; + + if (provider === 'anthropic') { + // Test Anthropic API key + const testRes = await fetch('https://api.anthropic.com/v1/messages', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-api-key': apiKey, + 'anthropic-version': '2023-06-01', + }, + body: JSON.stringify({ + model: 'claude-3-haiku-20240307', + max_tokens: 1, + messages: [{ role: 'user', content: 'hi' }], + }), + }); + // 200 = valid, 401 = invalid key, 400/other = might still be valid key + isValid = testRes.status !== 401; + } else { + // For other providers, just accept the key + isValid = true; + } + + if (!isValid) { + return res.status(400).json({ error: 'Invalid API key' }); + } + + // Store the API key - use scopes from device flow providers, empty for CLI providers + const scopes = isDeviceFlowProvider(providerConfig) ? providerConfig.scopes : []; + await vault.storeCredential({ + userId, + provider, + accessToken: apiKey, + scopes, + }); + + res.json({ + success: true, + message: `${providerConfig.displayName} connected`, + }); + } catch (error) { + console.error(`Error connecting ${provider} with API key:`, error); + res.status(500).json({ error: 'Failed to connect provider' }); + } +}); + /** * GET /api/providers/:provider/status/:flowId * Check status of device flow @@ -393,6 +502,12 @@ providersRouter.delete('/:provider/flow/:flowId', (req: Request, res: Response) async function pollForToken(flowId: string, provider: ProviderType, clientId: string) { const providerConfig = PROVIDERS[provider]; + // Only device flow providers can poll for tokens + if (!isDeviceFlowProvider(providerConfig)) { + console.error(`Provider ${provider} does not support device flow polling`); + return; + } + const poll = async (intervalMs: number) => { const current = await loadFlow(flowId); if (!current || current.status !== 'pending') return; @@ -491,19 +606,21 @@ async function storeProviderTokens( ) { const providerConfig = PROVIDERS[provider]; - // Fetch user info from provider + // Fetch user info from provider (only device flow providers have userInfoUrl) let userInfo: { id?: string; email?: string } = {}; - try { - const response = await fetch(providerConfig.userInfoUrl, { - headers: { - Authorization: `Bearer ${tokens.accessToken}`, - }, - }); - if (response.ok) { - userInfo = await response.json() as { id?: string; email?: string }; + if (isDeviceFlowProvider(providerConfig)) { + try { + const response = await fetch(providerConfig.userInfoUrl, { + headers: { + Authorization: `Bearer ${tokens.accessToken}`, + }, + }); + if (response.ok) { + userInfo = await response.json() as { id?: string; email?: string }; + } + } catch (error) { + console.error('Error fetching user info:', error); } - } catch (error) { - console.error('Error fetching user info:', error); } // Encrypt and store diff --git a/src/cloud/api/test-helpers.ts b/src/cloud/api/test-helpers.ts index ffc3e4e7..47d10d05 100644 --- a/src/cloud/api/test-helpers.ts +++ b/src/cloud/api/test-helpers.ts @@ -10,7 +10,10 @@ import { Router, Request, Response } from 'express'; import { randomUUID, createHash, randomBytes } from 'crypto'; import { getDb } from '../db/drizzle.js'; -import { users, linkedDaemons } from '../db/schema.js'; +import { users, linkedDaemons, workspaces, repositories } from '../db/schema.js'; +import { getProvisioner } from '../provisioner/index.js'; +import { db } from '../db/index.js'; +import { nangoService } from '../services/nango.js'; export const testHelpersRouter = Router(); @@ -134,7 +137,7 @@ testHelpersRouter.delete('/cleanup', async (req: Request, res: Response) => { } try { - const db = getDb(); + const _db = getDb(); // Delete test data (users with test- prefix in githubId) // Note: This cascades to linked daemons due to FK constraints @@ -157,3 +160,485 @@ testHelpersRouter.get('/status', (req: Request, res: Response) => { timestamp: new Date().toISOString(), }); }); + +/** + * POST /api/test/create-mock-workspace + * Creates a mock workspace pointing to a local dashboard server + * + * Use this to test the cloud flow locally without real provisioning. + * The workspace will have publicUrl pointing to localhost:3889. + */ +testHelpersRouter.post('/create-mock-workspace', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + try { + const { name, publicUrl } = req.body; + const userId = req.session.userId; + + if (!userId) { + return res.status(401).json({ error: 'Must be logged in. Use /api/test/create-user first or log in via OAuth.' }); + } + + const db = getDb(); + + // Create a mock workspace with local publicUrl + const [workspace] = await db.insert(workspaces).values({ + userId, + name: name || 'Local Test Workspace', + status: 'running', + publicUrl: publicUrl || 'http://localhost:3889', + computeProvider: 'docker', + computeId: `mock-${randomUUID().slice(0, 8)}`, + config: { + providers: ['anthropic'], + repositories: [], + supervisorEnabled: true, + maxAgents: 10, + }, + }).returning(); + + res.json({ + workspaceId: workspace.id, + name: workspace.name, + status: workspace.status, + publicUrl: workspace.publicUrl, + message: 'Mock workspace created. Start agent-relay locally and navigate to /app.', + }); + } catch (error) { + console.error('Error creating mock workspace:', error); + res.status(500).json({ error: 'Failed to create mock workspace' }); + } +}); + +/** + * POST /api/test/create-mock-repo + * Creates a mock repository for the current user + * + * Use this to test the cloud flow without connecting real GitHub repos. + */ +testHelpersRouter.post('/create-mock-repo', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + try { + const { fullName, isPrivate } = req.body; + const userId = req.session.userId; + + if (!userId) { + return res.status(401).json({ error: 'Must be logged in. Use /api/test/create-user first or log in via OAuth.' }); + } + + if (!fullName) { + return res.status(400).json({ error: 'fullName is required (e.g., "owner/repo")' }); + } + + const db = getDb(); + + // Create a mock repository + const [repo] = await db.insert(repositories).values({ + userId, + githubId: Math.floor(Math.random() * 1000000), + githubFullName: fullName, + isPrivate: isPrivate ?? false, + defaultBranch: 'main', + syncStatus: 'synced', + nangoConnectionId: `mock-connection-${randomUUID().slice(0, 8)}`, + lastSyncedAt: new Date(), + }).returning(); + + res.json({ + repoId: repo.id, + fullName: repo.githubFullName, + isPrivate: repo.isPrivate, + message: 'Mock repository created.', + }); + } catch (error) { + console.error('Error creating mock repo:', error); + res.status(500).json({ error: 'Failed to create mock repo' }); + } +}); + +/** + * POST /api/test/login-as + * Quick login for testing - creates session for existing or new test user + */ +testHelpersRouter.post('/login-as', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + try { + const { username } = req.body; + const db = getDb(); + + // Find or create user + let user; + const existingUsers = await db.select().from(users).limit(1); + + if (existingUsers.length > 0 && !username) { + user = existingUsers[0]; + } else { + const testId = `test-${randomUUID()}`; + const [newUser] = await db.insert(users).values({ + email: `${username || testId}@test.local`, + githubId: testId, + githubUsername: username || 'test-user', + avatarUrl: null, + plan: 'free', + }).returning(); + user = newUser; + } + + // Set session + req.session.userId = user.id; + + res.json({ + success: true, + userId: user.id, + username: user.githubUsername, + message: 'Logged in. You can now access /app and other authenticated routes.', + }); + } catch (error) { + console.error('Error in login-as:', error); + res.status(500).json({ error: 'Failed to login' }); + } +}); + +/** + * GET /api/test/setup-local-cloud + * One-shot setup: creates user, mock repo, and mock workspace + * + * After calling this, start agent-relay locally and go to /app + */ +testHelpersRouter.post('/setup-local-cloud', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + try { + const { repoName, workspaceName } = req.body; + const db = getDb(); + + // 1. Create or get test user + const testId = `test-${randomUUID().slice(0, 8)}`; + const [user] = await db.insert(users).values({ + email: `${testId}@test.local`, + githubId: testId, + githubUsername: 'local-tester', + avatarUrl: null, + plan: 'free', + }).returning(); + + // Set session + req.session.userId = user.id; + + // 2. Create mock repository + const [repo] = await db.insert(repositories).values({ + userId: user.id, + githubId: Math.floor(Math.random() * 1000000), + githubFullName: repoName || 'test-org/test-repo', + isPrivate: false, + defaultBranch: 'main', + syncStatus: 'synced', + nangoConnectionId: `mock-${randomUUID().slice(0, 8)}`, + lastSyncedAt: new Date(), + }).returning(); + + // 3. Create mock workspace pointing to local dashboard + const [workspace] = await db.insert(workspaces).values({ + userId: user.id, + name: workspaceName || 'Local Development', + status: 'running', + publicUrl: 'http://localhost:3889', + computeProvider: 'docker', + computeId: `mock-${randomUUID().slice(0, 8)}`, + config: { + providers: ['anthropic'], + repositories: [repo.githubFullName], + supervisorEnabled: true, + maxAgents: 10, + }, + }).returning(); + + res.json({ + success: true, + user: { + id: user.id, + username: user.githubUsername, + }, + repo: { + id: repo.id, + fullName: repo.githubFullName, + }, + workspace: { + id: workspace.id, + name: workspace.name, + publicUrl: workspace.publicUrl, + }, + instructions: [ + '1. Start agent-relay daemon: npm run dev (or agent-relay daemon)', + '2. Go to http://localhost:4567/app', + '3. The app should auto-connect to the local workspace', + '4. The WebSocket will connect to ws://localhost:3889/ws', + ], + }); + } catch (error) { + console.error('Error in setup-local-cloud:', error); + res.status(500).json({ error: 'Failed to setup local cloud' }); + } +}); + +/** + * POST /api/test/provision-real-workspace + * Provision a REAL Docker container using your Nango GitHub App connection. + * + * This tests the full flow including: + * - Fetching GitHub App token from Nango + * - Spinning up a Docker container + * - Cloning your actual repositories + * + * Prerequisites: + * - Must be logged in (via real OAuth or /api/test/login-as) + * - Must have connected repos via /connect-repos (real Nango GitHub App OAuth) + * - Docker must be running locally + * - COMPUTE_PROVIDER must be 'docker' (default for dev) + */ +testHelpersRouter.post('/provision-real-workspace', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ + error: 'Must be logged in. Use real OAuth or /api/test/login-as first.', + }); + } + + try { + const { name, repositoryFullName, providers, githubToken } = req.body; + + // Get user's connected repositories + const userRepos = await db.repositories.findByUserId(userId); + const reposWithNango = userRepos.filter(r => r.nangoConnectionId); + + if (reposWithNango.length === 0) { + return res.status(400).json({ + error: 'No repositories with Nango connection found. Complete /connect-repos first with real GitHub OAuth.', + hint: 'Go to http://localhost:4567/connect-repos and connect your GitHub App, or pass githubToken directly', + }); + } + + // Determine which repo to use + let targetRepo = reposWithNango[0]; + if (repositoryFullName) { + const found = reposWithNango.find(r => r.githubFullName === repositoryFullName); + if (!found) { + return res.status(400).json({ + error: `Repository ${repositoryFullName} not found or not connected via Nango`, + availableRepos: reposWithNango.map(r => r.githubFullName), + }); + } + targetRepo = found; + } + + // Use the real provisioner (Docker in dev mode) + const provisioner = getProvisioner(); + + const result = await provisioner.provision({ + userId, + name: name || `Test Workspace - ${targetRepo.githubFullName}`, + providers: providers || ['anthropic'], // Default to anthropic if not specified + repositories: [targetRepo.githubFullName], + supervisorEnabled: true, + maxAgents: 10, + // Allow passing GitHub token directly for local testing + githubToken: githubToken || undefined, + }); + + if (result.status === 'error') { + return res.status(500).json({ + error: 'Provisioning failed', + details: result.error, + }); + } + + res.json({ + success: true, + workspace: { + id: result.workspaceId, + status: result.status, + publicUrl: result.publicUrl, + }, + repository: targetRepo.githubFullName, + instructions: [ + `1. Workspace is running at ${result.publicUrl}`, + `2. Repository ${targetRepo.githubFullName} should be cloned`, + `3. Go to http://localhost:4567/app to connect`, + `4. Check container: docker logs ar-${result.workspaceId.substring(0, 8)}`, + `5. Verify clone: docker exec ar-${result.workspaceId.substring(0, 8)} ls /workspace/repos`, + ], + }); + } catch (error) { + console.error('Error provisioning real workspace:', error); + res.status(500).json({ + error: 'Failed to provision workspace', + details: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * GET /api/test/my-repos + * List current user's connected repositories (for debugging) + */ +testHelpersRouter.get('/my-repos', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ error: 'Not logged in' }); + } + + try { + const repos = await db.repositories.findByUserId(userId); + + res.json({ + userId, + repositories: repos.map(r => ({ + id: r.id, + fullName: r.githubFullName, + isPrivate: r.isPrivate, + hasNangoConnection: !!r.nangoConnectionId, + nangoConnectionId: r.nangoConnectionId, // For debugging + syncStatus: r.syncStatus, + })), + }); + } catch (error) { + console.error('Error fetching repos:', error); + res.status(500).json({ error: 'Failed to fetch repositories' }); + } +}); + +/** + * GET /api/test/my-workspaces + * List current user's workspaces (for debugging) + */ +testHelpersRouter.get('/my-workspaces', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ error: 'Not logged in' }); + } + + try { + const userWorkspaces = await db.workspaces.findByUserId(userId); + + res.json({ + userId, + workspaces: userWorkspaces.map(w => ({ + id: w.id, + name: w.name, + status: w.status, + publicUrl: w.publicUrl, + computeProvider: w.computeProvider, + computeId: w.computeId, + config: w.config, + })), + }); + } catch (error) { + console.error('Error fetching workspaces:', error); + res.status(500).json({ error: 'Failed to fetch workspaces' }); + } +}); + +/** + * GET /api/test/nango-token + * Test fetching GitHub App token from Nango (for debugging) + */ +testHelpersRouter.get('/nango-token', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ error: 'Not logged in' }); + } + + try { + const repos = await db.repositories.findByUserId(userId); + const repoWithConnection = repos.find(r => r.nangoConnectionId); + + if (!repoWithConnection?.nangoConnectionId) { + return res.status(400).json({ + error: 'No Nango connection found', + repos: repos.map(r => ({ fullName: r.githubFullName, nangoConnectionId: r.nangoConnectionId })), + }); + } + + console.log('[test] Fetching token for connection:', repoWithConnection.nangoConnectionId); + + const token = await nangoService.getGithubAppToken(repoWithConnection.nangoConnectionId); + + res.json({ + success: true, + connectionId: repoWithConnection.nangoConnectionId, + tokenLength: token.length, + tokenPrefix: token.substring(0, 10) + '...', + }); + } catch (error) { + console.error('[test] Nango token fetch error:', error); + res.status(500).json({ + error: 'Failed to fetch token', + details: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * DELETE /api/test/workspace/:id + * Delete/deprovision a workspace (for cleanup) + */ +testHelpersRouter.delete('/workspace/:id', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ error: 'Not logged in' }); + } + + try { + const { id } = req.params; + const workspace = await db.workspaces.findById(id); + + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + if (workspace.userId !== userId) { + return res.status(403).json({ error: 'Not your workspace' }); + } + + const provisioner = getProvisioner(); + await provisioner.deprovision(id); + + res.json({ + success: true, + message: `Workspace ${id} deleted`, + }); + } catch (error) { + console.error('Error deleting workspace:', error); + res.status(500).json({ error: 'Failed to delete workspace' }); + } +}); diff --git a/src/cloud/api/webhooks.ts b/src/cloud/api/webhooks.ts index 6892b11c..3ffe2271 100644 --- a/src/cloud/api/webhooks.ts +++ b/src/cloud/api/webhooks.ts @@ -71,8 +71,23 @@ webhooksRouter.post('/github', async (req: Request, res: Response) => { break; case 'issues': - // Future: handle issue events - console.log(`[webhook] Issue ${req.body.action} on ${req.body.repository?.full_name}`); + await handleIssueEvent(req.body); + break; + + case 'issue_comment': + await handleIssueCommentEvent(req.body); + break; + + case 'pull_request_review_comment': + await handlePRReviewCommentEvent(req.body); + break; + + case 'check_run': + await handleCheckRunEvent(req.body); + break; + + case 'workflow_run': + await handleWorkflowRunEvent(req.body); break; default: @@ -270,3 +285,490 @@ async function handleInstallationRepositoriesEvent(payload: { console.log(`[webhook] Removed access to ${repositories_removed.length} repositories`); } } + +// ============================================================================ +// CI Failure Webhook Handlers +// ============================================================================ + +/** + * Check run payload from GitHub webhook + */ +interface CheckRunPayload { + action: string; + check_run: { + id: number; + name: string; + status: string; + conclusion: string | null; + output: { + title: string | null; + summary: string | null; + text?: string | null; + annotations?: Array<{ + path: string; + start_line: number; + end_line: number; + annotation_level: string; + message: string; + }>; + }; + pull_requests: Array<{ + number: number; + head: { ref: string; sha: string }; + }>; + }; + repository: { + full_name: string; + clone_url: string; + }; +} + +/** + * Workflow run payload from GitHub webhook + */ +interface WorkflowRunPayload { + action: string; + workflow_run: { + id: number; + name: string; + status: string; + conclusion: string | null; + head_branch: string; + head_sha: string; + pull_requests: Array<{ + number: number; + }>; + }; + repository: { + full_name: string; + }; +} + +/** + * Handle check_run webhook events + * + * When a CI check fails on a PR, we: + * 1. Record the failure in our database + * 2. Check if an agent is already working on the PR + * 3. Either message the existing agent or spawn a new one + */ +async function handleCheckRunEvent(payload: CheckRunPayload): Promise { + const { action, check_run, repository } = payload; + + // Only handle completed checks + if (action !== 'completed') { + console.log(`[webhook] Ignoring check_run action: ${action}`); + return; + } + + // Only handle failures + if (check_run.conclusion !== 'failure') { + console.log(`[webhook] Check ${check_run.name} conclusion: ${check_run.conclusion} (not a failure)`); + return; + } + + // Only handle checks on PRs + if (check_run.pull_requests.length === 0) { + console.log(`[webhook] Check ${check_run.name} failed but not on a PR, skipping`); + return; + } + + const pr = check_run.pull_requests[0]; + + console.log( + `[webhook] CI failure: ${check_run.name} on ${repository.full_name}#${pr.number}` + ); + + // Build failure context + const failureContext = { + repository: repository.full_name, + prNumber: pr.number, + branch: pr.head.ref, + commitSha: pr.head.sha, + checkName: check_run.name, + checkId: check_run.id, + conclusion: check_run.conclusion, + failureTitle: check_run.output.title, + failureSummary: check_run.output.summary, + failureDetails: check_run.output.text, + annotations: (check_run.output.annotations || []).map(a => ({ + path: a.path, + startLine: a.start_line, + endLine: a.end_line, + annotationLevel: a.annotation_level, + message: a.message, + })), + }; + + // Record the failure in the database + try { + const failureEvent = await db.ciFailureEvents.create({ + repository: failureContext.repository, + prNumber: failureContext.prNumber, + branch: failureContext.branch, + commitSha: failureContext.commitSha, + checkName: failureContext.checkName, + checkId: failureContext.checkId, + conclusion: failureContext.conclusion, + failureTitle: failureContext.failureTitle, + failureSummary: failureContext.failureSummary, + failureDetails: failureContext.failureDetails, + annotations: failureContext.annotations, + }); + + console.log(`[webhook] Recorded CI failure event: ${failureEvent.id}`); + + // Check for existing active fix attempts on this repo + const activeAttempts = await db.ciFixAttempts.findActiveByRepository(repository.full_name); + + if (activeAttempts.length > 0) { + console.log(`[webhook] ${activeAttempts.length} active fix attempt(s) already exist, skipping spawn`); + await db.ciFailureEvents.markProcessed(failureEvent.id, false); + return; + } + + // Import and call the CI agent spawner (lazy import to avoid circular deps) + const { spawnCIFixAgent } = await import('../services/ci-agent-spawner.js'); + await spawnCIFixAgent(failureEvent); + + // Mark as processed with agent spawned + await db.ciFailureEvents.markProcessed(failureEvent.id, true); + console.log(`[webhook] Agent spawned for CI failure: ${failureEvent.id}`); + } catch (error) { + console.error(`[webhook] Failed to handle CI failure:`, error); + // Don't re-throw - we still want to return 200 to GitHub + } +} + +/** + * Handle workflow_run webhook events + * + * This handles the entire workflow completion. Useful for: + * - Waiting for all checks to complete before acting + * - Getting workflow-level context + */ +async function handleWorkflowRunEvent(payload: WorkflowRunPayload): Promise { + const { action, workflow_run, repository } = payload; + + // Only handle completed workflows + if (action !== 'completed') { + console.log(`[webhook] Ignoring workflow_run action: ${action}`); + return; + } + + // Only handle failures + if (workflow_run.conclusion !== 'failure') { + console.log(`[webhook] Workflow ${workflow_run.name} conclusion: ${workflow_run.conclusion}`); + return; + } + + // Log for now - we primarily handle individual check_runs + // but workflow_run events can be used for aggregate failure handling + console.log( + `[webhook] Workflow failed: ${workflow_run.name} on ${repository.full_name} ` + + `(branch: ${workflow_run.head_branch}, PRs: ${workflow_run.pull_requests.map(p => p.number).join(', ')})` + ); + + // Future: Could use this to trigger workflow-level actions + // For now, individual check_run events handle the actual failure processing +} + +// ============================================================================ +// Issue and Comment Webhook Handlers +// ============================================================================ + +/** + * Issue payload from GitHub webhook + */ +interface IssuePayload { + action: string; // opened, edited, closed, reopened, assigned, unassigned, labeled, unlabeled + issue: { + id: number; + number: number; + title: string; + body: string | null; + html_url: string; + state: string; + labels: Array<{ name: string }>; + user: { login: string; id: number }; + assignees: Array<{ login: string; id: number }>; + }; + repository: { + full_name: string; + }; + sender: { + login: string; + id: number; + }; +} + +/** + * Issue comment payload from GitHub webhook + */ +interface IssueCommentPayload { + action: string; // created, edited, deleted + issue: { + number: number; + title: string; + pull_request?: { url: string }; // Present if this is a PR comment + }; + comment: { + id: number; + body: string; + html_url: string; + user: { login: string; id: number }; + }; + repository: { + full_name: string; + }; + sender: { + login: string; + id: number; + }; +} + +/** + * PR review comment payload from GitHub webhook + */ +interface PRReviewCommentPayload { + action: string; // created, edited, deleted + pull_request: { + number: number; + title: string; + }; + comment: { + id: number; + body: string; + html_url: string; + path: string; + line: number | null; + user: { login: string; id: number }; + }; + repository: { + full_name: string; + }; + sender: { + login: string; + id: number; + }; +} + +/** + * Extract @mentions from comment text + * Returns list of mentioned agent names (without @ prefix) + */ +function extractMentions(text: string): string[] { + // Match @agent-name patterns (alphanumeric, hyphens, underscores) + const mentionPattern = /@([a-zA-Z][a-zA-Z0-9_-]*)/g; + const mentions: string[] = []; + let match; + + while ((match = mentionPattern.exec(text)) !== null) { + mentions.push(match[1].toLowerCase()); + } + + return [...new Set(mentions)]; // Remove duplicates +} + +/** + * Get context around a mention (for prompt building) + */ +function getMentionContext(text: string, mention: string, contextLength = 200): string { + const mentionIndex = text.toLowerCase().indexOf(`@${mention.toLowerCase()}`); + if (mentionIndex === -1) return text.slice(0, contextLength); + + const start = Math.max(0, mentionIndex - contextLength / 2); + const end = Math.min(text.length, mentionIndex + mention.length + 1 + contextLength / 2); + + let context = text.slice(start, end); + if (start > 0) context = '...' + context; + if (end < text.length) context = context + '...'; + + return context; +} + +/** + * Handle issues webhook events + * + * When a new issue is opened or labeled, we can: + * 1. Auto-assign an agent based on labels + * 2. Record the issue for later assignment + */ +async function handleIssueEvent(payload: IssuePayload): Promise { + const { action, issue, repository } = payload; + + console.log(`[webhook] Issue ${action}: #${issue.number} on ${repository.full_name}`); + + // Only handle opened issues for now + if (action !== 'opened' && action !== 'labeled') { + return; + } + + try { + // Check if we already have an assignment for this issue + const existing = await db.issueAssignments.findByIssue(repository.full_name, issue.number); + if (existing) { + console.log(`[webhook] Issue #${issue.number} already has an assignment`); + return; + } + + // Determine priority based on labels + const labels = issue.labels.map(l => l.name.toLowerCase()); + let priority: string | undefined; + if (labels.includes('critical') || labels.includes('p0')) priority = 'critical'; + else if (labels.includes('high') || labels.includes('p1')) priority = 'high'; + else if (labels.includes('medium') || labels.includes('p2')) priority = 'medium'; + else if (labels.includes('low') || labels.includes('p3')) priority = 'low'; + + // Create issue assignment record + const assignment = await db.issueAssignments.create({ + repository: repository.full_name, + issueNumber: issue.number, + issueTitle: issue.title, + issueBody: issue.body, + issueUrl: issue.html_url, + status: 'pending', + labels: issue.labels.map(l => l.name), + priority, + }); + + console.log(`[webhook] Created issue assignment: ${assignment.id}`); + + // Check if we should auto-assign an agent + // TODO: Load repo configuration for auto-assign settings + // For now, issues remain in 'pending' status for manual assignment + + } catch (error) { + console.error(`[webhook] Failed to handle issue event:`, error); + } +} + +/** + * Handle issue_comment webhook events + * + * When someone @mentions an agent in a comment: + * 1. Detect the mention + * 2. Record it for agent processing + * 3. Route to appropriate agent + */ +async function handleIssueCommentEvent(payload: IssueCommentPayload): Promise { + const { action, issue, comment, repository, sender } = payload; + + // Only handle new comments + if (action !== 'created') { + return; + } + + const isPR = !!issue.pull_request; + const sourceType = isPR ? 'pr_comment' : 'issue_comment'; + + console.log( + `[webhook] ${sourceType} on ${repository.full_name}#${issue.number} by @${sender.login}` + ); + + // Extract @mentions from comment + const mentions = extractMentions(comment.body); + if (mentions.length === 0) { + return; // No mentions to process + } + + console.log(`[webhook] Found mentions: ${mentions.join(', ')}`); + + try { + for (const mention of mentions) { + // Check if this is a known agent mention + // TODO: Load configured agents from repo/workspace settings + // For now, we accept any mention that looks like an agent name + + const context = getMentionContext(comment.body, mention); + + // Create mention record + const mentionRecord = await db.commentMentions.create({ + repository: repository.full_name, + sourceType, + sourceId: comment.id, + issueOrPrNumber: issue.number, + commentBody: comment.body, + commentUrl: comment.html_url, + authorLogin: sender.login, + authorId: sender.id, + mentionedAgent: mention, + mentionContext: context, + status: 'pending', + }); + + console.log(`[webhook] Created mention record for @${mention}: ${mentionRecord.id}`); + + // Import and call the mention handler (lazy import) + try { + const { handleMention } = await import('../services/mention-handler.js'); + await handleMention(mentionRecord); + } catch (_importError) { + // Handler not implemented yet - mentions will be processed later + console.log(`[webhook] Mention handler not available, mention queued for later processing`); + } + } + } catch (error) { + console.error(`[webhook] Failed to handle comment mentions:`, error); + } +} + +/** + * Handle pull_request_review_comment webhook events + * + * Similar to issue_comment, but for PR review comments (inline code comments) + */ +async function handlePRReviewCommentEvent(payload: PRReviewCommentPayload): Promise { + const { action, pull_request, comment, repository, sender } = payload; + + // Only handle new comments + if (action !== 'created') { + return; + } + + console.log( + `[webhook] PR review comment on ${repository.full_name}#${pull_request.number} ` + + `(${comment.path}:${comment.line}) by @${sender.login}` + ); + + // Extract @mentions from comment + const mentions = extractMentions(comment.body); + if (mentions.length === 0) { + return; // No mentions to process + } + + console.log(`[webhook] Found mentions in review comment: ${mentions.join(', ')}`); + + try { + for (const mention of mentions) { + const context = getMentionContext(comment.body, mention); + + // Create mention record + const mentionRecord = await db.commentMentions.create({ + repository: repository.full_name, + sourceType: 'pr_review', + sourceId: comment.id, + issueOrPrNumber: pull_request.number, + commentBody: comment.body, + commentUrl: comment.html_url, + authorLogin: sender.login, + authorId: sender.id, + mentionedAgent: mention, + mentionContext: `${comment.path}:${comment.line || '?'}\n\n${context}`, + status: 'pending', + }); + + console.log(`[webhook] Created review mention for @${mention}: ${mentionRecord.id}`); + + // Try to handle mention immediately + try { + const { handleMention } = await import('../services/mention-handler.js'); + await handleMention(mentionRecord); + } catch { + console.log(`[webhook] Mention handler not available, mention queued for later processing`); + } + } + } catch (error) { + console.error(`[webhook] Failed to handle PR review comment mentions:`, error); + } +} diff --git a/src/cloud/api/workspaces.ts b/src/cloud/api/workspaces.ts index 65e3adcb..dd1b5b86 100644 --- a/src/cloud/api/workspaces.ts +++ b/src/cloud/api/workspaces.ts @@ -104,6 +104,136 @@ workspacesRouter.post('/', checkWorkspaceLimit, async (req: Request, res: Respon } }); +/** + * GET /api/workspaces/summary + * Get summary of all user workspaces for dashboard status indicator + * NOTE: This route MUST be before /:id to avoid being caught by parameterized route + */ +workspacesRouter.get('/summary', async (req: Request, res: Response) => { + const userId = req.session.userId!; + + try { + const workspaces = await db.workspaces.findByUserId(userId); + const provisioner = getProvisioner(); + + // Get live status for each workspace + const workspaceSummaries = await Promise.all( + workspaces.map(async (w) => { + let liveStatus = w.status; + try { + liveStatus = await provisioner.getStatus(w.id); + } catch { + // Fall back to DB status + } + + return { + id: w.id, + name: w.name, + status: liveStatus, + publicUrl: w.publicUrl, + isStopped: liveStatus === 'stopped', + isRunning: liveStatus === 'running', + isProvisioning: liveStatus === 'provisioning', + hasError: liveStatus === 'error', + }; + }) + ); + + // Overall status for quick dashboard indicator + const hasRunningWorkspace = workspaceSummaries.some(w => w.isRunning); + const hasStoppedWorkspace = workspaceSummaries.some(w => w.isStopped); + const hasProvisioningWorkspace = workspaceSummaries.some(w => w.isProvisioning); + + res.json({ + workspaces: workspaceSummaries, + summary: { + total: workspaceSummaries.length, + running: workspaceSummaries.filter(w => w.isRunning).length, + stopped: workspaceSummaries.filter(w => w.isStopped).length, + provisioning: workspaceSummaries.filter(w => w.isProvisioning).length, + error: workspaceSummaries.filter(w => w.hasError).length, + }, + overallStatus: hasRunningWorkspace + ? 'ready' + : hasProvisioningWorkspace + ? 'provisioning' + : hasStoppedWorkspace + ? 'stopped' + : workspaceSummaries.length === 0 + ? 'none' + : 'error', + }); + } catch (error) { + console.error('Error getting workspace summary:', error); + res.status(500).json({ error: 'Failed to get workspace summary' }); + } +}); + +/** + * GET /api/workspaces/primary + * Get the user's primary workspace (first/default) with live status + * Used by dashboard to show quick status indicator + * NOTE: This route MUST be before /:id to avoid being caught by parameterized route + */ +workspacesRouter.get('/primary', async (req: Request, res: Response) => { + const userId = req.session.userId!; + + try { + const workspaces = await db.workspaces.findByUserId(userId); + + if (workspaces.length === 0) { + return res.json({ + exists: false, + message: 'No workspace found. Connect a repository to auto-provision one.', + }); + } + + const primary = workspaces[0]; + const provisioner = getProvisioner(); + + let liveStatus = primary.status; + try { + liveStatus = await provisioner.getStatus(primary.id); + } catch { + // Fall back to DB status + } + + res.json({ + exists: true, + workspace: { + id: primary.id, + name: primary.name, + status: liveStatus, + publicUrl: primary.publicUrl, + isStopped: liveStatus === 'stopped', + isRunning: liveStatus === 'running', + isProvisioning: liveStatus === 'provisioning', + hasError: liveStatus === 'error', + config: { + providers: primary.config.providers || [], + repositories: primary.config.repositories || [], + }, + }, + // Quick messages for UI + statusMessage: liveStatus === 'running' + ? 'Workspace is running' + : liveStatus === 'stopped' + ? 'Workspace is idle (will start automatically when needed)' + : liveStatus === 'provisioning' + ? 'Workspace is being provisioned...' + : 'Workspace has an error', + actionNeeded: liveStatus === 'stopped' + ? 'wakeup' + : liveStatus === 'error' + ? 'check_error' + : null, + }); + } catch (error) { + console.error('Error getting primary workspace:', error); + res.status(500).json({ error: 'Failed to get primary workspace' }); + } +}); + /** * GET /api/workspaces/:id * Get workspace details @@ -530,9 +660,122 @@ async function removeDomainFromCompute(workspace: Workspace): Promise { // Railway and Docker: similar cleanup } +/** + * POST /api/workspaces/:id/proxy/* + * Proxy API requests to the workspace container + * This allows the dashboard to make REST calls through the cloud server + */ +workspacesRouter.all('/:id/proxy/{*proxyPath}', async (req: Request, res: Response) => { + const userId = req.session.userId!; + const { id } = req.params; + // Express 5 wildcard params return an array of path segments, not a slash-separated string + const proxyPathParam = req.params.proxyPath; + const proxyPath = Array.isArray(proxyPathParam) ? proxyPathParam.join('/') : proxyPathParam; + + try { + const workspace = await db.workspaces.findById(id); + + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + if (workspace.userId !== userId) { + return res.status(403).json({ error: 'Unauthorized' }); + } + + if (workspace.status !== 'running' || !workspace.publicUrl) { + return res.status(400).json({ error: 'Workspace is not running' }); + } + + // Determine the internal URL for proxying + // When running inside Docker, localhost URLs won't work - use the container name instead + let targetBaseUrl = workspace.publicUrl; + const runningInDocker = process.env.RUNNING_IN_DOCKER === 'true'; + + if (runningInDocker && workspace.computeId && targetBaseUrl.includes('localhost')) { + // Replace localhost URL with container name for Docker networking + // workspace.computeId is the container name (e.g., "ar-abc12345") + // The workspace port is 3888 inside the container + targetBaseUrl = `http://${workspace.computeId}:3888`; + } + + const targetUrl = `${targetBaseUrl}/api/${proxyPath}`; + console.log(`[workspace-proxy] ${req.method} ${targetUrl}`); + + // Store targetUrl for error handling + (req as any)._proxyTargetUrl = targetUrl; + + // Add timeout to prevent hanging requests + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), 15000); // 15s timeout + + const fetchOptions: RequestInit = { + method: req.method, + headers: { + 'Content-Type': 'application/json', + }, + signal: controller.signal, + }; + + if (req.method !== 'GET' && req.method !== 'HEAD') { + fetchOptions.body = JSON.stringify(req.body); + } + + let proxyRes: globalThis.Response; + try { + proxyRes = await fetch(targetUrl, fetchOptions); + } finally { + clearTimeout(timeout); + } + console.log(`[workspace-proxy] Response: ${proxyRes.status} ${proxyRes.statusText}`); + + // Handle non-JSON responses gracefully + const contentType = proxyRes.headers.get('content-type'); + if (contentType?.includes('application/json')) { + const data = await proxyRes.json(); + res.status(proxyRes.status).json(data); + } else { + const text = await proxyRes.text(); + res.status(proxyRes.status).send(text); + } + } catch (error) { + const targetUrl = (req as any)._proxyTargetUrl || 'unknown'; + console.error('[workspace-proxy] Error proxying to:', targetUrl); + console.error('[workspace-proxy] Error details:', error); + + // Check for timeout/abort errors + if (error instanceof Error && error.name === 'AbortError') { + res.status(504).json({ + error: 'Workspace request timed out', + details: 'The workspace did not respond within 15 seconds', + targetUrl: targetUrl, + }); + return; + } + + // Check for connection refused (workspace not running) + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + if (errorMessage.includes('ECONNREFUSED') || errorMessage.includes('fetch failed')) { + res.status(503).json({ + error: 'Workspace is not reachable', + details: 'The workspace container may not be running or accepting connections', + targetUrl: targetUrl, + }); + return; + } + + res.status(500).json({ + error: 'Failed to proxy request to workspace', + details: errorMessage, + targetUrl: targetUrl, // Include target URL for debugging + }); + } +}); + /** * POST /api/workspaces/quick * Quick provision: one-click with defaults + * Providers are optional - can be connected after workspace creation via CLI login */ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: Response) => { const userId = req.session.userId!; @@ -543,18 +786,12 @@ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: R } try { - // Get user's connected providers + // Get user's connected providers (optional now) const credentials = await db.credentials.findByUserId(userId); const providers = credentials .filter((c) => c.provider !== 'github') .map((c) => c.provider); - if (providers.length === 0) { - return res.status(400).json({ - error: 'No AI providers connected. Please connect at least one provider.', - }); - } - // Create workspace with defaults const provisioner = getProvisioner(); const workspaceName = name || `Workspace for ${repositoryFullName}`; @@ -562,7 +799,7 @@ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: R const result = await provisioner.provision({ userId, name: workspaceName, - providers, + providers: providers.length > 0 ? providers : [], // Empty is OK now repositories: [repositoryFullName], supervisorEnabled: true, maxAgents: 10, @@ -579,7 +816,10 @@ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: R workspaceId: result.workspaceId, status: result.status, publicUrl: result.publicUrl, - message: 'Workspace provisioned successfully!', + providersConnected: providers.length > 0, + message: providers.length > 0 + ? 'Workspace provisioned successfully!' + : 'Workspace provisioned! Connect an AI provider to start using agents.', }); } catch (error) { console.error('Error quick provisioning:', error); diff --git a/src/cloud/config.ts b/src/cloud/config.ts index b2dcadcc..9173b2d7 100644 --- a/src/cloud/config.ts +++ b/src/cloud/config.ts @@ -85,8 +85,8 @@ function optionalEnv(name: string): string | undefined { export function loadConfig(): CloudConfig { return { - port: parseInt(process.env.PORT || '3000', 10), - publicUrl: process.env.PUBLIC_URL || 'http://localhost:3000', + port: parseInt(process.env.PORT || '4567', 10), + publicUrl: process.env.PUBLIC_URL || 'http://localhost:4567', sessionSecret: requireEnv('SESSION_SECRET'), databaseUrl: requireEnv('DATABASE_URL'), diff --git a/src/cloud/db/drizzle.ts b/src/cloud/db/drizzle.ts index cb9c6736..93255155 100644 --- a/src/cloud/db/drizzle.ts +++ b/src/cloud/db/drizzle.ts @@ -380,7 +380,11 @@ export interface WorkspaceQueries { updateStatus( id: string, status: string, - options?: { computeId?: string; publicUrl?: string; errorMessage?: string } + options?: { + computeId?: string; + publicUrl?: string; + errorMessage?: string; + } ): Promise; updateConfig(id: string, config: schema.WorkspaceConfig): Promise; setCustomDomain(id: string, customDomain: string, status?: string): Promise; @@ -423,7 +427,11 @@ export const workspaceQueries: WorkspaceQueries = { async updateStatus( id: string, status: string, - options?: { computeId?: string; publicUrl?: string; errorMessage?: string } + options?: { + computeId?: string; + publicUrl?: string; + errorMessage?: string; + } ): Promise { const db = getDb(); await db @@ -944,6 +952,7 @@ export const projectGroupQueries: ProjectGroupQueries = { export interface RepositoryQueries { findById(id: string): Promise; + findByFullName(fullName: string): Promise; findByUserId(userId: string): Promise; findByWorkspaceId(workspaceId: string): Promise; findByProjectGroupId(projectGroupId: string): Promise; @@ -962,6 +971,15 @@ export const repositoryQueries: RepositoryQueries = { return result[0] ?? null; }, + async findByFullName(fullName: string): Promise { + const db = getDb(); + const result = await db + .select() + .from(schema.repositories) + .where(eq(schema.repositories.githubFullName, fullName)); + return result[0] ?? null; + }, + async findByUserId(userId: string): Promise { const db = getDb(); return db @@ -1156,6 +1174,382 @@ export const agentSummaryQueries: AgentSummaryQueries = { }, }; +// ============================================================================ +// CI Failure Event Queries +// ============================================================================ + +export interface CIFailureEventQueries { + findById(id: string): Promise; + findByRepository(repository: string, limit?: number): Promise; + findByPR(repository: string, prNumber: number): Promise; + findRecentUnprocessed(limit?: number): Promise; + create(data: schema.NewCIFailureEvent): Promise; + markProcessed(id: string, agentSpawned: boolean): Promise; + delete(id: string): Promise; +} + +export const ciFailureEventQueries: CIFailureEventQueries = { + async findById(id: string): Promise { + const db = getDb(); + const result = await db.select().from(schema.ciFailureEvents).where(eq(schema.ciFailureEvents.id, id)); + return result[0] ?? null; + }, + + async findByRepository(repository: string, limit = 50): Promise { + const db = getDb(); + return db + .select() + .from(schema.ciFailureEvents) + .where(eq(schema.ciFailureEvents.repository, repository)) + .orderBy(desc(schema.ciFailureEvents.createdAt)) + .limit(limit); + }, + + async findByPR(repository: string, prNumber: number): Promise { + const db = getDb(); + return db + .select() + .from(schema.ciFailureEvents) + .where( + and( + eq(schema.ciFailureEvents.repository, repository), + eq(schema.ciFailureEvents.prNumber, prNumber) + ) + ) + .orderBy(desc(schema.ciFailureEvents.createdAt)); + }, + + async findRecentUnprocessed(limit = 100): Promise { + const db = getDb(); + return db + .select() + .from(schema.ciFailureEvents) + .where(isNull(schema.ciFailureEvents.processedAt)) + .orderBy(schema.ciFailureEvents.createdAt) + .limit(limit); + }, + + async create(data: schema.NewCIFailureEvent): Promise { + const db = getDb(); + const result = await db.insert(schema.ciFailureEvents).values(data).returning(); + return result[0]; + }, + + async markProcessed(id: string, agentSpawned: boolean): Promise { + const db = getDb(); + await db + .update(schema.ciFailureEvents) + .set({ processedAt: new Date(), agentSpawned }) + .where(eq(schema.ciFailureEvents.id, id)); + }, + + async delete(id: string): Promise { + const db = getDb(); + await db.delete(schema.ciFailureEvents).where(eq(schema.ciFailureEvents.id, id)); + }, +}; + +// ============================================================================ +// CI Fix Attempt Queries +// ============================================================================ + +export interface CIFixAttemptQueries { + findById(id: string): Promise; + findByFailureEvent(failureEventId: string): Promise; + findActiveByRepository(repository: string): Promise; + create(data: schema.NewCIFixAttempt): Promise; + updateStatus(id: string, status: string, errorMessage?: string): Promise; + complete(id: string, status: 'success' | 'failed', commitSha?: string, errorMessage?: string): Promise; +} + +export const ciFixAttemptQueries: CIFixAttemptQueries = { + async findById(id: string): Promise { + const db = getDb(); + const result = await db.select().from(schema.ciFixAttempts).where(eq(schema.ciFixAttempts.id, id)); + return result[0] ?? null; + }, + + async findByFailureEvent(failureEventId: string): Promise { + const db = getDb(); + return db + .select() + .from(schema.ciFixAttempts) + .where(eq(schema.ciFixAttempts.failureEventId, failureEventId)) + .orderBy(desc(schema.ciFixAttempts.startedAt)); + }, + + async findActiveByRepository(repository: string): Promise { + const db = getDb(); + // Find active fix attempts by joining with failure events + return db + .select({ + id: schema.ciFixAttempts.id, + failureEventId: schema.ciFixAttempts.failureEventId, + agentId: schema.ciFixAttempts.agentId, + agentName: schema.ciFixAttempts.agentName, + status: schema.ciFixAttempts.status, + commitSha: schema.ciFixAttempts.commitSha, + errorMessage: schema.ciFixAttempts.errorMessage, + startedAt: schema.ciFixAttempts.startedAt, + completedAt: schema.ciFixAttempts.completedAt, + }) + .from(schema.ciFixAttempts) + .innerJoin(schema.ciFailureEvents, eq(schema.ciFixAttempts.failureEventId, schema.ciFailureEvents.id)) + .where( + and( + eq(schema.ciFailureEvents.repository, repository), + sql`${schema.ciFixAttempts.status} IN ('pending', 'in_progress')` + ) + ); + }, + + async create(data: schema.NewCIFixAttempt): Promise { + const db = getDb(); + const result = await db.insert(schema.ciFixAttempts).values(data).returning(); + return result[0]; + }, + + async updateStatus(id: string, status: string, errorMessage?: string): Promise { + const db = getDb(); + const updates: Record = { status }; + if (errorMessage) { + updates.errorMessage = errorMessage; + } + await db + .update(schema.ciFixAttempts) + .set(updates) + .where(eq(schema.ciFixAttempts.id, id)); + }, + + async complete( + id: string, + status: 'success' | 'failed', + commitSha?: string, + errorMessage?: string + ): Promise { + const db = getDb(); + await db + .update(schema.ciFixAttempts) + .set({ + status, + completedAt: new Date(), + commitSha: commitSha ?? null, + errorMessage: errorMessage ?? null, + }) + .where(eq(schema.ciFixAttempts.id, id)); + }, +}; + +// ============================================================================ +// Issue Assignment Queries +// ============================================================================ + +export interface IssueAssignmentQueries { + findById(id: string): Promise; + findByRepository(repository: string, limit?: number): Promise; + findByIssue(repository: string, issueNumber: number): Promise; + findByAgent(agentId: string): Promise; + findPending(limit?: number): Promise; + create(data: schema.NewIssueAssignment): Promise; + assignAgent(id: string, agentId: string, agentName: string): Promise; + updateStatus(id: string, status: string, resolution?: string): Promise; + linkPR(id: string, prNumber: number): Promise; +} + +export const issueAssignmentQueries: IssueAssignmentQueries = { + async findById(id: string): Promise { + const db = getDb(); + const result = await db.select().from(schema.issueAssignments).where(eq(schema.issueAssignments.id, id)); + return result[0] ?? null; + }, + + async findByRepository(repository: string, limit = 50): Promise { + const db = getDb(); + return db + .select() + .from(schema.issueAssignments) + .where(eq(schema.issueAssignments.repository, repository)) + .orderBy(desc(schema.issueAssignments.createdAt)) + .limit(limit); + }, + + async findByIssue(repository: string, issueNumber: number): Promise { + const db = getDb(); + const result = await db + .select() + .from(schema.issueAssignments) + .where( + and( + eq(schema.issueAssignments.repository, repository), + eq(schema.issueAssignments.issueNumber, issueNumber) + ) + ); + return result[0] ?? null; + }, + + async findByAgent(agentId: string): Promise { + const db = getDb(); + return db + .select() + .from(schema.issueAssignments) + .where(eq(schema.issueAssignments.agentId, agentId)) + .orderBy(desc(schema.issueAssignments.createdAt)); + }, + + async findPending(limit = 100): Promise { + const db = getDb(); + return db + .select() + .from(schema.issueAssignments) + .where(eq(schema.issueAssignments.status, 'pending')) + .orderBy(schema.issueAssignments.createdAt) + .limit(limit); + }, + + async create(data: schema.NewIssueAssignment): Promise { + const db = getDb(); + const result = await db.insert(schema.issueAssignments).values(data).returning(); + return result[0]; + }, + + async assignAgent(id: string, agentId: string, agentName: string): Promise { + const db = getDb(); + await db + .update(schema.issueAssignments) + .set({ + agentId, + agentName, + assignedAt: new Date(), + status: 'assigned', + updatedAt: new Date(), + }) + .where(eq(schema.issueAssignments.id, id)); + }, + + async updateStatus(id: string, status: string, resolution?: string): Promise { + const db = getDb(); + const updates: Record = { status, updatedAt: new Date() }; + if (resolution) { + updates.resolution = resolution; + } + await db + .update(schema.issueAssignments) + .set(updates) + .where(eq(schema.issueAssignments.id, id)); + }, + + async linkPR(id: string, prNumber: number): Promise { + const db = getDb(); + await db + .update(schema.issueAssignments) + .set({ linkedPrNumber: prNumber, updatedAt: new Date() }) + .where(eq(schema.issueAssignments.id, id)); + }, +}; + +// ============================================================================ +// Comment Mention Queries +// ============================================================================ + +export interface CommentMentionQueries { + findById(id: string): Promise; + findByRepository(repository: string, limit?: number): Promise; + findBySource(sourceType: string, sourceId: number): Promise; + findPending(limit?: number): Promise; + findByMentionedAgent(mentionedAgent: string, limit?: number): Promise; + create(data: schema.NewCommentMention): Promise; + markProcessing(id: string, agentId: string, agentName: string): Promise; + markResponded(id: string, responseCommentId: number, responseBody: string): Promise; + markIgnored(id: string): Promise; +} + +export const commentMentionQueries: CommentMentionQueries = { + async findById(id: string): Promise { + const db = getDb(); + const result = await db.select().from(schema.commentMentions).where(eq(schema.commentMentions.id, id)); + return result[0] ?? null; + }, + + async findByRepository(repository: string, limit = 50): Promise { + const db = getDb(); + return db + .select() + .from(schema.commentMentions) + .where(eq(schema.commentMentions.repository, repository)) + .orderBy(desc(schema.commentMentions.createdAt)) + .limit(limit); + }, + + async findBySource(sourceType: string, sourceId: number): Promise { + const db = getDb(); + const result = await db + .select() + .from(schema.commentMentions) + .where( + and( + eq(schema.commentMentions.sourceType, sourceType), + eq(schema.commentMentions.sourceId, sourceId) + ) + ); + return result[0] ?? null; + }, + + async findPending(limit = 100): Promise { + const db = getDb(); + return db + .select() + .from(schema.commentMentions) + .where(eq(schema.commentMentions.status, 'pending')) + .orderBy(schema.commentMentions.createdAt) + .limit(limit); + }, + + async findByMentionedAgent(mentionedAgent: string, limit = 50): Promise { + const db = getDb(); + return db + .select() + .from(schema.commentMentions) + .where(eq(schema.commentMentions.mentionedAgent, mentionedAgent)) + .orderBy(desc(schema.commentMentions.createdAt)) + .limit(limit); + }, + + async create(data: schema.NewCommentMention): Promise { + const db = getDb(); + const result = await db.insert(schema.commentMentions).values(data).returning(); + return result[0]; + }, + + async markProcessing(id: string, agentId: string, agentName: string): Promise { + const db = getDb(); + await db + .update(schema.commentMentions) + .set({ status: 'processing', agentId, agentName }) + .where(eq(schema.commentMentions.id, id)); + }, + + async markResponded(id: string, responseCommentId: number, responseBody: string): Promise { + const db = getDb(); + await db + .update(schema.commentMentions) + .set({ + status: 'responded', + responseCommentId, + responseBody, + respondedAt: new Date(), + }) + .where(eq(schema.commentMentions.id, id)); + }, + + async markIgnored(id: string): Promise { + const db = getDb(); + await db + .update(schema.commentMentions) + .set({ status: 'ignored' }) + .where(eq(schema.commentMentions.id, id)); + }, +}; + // ============================================================================ // Migration helper // ============================================================================ diff --git a/src/cloud/db/index.ts b/src/cloud/db/index.ts index ed846149..f83e8951 100644 --- a/src/cloud/db/index.ts +++ b/src/cloud/db/index.ts @@ -19,6 +19,8 @@ export type { Workspace, NewWorkspace, WorkspaceConfig, + WorkspaceAgentPolicy, + AgentPolicyRule, WorkspaceMember, NewWorkspaceMember, ProjectGroup, @@ -33,6 +35,20 @@ export type { NewSubscription, UsageRecord, NewUsageRecord, + // CI failure types + CIAnnotation, + CIFailureEvent, + NewCIFailureEvent, + CIFixAttempt, + NewCIFixAttempt, + CICheckStrategy, + CIWebhookConfig, + // Issue and comment types + IssueAssignment, + NewIssueAssignment, + CommentMention, + NewCommentMention, + AgentTriggerConfig, } from './schema.js'; // Re-export schema tables for direct access if needed @@ -47,6 +63,10 @@ export { linkedDaemons as linkedDaemonsTable, subscriptions as subscriptionsTable, usageRecords as usageRecordsTable, + ciFailureEvents as ciFailureEventsTable, + ciFixAttempts as ciFixAttemptsTable, + issueAssignments as issueAssignmentsTable, + commentMentions as commentMentionsTable, } from './schema.js'; // Import query modules @@ -62,6 +82,10 @@ import { linkedDaemonQueries, projectGroupQueries, repositoryQueries, + ciFailureEventQueries, + ciFixAttemptQueries, + issueAssignmentQueries, + commentMentionQueries, } from './drizzle.js'; // Legacy type aliases for backwards compatibility @@ -86,6 +110,12 @@ export const db = { repositories: repositoryQueries, // Linked daemon operations (for local agent-relay instances) linkedDaemons: linkedDaemonQueries, + // CI failure tracking + ciFailureEvents: ciFailureEventQueries, + ciFixAttempts: ciFixAttemptQueries, + // Issue and comment tracking + issueAssignments: issueAssignmentQueries, + commentMentions: commentMentionQueries, // Database utilities getDb, close: closeDb, @@ -102,6 +132,10 @@ export { projectGroupQueries, repositoryQueries, linkedDaemonQueries, + ciFailureEventQueries, + ciFixAttemptQueries, + issueAssignmentQueries, + commentMentionQueries, }; // Export database utilities diff --git a/src/cloud/db/migrations/0003_nango_user_columns.sql b/src/cloud/db/migrations/0003_nango_user_columns.sql new file mode 100644 index 00000000..1f0313ea --- /dev/null +++ b/src/cloud/db/migrations/0003_nango_user_columns.sql @@ -0,0 +1,15 @@ +-- Add Nango OAuth connection columns to users table +-- These columns support the two-connection pattern: +-- - nango_connection_id: Permanent login connection +-- - incoming_connection_id: Temp connection for polling during login +-- - pending_installation_request: Tracks org approval wait state + +ALTER TABLE users ADD COLUMN IF NOT EXISTS nango_connection_id VARCHAR(255); +--> statement-breakpoint +ALTER TABLE users ADD COLUMN IF NOT EXISTS incoming_connection_id VARCHAR(255); +--> statement-breakpoint +ALTER TABLE users ADD COLUMN IF NOT EXISTS pending_installation_request TIMESTAMP; +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_users_nango_connection ON users(nango_connection_id); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_users_incoming_connection ON users(incoming_connection_id); diff --git a/src/cloud/db/migrations/0004_repositories_nango_columns.sql b/src/cloud/db/migrations/0004_repositories_nango_columns.sql new file mode 100644 index 00000000..e4baafc2 --- /dev/null +++ b/src/cloud/db/migrations/0004_repositories_nango_columns.sql @@ -0,0 +1,10 @@ +-- Add Nango connection columns to repositories table +-- These columns support GitHub App OAuth via Nango + +ALTER TABLE repositories ADD COLUMN IF NOT EXISTS installation_id UUID; +--> statement-breakpoint +ALTER TABLE repositories ADD COLUMN IF NOT EXISTS nango_connection_id VARCHAR(255); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_repositories_installation_id ON repositories(installation_id); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_repositories_nango_connection ON repositories(nango_connection_id); diff --git a/src/cloud/db/migrations/0005_github_installations.sql b/src/cloud/db/migrations/0005_github_installations.sql new file mode 100644 index 00000000..7137d068 --- /dev/null +++ b/src/cloud/db/migrations/0005_github_installations.sql @@ -0,0 +1,35 @@ +-- Create github_installations table and add foreign key to repositories +-- This table tracks GitHub App installations for accessing repos + +CREATE TABLE IF NOT EXISTS github_installations ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + installation_id VARCHAR(255) UNIQUE NOT NULL, + account_type VARCHAR(50) NOT NULL, + account_login VARCHAR(255) NOT NULL, + account_id VARCHAR(255) NOT NULL, + installed_by_id UUID REFERENCES users(id) ON DELETE SET NULL, + permissions JSONB DEFAULT '{}', + events TEXT[], + suspended BOOLEAN NOT NULL DEFAULT false, + suspended_at TIMESTAMP, + suspended_by VARCHAR(255), + created_at TIMESTAMP DEFAULT NOW() NOT NULL, + updated_at TIMESTAMP DEFAULT NOW() NOT NULL +); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_github_installations_account_login ON github_installations(account_login); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_github_installations_installed_by ON github_installations(installed_by_id); +--> statement-breakpoint +-- Add foreign key constraint to repositories.installation_id +ALTER TABLE repositories + ADD CONSTRAINT fk_repositories_installation + FOREIGN KEY (installation_id) + REFERENCES github_installations(id) + ON DELETE SET NULL; +--> statement-breakpoint +-- Add updated_at trigger for github_installations +DROP TRIGGER IF EXISTS trg_github_installations_updated_at ON github_installations; +CREATE TRIGGER trg_github_installations_updated_at + BEFORE UPDATE ON github_installations + FOR EACH ROW EXECUTE FUNCTION touch_updated_at(); diff --git a/src/cloud/db/migrations/0006_workspace_ssh.sql b/src/cloud/db/migrations/0006_workspace_ssh.sql new file mode 100644 index 00000000..347269ca --- /dev/null +++ b/src/cloud/db/migrations/0006_workspace_ssh.sql @@ -0,0 +1,6 @@ +-- Add SSH access columns to workspaces for port forwarding (e.g., Codex OAuth callback tunneling) +ALTER TABLE "workspaces" ADD COLUMN IF NOT EXISTS "ssh_host" varchar(255); +--> statement-breakpoint +ALTER TABLE "workspaces" ADD COLUMN IF NOT EXISTS "ssh_port" integer; +--> statement-breakpoint +ALTER TABLE "workspaces" ADD COLUMN IF NOT EXISTS "ssh_password" varchar(255); diff --git a/src/cloud/db/migrations/0007_drop_workspace_ssh.sql b/src/cloud/db/migrations/0007_drop_workspace_ssh.sql new file mode 100644 index 00000000..36259c35 --- /dev/null +++ b/src/cloud/db/migrations/0007_drop_workspace_ssh.sql @@ -0,0 +1,6 @@ +-- Drop SSH columns from workspaces table (no longer needed - CLI auth uses device flow) +ALTER TABLE workspaces DROP COLUMN IF EXISTS ssh_host; +--> statement-breakpoint +ALTER TABLE workspaces DROP COLUMN IF EXISTS ssh_port; +--> statement-breakpoint +ALTER TABLE workspaces DROP COLUMN IF EXISTS ssh_password; diff --git a/src/cloud/db/migrations/meta/0005_snapshot.json b/src/cloud/db/migrations/meta/0005_snapshot.json new file mode 100644 index 00000000..41e109f0 --- /dev/null +++ b/src/cloud/db/migrations/meta/0005_snapshot.json @@ -0,0 +1,2965 @@ +{ + "id": "3c5a4f23-1159-482b-8920-9147e12ced8f", + "prevId": "00000000-0000-0000-0000-000000000000", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.agent_crashes": { + "name": "agent_crashes", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "daemon_id": { + "name": "daemon_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "pid": { + "name": "pid", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "exit_code": { + "name": "exit_code", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "signal": { + "name": "signal", + "type": "varchar(50)", + "primaryKey": false, + "notNull": false + }, + "reason": { + "name": "reason", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "likely_cause": { + "name": "likely_cause", + "type": "varchar(50)", + "primaryKey": false, + "notNull": false + }, + "confidence": { + "name": "confidence", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false + }, + "summary": { + "name": "summary", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "peak_memory": { + "name": "peak_memory", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "last_known_memory": { + "name": "last_known_memory", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "memory_trend": { + "name": "memory_trend", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false + }, + "insight_data": { + "name": "insight_data", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "last_output": { + "name": "last_output", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "crashed_at": { + "name": "crashed_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_agent_crashes_daemon_id": { + "name": "idx_agent_crashes_daemon_id", + "columns": [ + { + "expression": "daemon_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_crashes_agent_name": { + "name": "idx_agent_crashes_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_crashes_crashed_at": { + "name": "idx_agent_crashes_crashed_at", + "columns": [ + { + "expression": "crashed_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_crashes_likely_cause": { + "name": "idx_agent_crashes_likely_cause", + "columns": [ + { + "expression": "likely_cause", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "agent_crashes_daemon_id_linked_daemons_id_fk": { + "name": "agent_crashes_daemon_id_linked_daemons_id_fk", + "tableFrom": "agent_crashes", + "tableTo": "linked_daemons", + "columnsFrom": [ + "daemon_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.agent_metrics": { + "name": "agent_metrics", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "daemon_id": { + "name": "daemon_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "pid": { + "name": "pid", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'unknown'" + }, + "rss_bytes": { + "name": "rss_bytes", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "heap_used_bytes": { + "name": "heap_used_bytes", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "cpu_percent": { + "name": "cpu_percent", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "trend": { + "name": "trend", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false + }, + "trend_rate_per_minute": { + "name": "trend_rate_per_minute", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "alert_level": { + "name": "alert_level", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false, + "default": "'normal'" + }, + "high_watermark": { + "name": "high_watermark", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "average_rss": { + "name": "average_rss", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "metrics_data": { + "name": "metrics_data", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "uptime_ms": { + "name": "uptime_ms", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "started_at": { + "name": "started_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "recorded_at": { + "name": "recorded_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_agent_metrics_daemon_id": { + "name": "idx_agent_metrics_daemon_id", + "columns": [ + { + "expression": "daemon_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_metrics_agent_name": { + "name": "idx_agent_metrics_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_metrics_recorded_at": { + "name": "idx_agent_metrics_recorded_at", + "columns": [ + { + "expression": "recorded_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_metrics_alert_level": { + "name": "idx_agent_metrics_alert_level", + "columns": [ + { + "expression": "alert_level", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "agent_metrics_daemon_id_linked_daemons_id_fk": { + "name": "agent_metrics_daemon_id_linked_daemons_id_fk", + "tableFrom": "agent_metrics", + "tableTo": "linked_daemons", + "columnsFrom": [ + "daemon_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.agent_sessions": { + "name": "agent_sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "workspace_id": { + "name": "workspace_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'active'" + }, + "started_at": { + "name": "started_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "ended_at": { + "name": "ended_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "end_marker": { + "name": "end_marker", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + } + }, + "indexes": { + "idx_agent_sessions_workspace_id": { + "name": "idx_agent_sessions_workspace_id", + "columns": [ + { + "expression": "workspace_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_sessions_agent_name": { + "name": "idx_agent_sessions_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_sessions_status": { + "name": "idx_agent_sessions_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "agent_sessions_workspace_id_workspaces_id_fk": { + "name": "agent_sessions_workspace_id_workspaces_id_fk", + "tableFrom": "agent_sessions", + "tableTo": "workspaces", + "columnsFrom": [ + "workspace_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.agent_summaries": { + "name": "agent_summaries", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "session_id": { + "name": "session_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "summary": { + "name": "summary", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_agent_summaries_session_id": { + "name": "idx_agent_summaries_session_id", + "columns": [ + { + "expression": "session_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_summaries_agent_name": { + "name": "idx_agent_summaries_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "agent_summaries_session_id_agent_sessions_id_fk": { + "name": "agent_summaries_session_id_agent_sessions_id_fk", + "tableFrom": "agent_summaries", + "tableTo": "agent_sessions", + "columnsFrom": [ + "session_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.ci_failure_events": { + "name": "ci_failure_events", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "repository_id": { + "name": "repository_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "repository": { + "name": "repository", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "pr_number": { + "name": "pr_number", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "branch": { + "name": "branch", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "commit_sha": { + "name": "commit_sha", + "type": "varchar(40)", + "primaryKey": false, + "notNull": false + }, + "check_name": { + "name": "check_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "check_id": { + "name": "check_id", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "conclusion": { + "name": "conclusion", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "failure_title": { + "name": "failure_title", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "failure_summary": { + "name": "failure_summary", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "failure_details": { + "name": "failure_details", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "annotations": { + "name": "annotations", + "type": "jsonb", + "primaryKey": false, + "notNull": false, + "default": "'[]'::jsonb" + }, + "workflow_name": { + "name": "workflow_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "workflow_run_id": { + "name": "workflow_run_id", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "processed_at": { + "name": "processed_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "agent_spawned": { + "name": "agent_spawned", + "type": "boolean", + "primaryKey": false, + "notNull": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_ci_failure_events_repository": { + "name": "idx_ci_failure_events_repository", + "columns": [ + { + "expression": "repository", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_failure_events_pr_number": { + "name": "idx_ci_failure_events_pr_number", + "columns": [ + { + "expression": "pr_number", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_failure_events_check_name": { + "name": "idx_ci_failure_events_check_name", + "columns": [ + { + "expression": "check_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_failure_events_created_at": { + "name": "idx_ci_failure_events_created_at", + "columns": [ + { + "expression": "created_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_failure_events_repo_pr": { + "name": "idx_ci_failure_events_repo_pr", + "columns": [ + { + "expression": "repository", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "pr_number", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "ci_failure_events_repository_id_repositories_id_fk": { + "name": "ci_failure_events_repository_id_repositories_id_fk", + "tableFrom": "ci_failure_events", + "tableTo": "repositories", + "columnsFrom": [ + "repository_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.ci_fix_attempts": { + "name": "ci_fix_attempts", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "failure_event_id": { + "name": "failure_event_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_id": { + "name": "agent_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'pending'" + }, + "commit_sha": { + "name": "commit_sha", + "type": "varchar(40)", + "primaryKey": false, + "notNull": false + }, + "error_message": { + "name": "error_message", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "started_at": { + "name": "started_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "completed_at": { + "name": "completed_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": { + "idx_ci_fix_attempts_failure_event": { + "name": "idx_ci_fix_attempts_failure_event", + "columns": [ + { + "expression": "failure_event_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_fix_attempts_status": { + "name": "idx_ci_fix_attempts_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_fix_attempts_agent_id": { + "name": "idx_ci_fix_attempts_agent_id", + "columns": [ + { + "expression": "agent_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "ci_fix_attempts_failure_event_id_ci_failure_events_id_fk": { + "name": "ci_fix_attempts_failure_event_id_ci_failure_events_id_fk", + "tableFrom": "ci_fix_attempts", + "tableTo": "ci_failure_events", + "columnsFrom": [ + "failure_event_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.comment_mentions": { + "name": "comment_mentions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "repository_id": { + "name": "repository_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "repository": { + "name": "repository", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "source_type": { + "name": "source_type", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "source_id": { + "name": "source_id", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "issue_or_pr_number": { + "name": "issue_or_pr_number", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "comment_body": { + "name": "comment_body", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "comment_url": { + "name": "comment_url", + "type": "varchar(512)", + "primaryKey": false, + "notNull": false + }, + "author_login": { + "name": "author_login", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "author_id": { + "name": "author_id", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "mentioned_agent": { + "name": "mentioned_agent", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "mention_context": { + "name": "mention_context", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "agent_id": { + "name": "agent_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'pending'" + }, + "response_comment_id": { + "name": "response_comment_id", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "response_body": { + "name": "response_body", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "responded_at": { + "name": "responded_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_comment_mentions_repository": { + "name": "idx_comment_mentions_repository", + "columns": [ + { + "expression": "repository", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_comment_mentions_source": { + "name": "idx_comment_mentions_source", + "columns": [ + { + "expression": "source_type", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "source_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_comment_mentions_status": { + "name": "idx_comment_mentions_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_comment_mentions_mentioned_agent": { + "name": "idx_comment_mentions_mentioned_agent", + "columns": [ + { + "expression": "mentioned_agent", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "comment_mentions_repository_id_repositories_id_fk": { + "name": "comment_mentions_repository_id_repositories_id_fk", + "tableFrom": "comment_mentions", + "tableTo": "repositories", + "columnsFrom": [ + "repository_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.credentials": { + "name": "credentials", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "provider": { + "name": "provider", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "access_token": { + "name": "access_token", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "refresh_token": { + "name": "refresh_token", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "token_expires_at": { + "name": "token_expires_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "scopes": { + "name": "scopes", + "type": "text[]", + "primaryKey": false, + "notNull": false + }, + "provider_account_id": { + "name": "provider_account_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "provider_account_email": { + "name": "provider_account_email", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_credentials_user_id": { + "name": "idx_credentials_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "credentials_user_id_users_id_fk": { + "name": "credentials_user_id_users_id_fk", + "tableFrom": "credentials", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "credentials_user_provider_unique": { + "name": "credentials_user_provider_unique", + "nullsNotDistinct": false, + "columns": [ + "user_id", + "provider" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.github_installations": { + "name": "github_installations", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "installation_id": { + "name": "installation_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "account_type": { + "name": "account_type", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "account_login": { + "name": "account_login", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "account_id": { + "name": "account_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "installed_by_id": { + "name": "installed_by_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": false, + "default": "'{}'::jsonb" + }, + "events": { + "name": "events", + "type": "text[]", + "primaryKey": false, + "notNull": false + }, + "suspended": { + "name": "suspended", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "suspended_at": { + "name": "suspended_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "suspended_by": { + "name": "suspended_by", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_github_installations_account_login": { + "name": "idx_github_installations_account_login", + "columns": [ + { + "expression": "account_login", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_github_installations_installed_by": { + "name": "idx_github_installations_installed_by", + "columns": [ + { + "expression": "installed_by_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "github_installations_installed_by_id_users_id_fk": { + "name": "github_installations_installed_by_id_users_id_fk", + "tableFrom": "github_installations", + "tableTo": "users", + "columnsFrom": [ + "installed_by_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "github_installations_installation_id_unique": { + "name": "github_installations_installation_id_unique", + "nullsNotDistinct": false, + "columns": [ + "installation_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.issue_assignments": { + "name": "issue_assignments", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "repository_id": { + "name": "repository_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "repository": { + "name": "repository", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "issue_number": { + "name": "issue_number", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "issue_title": { + "name": "issue_title", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "issue_body": { + "name": "issue_body", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "issue_url": { + "name": "issue_url", + "type": "varchar(512)", + "primaryKey": false, + "notNull": false + }, + "agent_id": { + "name": "agent_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "assigned_at": { + "name": "assigned_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'pending'" + }, + "resolution": { + "name": "resolution", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "linked_pr_number": { + "name": "linked_pr_number", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "labels": { + "name": "labels", + "type": "text[]", + "primaryKey": false, + "notNull": false + }, + "priority": { + "name": "priority", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_issue_assignments_repository": { + "name": "idx_issue_assignments_repository", + "columns": [ + { + "expression": "repository", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_issue_assignments_issue_number": { + "name": "idx_issue_assignments_issue_number", + "columns": [ + { + "expression": "issue_number", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_issue_assignments_status": { + "name": "idx_issue_assignments_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_issue_assignments_agent_id": { + "name": "idx_issue_assignments_agent_id", + "columns": [ + { + "expression": "agent_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "issue_assignments_repository_id_repositories_id_fk": { + "name": "issue_assignments_repository_id_repositories_id_fk", + "tableFrom": "issue_assignments", + "tableTo": "repositories", + "columnsFrom": [ + "repository_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "issue_assignments_repo_issue_unique": { + "name": "issue_assignments_repo_issue_unique", + "nullsNotDistinct": false, + "columns": [ + "repository", + "issue_number" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.linked_daemons": { + "name": "linked_daemons", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "machine_id": { + "name": "machine_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "api_key_hash": { + "name": "api_key_hash", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'offline'" + }, + "last_seen_at": { + "name": "last_seen_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "pending_updates": { + "name": "pending_updates", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'[]'::jsonb" + }, + "message_queue": { + "name": "message_queue", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'[]'::jsonb" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_linked_daemons_user_id": { + "name": "idx_linked_daemons_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_linked_daemons_api_key_hash": { + "name": "idx_linked_daemons_api_key_hash", + "columns": [ + { + "expression": "api_key_hash", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_linked_daemons_status": { + "name": "idx_linked_daemons_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "linked_daemons_user_id_users_id_fk": { + "name": "linked_daemons_user_id_users_id_fk", + "tableFrom": "linked_daemons", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "linked_daemons_user_machine_unique": { + "name": "linked_daemons_user_machine_unique", + "nullsNotDistinct": false, + "columns": [ + "user_id", + "machine_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.memory_alerts": { + "name": "memory_alerts", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "daemon_id": { + "name": "daemon_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "alert_type": { + "name": "alert_type", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "current_rss": { + "name": "current_rss", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "threshold": { + "name": "threshold", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "message": { + "name": "message", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "recommendation": { + "name": "recommendation", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "acknowledged": { + "name": "acknowledged", + "type": "boolean", + "primaryKey": false, + "notNull": false, + "default": false + }, + "acknowledged_at": { + "name": "acknowledged_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_memory_alerts_daemon_id": { + "name": "idx_memory_alerts_daemon_id", + "columns": [ + { + "expression": "daemon_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_memory_alerts_agent_name": { + "name": "idx_memory_alerts_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_memory_alerts_alert_type": { + "name": "idx_memory_alerts_alert_type", + "columns": [ + { + "expression": "alert_type", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_memory_alerts_created_at": { + "name": "idx_memory_alerts_created_at", + "columns": [ + { + "expression": "created_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "memory_alerts_daemon_id_linked_daemons_id_fk": { + "name": "memory_alerts_daemon_id_linked_daemons_id_fk", + "tableFrom": "memory_alerts", + "tableTo": "linked_daemons", + "columnsFrom": [ + "daemon_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.project_groups": { + "name": "project_groups", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "color": { + "name": "color", + "type": "varchar(7)", + "primaryKey": false, + "notNull": false + }, + "icon": { + "name": "icon", + "type": "varchar(50)", + "primaryKey": false, + "notNull": false + }, + "coordinator_agent": { + "name": "coordinator_agent", + "type": "jsonb", + "primaryKey": false, + "notNull": false, + "default": "'{\"enabled\":false}'::jsonb" + }, + "sort_order": { + "name": "sort_order", + "type": "bigint", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_project_groups_user_id": { + "name": "idx_project_groups_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "project_groups_user_id_users_id_fk": { + "name": "project_groups_user_id_users_id_fk", + "tableFrom": "project_groups", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "project_groups_user_name_unique": { + "name": "project_groups_user_name_unique", + "nullsNotDistinct": false, + "columns": [ + "user_id", + "name" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.repositories": { + "name": "repositories", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "workspace_id": { + "name": "workspace_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "project_group_id": { + "name": "project_group_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "installation_id": { + "name": "installation_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "nango_connection_id": { + "name": "nango_connection_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "github_full_name": { + "name": "github_full_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "github_id": { + "name": "github_id", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "default_branch": { + "name": "default_branch", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true, + "default": "'main'" + }, + "is_private": { + "name": "is_private", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "sync_status": { + "name": "sync_status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'pending'" + }, + "last_synced_at": { + "name": "last_synced_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "project_agent": { + "name": "project_agent", + "type": "jsonb", + "primaryKey": false, + "notNull": false, + "default": "'{\"enabled\":false}'::jsonb" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_repositories_user_id": { + "name": "idx_repositories_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_repositories_workspace_id": { + "name": "idx_repositories_workspace_id", + "columns": [ + { + "expression": "workspace_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_repositories_project_group_id": { + "name": "idx_repositories_project_group_id", + "columns": [ + { + "expression": "project_group_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_repositories_installation_id": { + "name": "idx_repositories_installation_id", + "columns": [ + { + "expression": "installation_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_repositories_nango_connection": { + "name": "idx_repositories_nango_connection", + "columns": [ + { + "expression": "nango_connection_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "repositories_user_id_users_id_fk": { + "name": "repositories_user_id_users_id_fk", + "tableFrom": "repositories", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "repositories_workspace_id_workspaces_id_fk": { + "name": "repositories_workspace_id_workspaces_id_fk", + "tableFrom": "repositories", + "tableTo": "workspaces", + "columnsFrom": [ + "workspace_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "repositories_project_group_id_project_groups_id_fk": { + "name": "repositories_project_group_id_project_groups_id_fk", + "tableFrom": "repositories", + "tableTo": "project_groups", + "columnsFrom": [ + "project_group_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "repositories_installation_id_github_installations_id_fk": { + "name": "repositories_installation_id_github_installations_id_fk", + "tableFrom": "repositories", + "tableTo": "github_installations", + "columnsFrom": [ + "installation_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "repositories_user_github_unique": { + "name": "repositories_user_github_unique", + "nullsNotDistinct": false, + "columns": [ + "user_id", + "github_full_name" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.subscriptions": { + "name": "subscriptions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "stripe_subscription_id": { + "name": "stripe_subscription_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "stripe_customer_id": { + "name": "stripe_customer_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "plan": { + "name": "plan", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'active'" + }, + "current_period_start": { + "name": "current_period_start", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "current_period_end": { + "name": "current_period_end", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "subscriptions_user_id_users_id_fk": { + "name": "subscriptions_user_id_users_id_fk", + "tableFrom": "subscriptions", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "subscriptions_stripe_subscription_id_unique": { + "name": "subscriptions_stripe_subscription_id_unique", + "nullsNotDistinct": false, + "columns": [ + "stripe_subscription_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.usage_records": { + "name": "usage_records", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "workspace_id": { + "name": "workspace_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "metric": { + "name": "metric", + "type": "varchar(100)", + "primaryKey": false, + "notNull": true + }, + "value": { + "name": "value", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "recorded_at": { + "name": "recorded_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_usage_records_user_id": { + "name": "idx_usage_records_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_usage_records_recorded_at": { + "name": "idx_usage_records_recorded_at", + "columns": [ + { + "expression": "recorded_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "usage_records_user_id_users_id_fk": { + "name": "usage_records_user_id_users_id_fk", + "tableFrom": "usage_records", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "usage_records_workspace_id_workspaces_id_fk": { + "name": "usage_records_workspace_id_workspaces_id_fk", + "tableFrom": "usage_records", + "tableTo": "workspaces", + "columnsFrom": [ + "workspace_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "github_id": { + "name": "github_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "github_username": { + "name": "github_username", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "email": { + "name": "email", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "avatar_url": { + "name": "avatar_url", + "type": "varchar(512)", + "primaryKey": false, + "notNull": false + }, + "plan": { + "name": "plan", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'free'" + }, + "nango_connection_id": { + "name": "nango_connection_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "incoming_connection_id": { + "name": "incoming_connection_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "pending_installation_request": { + "name": "pending_installation_request", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "onboarding_completed_at": { + "name": "onboarding_completed_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_users_nango_connection": { + "name": "idx_users_nango_connection", + "columns": [ + { + "expression": "nango_connection_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_users_incoming_connection": { + "name": "idx_users_incoming_connection", + "columns": [ + { + "expression": "incoming_connection_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_github_id_unique": { + "name": "users_github_id_unique", + "nullsNotDistinct": false, + "columns": [ + "github_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.workspace_members": { + "name": "workspace_members", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "workspace_id": { + "name": "workspace_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "role": { + "name": "role", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'member'" + }, + "invited_by": { + "name": "invited_by", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "invited_at": { + "name": "invited_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "accepted_at": { + "name": "accepted_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": { + "idx_workspace_members_workspace_id": { + "name": "idx_workspace_members_workspace_id", + "columns": [ + { + "expression": "workspace_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_workspace_members_user_id": { + "name": "idx_workspace_members_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "workspace_members_workspace_id_workspaces_id_fk": { + "name": "workspace_members_workspace_id_workspaces_id_fk", + "tableFrom": "workspace_members", + "tableTo": "workspaces", + "columnsFrom": [ + "workspace_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "workspace_members_user_id_users_id_fk": { + "name": "workspace_members_user_id_users_id_fk", + "tableFrom": "workspace_members", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "workspace_members_invited_by_users_id_fk": { + "name": "workspace_members_invited_by_users_id_fk", + "tableFrom": "workspace_members", + "tableTo": "users", + "columnsFrom": [ + "invited_by" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "workspace_members_workspace_user_unique": { + "name": "workspace_members_workspace_user_unique", + "nullsNotDistinct": false, + "columns": [ + "workspace_id", + "user_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.workspaces": { + "name": "workspaces", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'provisioning'" + }, + "compute_provider": { + "name": "compute_provider", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "compute_id": { + "name": "compute_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "public_url": { + "name": "public_url", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "custom_domain": { + "name": "custom_domain", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "custom_domain_status": { + "name": "custom_domain_status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": false + }, + "ssh_host": { + "name": "ssh_host", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "ssh_port": { + "name": "ssh_port", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "ssh_password": { + "name": "ssh_password", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "config": { + "name": "config", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "error_message": { + "name": "error_message", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_workspaces_user_id": { + "name": "idx_workspaces_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_workspaces_custom_domain": { + "name": "idx_workspaces_custom_domain", + "columns": [ + { + "expression": "custom_domain", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "workspaces_user_id_users_id_fk": { + "name": "workspaces_user_id_users_id_fk", + "tableFrom": "workspaces", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/src/cloud/db/migrations/meta/_journal.json b/src/cloud/db/migrations/meta/_journal.json index 7e7e1959..f5e486f5 100644 --- a/src/cloud/db/migrations/meta/_journal.json +++ b/src/cloud/db/migrations/meta/_journal.json @@ -15,6 +15,41 @@ "when": 1735776000000, "tag": "0002_agent_sessions", "breakpoints": true + }, + { + "idx": 2, + "version": "5", + "when": 1735862400000, + "tag": "0003_nango_user_columns", + "breakpoints": true + }, + { + "idx": 3, + "version": "5", + "when": 1735948800000, + "tag": "0004_repositories_nango_columns", + "breakpoints": true + }, + { + "idx": 4, + "version": "5", + "when": 1736035200000, + "tag": "0005_github_installations", + "breakpoints": true + }, + { + "idx": 5, + "version": "5", + "when": 1736121600000, + "tag": "0006_workspace_ssh", + "breakpoints": true + }, + { + "idx": 6, + "version": "5", + "when": 1736208000000, + "tag": "0007_drop_workspace_ssh", + "breakpoints": true } ] -} +} \ No newline at end of file diff --git a/src/cloud/db/schema.ts b/src/cloud/db/schema.ts index 8335277e..6c8fe0cc 100644 --- a/src/cloud/db/schema.ts +++ b/src/cloud/db/schema.ts @@ -118,6 +118,40 @@ export const credentialsRelations = relations(credentials, ({ one }) => ({ // Workspaces // ============================================================================ +// Agent policy types for workspace-level enforcement +export interface AgentPolicyRule { + /** Agent name pattern (supports wildcards: "Lead", "Worker*", "*") */ + name: string; + /** Allowed tools (empty = all allowed, ["none"] = no tools) */ + allowedTools?: string[]; + /** Agents this agent can spawn (empty = can spawn any) */ + canSpawn?: string[]; + /** Agents this agent can message (empty = can message any) */ + canMessage?: string[]; + /** Maximum concurrent spawns allowed */ + maxSpawns?: number; + /** Rate limit: messages per minute */ + rateLimit?: number; + /** Whether this agent can be spawned by others */ + canBeSpawned?: boolean; +} + +export interface WorkspaceAgentPolicy { + /** Default policy for agents without explicit config */ + defaultPolicy?: AgentPolicyRule; + /** Named agent policies */ + agents?: AgentPolicyRule[]; + /** Global settings */ + settings?: { + /** Require explicit agent definitions (reject unknown agents) */ + requireExplicitAgents?: boolean; + /** Enable audit logging */ + auditEnabled?: boolean; + /** Maximum total agents */ + maxTotalAgents?: number; + }; +} + // Workspace configuration type export interface WorkspaceConfig { providers?: string[]; @@ -125,6 +159,8 @@ export interface WorkspaceConfig { supervisorEnabled?: boolean; maxAgents?: number; resourceTier?: 'small' | 'medium' | 'large' | 'xlarge'; + /** Agent policy for this workspace (enforced when repos don't have agents.md) */ + agentPolicy?: WorkspaceAgentPolicy; } export const workspaces = pgTable('workspaces', { @@ -559,3 +595,220 @@ export type AgentCrash = typeof agentCrashes.$inferSelect; export type NewAgentCrash = typeof agentCrashes.$inferInsert; export type MemoryAlert = typeof memoryAlerts.$inferSelect; export type NewMemoryAlert = typeof memoryAlerts.$inferInsert; + +// ============================================================================ +// CI Failure Events (GitHub CI check failures) +// ============================================================================ + +export interface CIAnnotation { + path: string; + startLine: number; + endLine: number; + annotationLevel: string; + message: string; +} + +export const ciFailureEvents = pgTable('ci_failure_events', { + id: uuid('id').primaryKey().defaultRandom(), + repositoryId: uuid('repository_id').references(() => repositories.id, { onDelete: 'cascade' }), + repository: varchar('repository', { length: 255 }).notNull(), // org/repo format + prNumber: bigint('pr_number', { mode: 'number' }), + branch: varchar('branch', { length: 255 }), + commitSha: varchar('commit_sha', { length: 40 }), + checkName: varchar('check_name', { length: 255 }).notNull(), + checkId: bigint('check_id', { mode: 'number' }).notNull(), + conclusion: varchar('conclusion', { length: 50 }).notNull(), // failure, cancelled, timed_out, etc. + failureTitle: text('failure_title'), + failureSummary: text('failure_summary'), + failureDetails: text('failure_details'), + annotations: jsonb('annotations').$type().default([]), + workflowName: varchar('workflow_name', { length: 255 }), + workflowRunId: bigint('workflow_run_id', { mode: 'number' }), + // Processing state + processedAt: timestamp('processed_at'), + agentSpawned: boolean('agent_spawned').default(false), + createdAt: timestamp('created_at').defaultNow().notNull(), +}, (table) => ({ + repositoryIdx: index('idx_ci_failure_events_repository').on(table.repository), + prNumberIdx: index('idx_ci_failure_events_pr_number').on(table.prNumber), + checkNameIdx: index('idx_ci_failure_events_check_name').on(table.checkName), + createdAtIdx: index('idx_ci_failure_events_created_at').on(table.createdAt), + repoPrIdx: index('idx_ci_failure_events_repo_pr').on(table.repository, table.prNumber), +})); + +export const ciFailureEventsRelations = relations(ciFailureEvents, ({ one, many }) => ({ + repositoryRef: one(repositories, { + fields: [ciFailureEvents.repositoryId], + references: [repositories.id], + }), + fixAttempts: many(ciFixAttempts), +})); + +// ============================================================================ +// CI Fix Attempts (agent responses to failures) +// ============================================================================ + +export const ciFixAttempts = pgTable('ci_fix_attempts', { + id: uuid('id').primaryKey().defaultRandom(), + failureEventId: uuid('failure_event_id').notNull().references(() => ciFailureEvents.id, { onDelete: 'cascade' }), + agentId: varchar('agent_id', { length: 255 }).notNull(), + agentName: varchar('agent_name', { length: 255 }).notNull(), + status: varchar('status', { length: 50 }).notNull().default('pending'), // pending, in_progress, success, failed + commitSha: varchar('commit_sha', { length: 40 }), + errorMessage: text('error_message'), + // Timing + startedAt: timestamp('started_at').defaultNow().notNull(), + completedAt: timestamp('completed_at'), +}, (table) => ({ + failureEventIdx: index('idx_ci_fix_attempts_failure_event').on(table.failureEventId), + statusIdx: index('idx_ci_fix_attempts_status').on(table.status), + agentIdIdx: index('idx_ci_fix_attempts_agent_id').on(table.agentId), +})); + +export const ciFixAttemptsRelations = relations(ciFixAttempts, ({ one }) => ({ + failureEvent: one(ciFailureEvents, { + fields: [ciFixAttempts.failureEventId], + references: [ciFailureEvents.id], + }), +})); + +// ============================================================================ +// CI Webhook Configuration (per-repository settings) +// ============================================================================ + +export interface CICheckStrategy { + autoFix: boolean; + command?: string; + agentProfile?: string; + notifyOnly?: boolean; +} + +export interface CIWebhookConfig { + enabled: boolean; + autoFix?: { + lint?: boolean; + typecheck?: boolean; + test?: boolean; + build?: boolean; + }; + notifyExistingAgent?: boolean; + spawnNewAgent?: boolean; + maxConcurrentAgents?: number; + cooldownMinutes?: number; + checkStrategies?: Record; +} + +// Type exports for CI tables +export type CIFailureEvent = typeof ciFailureEvents.$inferSelect; +export type NewCIFailureEvent = typeof ciFailureEvents.$inferInsert; +export type CIFixAttempt = typeof ciFixAttempts.$inferSelect; +export type NewCIFixAttempt = typeof ciFixAttempts.$inferInsert; + +// ============================================================================ +// GitHub Issue Assignments (agent handling of issues) +// ============================================================================ + +export const issueAssignments = pgTable('issue_assignments', { + id: uuid('id').primaryKey().defaultRandom(), + repositoryId: uuid('repository_id').references(() => repositories.id, { onDelete: 'cascade' }), + repository: varchar('repository', { length: 255 }).notNull(), // org/repo format + issueNumber: bigint('issue_number', { mode: 'number' }).notNull(), + issueTitle: text('issue_title').notNull(), + issueBody: text('issue_body'), + issueUrl: varchar('issue_url', { length: 512 }), + // Assignment details + agentId: varchar('agent_id', { length: 255 }), + agentName: varchar('agent_name', { length: 255 }), + assignedAt: timestamp('assigned_at'), + // Status tracking + status: varchar('status', { length: 50 }).notNull().default('pending'), // pending, assigned, in_progress, resolved, closed + resolution: text('resolution'), + // PR created to fix the issue + linkedPrNumber: bigint('linked_pr_number', { mode: 'number' }), + // Metadata + labels: text('labels').array(), + priority: varchar('priority', { length: 20 }), // low, medium, high, critical + createdAt: timestamp('created_at').defaultNow().notNull(), + updatedAt: timestamp('updated_at').defaultNow().notNull(), +}, (table) => ({ + repositoryIdx: index('idx_issue_assignments_repository').on(table.repository), + issueNumberIdx: index('idx_issue_assignments_issue_number').on(table.issueNumber), + statusIdx: index('idx_issue_assignments_status').on(table.status), + agentIdIdx: index('idx_issue_assignments_agent_id').on(table.agentId), + repoIssueIdx: unique('issue_assignments_repo_issue_unique').on(table.repository, table.issueNumber), +})); + +export const issueAssignmentsRelations = relations(issueAssignments, ({ one }) => ({ + repositoryRef: one(repositories, { + fields: [issueAssignments.repositoryId], + references: [repositories.id], + }), +})); + +// ============================================================================ +// Comment Mentions (tracking @mentions to agents) +// ============================================================================ + +export const commentMentions = pgTable('comment_mentions', { + id: uuid('id').primaryKey().defaultRandom(), + repositoryId: uuid('repository_id').references(() => repositories.id, { onDelete: 'cascade' }), + repository: varchar('repository', { length: 255 }).notNull(), + // Source of the mention + sourceType: varchar('source_type', { length: 50 }).notNull(), // issue_comment, pr_comment, pr_review + sourceId: bigint('source_id', { mode: 'number' }).notNull(), // GitHub comment ID + issueOrPrNumber: bigint('issue_or_pr_number', { mode: 'number' }).notNull(), + // Comment details + commentBody: text('comment_body').notNull(), + commentUrl: varchar('comment_url', { length: 512 }), + authorLogin: varchar('author_login', { length: 255 }).notNull(), + authorId: bigint('author_id', { mode: 'number' }), + // Mention details + mentionedAgent: varchar('mentioned_agent', { length: 255 }).notNull(), // e.g., "agent-relay", "ci-fix", "lead" + mentionContext: text('mention_context'), // Text surrounding the mention + // Response tracking + agentId: varchar('agent_id', { length: 255 }), + agentName: varchar('agent_name', { length: 255 }), + status: varchar('status', { length: 50 }).notNull().default('pending'), // pending, processing, responded, ignored + responseCommentId: bigint('response_comment_id', { mode: 'number' }), + responseBody: text('response_body'), + respondedAt: timestamp('responded_at'), + // Metadata + createdAt: timestamp('created_at').defaultNow().notNull(), +}, (table) => ({ + repositoryIdx: index('idx_comment_mentions_repository').on(table.repository), + sourceIdx: index('idx_comment_mentions_source').on(table.sourceType, table.sourceId), + statusIdx: index('idx_comment_mentions_status').on(table.status), + mentionedAgentIdx: index('idx_comment_mentions_mentioned_agent').on(table.mentionedAgent), +})); + +export const commentMentionsRelations = relations(commentMentions, ({ one }) => ({ + repositoryRef: one(repositories, { + fields: [commentMentions.repositoryId], + references: [repositories.id], + }), +})); + +// ============================================================================ +// Agent Webhook Configuration (per-repo settings for agent triggers) +// ============================================================================ + +export interface AgentTriggerConfig { + // Which agents can be mentioned + mentionableAgents?: string[]; // e.g., ["lead", "ci-fix", "reviewer"] + // Default agent for issue handling + defaultIssueAgent?: string; + // Labels that trigger agent assignment + autoAssignLabels?: Record; // e.g., { "bug": "debugger", "enhancement": "developer" } + // Whether to auto-respond to mentions + autoRespondToMentions?: boolean; + // Rate limiting + maxResponsesPerHour?: number; + // Who can trigger agents + allowedTriggerUsers?: string[]; // Empty = everyone, list = only these users +} + +// Type exports for issue/comment tables +export type IssueAssignment = typeof issueAssignments.$inferSelect; +export type NewIssueAssignment = typeof issueAssignments.$inferInsert; +export type CommentMention = typeof commentMentions.$inferSelect; +export type NewCommentMention = typeof commentMentions.$inferInsert; diff --git a/src/cloud/provisioner/index.ts b/src/cloud/provisioner/index.ts index 183205a8..460240e5 100644 --- a/src/cloud/provisioner/index.ts +++ b/src/cloud/provisioner/index.ts @@ -4,6 +4,7 @@ * One-click provisioning for compute resources (Fly.io, Railway, Docker). */ +import * as crypto from 'crypto'; import { getConfig } from '../config.js'; import { db, Workspace } from '../db/index.js'; import { vault } from '../vault/index.js'; @@ -11,6 +12,7 @@ import { nangoService } from '../services/nango.js'; const WORKSPACE_PORT = 3888; const FETCH_TIMEOUT_MS = 10_000; +const WORKSPACE_IMAGE = process.env.WORKSPACE_IMAGE || 'ghcr.io/agentworkforce/relay-workspace:latest'; /** * Get a fresh GitHub App installation token from Nango. @@ -104,6 +106,8 @@ export interface ProvisionConfig { repositories: string[]; supervisorEnabled?: boolean; maxAgents?: number; + /** Direct GitHub token for testing (bypasses Nango lookup) */ + githubToken?: string; } export interface ProvisionResult { @@ -164,6 +168,8 @@ class FlyProvisioner implements ComputeProvisioner { private org: string; private region: string; private workspaceDomain?: string; + private cloudApiUrl: string; + private sessionSecret: string; constructor() { const config = getConfig(); @@ -174,6 +180,19 @@ class FlyProvisioner implements ComputeProvisioner { this.org = config.compute.fly.org; this.region = config.compute.fly.region || 'sjc'; this.workspaceDomain = config.compute.fly.workspaceDomain; + this.cloudApiUrl = config.publicUrl; + this.sessionSecret = config.sessionSecret; + } + + /** + * Generate a workspace token for API authentication + * This is a simple HMAC - in production, consider using JWTs + */ + private generateWorkspaceToken(workspaceId: string): string { + return crypto + .createHmac('sha256', this.sessionSecret) + .update(`workspace:${workspaceId}`) + .digest('hex'); } async provision( @@ -195,20 +214,22 @@ class FlyProvisioner implements ComputeProvisioner { }), }); - // Set secrets (credentials) + // Set secrets (provider credentials) const secrets: Record = {}; for (const [provider, token] of credentials) { secrets[`${provider.toUpperCase()}_TOKEN`] = token; } - await fetchWithRetry(`https://api.machines.dev/v1/apps/${appName}/secrets`, { - method: 'POST', - headers: { - Authorization: `Bearer ${this.apiToken}`, - 'Content-Type': 'application/json', - }, - body: JSON.stringify(secrets), - }); + if (Object.keys(secrets).length > 0) { + await fetchWithRetry(`https://api.machines.dev/v1/apps/${appName}/secrets`, { + method: 'POST', + headers: { + Authorization: `Bearer ${this.apiToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(secrets), + }); + } // If custom workspace domain is configured, add certificate const customHostname = this.workspaceDomain @@ -231,7 +252,7 @@ class FlyProvisioner implements ComputeProvisioner { body: JSON.stringify({ region: this.region, config: { - image: 'ghcr.io/khaliqgant/agent-relay-workspace:latest', + image: WORKSPACE_IMAGE, env: { WORKSPACE_ID: workspace.id, SUPERVISOR_ENABLED: String(workspace.config.supervisorEnabled ?? false), @@ -240,6 +261,9 @@ class FlyProvisioner implements ComputeProvisioner { PROVIDERS: (workspace.config.providers ?? []).join(','), PORT: String(WORKSPACE_PORT), AGENT_RELAY_DASHBOARD_PORT: String(WORKSPACE_PORT), + // Git gateway configuration + CLOUD_API_URL: this.cloudApiUrl, + WORKSPACE_TOKEN: this.generateWorkspaceToken(workspace.id), }, services: [ { @@ -479,6 +503,8 @@ class FlyProvisioner implements ComputeProvisioner { */ class RailwayProvisioner implements ComputeProvisioner { private apiToken: string; + private cloudApiUrl: string; + private sessionSecret: string; constructor() { const config = getConfig(); @@ -486,6 +512,15 @@ class RailwayProvisioner implements ComputeProvisioner { throw new Error('Railway configuration missing'); } this.apiToken = config.compute.railway.apiToken; + this.cloudApiUrl = config.publicUrl; + this.sessionSecret = config.sessionSecret; + } + + private generateWorkspaceToken(workspaceId: string): string { + return crypto + .createHmac('sha256', this.sessionSecret) + .update(`workspace:${workspaceId}`) + .digest('hex'); } async provision( @@ -539,7 +574,7 @@ class RailwayProvisioner implements ComputeProvisioner { projectId, name: 'workspace', source: { - image: 'ghcr.io/khaliqgant/agent-relay-workspace:latest', + image: WORKSPACE_IMAGE, }, }, }, @@ -558,6 +593,8 @@ class RailwayProvisioner implements ComputeProvisioner { PROVIDERS: (workspace.config.providers ?? []).join(','), PORT: String(WORKSPACE_PORT), AGENT_RELAY_DASHBOARD_PORT: String(WORKSPACE_PORT), + CLOUD_API_URL: this.cloudApiUrl, + WORKSPACE_TOKEN: this.generateWorkspaceToken(workspace.id), }; for (const [provider, token] of credentials) { @@ -724,6 +761,64 @@ class RailwayProvisioner implements ComputeProvisioner { * Local Docker provisioner (for development/self-hosted) */ class DockerProvisioner implements ComputeProvisioner { + private cloudApiUrl: string; + private cloudApiUrlForContainer: string; + private sessionSecret: string; + + constructor() { + const config = getConfig(); + this.cloudApiUrl = config.publicUrl; + this.sessionSecret = config.sessionSecret; + + // For Docker containers, localhost won't work - they need to reach the host + // Convert localhost URLs to host.docker.internal for container access + if (this.cloudApiUrl.includes('localhost') || this.cloudApiUrl.includes('127.0.0.1')) { + this.cloudApiUrlForContainer = this.cloudApiUrl + .replace('localhost', 'host.docker.internal') + .replace('127.0.0.1', 'host.docker.internal'); + console.log(`[docker] Container API URL: ${this.cloudApiUrlForContainer} (host: ${this.cloudApiUrl})`); + } else { + this.cloudApiUrlForContainer = this.cloudApiUrl; + } + } + + private generateWorkspaceToken(workspaceId: string): string { + return crypto + .createHmac('sha256', this.sessionSecret) + .update(`workspace:${workspaceId}`) + .digest('hex'); + } + + /** + * Wait for container to be healthy by polling the health endpoint + */ + private async waitForHealthy(publicUrl: string, timeoutMs: number = 60_000): Promise { + const startTime = Date.now(); + const pollInterval = 2000; + + console.log(`[docker] Waiting for container to be healthy at ${publicUrl}...`); + + while (Date.now() - startTime < timeoutMs) { + try { + const response = await fetch(`${publicUrl}/health`, { + method: 'GET', + signal: AbortSignal.timeout(5000), + }); + + if (response.ok) { + console.log(`[docker] Container healthy after ${Date.now() - startTime}ms`); + return; + } + } catch { + // Container not ready yet, continue polling + } + + await wait(pollInterval); + } + + throw new Error(`Container did not become healthy within ${timeoutMs}ms`); + } + async provision( workspace: Workspace, credentials: Map @@ -739,6 +834,8 @@ class DockerProvisioner implements ComputeProvisioner { `-e PROVIDERS=${(workspace.config.providers ?? []).join(',')}`, `-e PORT=${WORKSPACE_PORT}`, `-e AGENT_RELAY_DASHBOARD_PORT=${WORKSPACE_PORT}`, + `-e CLOUD_API_URL=${this.cloudApiUrlForContainer}`, + `-e WORKSPACE_TOKEN=${this.generateWorkspaceToken(workspace.id)}`, ]; for (const [provider, token] of credentials) { @@ -749,17 +846,47 @@ class DockerProvisioner implements ComputeProvisioner { const { execSync } = await import('child_process'); const hostPort = 3000 + Math.floor(Math.random() * 1000); + // When running in Docker, connect to the same network for container-to-container communication + const runningInDocker = process.env.RUNNING_IN_DOCKER === 'true'; + const networkArg = runningInDocker ? '--network agent-relay-dev' : ''; + + // In development, mount local dist and docs folders for faster iteration + // Set WORKSPACE_DEV_MOUNT=true to enable + const devMount = process.env.WORKSPACE_DEV_MOUNT === 'true'; + const volumeArgs = devMount + ? `-v "${process.cwd()}/dist:/app/dist:ro" -v "${process.cwd()}/docs:/app/docs:ro"` + : ''; + if (devMount) { + console.log('[provisioner] Dev mode: mounting local dist/ and docs/ folders into workspace container'); + } + try { execSync( - `docker run -d --name ${containerName} -p ${hostPort}:${WORKSPACE_PORT} ${envArgs.join(' ')} ghcr.io/khaliqgant/agent-relay-workspace:latest`, + `docker run -d --user root --name ${containerName} ${networkArg} ${volumeArgs} -p ${hostPort}:${WORKSPACE_PORT} ${envArgs.join(' ')} ${WORKSPACE_IMAGE}`, { stdio: 'pipe' } ); + const publicUrl = `http://localhost:${hostPort}`; + + // Wait for container to be healthy before returning + // When running in Docker, use the internal container name for health check + const healthCheckUrl = runningInDocker + ? `http://${containerName}:${WORKSPACE_PORT}` + : publicUrl; + await this.waitForHealthy(healthCheckUrl); + return { computeId: containerName, - publicUrl: `http://localhost:${hostPort}`, + publicUrl, }; } catch (error) { + // Clean up container if it was created but health check failed + try { + const { execSync: execSyncCleanup } = await import('child_process'); + execSyncCleanup(`docker rm -f ${containerName}`, { stdio: 'pipe' }); + } catch { + // Ignore cleanup errors + } throw new Error(`Failed to start Docker container: ${error}`); } } @@ -853,6 +980,16 @@ export class WorkspaceProvisioner { }, }); + // Add creator as owner in workspace_members for team collaboration support + await db.workspaceMembers.addMember({ + workspaceId: workspace.id, + userId: config.userId, + role: 'owner', + invitedBy: config.userId, // Self-invited as creator + }); + // Auto-accept the creator's membership + await db.workspaceMembers.acceptInvite(workspace.id, config.userId); + // Get credentials const credentials = new Map(); for (const provider of config.providers) { @@ -863,13 +1000,20 @@ export class WorkspaceProvisioner { } // GitHub token is required for cloning repositories - // Use Nango GitHub App token (fresh installation token, not from vault) + // Use direct token if provided (for testing), otherwise get from Nango if (config.repositories.length > 0) { - const githubToken = await getGithubAppTokenForUser(config.userId); - if (githubToken) { - credentials.set('github', githubToken); + if (config.githubToken) { + // Direct token provided (for testing) + credentials.set('github', config.githubToken); + console.log('[provisioner] Using provided GitHub token'); } else { - console.warn(`[provisioner] No GitHub App token for user ${config.userId}; repository cloning may fail.`); + // Get fresh installation token from Nango GitHub App + const githubToken = await getGithubAppTokenForUser(config.userId); + if (githubToken) { + credentials.set('github', githubToken); + } else { + console.warn(`[provisioner] No GitHub App token for user ${config.userId}; repository cloning may fail.`); + } } } diff --git a/src/cloud/server.ts b/src/cloud/server.ts index 52c2ed9e..6529d356 100644 --- a/src/cloud/server.ts +++ b/src/cloud/server.ts @@ -8,9 +8,11 @@ import cors from 'cors'; import helmet from 'helmet'; import crypto from 'crypto'; import path from 'node:path'; +import http from 'node:http'; import { fileURLToPath } from 'node:url'; import { createClient, RedisClientType } from 'redis'; import { RedisStore } from 'connect-redis'; +import { WebSocketServer, WebSocket } from 'ws'; import { getConfig } from './config.js'; import { runMigrations } from './db/index.js'; import { getScalingOrchestrator, ScalingOrchestrator } from './services/index.js'; @@ -26,7 +28,7 @@ declare module 'express-session' { } // API routers -import { authRouter } from './api/auth.js'; +import { authRouter, requireAuth } from './api/auth.js'; import { providersRouter } from './api/providers.js'; import { workspacesRouter } from './api/workspaces.js'; import { reposRouter } from './api/repos.js'; @@ -41,6 +43,39 @@ import { testHelpersRouter } from './api/test-helpers.js'; import { webhooksRouter } from './api/webhooks.js'; import { githubAppRouter } from './api/github-app.js'; import { nangoAuthRouter } from './api/nango-auth.js'; +import { gitRouter } from './api/git.js'; +import { db } from './db/index.js'; + +/** + * Proxy a request to the user's primary running workspace + */ +async function proxyToUserWorkspace(req: Request, res: Response, path: string): Promise { + const userId = req.session.userId; + if (!userId) { + res.status(401).json({ error: 'Unauthorized' }); + return; + } + + try { + // Find user's running workspace + const workspaces = await db.workspaces.findByUserId(userId); + const runningWorkspace = workspaces.find(w => w.status === 'running' && w.publicUrl); + + if (!runningWorkspace || !runningWorkspace.publicUrl) { + res.status(404).json({ error: 'No running workspace found', success: false }); + return; + } + + // Proxy to workspace + const targetUrl = `${runningWorkspace.publicUrl}${path}`; + const proxyRes = await fetch(targetUrl); + const data = await proxyRes.json(); + res.status(proxyRes.status).json(data); + } catch (error) { + console.error('[trajectory-proxy] Error:', error); + res.status(500).json({ error: 'Failed to proxy request to workspace', success: false }); + } +} export interface CloudServer { app: Express; @@ -64,16 +99,19 @@ export async function createServer(): Promise { await redisClient.connect(); // Middleware - // Configure helmet to allow Next.js inline scripts + // Configure helmet to allow Next.js inline scripts and Nango Connect UI app.use(helmet({ contentSecurityPolicy: { directives: { defaultSrc: ["'self'"], - scriptSrc: ["'self'", "'unsafe-inline'", "'unsafe-eval'"], - styleSrc: ["'self'", "'unsafe-inline'", "https://fonts.googleapis.com"], - fontSrc: ["'self'", "https://fonts.gstatic.com"], - imgSrc: ["'self'", "data:", "https:"], - connectSrc: ["'self'", "wss:", "ws:", "https:"], + scriptSrc: ["'self'", "'unsafe-inline'", "'unsafe-eval'", "https://connect.nango.dev"], + styleSrc: ["'self'", "'unsafe-inline'", "https://fonts.googleapis.com", "https://connect.nango.dev"], + fontSrc: ["'self'", "https://fonts.gstatic.com", "data:"], + imgSrc: ["'self'", "data:", "https:", "blob:"], + connectSrc: ["'self'", "wss:", "ws:", "https:", "https://api.nango.dev", "https://connect.nango.dev"], + frameSrc: ["'self'", "https://connect.nango.dev", "https://github.com"], + childSrc: ["'self'", "https://connect.nango.dev", "blob:"], + workerSrc: ["'self'", "blob:"], }, }, })); @@ -83,7 +121,13 @@ export async function createServer(): Promise { credentials: true, }) ); - app.use(express.json()); + // Custom JSON parser that preserves raw body for webhook signature verification + app.use(express.json({ + verify: (req: Request, _res, buf) => { + // Store raw body for webhook signature verification + (req as Request & { rawBody?: string }).rawBody = buf.toString(); + }, + })); // Session middleware app.use( @@ -116,10 +160,19 @@ export async function createServer(): Promise { // Simple in-memory rate limiting per IP const RATE_LIMIT_WINDOW_MS = 60_000; - const RATE_LIMIT_MAX = 300; + // Higher limit in development mode + const RATE_LIMIT_MAX = process.env.NODE_ENV === 'development' ? 1000 : 300; const rateLimits = new Map(); app.use((req: Request, res: Response, next: NextFunction) => { + // Skip rate limiting for localhost in development + if (process.env.NODE_ENV === 'development') { + const ip = req.ip || ''; + if (ip === '127.0.0.1' || ip === '::1' || ip === '::ffff:127.0.0.1') { + return next(); + } + } + const now = Date.now(); const key = req.ip || 'unknown'; const entry = rateLimits.get(key); @@ -148,18 +201,32 @@ export async function createServer(): Promise { // Lightweight CSRF protection using session token const SAFE_METHODS = new Set(['GET', 'HEAD', 'OPTIONS']); - // Paths exempt from CSRF (webhooks from external services) - const CSRF_EXEMPT_PATHS = ['/api/webhooks/', '/api/auth/nango/webhook']; + // Paths exempt from CSRF (webhooks from external services, workspace proxy) + const CSRF_EXEMPT_PATHS = [ + '/api/webhooks/', + '/api/auth/nango/webhook', + ]; + // Additional pattern for workspace proxy routes (contains /proxy/) + const isWorkspaceProxyRoute = (path: string) => /^\/api\/workspaces\/[^/]+\/proxy\//.test(path); app.use((req: Request, res: Response, next: NextFunction) => { - // Skip CSRF for webhook endpoints - if (CSRF_EXEMPT_PATHS.some(path => req.path.startsWith(path))) { + // Skip CSRF for webhook endpoints and workspace proxy routes + if (CSRF_EXEMPT_PATHS.some(path => req.path.startsWith(path)) || isWorkspaceProxyRoute(req.path)) { return next(); } if (!req.session) return res.status(500).json({ error: 'Session unavailable' }); + // Generate CSRF token if not present + // Use session.save() to ensure the session is persisted even for unauthenticated users + // This is necessary because saveUninitialized: false won't auto-save new sessions if (!req.session.csrfToken) { req.session.csrfToken = crypto.randomBytes(32).toString('hex'); + // Explicitly save session to persist the CSRF token + req.session.save((err) => { + if (err) { + console.error('[csrf] Failed to save session:', err); + } + }); } res.setHeader('X-CSRF-Token', req.session.csrfToken); @@ -180,6 +247,7 @@ export async function createServer(): Promise { const token = req.get('x-csrf-token'); if (!token || token !== req.session.csrfToken) { + console.log(`[csrf] Token mismatch: received=${token?.substring(0, 8)}... expected=${req.session.csrfToken?.substring(0, 8)}...`); return res.status(403).json({ error: 'CSRF token invalid or missing', code: 'CSRF_MISMATCH', @@ -208,6 +276,7 @@ export async function createServer(): Promise { app.use('/api/webhooks', webhooksRouter); app.use('/api/github-app', githubAppRouter); app.use('/api/auth/nango', nangoAuthRouter); + app.use('/api/git', gitRouter); // Test helper routes (only available in non-production) if (process.env.NODE_ENV !== 'production') { @@ -215,12 +284,32 @@ export async function createServer(): Promise { console.log('[cloud] Test helper routes enabled (non-production mode)'); } + // Trajectory proxy routes - auto-detect user's workspace and forward + // These are convenience routes so the dashboard doesn't need to know the workspace ID + app.get('/api/trajectory', requireAuth, async (req, res) => { + await proxyToUserWorkspace(req, res, '/api/trajectory'); + }); + + app.get('/api/trajectory/steps', requireAuth, async (req, res) => { + const queryString = req.query.trajectoryId + ? `?trajectoryId=${encodeURIComponent(req.query.trajectoryId as string)}` + : ''; + await proxyToUserWorkspace(req, res, `/api/trajectory/steps${queryString}`); + }); + + app.get('/api/trajectory/history', requireAuth, async (req, res) => { + await proxyToUserWorkspace(req, res, '/api/trajectory/history'); + }); + // Serve static dashboard files (Next.js static export) // Path: dist/cloud/server.js -> ../../src/dashboard/out const dashboardPath = path.join(__dirname, '../../src/dashboard/out'); - app.use(express.static(dashboardPath)); - // SPA fallback - serve index.html for all non-API routes + // Serve static files with .html extension fallback for clean URLs + // e.g., /signup will try /signup.html + app.use(express.static(dashboardPath, { extensions: ['html'] })); + + // SPA fallback - serve index.html for all non-API routes that don't match static files // Express 5 requires named wildcard params instead of bare '*' app.get('/{*splat}', (req, res, next) => { // Don't serve index.html for API routes @@ -240,9 +329,236 @@ export async function createServer(): Promise { }); // Server lifecycle - let server: ReturnType | null = null; + let server: http.Server | null = null; let scalingOrchestrator: ScalingOrchestrator | null = null; + // Create HTTP server for WebSocket upgrade handling + const httpServer = http.createServer(app); + + // ===== Presence WebSocket ===== + const wssPresence = new WebSocketServer({ + noServer: true, + perMessageDeflate: false, + maxPayload: 1024 * 1024, // 1MB - presence messages are small + }); + + // Track online users for presence with multi-tab support + interface UserPresenceInfo { + username: string; + avatarUrl?: string; + connectedAt: string; + lastSeen: string; + } + interface UserPresenceState { + info: UserPresenceInfo; + connections: Set; + } + const onlineUsers = new Map(); + + // Validation helpers + const isValidUsername = (username: unknown): username is string => { + if (typeof username !== 'string') return false; + return /^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,37}[a-zA-Z0-9])?$/.test(username); + }; + + const isValidAvatarUrl = (url: unknown): url is string | undefined => { + if (url === undefined || url === null) return true; + if (typeof url !== 'string') return false; + try { + const parsed = new URL(url); + return parsed.protocol === 'https:' && + (parsed.hostname === 'avatars.githubusercontent.com' || + parsed.hostname === 'github.com' || + parsed.hostname.endsWith('.githubusercontent.com')); + } catch { + return false; + } + }; + + // Handle HTTP upgrade for WebSocket + httpServer.on('upgrade', (request, socket, head) => { + const pathname = new URL(request.url || '', `http://${request.headers.host}`).pathname; + + if (pathname === '/ws/presence') { + wssPresence.handleUpgrade(request, socket, head, (ws) => { + wssPresence.emit('connection', ws, request); + }); + } else { + // Unknown WebSocket path - destroy socket + socket.destroy(); + } + }); + + // Broadcast to all presence clients + const broadcastPresence = (message: object, exclude?: WebSocket) => { + const payload = JSON.stringify(message); + wssPresence.clients.forEach((client) => { + if (client !== exclude && client.readyState === WebSocket.OPEN) { + client.send(payload); + } + }); + }; + + // Get online users list + const getOnlineUsersList = (): UserPresenceInfo[] => { + return Array.from(onlineUsers.values()).map((state) => state.info); + }; + + // Heartbeat interval to detect dead connections (30 seconds) + const PRESENCE_HEARTBEAT_INTERVAL = 30000; + const _PRESENCE_HEARTBEAT_TIMEOUT = 35000; // Allow 5s grace period (reserved for future use) + + // Track connection health for heartbeat + const connectionHealth = new WeakMap(); + + // Heartbeat interval to clean up dead connections + const presenceHeartbeat = setInterval(() => { + const now = Date.now(); + wssPresence.clients.forEach((ws) => { + const health = connectionHealth.get(ws); + if (!health) { + // New connection without health tracking - initialize it + connectionHealth.set(ws, { isAlive: true, lastPing: now }); + return; + } + + if (!health.isAlive) { + // Connection didn't respond to last ping - terminate it + ws.terminate(); + return; + } + + // Mark as not alive until we get a pong + health.isAlive = false; + health.lastPing = now; + ws.ping(); + }); + }, PRESENCE_HEARTBEAT_INTERVAL); + + // Clean up interval on server close + wssPresence.on('close', () => { + clearInterval(presenceHeartbeat); + }); + + // Handle presence connections + wssPresence.on('connection', (ws) => { + // Initialize health tracking (no log - too noisy) + connectionHealth.set(ws, { isAlive: true, lastPing: Date.now() }); + + // Handle pong responses (heartbeat) + ws.on('pong', () => { + const health = connectionHealth.get(ws); + if (health) { + health.isAlive = true; + } + }); + + let clientUsername: string | undefined; + + ws.on('message', (data) => { + try { + const msg = JSON.parse(data.toString()); + + if (msg.type === 'presence') { + if (msg.action === 'join' && msg.user?.username) { + const username = msg.user.username; + const avatarUrl = msg.user.avatarUrl; + + if (!isValidUsername(username)) { + console.warn(`[cloud] Invalid username rejected: ${username}`); + return; + } + if (!isValidAvatarUrl(avatarUrl)) { + console.warn(`[cloud] Invalid avatar URL rejected for user ${username}`); + return; + } + + clientUsername = username; + const now = new Date().toISOString(); + + const existing = onlineUsers.get(username); + if (existing) { + existing.connections.add(ws); + existing.info.lastSeen = now; + // Only log at milestones to reduce noise + const count = existing.connections.size; + if (count === 2 || count === 5 || count === 10 || count % 50 === 0) { + console.log(`[cloud] User ${username} has ${count} connections`); + } + } else { + onlineUsers.set(username, { + info: { username, avatarUrl, connectedAt: now, lastSeen: now }, + connections: new Set([ws]), + }); + + console.log(`[cloud] User ${username} came online`); + broadcastPresence({ + type: 'presence_join', + user: { username, avatarUrl, connectedAt: now, lastSeen: now }, + }, ws); + } + + ws.send(JSON.stringify({ + type: 'presence_list', + users: getOnlineUsersList(), + })); + + } else if (msg.action === 'leave') { + if (!clientUsername || msg.username !== clientUsername) return; + + const userState = onlineUsers.get(clientUsername); + if (userState) { + userState.connections.delete(ws); + if (userState.connections.size === 0) { + onlineUsers.delete(clientUsername); + console.log(`[cloud] User ${clientUsername} went offline`); + broadcastPresence({ type: 'presence_leave', username: clientUsername }); + } + } + } + } else if (msg.type === 'typing') { + if (!clientUsername || msg.username !== clientUsername) return; + + const userState = onlineUsers.get(clientUsername); + if (userState) { + userState.info.lastSeen = new Date().toISOString(); + } + + broadcastPresence({ + type: 'typing', + username: clientUsername, + avatarUrl: userState?.info.avatarUrl, + isTyping: msg.isTyping, + }, ws); + } + } catch (err) { + console.error('[cloud] Invalid presence message:', err); + } + }); + + ws.on('close', () => { + if (clientUsername) { + const userState = onlineUsers.get(clientUsername); + if (userState) { + userState.connections.delete(ws); + if (userState.connections.size === 0) { + onlineUsers.delete(clientUsername); + console.log(`[cloud] User ${clientUsername} disconnected`); + broadcastPresence({ type: 'presence_leave', username: clientUsername }); + } + } + } + }); + + ws.on('error', (err) => { + console.error('[cloud] Presence WebSocket error:', err); + }); + }); + + wssPresence.on('error', (err) => { + console.error('[cloud] Presence WebSocket server error:', err); + }); + return { app, @@ -278,9 +594,10 @@ export async function createServer(): Promise { } return new Promise((resolve) => { - server = app.listen(config.port, () => { + server = httpServer.listen(config.port, () => { console.log(`Agent Relay Cloud running on port ${config.port}`); console.log(`Public URL: ${config.publicUrl}`); + console.log(`WebSocket: ws://localhost:${config.port}/ws/presence`); resolve(); }); }); @@ -292,6 +609,9 @@ export async function createServer(): Promise { await scalingOrchestrator.shutdown(); } + // Close WebSocket server + wssPresence.close(); + if (server) { await new Promise((resolve) => server!.close(() => resolve())); } diff --git a/src/cloud/services/ci-agent-spawner.ts b/src/cloud/services/ci-agent-spawner.ts new file mode 100644 index 00000000..f9236d93 --- /dev/null +++ b/src/cloud/services/ci-agent-spawner.ts @@ -0,0 +1,500 @@ +/** + * CI Agent Spawner Service + * + * Spawns agents to fix CI failures automatically. + * Called by the webhook handler when CI checks fail on PRs. + * + * Flow: + * 1. App posts acknowledgment comment on the PR + * 2. Finds a linked daemon for the repository + * 3. Queues spawn command for the daemon + * 4. Agent works and posts response comment + */ + +import { db, CIFailureEvent, CIAnnotation, Repository } from '../db/index.js'; +import { nangoService } from './nango.js'; + +/** + * Get the GitHub App name for comments + */ +function getAppName(): string { + return process.env.GITHUB_APP_NAME || 'Agent Relay'; +} + +/** + * Post a CI failure acknowledgment comment on GitHub + */ +async function postCIAcknowledgmentComment( + repository: Repository, + prNumber: number, + checkName: string, + failureTitle: string | null +): Promise<{ id: number; url: string } | null> { + if (!repository.nangoConnectionId) { + console.warn(`[ci-spawner] Repository ${repository.githubFullName} has no Nango connection`); + return null; + } + + const [owner, repo] = repository.githubFullName.split('/'); + const appName = getAppName(); + + const body = `🔴 **CI Failure Detected** + +The \`${checkName}\` check has failed${failureTitle ? `: ${failureTitle}` : ''}. + +I'm spawning an agent to investigate and fix this issue. The **@ci-fix** agent will analyze the failure and attempt to resolve it. + +You'll be notified when the fix is ready or if manual intervention is needed. + +_— ${appName}_`; + + try { + const result = await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + prNumber, + body + ); + console.log(`[ci-spawner] Posted CI acknowledgment comment: ${result.html_url}`); + return { id: result.id, url: result.html_url }; + } catch (error) { + console.error(`[ci-spawner] Failed to post CI acknowledgment comment:`, error); + return null; + } +} + +/** + * Post a completion comment on GitHub + */ +async function postCompletionComment( + repository: Repository, + prNumber: number, + success: boolean, + summary: string, + commitSha?: string +): Promise { + if (!repository.nangoConnectionId) { + return; + } + + const [owner, repo] = repository.githubFullName.split('/'); + const appName = getAppName(); + + let body: string; + if (success) { + body = `✅ **CI Fix Applied** + +${summary} + +${commitSha ? `**Commit:** ${commitSha.substring(0, 7)}` : ''} + +Please review the changes and re-run the CI checks. + +_— ${appName}_`; + } else { + body = `⚠️ **CI Fix Unsuccessful** + +${summary} + +Manual intervention may be required. Please check the failure details and fix the issue manually. + +_— ${appName}_`; + } + + try { + await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + prNumber, + body + ); + console.log(`[ci-spawner] Posted completion comment for PR #${prNumber}`); + } catch (error) { + console.error(`[ci-spawner] Failed to post completion comment:`, error); + } +} + +/** + * Find a linked daemon that can handle this repository + */ +async function findAvailableDaemon(repository: Repository): Promise<{ id: string; userId: string } | null> { + if (!repository.userId) { + console.warn(`[ci-spawner] Repository ${repository.githubFullName} has no userId`); + return null; + } + + const daemons = await db.linkedDaemons.findByUserId(repository.userId); + const onlineDaemon = daemons.find(d => d.status === 'online'); + + if (!onlineDaemon) { + console.warn(`[ci-spawner] No online daemon found for user ${repository.userId}`); + return null; + } + + return { id: onlineDaemon.id, userId: repository.userId }; +} + +/** + * Queue a spawn command for a linked daemon + */ +async function queueSpawnCommand( + daemonId: string, + agentName: string, + prompt: string, + metadata: { + failureEventId: string; + fixAttemptId: string; + repository: string; + prNumber: number; + checkName: string; + } +): Promise { + const command = { + type: 'spawn_agent', + agentName, + cli: 'claude', + task: prompt, + metadata, + timestamp: new Date().toISOString(), + }; + + await db.linkedDaemons.queueMessage(daemonId, { + from: { daemonId: 'cloud', daemonName: 'Agent Relay Cloud', agent: 'system' }, + to: '__spawner__', + content: JSON.stringify(command), + metadata: { type: 'spawn_command' }, + timestamp: new Date().toISOString(), + }); + + console.log(`[ci-spawner] Queued spawn command for daemon ${daemonId}`); +} + +/** + * Spawn an agent to fix CI failures + * + * This function: + * 1. Finds the workspace for the repository + * 2. Posts acknowledgment comment + * 3. Creates a fix attempt record + * 4. Queues spawn command for a linked daemon + * + * @param failureEvent - The CI failure event from the database + */ +export async function spawnCIFixAgent(failureEvent: CIFailureEvent): Promise { + console.log(`[ci-spawner] Spawning agent for failure: ${failureEvent.id}`); + console.log(`[ci-spawner] Repository: ${failureEvent.repository}`); + console.log(`[ci-spawner] Check: ${failureEvent.checkName}`); + console.log(`[ci-spawner] PR: #${failureEvent.prNumber}`); + + // Only handle failures on PRs + if (failureEvent.prNumber === null) { + console.log(`[ci-spawner] Failure not on a PR, skipping`); + return; + } + + const prNumber = failureEvent.prNumber; + + // Find the repository + const repository = await db.repositories.findByFullName(failureEvent.repository); + if (!repository) { + console.error(`[ci-spawner] Repository not found: ${failureEvent.repository}`); + return; + } + + // Generate agent name and ID + const agentName = `ci-fix-${failureEvent.checkName.replace(/[^a-zA-Z0-9-]/g, '-')}-${prNumber}`; + const agentId = `ci-${failureEvent.id}`; + + // Create fix attempt record + const fixAttempt = await db.ciFixAttempts.create({ + failureEventId: failureEvent.id, + agentId, + agentName, + status: 'pending', + }); + + console.log(`[ci-spawner] Created fix attempt: ${fixAttempt.id}`); + + try { + // Step 1: Post acknowledgment comment + await postCIAcknowledgmentComment( + repository, + prNumber, + failureEvent.checkName, + failureEvent.failureTitle + ); + + // Step 2: Find a linked daemon + const daemon = await findAvailableDaemon(repository); + + if (!daemon) { + console.warn(`[ci-spawner] No available daemon for ${failureEvent.repository}`); + + // Post a comment explaining the situation + if (repository.nangoConnectionId) { + const [owner, repo] = repository.githubFullName.split('/'); + try { + await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + prNumber, + `⚠️ I couldn't spawn an agent to fix this CI failure because no Agent Relay daemon is available. + +Please ensure you have a linked daemon running by executing \`agent-relay cloud link\` on your development machine. + +You can also fix this issue manually by reviewing the failure output above. + +_— ${getAppName()}_` + ); + } catch (error) { + console.error(`[ci-spawner] Failed to post error comment:`, error); + } + } + + await db.ciFixAttempts.complete( + fixAttempt.id, + 'failed', + undefined, + 'No available daemon to spawn agent' + ); + return; + } + + // Step 3: Build the agent prompt + const prompt = buildAgentPrompt(failureEvent, repository); + + // Step 4: Update status to in_progress + await db.ciFixAttempts.updateStatus(fixAttempt.id, 'in_progress'); + + // Step 5: Queue spawn command for the daemon + await queueSpawnCommand(daemon.id, agentName, prompt, { + failureEventId: failureEvent.id, + fixAttemptId: fixAttempt.id, + repository: failureEvent.repository, + prNumber, + checkName: failureEvent.checkName, + }); + + console.log(`[ci-spawner] Successfully queued CI fix agent for ${failureEvent.repository}#${prNumber}`); + + } catch (error) { + console.error(`[ci-spawner] Failed to spawn agent:`, error); + await db.ciFixAttempts.complete( + fixAttempt.id, + 'failed', + undefined, + error instanceof Error ? error.message : 'Unknown error' + ); + throw error; + } +} + +/** + * Build the prompt for the CI fix agent + */ +function buildAgentPrompt(failureEvent: CIFailureEvent, _repository: Repository): string { + const annotations = failureEvent.annotations as CIAnnotation[] | null; + const annotationsList = annotations && annotations.length > 0 + ? annotations + .slice(0, 20) // Limit to first 20 annotations + .map(a => `- ${a.path}:${a.startLine} - ${a.message}`) + .join('\n') + : null; + + const responseInstructions = ` +## Response Instructions + +When you complete your work: +1. Commit and push your changes +2. Post a comment on the PR summarizing what you fixed + +Use the GitHub CLI (\`gh\`) to post your response: +\`\`\`bash +gh pr comment ${failureEvent.prNumber} --repo ${failureEvent.repository} --body "## CI Fix Applied + +Summary of changes... + +**Files modified:** +- file1.ts +- file2.ts + +Please re-run the CI checks to verify the fix." +\`\`\` +`; + + return ` +# CI Failure Fix Task + +A CI check has failed on PR #${failureEvent.prNumber} in ${failureEvent.repository}. + +## Failure Details + +**Check Name:** ${failureEvent.checkName} +**Branch:** ${failureEvent.branch || 'unknown'} +**Commit:** ${failureEvent.commitSha || 'unknown'} + +${failureEvent.failureTitle ? `**Title:** ${failureEvent.failureTitle}` : ''} + +${failureEvent.failureSummary ? `**Summary:**\n${failureEvent.failureSummary}` : ''} + +${failureEvent.failureDetails ? `**Details:**\n${failureEvent.failureDetails}` : ''} + +${annotationsList ? `## Annotations\n\n${annotationsList}` : ''} + +## Your Task + +1. Checkout the branch: \`${failureEvent.branch || 'unknown'}\` +2. Analyze the failure based on the annotations and error messages +3. Fix the issues in the affected files +4. Run the relevant checks locally to verify the fix +5. Commit and push your changes with a clear commit message +6. Report back with a summary of what was fixed + +${responseInstructions} + +## Important + +- Only fix the specific issues causing the CI failure +- Do not refactor or improve unrelated code +- If you cannot fix the issue, explain why and what manual intervention is needed +- Keep your commit message descriptive and reference the CI check name +`.trim(); +} + +/** + * Notify an existing agent about a CI failure + * + * Used when an agent is already working on a PR and a new failure occurs. + * + * @param agentId - The ID of the existing agent + * @param failureEvent - The new CI failure event + */ +export async function notifyAgentOfCIFailure( + agentId: string, + failureEvent: CIFailureEvent +): Promise { + console.log(`[ci-spawner] Notifying agent ${agentId} of new failure`); + + // Find the repository + const repository = await db.repositories.findByFullName(failureEvent.repository); + if (!repository || !repository.userId) { + console.warn(`[ci-spawner] Repository not found or has no userId: ${failureEvent.repository}`); + return; + } + + // Find the daemon that should have this agent + const daemons = await db.linkedDaemons.findByUserId(repository.userId); + const onlineDaemon = daemons.find(d => d.status === 'online'); + + if (!onlineDaemon) { + console.warn(`[ci-spawner] No online daemon to notify agent ${agentId}`); + return; + } + + // Build notification message + const annotations = failureEvent.annotations as CIAnnotation[] | null; + const annotationsList = annotations && annotations.length > 0 + ? annotations + .slice(0, 10) + .map(a => ` - ${a.path}:${a.startLine}: ${a.message}`) + .join('\n') + : null; + + const message = ` +CI FAILURE: ${failureEvent.checkName} + +${failureEvent.failureTitle || 'Check failed'} + +${failureEvent.failureSummary || ''} + +${annotationsList ? `Issues:\n${annotationsList}` : ''} + +Please investigate and fix these issues, then push your changes. +`.trim(); + + // Queue message for the agent via daemon + await db.linkedDaemons.queueMessage(onlineDaemon.id, { + from: { daemonId: 'cloud', daemonName: 'Agent Relay Cloud', agent: 'system' }, + to: agentId, + content: message, + metadata: { type: 'ci_failure_notification', failureEventId: failureEvent.id }, + timestamp: new Date().toISOString(), + }); + + console.log(`[ci-spawner] Queued CI failure notification for agent ${agentId}`); +} + +/** + * Mark a fix attempt as complete + * + * Called when an agent reports completion (success or failure) + */ +export async function completeFixAttempt( + fixAttemptId: string, + success: boolean, + commitSha?: string, + errorMessage?: string +): Promise { + console.log(`[ci-spawner] Completing fix attempt ${fixAttemptId}: ${success ? 'success' : 'failed'}`); + + // Update the fix attempt record + await db.ciFixAttempts.complete( + fixAttemptId, + success ? 'success' : 'failed', + commitSha, + errorMessage + ); + + // Get the fix attempt to find the failure event + const fixAttempt = await db.ciFixAttempts.findById(fixAttemptId); + if (!fixAttempt) { + console.warn(`[ci-spawner] Fix attempt not found: ${fixAttemptId}`); + return; + } + + // Get the failure event to find the repository and PR + const failureEvent = await db.ciFailureEvents.findById(fixAttempt.failureEventId); + if (!failureEvent) { + console.warn(`[ci-spawner] Failure event not found: ${fixAttempt.failureEventId}`); + return; + } + + // Find the repository to post completion comment + const repository = await db.repositories.findByFullName(failureEvent.repository); + if (repository && failureEvent.prNumber !== null) { + const summary = success + ? `The @ci-fix agent has fixed the \`${failureEvent.checkName}\` check failure.` + : errorMessage || 'The @ci-fix agent was unable to fix the issue.'; + + await postCompletionComment( + repository, + failureEvent.prNumber, + success, + summary, + commitSha + ); + } +} + +/** + * Get failure history for a repository + */ +export async function getFailureHistory( + repository: string, + limit = 50 +): Promise { + return db.ciFailureEvents.findByRepository(repository, limit); +} + +/** + * Get failure history for a specific PR + */ +export async function getPRFailureHistory( + repository: string, + prNumber: number +): Promise { + return db.ciFailureEvents.findByPR(repository, prNumber); +} diff --git a/src/cloud/services/index.ts b/src/cloud/services/index.ts index a1961ce8..53cb1b73 100644 --- a/src/cloud/services/index.ts +++ b/src/cloud/services/index.ts @@ -44,3 +44,24 @@ export { getScalingOrchestrator, createScalingOrchestrator, } from './scaling-orchestrator.js'; + +// CI failure handling +export { + spawnCIFixAgent, + notifyAgentOfCIFailure, + completeFixAttempt, + getFailureHistory, + getPRFailureHistory, +} from './ci-agent-spawner.js'; + +// Issue and mention handling +export { + handleMention, + handleIssueAssignment, + getPendingMentions, + getPendingIssueAssignments, + processPendingMentions, + processPendingIssueAssignments, + KNOWN_AGENTS, + isKnownAgent, +} from './mention-handler.js'; diff --git a/src/cloud/services/mention-handler.ts b/src/cloud/services/mention-handler.ts new file mode 100644 index 00000000..6cecaafb --- /dev/null +++ b/src/cloud/services/mention-handler.ts @@ -0,0 +1,496 @@ +/** + * Mention Handler Service + * + * Handles @mentions of agents in GitHub issues and PR comments. + * Routes mentions to appropriate agents for response. + * + * Flow: + * 1. App posts acknowledgment comment + * 2. Finds a linked daemon for the repository + * 3. Queues spawn command for the daemon + * 4. Agent works and posts response comment + */ + +import { db, CommentMention, IssueAssignment, Repository } from '../db/index.js'; +import { nangoService } from './nango.js'; + +/** + * Known agent types that can be mentioned + */ +export const KNOWN_AGENTS = { + // Generic agents + 'agent-relay': 'General purpose agent for any task', + 'lead': 'Lead agent for coordination and delegation', + 'developer': 'Developer agent for coding tasks', + 'reviewer': 'Code review agent', + + // Specialized agents + 'ci-fix': 'CI failure fixing agent', + 'debugger': 'Bug investigation and fixing agent', + 'docs': 'Documentation agent', + 'test': 'Test writing agent', + 'refactor': 'Code refactoring agent', +} as const; + +export type KnownAgentType = keyof typeof KNOWN_AGENTS; + +/** + * Check if a mention is for a known agent type + */ +export function isKnownAgent(mention: string): mention is KnownAgentType { + return mention in KNOWN_AGENTS; +} + +/** + * Get the GitHub App name for comments + */ +function getAppName(): string { + return process.env.GITHUB_APP_NAME || 'Agent Relay'; +} + +/** + * Post an acknowledgment comment on GitHub + */ +async function postAcknowledgmentComment( + repository: Repository, + issueNumber: number, + mentionedAgent: string, + authorLogin: string +): Promise<{ id: number; url: string } | null> { + if (!repository.nangoConnectionId) { + console.warn(`[mention-handler] Repository ${repository.githubFullName} has no Nango connection`); + return null; + } + + const [owner, repo] = repository.githubFullName.split('/'); + const appName = getAppName(); + const agentDescription = isKnownAgent(mentionedAgent) + ? KNOWN_AGENTS[mentionedAgent] + : 'Custom agent'; + + const body = `👋 @${authorLogin}, I've received your request and am routing it to **@${mentionedAgent}** (${agentDescription}). + +The agent will respond shortly. You can track progress in this thread. + +_— ${appName}_`; + + try { + const result = await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + issueNumber, + body + ); + console.log(`[mention-handler] Posted acknowledgment comment: ${result.html_url}`); + return { id: result.id, url: result.html_url }; + } catch (error) { + console.error(`[mention-handler] Failed to post acknowledgment comment:`, error); + return null; + } +} + +/** + * Find a linked daemon that can handle this repository + */ +async function findAvailableDaemon(repository: Repository): Promise<{ id: string; userId: string } | null> { + // The daemon must belong to the repository owner + if (!repository.userId) { + console.warn(`[mention-handler] Repository ${repository.githubFullName} has no userId`); + return null; + } + + const daemons = await db.linkedDaemons.findByUserId(repository.userId); + const onlineDaemon = daemons.find(d => d.status === 'online'); + + if (!onlineDaemon) { + console.warn(`[mention-handler] No online daemon found for user ${repository.userId}`); + return null; + } + + return { id: onlineDaemon.id, userId: repository.userId }; +} + +/** + * Queue a spawn command for a linked daemon + */ +async function queueSpawnCommand( + daemonId: string, + agentName: string, + prompt: string, + metadata: { + mentionId: string; + repository: string; + issueNumber: number; + authorLogin: string; + } +): Promise { + const command = { + type: 'spawn_agent', + agentName, + cli: 'claude', // Default to Claude CLI + task: prompt, + metadata, + timestamp: new Date().toISOString(), + }; + + await db.linkedDaemons.queueMessage(daemonId, { + from: { daemonId: 'cloud', daemonName: 'Agent Relay Cloud', agent: 'system' }, + to: '__spawner__', + content: JSON.stringify(command), + metadata: { type: 'spawn_command' }, + timestamp: new Date().toISOString(), + }); + + console.log(`[mention-handler] Queued spawn command for daemon ${daemonId}`); +} + +/** + * Handle a mention record + * + * This function: + * 1. Validates the mention is for a known agent + * 2. Posts an acknowledgment comment + * 3. Finds a linked daemon + * 4. Queues a spawn command for the agent + */ +export async function handleMention(mention: CommentMention): Promise { + console.log(`[mention-handler] Processing mention: @${mention.mentionedAgent} in ${mention.repository}`); + + // Check if this is a known agent type + if (!isKnownAgent(mention.mentionedAgent)) { + console.log(`[mention-handler] Unknown agent: @${mention.mentionedAgent}, checking workspace config`); + // TODO: Check workspace configuration for custom agent names + // For now, mark as ignored + await db.commentMentions.markIgnored(mention.id); + return; + } + + // Find the repository to get Nango connection + const repository = await db.repositories.findByFullName(mention.repository); + if (!repository) { + console.error(`[mention-handler] Repository not found: ${mention.repository}`); + await db.commentMentions.markIgnored(mention.id); + return; + } + + // Generate agent info + const agentId = `mention-${mention.id}`; + const agentName = `${mention.mentionedAgent}-${mention.issueOrPrNumber}`; + + // Update status to processing + await db.commentMentions.markProcessing(mention.id, agentId, agentName); + + // Step 1: Post acknowledgment comment + const ackResult = await postAcknowledgmentComment( + repository, + mention.issueOrPrNumber, + mention.mentionedAgent, + mention.authorLogin + ); + + if (!ackResult) { + console.warn(`[mention-handler] Could not post acknowledgment, continuing anyway`); + } + + // Step 2: Find a linked daemon + const daemon = await findAvailableDaemon(repository); + + if (!daemon) { + console.warn(`[mention-handler] No available daemon for ${mention.repository}`); + // Post a comment explaining the situation + if (repository.nangoConnectionId) { + const [owner, repo] = repository.githubFullName.split('/'); + try { + await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + mention.issueOrPrNumber, + `⚠️ @${mention.authorLogin}, I couldn't find an available agent to handle this request. Please ensure you have a linked Agent Relay daemon running. + +You can set this up by running \`agent-relay cloud link\` on your development machine. + +_— ${getAppName()}_` + ); + } catch (error) { + console.error(`[mention-handler] Failed to post error comment:`, error); + } + } + return; + } + + // Step 3: Build the prompt for the agent + const prompt = buildMentionPrompt(mention, repository); + + // Step 4: Queue spawn command for the daemon + await queueSpawnCommand(daemon.id, agentName, prompt, { + mentionId: mention.id, + repository: mention.repository, + issueNumber: mention.issueOrPrNumber, + authorLogin: mention.authorLogin, + }); + + console.log(`[mention-handler] Spawned agent @${mention.mentionedAgent} for mention ${mention.id}`); +} + +/** + * Build a prompt for handling a mention + */ +function buildMentionPrompt(mention: CommentMention, _repository: Repository): string { + const agentDescription = isKnownAgent(mention.mentionedAgent) + ? KNOWN_AGENTS[mention.mentionedAgent] + : 'Custom agent'; + + const sourceTypeDescription = { + issue_comment: 'GitHub issue comment', + pr_comment: 'GitHub PR comment', + pr_review: 'GitHub PR review comment', + }[mention.sourceType] || 'GitHub comment'; + + const responseInstructions = ` +## Response Instructions + +When you complete your work: +1. Post a comment on GitHub to notify @${mention.authorLogin} +2. Reference specific files and line numbers when relevant +3. If you made code changes, push them and reference the commit + +Use the GitHub CLI (\`gh\`) to post your response: +\`\`\`bash +gh issue comment ${mention.issueOrPrNumber} --repo ${mention.repository} --body "Your response here @${mention.authorLogin}" +\`\`\` + +Or for PR comments: +\`\`\`bash +gh pr comment ${mention.issueOrPrNumber} --repo ${mention.repository} --body "Your response here @${mention.authorLogin}" +\`\`\` +`; + + return ` +# Agent Mention Task + +You (@${mention.mentionedAgent}) have been mentioned in a ${sourceTypeDescription}. + +## Your Role +${agentDescription} + +## Context + +**Repository:** ${mention.repository} +**Issue/PR:** #${mention.issueOrPrNumber} +**Comment by:** @${mention.authorLogin} +**Comment URL:** ${mention.commentUrl || 'N/A'} + +## Comment + +${mention.commentBody} + +## Your Task + +Analyze the comment and respond appropriately: + +1. If a question was asked, provide a helpful answer +2. If a task was requested, either complete it or explain what's needed +3. If feedback was given, acknowledge it and act on it if needed + +${responseInstructions} + +## Important + +- Be concise and helpful +- If you need to make code changes, create a commit and push +- If the request is unclear, ask for clarification in your response +- Always @mention ${mention.authorLogin} in your response so they get notified +`.trim(); +} + +/** + * Handle an issue assignment + * + * Called when an issue should be assigned to an agent + */ +export async function handleIssueAssignment(assignment: IssueAssignment): Promise { + console.log(`[mention-handler] Processing issue assignment: #${assignment.issueNumber} in ${assignment.repository}`); + + // Find the repository + const repository = await db.repositories.findByFullName(assignment.repository); + if (!repository) { + console.error(`[mention-handler] Repository not found: ${assignment.repository}`); + return; + } + + // Post acknowledgment comment + if (repository.nangoConnectionId) { + const [owner, repo] = repository.githubFullName.split('/'); + try { + await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + assignment.issueNumber, + `🤖 I've been assigned to work on this issue. I'll analyze the problem and get started. + +You can track my progress in this thread. I'll update you when I have a solution or need more information. + +_— ${getAppName()}_` + ); + } catch (error) { + console.error(`[mention-handler] Failed to post assignment comment:`, error); + } + } + + // Find a linked daemon + const daemon = await findAvailableDaemon(repository); + + if (!daemon) { + console.warn(`[mention-handler] No available daemon for ${assignment.repository}`); + if (repository.nangoConnectionId) { + const [owner, repo] = repository.githubFullName.split('/'); + try { + await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + assignment.issueNumber, + `⚠️ I couldn't start working on this issue because no Agent Relay daemon is available. + +Please ensure you have a linked daemon running by executing \`agent-relay cloud link\` on your development machine. + +_— ${getAppName()}_` + ); + } catch (error) { + console.error(`[mention-handler] Failed to post error comment:`, error); + } + } + return; + } + + // Build prompt for the issue + const prompt = buildIssuePrompt(assignment, repository); + + // Queue spawn command + const agentName = `issue-${assignment.issueNumber}`; + await queueSpawnCommand(daemon.id, agentName, prompt, { + mentionId: assignment.id, + repository: assignment.repository, + issueNumber: assignment.issueNumber, + authorLogin: 'issue-author', // TODO: Get from issue + }); + + // Update assignment status and assign agent + await db.issueAssignments.assignAgent(assignment.id, agentName, agentName); + await db.issueAssignments.updateStatus(assignment.id, 'in_progress'); + + console.log(`[mention-handler] Spawned agent for issue #${assignment.issueNumber}`); +} + +/** + * Build a prompt for an issue assignment + */ +function buildIssuePrompt(assignment: IssueAssignment, _repository: Repository): string { + const priorityNote = assignment.priority + ? `\n**Priority:** ${assignment.priority.toUpperCase()}` + : ''; + + const labelsNote = assignment.labels && assignment.labels.length > 0 + ? `\n**Labels:** ${assignment.labels.join(', ')}` + : ''; + + return ` +# Issue Assignment + +You have been assigned to work on GitHub issue #${assignment.issueNumber}. + +## Issue Details + +**Repository:** ${assignment.repository} +**Title:** ${assignment.issueTitle}${priorityNote}${labelsNote} +**URL:** ${assignment.issueUrl || 'N/A'} + +## Description + +${assignment.issueBody || 'No description provided.'} + +## Your Task + +1. Analyze the issue and understand what needs to be done +2. Investigate the codebase to find relevant files +3. Implement a solution if possible +4. Create a PR with your changes +5. Link the PR to this issue + +## Response Instructions + +Keep the issue updated with your progress: +\`\`\`bash +gh issue comment ${assignment.issueNumber} --repo ${assignment.repository} --body "Your update here" +\`\`\` + +When you create a PR: +\`\`\`bash +gh pr create --repo ${assignment.repository} --title "Fix #${assignment.issueNumber}: Brief description" --body "Fixes #${assignment.issueNumber} + +Description of changes..." +\`\`\` + +## Important + +- Start with a comment on the issue acknowledging you're working on it +- If you need clarification, ask in the issue comments +- Create a draft PR early if the fix is complex +- Reference the issue number in your commit messages (e.g., "Fix #${assignment.issueNumber}") +`.trim(); +} + +/** + * Get pending mentions for processing + */ +export async function getPendingMentions(limit = 50): Promise { + return db.commentMentions.findPending(limit); +} + +/** + * Get pending issue assignments for processing + */ +export async function getPendingIssueAssignments(limit = 50): Promise { + return db.issueAssignments.findPending(limit); +} + +/** + * Process all pending mentions (background job) + */ +export async function processPendingMentions(): Promise { + const pending = await getPendingMentions(); + let processed = 0; + + for (const mention of pending) { + try { + await handleMention(mention); + processed++; + } catch (error) { + console.error(`[mention-handler] Failed to process mention ${mention.id}:`, error); + } + } + + return processed; +} + +/** + * Process all pending issue assignments (background job) + */ +export async function processPendingIssueAssignments(): Promise { + const pending = await getPendingIssueAssignments(); + let processed = 0; + + for (const assignment of pending) { + try { + await handleIssueAssignment(assignment); + processed++; + } catch (error) { + console.error(`[mention-handler] Failed to process assignment ${assignment.id}:`, error); + } + } + + return processed; +} diff --git a/src/cloud/services/nango.ts b/src/cloud/services/nango.ts index 64ef4181..11d0d556 100644 --- a/src/cloud/services/nango.ts +++ b/src/cloud/services/nango.ts @@ -1,6 +1,6 @@ -import crypto from 'crypto'; import { Nango } from '@nangohq/node'; import type { AxiosResponse } from 'axios'; +import crypto from 'node:crypto'; import { getConfig } from '../config.js'; export const NANGO_INTEGRATIONS = { @@ -56,7 +56,8 @@ class NangoService { /** * Retrieve an installation access token from a GitHub App connection. - * Nango will refresh the token when refreshGithubAppJwtToken=true. + * Use this ONLY when you need the raw token (e.g., for git clone URLs). + * For API calls, use the proxy methods instead. */ async getGithubAppToken(connectionId: string): Promise { const token = await this.client.getToken( @@ -65,29 +66,115 @@ class NangoService { false, true ); - if (typeof token !== 'string') { - throw new Error('Expected GitHub App token to be a string'); + + // Handle different return formats from Nango + if (typeof token === 'string') { + return token; + } + + // Nango may return an object with access_token + if (token && typeof token === 'object') { + const tokenObj = token as { access_token?: string; token?: string }; + if (tokenObj.access_token) { + return tokenObj.access_token; + } + if (tokenObj.token) { + return tokenObj.token; + } } - return token; + + console.error('[nango] Unexpected token format:', typeof token, token); + throw new Error('Expected GitHub App token to be a string'); } /** - * List repositories available to a GitHub App installation using the Nango connection. + * List repositories available to a GitHub App installation using the Nango Proxy. + * The proxy automatically handles token injection and refresh. + * @see https://nango.dev/docs/implementation-guides/requests-proxy/implement-requests-proxy */ async listGithubAppRepos(connectionId: string): Promise<{ repositories: Array<{ id: number; full_name: string; private: boolean; default_branch: string }> }> { - const token = await this.getGithubAppToken(connectionId); - const response = await fetch('https://api.github.com/installation/repositories?per_page=100', { - method: 'GET', - headers: { - Authorization: `Bearer ${token}`, - Accept: 'application/vnd.github+json', - }, - }); - if (!response.ok) { - const text = await response.text(); - throw new Error(`Failed to list installation repositories: ${text}`); + const response = await this.client.get<{ repositories: Array<{ id: number; full_name: string; private: boolean; default_branch: string }> }>({ + connectionId, + providerConfigKey: NANGO_INTEGRATIONS.GITHUB_APP, + endpoint: '/installation/repositories', + params: { per_page: '100' }, + }) as AxiosResponse<{ repositories: Array<{ id: number; full_name: string; private: boolean; default_branch: string }> }>; + return response.data; + } + + /** + * Get the GitHub App installation ID from a connection. + * The installation ID is stored in connection_config.installation_id + */ + async getGithubAppInstallationId(connectionId: string): Promise { + try { + const connection = await this.client.getConnection(NANGO_INTEGRATIONS.GITHUB_APP, connectionId); + // Extract installation_id from connection_config (where Nango stores it for GitHub App OAuth) + const connectionConfig = (connection as { connection_config?: Record }).connection_config; + if (connectionConfig?.installation_id) { + return Number(connectionConfig.installation_id); + } + console.warn('[nango] No installation_id in connection_config'); + return null; + } catch (err) { + console.error('[nango] Failed to get installation ID:', err); + return null; } - return response.json() as Promise<{ repositories: Array<{ id: number; full_name: string; private: boolean; default_branch: string }> }>; + } + + /** + * Create an issue via Nango Proxy. + */ + async createGithubIssue( + connectionId: string, + owner: string, + repo: string, + data: { title: string; body?: string; labels?: string[] } + ): Promise<{ number: number; html_url: string }> { + const response = await this.client.post<{ number: number; html_url: string }>({ + connectionId, + providerConfigKey: NANGO_INTEGRATIONS.GITHUB_APP, + endpoint: `/repos/${owner}/${repo}/issues`, + data, + }) as AxiosResponse<{ number: number; html_url: string }>; + return response.data; + } + + /** + * Create a pull request via Nango Proxy. + */ + async createGithubPullRequest( + connectionId: string, + owner: string, + repo: string, + data: { title: string; body?: string; head: string; base: string } + ): Promise<{ number: number; html_url: string }> { + const response = await this.client.post<{ number: number; html_url: string }>({ + connectionId, + providerConfigKey: NANGO_INTEGRATIONS.GITHUB_APP, + endpoint: `/repos/${owner}/${repo}/pulls`, + data, + }) as AxiosResponse<{ number: number; html_url: string }>; + return response.data; + } + + /** + * Add a comment to an issue via Nango Proxy. + */ + async addGithubIssueComment( + connectionId: string, + owner: string, + repo: string, + issueNumber: number, + body: string + ): Promise<{ id: number; html_url: string }> { + const response = await this.client.post<{ id: number; html_url: string }>({ + connectionId, + providerConfigKey: NANGO_INTEGRATIONS.GITHUB_APP, + endpoint: `/repos/${owner}/${repo}/issues/${issueNumber}/comments`, + data: { body }, + }) as AxiosResponse<{ id: number; html_url: string }>; + return response.data; } /** @@ -101,12 +188,63 @@ class NangoService { } /** - * Verify webhook signature sent by Nango using HMAC SHA256 with the secret key. + * Delete a connection from Nango. + * + * Used to remove temporary session connections for returning users + * to prevent duplicate connections in Nango. In the two-connection pattern, + * new users get a permanent connection but returning users authenticate + * with a temporary one that gets deleted. + * + * @param connectionId - Nango connection ID to delete + * @param providerConfigKey - The integration key (e.g., 'github') */ - verifyWebhookSignature(rawBody: string, signature?: string | string[] | null): boolean { - if (!signature || typeof signature !== 'string') return false; - const expected = crypto.createHmac('sha256', this.secret).update(rawBody).digest('hex'); - return signature === expected; + async deleteConnection(connectionId: string, providerConfigKey: string): Promise { + await this.client.deleteConnection(providerConfigKey, connectionId); + } + + /** + * Get connection metadata including end_user info. + * Useful when webhook doesn't include end_user data. + */ + async getConnection(connectionId: string, providerConfigKey: string): Promise<{ + id: number; + connection_id: string; + provider_config_key: string; + end_user?: { id?: string; email?: string }; + metadata?: Record; + }> { + const connection = await this.client.getConnection(providerConfigKey, connectionId); + return connection as unknown as { + id: number; + connection_id: string; + provider_config_key: string; + end_user?: { id?: string; email?: string }; + metadata?: Record; + }; + } + + /** + * Verify webhook signature sent by Nango. + * Uses the new verifyIncomingWebhookRequest method. + * @see https://nango.dev/docs/reference/sdks/node#verify-webhook-signature + */ + verifyWebhookSignature(rawBody: string, headers: Record): boolean { + try { + // Use the new method: verifyIncomingWebhookRequest(body, headers) + return this.client.verifyIncomingWebhookRequest(rawBody, headers as Record); + } catch (err) { + console.error('[nango] verifyIncomingWebhookRequest error:', err); + // Fall back to manual HMAC verification using the secret key + const signature = headers['x-nango-signature'] as string | undefined; + const hmacSha256 = headers['x-nango-hmac-sha256'] as string | undefined; + if (!signature && !hmacSha256) return false; + + const expectedSignature = crypto + .createHmac('sha256', this.secret) + .update(rawBody) + .digest('hex'); + return signature === expectedSignature || hmacSha256 === expectedSignature; + } } } diff --git a/src/cloud/vault/index.ts b/src/cloud/vault/index.ts index bcccfc71..c6448b9b 100644 --- a/src/cloud/vault/index.ts +++ b/src/cloud/vault/index.ts @@ -4,7 +4,7 @@ * Secure storage for OAuth tokens with AES-256-GCM encryption. */ -import crypto from 'crypto'; +import * as crypto from 'crypto'; import { getConfig } from '../config.js'; import { db } from '../db/index.js'; diff --git a/src/cloud/webhooks/index.ts b/src/cloud/webhooks/index.ts new file mode 100644 index 00000000..de883ab4 --- /dev/null +++ b/src/cloud/webhooks/index.ts @@ -0,0 +1,42 @@ +/** + * Generic Webhook System + * + * A configurable webhook system that can handle events from any source + * (GitHub, GitLab, Linear, Slack, Jira, etc.) and route them to agents. + * + * Components: + * - Parsers: Transform source-specific payloads to normalized events + * - Responders: Send responses back to source systems + * - Rules Engine: Match events to actions based on configuration + * - Router: Orchestrates the full webhook processing pipeline + */ + +// Types +export * from './types.js'; + +// Parsers +export { getParser, registerParser, parsers } from './parsers/index.js'; +export { githubParser } from './parsers/github.js'; +export { linearParser } from './parsers/linear.js'; +export { slackParser } from './parsers/slack.js'; + +// Responders +export { getResponder, registerResponder, responders } from './responders/index.js'; +export { githubResponder } from './responders/github.js'; +export { linearResponder } from './responders/linear.js'; +export { slackResponder, formatSlackBlocks } from './responders/slack.js'; + +// Rules Engine +export { + matchesRule, + findMatchingRules, + resolveActionTemplate, + defaultRules, +} from './rules-engine.js'; + +// Router +export { + processWebhook, + getWebhookConfig, + defaultSources, +} from './router.js'; diff --git a/src/cloud/webhooks/parsers/github.test.ts b/src/cloud/webhooks/parsers/github.test.ts new file mode 100644 index 00000000..cb103787 --- /dev/null +++ b/src/cloud/webhooks/parsers/github.test.ts @@ -0,0 +1,412 @@ +/** + * GitHub Parser Tests + */ + +import { describe, it, expect } from 'vitest'; +import { githubParser } from './github.js'; + +describe('githubParser', () => { + describe('check_run events', () => { + it('should parse CI failure event', () => { + const payload = { + action: 'completed', + check_run: { + id: 12345, + name: 'build', + conclusion: 'failure', + html_url: 'https://github.com/owner/repo/runs/12345', + pull_requests: [ + { + number: 42, + head: { ref: 'feature-branch', sha: 'abc123' }, + }, + ], + output: { + title: 'Build failed', + summary: 'TypeScript compilation errors', + text: 'Error details here', + annotations: [ + { + path: 'src/index.ts', + start_line: 10, + end_line: 10, + annotation_level: 'failure', + message: "Cannot find name 'foo'", + }, + ], + }, + }, + repository: { + full_name: 'owner/repo', + html_url: 'https://github.com/owner/repo', + }, + sender: { + id: 123, + login: 'github-actions', + }, + }; + + const headers = { + 'x-github-event': 'check_run', + 'x-github-delivery': 'delivery-123', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('ci_failure'); + expect(events[0].source).toBe('github'); + expect(events[0].context.name).toBe('owner/repo'); + expect(events[0].item?.type).toBe('check'); + expect(events[0].item?.number).toBe(42); + expect(events[0].metadata?.checkName).toBe('build'); + expect(events[0].metadata?.annotations).toHaveLength(1); + }); + + it('should not create CI failure event for successful check run', () => { + const payload = { + action: 'completed', + check_run: { + id: 12345, + name: 'build', + conclusion: 'success', + pull_requests: [{ number: 42, head: { ref: 'main', sha: 'abc' } }], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'check_run', + 'x-github-delivery': 'delivery-123', + }; + + const events = githubParser.parse(payload, headers); + + // Should not create a ci_failure event (may create generic event or none) + const ciFailureEvents = events.filter(e => e.type === 'ci_failure'); + expect(ciFailureEvents).toHaveLength(0); + }); + + it('should not create CI failure event for check run without PR', () => { + const payload = { + action: 'completed', + check_run: { + id: 12345, + name: 'build', + conclusion: 'failure', + pull_requests: [], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'check_run', + 'x-github-delivery': 'delivery-123', + }; + + const events = githubParser.parse(payload, headers); + + // Should not create a ci_failure event (may create generic event or none) + const ciFailureEvents = events.filter(e => e.type === 'ci_failure'); + expect(ciFailureEvents).toHaveLength(0); + }); + }); + + describe('issue_comment events', () => { + it('should parse mention in issue comment', () => { + const payload = { + action: 'created', + issue: { + number: 42, + title: 'Bug report', + html_url: 'https://github.com/owner/repo/issues/42', + }, + comment: { + id: 789, + body: '@developer please fix this bug', + html_url: 'https://github.com/owner/repo/issues/42#issuecomment-789', + }, + repository: { + full_name: 'owner/repo', + html_url: 'https://github.com/owner/repo', + }, + sender: { + id: 123, + login: 'reporter', + }, + }; + + const headers = { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'delivery-456', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].item?.number).toBe(42); + expect(events[0].item?.body).toBe('@developer please fix this bug'); + }); + + it('should extract multiple mentions', () => { + const payload = { + action: 'created', + issue: { number: 42, title: 'Issue' }, + comment: { + id: 789, + body: '@lead please assign this to @developer or @reviewer', + html_url: 'https://github.com/owner/repo/issues/42#issuecomment-789', + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'delivery-456', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].mentions).toContain('lead'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].mentions).toContain('reviewer'); + }); + + it('should not create mention event if no mentions', () => { + const payload = { + action: 'created', + issue: { number: 42, title: 'Issue' }, + comment: { + id: 789, + body: 'This is a regular comment', + html_url: 'https://github.com/owner/repo/issues/42#issuecomment-789', + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'delivery-456', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(0); + }); + + it('should identify PR comments vs issue comments', () => { + const payload = { + action: 'created', + issue: { + number: 42, + title: 'Fix bug', + pull_request: { url: 'https://api.github.com/repos/owner/repo/pulls/42' }, + }, + comment: { + id: 789, + body: '@reviewer please check this', + html_url: 'https://github.com/owner/repo/pull/42#issuecomment-789', + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'developer' }, + }; + + const headers = { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'delivery-456', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].item?.type).toBe('pull_request'); + expect(events[0].metadata?.isPR).toBe(true); + }); + }); + + describe('issues events', () => { + it('should parse issue created event', () => { + const payload = { + action: 'opened', + issue: { + id: 123, + number: 42, + title: 'Critical bug in production', + body: 'The app crashes when users try to login', + html_url: 'https://github.com/owner/repo/issues/42', + state: 'open', + labels: [ + { name: 'bug' }, + { name: 'critical' }, + ], + assignees: [], + }, + repository: { + full_name: 'owner/repo', + html_url: 'https://github.com/owner/repo', + }, + sender: { + id: 123, + login: 'reporter', + }, + }; + + const headers = { + 'x-github-event': 'issues', + 'x-github-delivery': 'delivery-789', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('issue_created'); + expect(events[0].item?.title).toBe('Critical bug in production'); + expect(events[0].labels).toContain('bug'); + expect(events[0].labels).toContain('critical'); + expect(events[0].priority).toBe('critical'); + }); + + it('should extract mentions from issue body', () => { + const payload = { + action: 'opened', + issue: { + id: 123, + number: 42, + title: 'Feature request', + body: 'Hey @lead, can we add this feature? cc @developer', + html_url: 'https://github.com/owner/repo/issues/42', + state: 'open', + labels: [], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'issues', + 'x-github-delivery': 'delivery-789', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].mentions).toContain('lead'); + expect(events[0].mentions).toContain('developer'); + }); + + it('should map priority labels correctly', () => { + const testCases = [ + { labels: [{ name: 'p0' }], expected: 'critical' }, + { labels: [{ name: 'p1' }], expected: 'high' }, + { labels: [{ name: 'high' }], expected: 'high' }, + { labels: [{ name: 'p2' }], expected: 'medium' }, + { labels: [{ name: 'medium' }], expected: 'medium' }, + { labels: [{ name: 'p3' }], expected: 'low' }, + { labels: [{ name: 'low' }], expected: 'low' }, + { labels: [{ name: 'enhancement' }], expected: undefined }, + ]; + + for (const { labels, expected } of testCases) { + const payload = { + action: 'opened', + issue: { + id: 123, + number: 42, + title: 'Test', + body: '', + html_url: 'https://github.com/owner/repo/issues/42', + state: 'open', + labels, + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'issues', + 'x-github-delivery': 'delivery-789', + }; + + const events = githubParser.parse(payload, headers); + expect(events[0].priority).toBe(expected); + } + }); + }); + + describe('pull_request_review_comment events', () => { + it('should parse review comment with mention', () => { + const payload = { + action: 'created', + pull_request: { + number: 42, + title: 'Add feature', + }, + comment: { + id: 789, + body: '@developer this needs to be refactored', + html_url: 'https://github.com/owner/repo/pull/42#discussion_r789', + path: 'src/index.ts', + line: 25, + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'reviewer' }, + }; + + const headers = { + 'x-github-event': 'pull_request_review_comment', + 'x-github-delivery': 'delivery-abc', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].metadata?.filePath).toBe('src/index.ts'); + expect(events[0].metadata?.line).toBe(25); + expect(events[0].metadata?.isReviewComment).toBe(true); + }); + }); + + describe('pull_request events', () => { + it('should parse PR opened event', () => { + const payload = { + action: 'opened', + pull_request: { + id: 123, + number: 42, + title: 'Add new feature', + body: 'This PR adds the requested feature', + html_url: 'https://github.com/owner/repo/pull/42', + state: 'open', + draft: false, + head: { ref: 'feature-branch' }, + base: { ref: 'main' }, + labels: [], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'developer' }, + }; + + const headers = { + 'x-github-event': 'pull_request', + 'x-github-delivery': 'delivery-def', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('pr_opened'); + expect(events[0].item?.title).toBe('Add new feature'); + expect(events[0].metadata?.head).toBe('feature-branch'); + expect(events[0].metadata?.base).toBe('main'); + }); + }); +}); diff --git a/src/cloud/webhooks/parsers/github.ts b/src/cloud/webhooks/parsers/github.ts new file mode 100644 index 00000000..3fab490c --- /dev/null +++ b/src/cloud/webhooks/parsers/github.ts @@ -0,0 +1,249 @@ +/** + * GitHub Webhook Parser + * + * Transforms GitHub webhook payloads into normalized events. + */ + +import type { NormalizedEvent, WebhookParser } from '../types.js'; + +/** + * Extract @mentions from text + */ +function extractMentions(text: string | null | undefined): string[] { + if (!text) return []; + const mentionPattern = /@([a-zA-Z][a-zA-Z0-9_-]*)/g; + const mentions: string[] = []; + let match; + while ((match = mentionPattern.exec(text)) !== null) { + mentions.push(match[1].toLowerCase()); + } + return [...new Set(mentions)]; +} + +/** + * Map GitHub priority labels to normalized priority + */ +function extractPriority(labels: Array<{ name: string }>): 'critical' | 'high' | 'medium' | 'low' | undefined { + const labelNames = labels.map(l => l.name.toLowerCase()); + if (labelNames.includes('critical') || labelNames.includes('p0')) return 'critical'; + if (labelNames.includes('high') || labelNames.includes('p1')) return 'high'; + if (labelNames.includes('medium') || labelNames.includes('p2')) return 'medium'; + if (labelNames.includes('low') || labelNames.includes('p3')) return 'low'; + return undefined; +} + +export const githubParser: WebhookParser = { + id: 'github', + + parse(payload: unknown, headers: Record): NormalizedEvent[] { + const eventType = headers['x-github-event'] as string; + const deliveryId = headers['x-github-delivery'] as string; + const data = payload as Record; + + const events: NormalizedEvent[] = []; + const repository = data.repository as Record | undefined; + const sender = data.sender as Record | undefined; + + const baseEvent: Partial = { + id: deliveryId || `github-${Date.now()}`, + source: 'github', + timestamp: new Date(), + actor: { + id: String(sender?.id || 'unknown'), + name: String(sender?.login || 'unknown'), + }, + context: { + name: String(repository?.full_name || 'unknown'), + url: String(repository?.html_url || ''), + }, + labels: [], + mentions: [], + metadata: {}, + rawPayload: payload, + }; + + switch (eventType) { + case 'check_run': { + const checkRun = data.check_run as Record; + const action = data.action as string; + const conclusion = checkRun?.conclusion as string | null; + const pullRequests = checkRun?.pull_requests as Array> | undefined; + + if (action === 'completed' && conclusion === 'failure' && pullRequests?.length) { + const pr = pullRequests[0]; + const output = checkRun.output as Record | undefined; + const annotations = output?.annotations as Array> | undefined; + + events.push({ + ...baseEvent, + type: 'ci_failure', + item: { + type: 'check', + id: String(checkRun.id), + number: pr.number as number, + title: String(checkRun.name), + body: String(output?.summary || ''), + url: String(checkRun.html_url || ''), + state: 'failure', + }, + metadata: { + checkName: checkRun.name, + conclusion, + branch: (pr.head as Record)?.ref, + commitSha: (pr.head as Record)?.sha, + failureTitle: output?.title, + failureSummary: output?.summary, + failureDetails: output?.text, + annotations: annotations?.map(a => ({ + path: a.path, + startLine: a.start_line, + endLine: a.end_line, + level: a.annotation_level, + message: a.message, + })), + }, + } as NormalizedEvent); + } + break; + } + + case 'issues': { + const issue = data.issue as Record; + const action = data.action as string; + const labels = (issue?.labels || []) as Array<{ name: string }>; + + if (action === 'opened' || action === 'labeled') { + events.push({ + ...baseEvent, + type: 'issue_created', + item: { + type: 'issue', + id: String(issue.id), + number: issue.number as number, + title: String(issue.title), + body: String(issue.body || ''), + url: String(issue.html_url), + state: String(issue.state), + }, + labels: labels.map(l => l.name), + priority: extractPriority(labels), + mentions: extractMentions(issue.body as string), + metadata: { + action, + assignees: (issue.assignees as Array> || []).map(a => a.login), + }, + } as NormalizedEvent); + } + break; + } + + case 'issue_comment': { + const issue = data.issue as Record; + const comment = data.comment as Record; + const action = data.action as string; + const isPR = !!(issue?.pull_request); + + if (action === 'created') { + const mentions = extractMentions(comment.body as string); + if (mentions.length > 0) { + events.push({ + ...baseEvent, + type: 'mention', + item: { + type: isPR ? 'pull_request' : 'issue', + id: String(comment.id), + number: issue.number as number, + title: String(issue.title), + body: String(comment.body), + url: String(comment.html_url), + }, + mentions, + metadata: { + commentId: comment.id, + commentUrl: comment.html_url, + isPR, + }, + } as NormalizedEvent); + } + } + break; + } + + case 'pull_request_review_comment': { + const pr = data.pull_request as Record; + const comment = data.comment as Record; + const action = data.action as string; + + if (action === 'created') { + const mentions = extractMentions(comment.body as string); + if (mentions.length > 0) { + events.push({ + ...baseEvent, + type: 'mention', + item: { + type: 'pull_request', + id: String(comment.id), + number: pr.number as number, + title: String(pr.title), + body: String(comment.body), + url: String(comment.html_url), + }, + mentions, + metadata: { + commentId: comment.id, + commentUrl: comment.html_url, + filePath: comment.path, + line: comment.line, + isPR: true, + isReviewComment: true, + }, + } as NormalizedEvent); + } + } + break; + } + + case 'pull_request': { + const pr = data.pull_request as Record; + const action = data.action as string; + const labels = (pr?.labels || []) as Array<{ name: string }>; + + if (action === 'opened') { + events.push({ + ...baseEvent, + type: 'pr_opened', + item: { + type: 'pull_request', + id: String(pr.id), + number: pr.number as number, + title: String(pr.title), + body: String(pr.body || ''), + url: String(pr.html_url), + state: String(pr.state), + }, + labels: labels.map(l => l.name), + priority: extractPriority(labels), + mentions: extractMentions(pr.body as string), + metadata: { + action, + head: (pr.head as Record)?.ref, + base: (pr.base as Record)?.ref, + draft: pr.draft, + }, + } as NormalizedEvent); + } + break; + } + + default: + // Unknown event type - create a generic event + events.push({ + ...baseEvent, + type: `github.${eventType}`, + metadata: { action: data.action }, + } as NormalizedEvent); + } + + return events; + }, +}; diff --git a/src/cloud/webhooks/parsers/index.ts b/src/cloud/webhooks/parsers/index.ts new file mode 100644 index 00000000..1ca4bf17 --- /dev/null +++ b/src/cloud/webhooks/parsers/index.ts @@ -0,0 +1,35 @@ +/** + * Webhook Parsers Index + * + * Registry of all available parsers. + */ + +import type { WebhookParser } from '../types.js'; +import { githubParser } from './github.js'; +import { linearParser } from './linear.js'; +import { slackParser } from './slack.js'; + +/** + * Registry of all available parsers + */ +export const parsers: Record = { + github: githubParser, + linear: linearParser, + slack: slackParser, +}; + +/** + * Get a parser by ID + */ +export function getParser(id: string): WebhookParser | undefined { + return parsers[id]; +} + +/** + * Register a custom parser + */ +export function registerParser(parser: WebhookParser): void { + parsers[parser.id] = parser; +} + +export { githubParser, linearParser, slackParser }; diff --git a/src/cloud/webhooks/parsers/linear.test.ts b/src/cloud/webhooks/parsers/linear.test.ts new file mode 100644 index 00000000..21a02028 --- /dev/null +++ b/src/cloud/webhooks/parsers/linear.test.ts @@ -0,0 +1,405 @@ +/** + * Linear Parser Tests + */ + +import { describe, it, expect } from 'vitest'; +import { linearParser } from './linear.js'; + +describe('linearParser', () => { + describe('Issue events', () => { + it('should parse issue created event', () => { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-123', + createdAt: '2024-01-15T10:00:00Z', + data: { + id: 'issue-123', + number: 42, + title: 'Implement new feature', + description: 'We need to add a new dashboard component', + url: 'https://linear.app/team/issue/ENG-42', + identifier: 'ENG-42', + priority: 2, // High + estimate: 3, + dueDate: '2024-01-30', + state: { name: 'Todo' }, + labels: [ + { name: 'feature' }, + { name: 'frontend' }, + ], + assignee: { + id: 'user-1', + name: 'John Developer', + email: 'john@example.com', + }, + creator: { + id: 'user-2', + name: 'Jane PM', + email: 'jane@example.com', + }, + team: { + key: 'ENG', + name: 'Engineering', + }, + cycle: { + name: 'Sprint 5', + }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('issue_created'); + expect(events[0].source).toBe('linear'); + expect(events[0].item?.type).toBe('ticket'); + expect(events[0].item?.title).toBe('Implement new feature'); + expect(events[0].item?.number).toBe(42); + expect(events[0].priority).toBe('high'); + expect(events[0].labels).toContain('feature'); + expect(events[0].labels).toContain('frontend'); + expect(events[0].metadata?.identifier).toBe('ENG-42'); + expect(events[0].metadata?.assignee).toBe('John Developer'); + expect(events[0].actor.name).toBe('Jane PM'); + expect(events[0].context.name).toBe('ENG'); + }); + + it('should map Linear priority correctly', () => { + const testCases = [ + { priority: 1, expected: 'critical' }, + { priority: 2, expected: 'high' }, + { priority: 3, expected: 'medium' }, + { priority: 4, expected: 'low' }, + { priority: 0, expected: undefined }, + { priority: undefined, expected: undefined }, + ]; + + for (const { priority, expected } of testCases) { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-123', + data: { + id: 'issue-123', + title: 'Test', + priority, + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + expect(events[0].priority).toBe(expected); + } + }); + + it('should detect agent assignment', () => { + const payload = { + action: 'update', + type: 'Issue', + webhookId: 'webhook-456', + createdAt: '2024-01-15T11:00:00Z', + updatedFrom: { + assigneeId: null, // Was unassigned + }, + data: { + id: 'issue-123', + number: 42, + title: 'Fix authentication bug', + description: 'Users cannot log in', + url: 'https://linear.app/team/issue/ENG-42', + identifier: 'ENG-42', + state: { name: 'In Progress' }, + labels: [], + assignee: { + id: 'agent-developer-1', + name: 'Developer Agent', + email: 'developer@agents.local', + }, + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('issue_assigned'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].metadata?.action).toBe('assigned'); + }); + + it('should detect various agent name patterns', () => { + const agentNames = [ + { name: 'Lead Agent', expectedAgent: 'lead' }, + { name: 'Developer Bot', expectedAgent: 'developer' }, + { name: 'Code Reviewer', expectedAgent: 'reviewer' }, + { name: 'CI-Fix Agent', expectedAgent: 'ci-fix' }, + { name: 'Test Bot', expectedAgent: 'test' }, + { name: 'Docs Agent', expectedAgent: 'docs' }, + { name: 'Refactor Bot', expectedAgent: 'refactor' }, + { name: 'Debugger', expectedAgent: 'debugger' }, + ]; + + for (const { name, expectedAgent } of agentNames) { + const payload = { + action: 'update', + type: 'Issue', + webhookId: 'webhook-456', + updatedFrom: { assigneeId: null }, + data: { + id: 'issue-123', + title: 'Test issue', + state: { name: 'Todo' }, + labels: [], + assignee: { id: 'agent-1', name }, + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + expect(events[0].type).toBe('issue_assigned'); + expect(events[0].mentions).toContain(expectedAgent); + } + }); + + it('should not treat regular user assignment as agent assignment', () => { + const payload = { + action: 'update', + type: 'Issue', + webhookId: 'webhook-456', + updatedFrom: { assigneeId: null }, + data: { + id: 'issue-123', + title: 'Test issue', + state: { name: 'Todo' }, + labels: [], + assignee: { id: 'user-1', name: 'John Smith' }, + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + expect(events[0].type).toBe('issue_updated'); + expect(events[0].type).not.toBe('issue_assigned'); + }); + + it('should parse regular issue update', () => { + const payload = { + action: 'update', + type: 'Issue', + webhookId: 'webhook-789', + updatedFrom: { stateId: 'state-1' }, + data: { + id: 'issue-123', + title: 'Test issue', + state: { name: 'In Progress' }, + labels: [], + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('issue_updated'); + }); + + it('should extract mentions from issue description', () => { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-123', + data: { + id: 'issue-123', + title: 'Review request', + description: 'Hey @lead, please review this. cc @developer', + state: { name: 'Todo' }, + labels: [], + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events[0].mentions).toContain('lead'); + expect(events[0].mentions).toContain('developer'); + }); + }); + + describe('Comment events', () => { + it('should parse comment created event', () => { + const payload = { + action: 'create', + type: 'Comment', + webhookId: 'webhook-comment-123', + createdAt: '2024-01-15T12:00:00Z', + data: { + id: 'comment-1', + body: 'I found the root cause of this issue', + url: 'https://linear.app/team/issue/ENG-42#comment-1', + issue: { + id: 'issue-123', + number: 42, + title: 'Bug report', + identifier: 'ENG-42', + }, + user: { + id: 'user-1', + name: 'Developer', + email: 'dev@example.com', + }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('comment_created'); + expect(events[0].item?.type).toBe('comment'); + expect(events[0].item?.body).toBe('I found the root cause of this issue'); + expect(events[0].metadata?.issueIdentifier).toBe('ENG-42'); + }); + + it('should parse comment with mentions', () => { + const payload = { + action: 'create', + type: 'Comment', + webhookId: 'webhook-comment-456', + data: { + id: 'comment-2', + body: '@reviewer please take a look at this fix', + url: 'https://linear.app/team/issue/ENG-42#comment-2', + issue: { + id: 'issue-123', + number: 42, + title: 'Bug report', + identifier: 'ENG-42', + }, + user: { id: 'user-1', name: 'Developer' }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].mentions).toContain('reviewer'); + }); + + it('should not create event for comment without issue context', () => { + const payload = { + action: 'create', + type: 'Comment', + webhookId: 'webhook-comment-789', + data: { + id: 'comment-3', + body: 'Orphan comment', + // No issue field + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + }); + + describe('Project events', () => { + it('should parse project created event', () => { + const payload = { + action: 'create', + type: 'Project', + webhookId: 'webhook-project-123', + data: { + id: 'project-1', + name: 'Q1 Roadmap', + description: 'Features for Q1 2024', + url: 'https://linear.app/team/project/q1-roadmap', + targetDate: '2024-03-31', + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('project_created'); + expect(events[0].context.name).toBe('Q1 Roadmap'); + }); + }); + + describe('IssueLabel events', () => { + it('should parse label change event', () => { + const payload = { + action: 'create', + type: 'IssueLabel', + webhookId: 'webhook-label-123', + data: { + id: 'label-1', + name: 'bug', + color: '#ff0000', + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('label_change'); + expect(events[0].labels).toContain('bug'); + }); + }); + + describe('Unknown events', () => { + it('should create generic event for unknown types', () => { + const payload = { + action: 'create', + type: 'Workflow', + webhookId: 'webhook-unknown-123', + data: { + id: 'workflow-1', + name: 'Custom workflow', + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('linear.workflow.create'); + }); + }); + + describe('Edge cases', () => { + it('should handle missing data gracefully', () => { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-edge-1', + // Missing data field + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + + it('should handle null/undefined fields', () => { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-edge-2', + data: { + id: 'issue-123', + title: null, + description: undefined, + state: null, + labels: null, + team: null, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].item?.title).toBe(''); + }); + }); +}); diff --git a/src/cloud/webhooks/parsers/linear.ts b/src/cloud/webhooks/parsers/linear.ts new file mode 100644 index 00000000..eb8cf38a --- /dev/null +++ b/src/cloud/webhooks/parsers/linear.ts @@ -0,0 +1,276 @@ +/** + * Linear Webhook Parser + * + * Transforms Linear webhook payloads into normalized events. + * Linear webhooks: https://developers.linear.app/docs/graphql/webhooks + */ + +import type { NormalizedEvent, WebhookParser } from '../types.js'; + +/** + * Extract @mentions from text (Linear uses @username format) + */ +function extractMentions(text: string | null | undefined): string[] { + if (!text) return []; + const mentionPattern = /@([a-zA-Z][a-zA-Z0-9_-]*)/g; + const mentions: string[] = []; + let match; + while ((match = mentionPattern.exec(text)) !== null) { + mentions.push(match[1].toLowerCase()); + } + return [...new Set(mentions)]; +} + +/** + * Map Linear priority to normalized priority + * Linear: 0 = No priority, 1 = Urgent, 2 = High, 3 = Normal, 4 = Low + */ +function mapPriority(priority: number | undefined): 'critical' | 'high' | 'medium' | 'low' | undefined { + switch (priority) { + case 1: return 'critical'; + case 2: return 'high'; + case 3: return 'medium'; + case 4: return 'low'; + default: return undefined; + } +} + +export const linearParser: WebhookParser = { + id: 'linear', + + parse(payload: unknown): NormalizedEvent[] { + const data = payload as Record; + const events: NormalizedEvent[] = []; + + const action = data.action as string; // create, update, remove + const type = data.type as string; // Issue, Comment, Project, etc. + const webhookData = data.data as Record | undefined; + const webhookId = data.webhookId as string | undefined; + const createdAt = data.createdAt as string | undefined; + + if (!webhookData) return events; + + const baseEvent: Partial = { + id: webhookId || `linear-${Date.now()}`, + source: 'linear', + timestamp: createdAt ? new Date(createdAt) : new Date(), + actor: { + id: 'unknown', + name: 'unknown', + }, + context: { + name: 'unknown', + }, + labels: [], + mentions: [], + metadata: {}, + rawPayload: payload, + }; + + // Extract actor from various fields + const creator = webhookData.creator as Record | undefined; + const user = webhookData.user as Record | undefined; + const actor = creator || user; + if (actor) { + baseEvent.actor = { + id: String(actor.id || 'unknown'), + name: String(actor.name || actor.email || 'unknown'), + email: actor.email as string | undefined, + }; + } + + // Extract team/project context + const team = webhookData.team as Record | undefined; + const project = webhookData.project as Record | undefined; + if (team) { + baseEvent.context = { + name: String(team.key || team.name || 'unknown'), + url: `https://linear.app/team/${team.key}`, + }; + } else if (project) { + baseEvent.context = { + name: String(project.name || 'unknown'), + url: project.url as string | undefined, + }; + } + + switch (type) { + case 'Issue': { + const issue = webhookData; + const labels = (issue.labels as Array> || []); + const labelNames = labels.map(l => String(l.name)); + const assignee = issue.assignee as Record | undefined; + + if (action === 'create') { + events.push({ + ...baseEvent, + type: 'issue_created', + item: { + type: 'ticket', + id: String(issue.id), + number: issue.number as number | undefined, + title: String(issue.title || ''), + body: String(issue.description || ''), + url: String(issue.url || ''), + state: String((issue.state as Record)?.name || issue.state || 'unknown'), + }, + labels: labelNames, + priority: mapPriority(issue.priority as number | undefined), + mentions: extractMentions(issue.description as string), + metadata: { + action, + identifier: issue.identifier, // e.g., "ENG-123" + estimate: issue.estimate, + dueDate: issue.dueDate, + assignee: assignee?.name, + assigneeEmail: assignee?.email, + cycle: (issue.cycle as Record)?.name, + }, + } as NormalizedEvent); + } else if (action === 'update') { + // Check for assignment changes + const updatedFrom = data.updatedFrom as Record | undefined; + const wasAssigned = updatedFrom?.assigneeId !== undefined && + !updatedFrom?.assigneeId && + assignee?.id; + + // Check if assigned to an agent (name matches agent pattern) + const assigneeName = String(assignee?.name || '').toLowerCase(); + // Order matters: more specific patterns first, generic 'agent' and 'bot' last + const agentPatterns = ['developer', 'reviewer', 'debugger', 'ci-fix', 'refactor', 'lead', 'test', 'docs', 'agent', 'bot']; + const isAgentAssignment = wasAssigned && agentPatterns.some(p => assigneeName.includes(p)); + + if (isAgentAssignment) { + // Extract the agent type from the assignee name (finds first/most-specific match) + const matchedAgent = agentPatterns.find(p => assigneeName.includes(p)) || 'developer'; + + events.push({ + ...baseEvent, + type: 'issue_assigned', + item: { + type: 'ticket', + id: String(issue.id), + number: issue.number as number | undefined, + title: String(issue.title || ''), + body: String(issue.description || ''), + url: String(issue.url || ''), + state: String((issue.state as Record)?.name || issue.state || 'unknown'), + }, + labels: labelNames, + priority: mapPriority(issue.priority as number | undefined), + mentions: [matchedAgent], // The assigned agent type + metadata: { + action: 'assigned', + identifier: issue.identifier, + assignee: assignee?.name, + assigneeEmail: assignee?.email, + previousAssignee: updatedFrom?.assigneeId, + }, + } as NormalizedEvent); + } else { + // Regular update event + events.push({ + ...baseEvent, + type: 'issue_updated', + item: { + type: 'ticket', + id: String(issue.id), + number: issue.number as number | undefined, + title: String(issue.title || ''), + body: String(issue.description || ''), + url: String(issue.url || ''), + state: String((issue.state as Record)?.name || issue.state || 'unknown'), + }, + labels: labelNames, + priority: mapPriority(issue.priority as number | undefined), + metadata: { + action, + identifier: issue.identifier, + updatedFrom, + }, + } as NormalizedEvent); + } + } + break; + } + + case 'Comment': { + const comment = webhookData; + const issue = comment.issue as Record | undefined; + + if (action === 'create' && issue) { + const mentions = extractMentions(comment.body as string); + + events.push({ + ...baseEvent, + type: mentions.length > 0 ? 'mention' : 'comment_created', + item: { + type: 'comment', + id: String(comment.id), + number: issue.number as number | undefined, + title: String(issue.title || ''), + body: String(comment.body || ''), + url: String(comment.url || issue.url || ''), + }, + mentions, + metadata: { + action, + issueId: issue.id, + issueIdentifier: issue.identifier, + parentCommentId: (comment.parent as Record)?.id, + }, + } as NormalizedEvent); + } + break; + } + + case 'Project': { + const project = webhookData; + + if (action === 'create') { + events.push({ + ...baseEvent, + type: 'project_created', + context: { + name: String(project.name || 'unknown'), + url: String(project.url || ''), + }, + metadata: { + action, + projectId: project.id, + description: project.description, + targetDate: project.targetDate, + }, + } as NormalizedEvent); + } + break; + } + + case 'IssueLabel': { + // Label added/removed from issue + const label = webhookData; + events.push({ + ...baseEvent, + type: 'label_change', + labels: [String(label.name || '')], + metadata: { + action, + labelId: label.id, + color: label.color, + }, + } as NormalizedEvent); + break; + } + + default: + // Unknown type - create generic event + events.push({ + ...baseEvent, + type: `linear.${type?.toLowerCase() || 'unknown'}.${action || 'unknown'}`, + metadata: { action, type }, + } as NormalizedEvent); + } + + return events; + }, +}; diff --git a/src/cloud/webhooks/parsers/slack.test.ts b/src/cloud/webhooks/parsers/slack.test.ts new file mode 100644 index 00000000..9bdcb94f --- /dev/null +++ b/src/cloud/webhooks/parsers/slack.test.ts @@ -0,0 +1,383 @@ +/** + * Slack Parser Tests + */ + +import { describe, it, expect } from 'vitest'; +import { slackParser } from './slack.js'; + +describe('slackParser', () => { + describe('URL verification', () => { + it('should return empty array for url_verification', () => { + const payload = { + type: 'url_verification', + challenge: 'test-challenge-token', + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + }); + + describe('Non-event payloads', () => { + it('should return empty array for non-event_callback type', () => { + const payload = { + type: 'interactive_message', + callback_id: 'some-callback', + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + }); + + describe('app_mention events', () => { + it('should parse app mention event', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12345', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT_ID> can you help me with this?', + ts: '1705320000.000100', + channel: 'C12345', + channel_type: 'channel', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].source).toBe('slack'); + expect(events[0].actor.id).toBe('U12345'); + expect(events[0].item?.type).toBe('message'); + // Should default to 'lead' when no specific agent mentioned + expect(events[0].mentions).toContain('lead'); + expect(events[0].metadata?.channelId).toBe('C12345'); + }); + + it('should extract agent mentions from message', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12346', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT_ID> @developer please help with this bug', + ts: '1705320000.000200', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].mentions).toContain('developer'); + }); + + it('should clean Slack user mentions from text', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12347', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT_ID> hey <@U67890|john> check this', + ts: '1705320000.000300', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + // Body should have cleaned text + expect(events[0].item?.body).toContain('@john'); + expect(events[0].item?.body).not.toContain('<@'); + }); + + it('should capture thread context', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12348', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT_ID> replying in thread', + ts: '1705320000.000400', + thread_ts: '1705310000.000100', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events[0].metadata?.threadTs).toBe('1705310000.000100'); + }); + }); + + describe('message events', () => { + it('should parse message with agent mention', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12349', + event_time: 1705320000, + event: { + type: 'message', + user: 'U12345', + text: '@reviewer can you check this PR?', + ts: '1705320000.000500', + channel: 'C12345', + channel_type: 'channel', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].mentions).toContain('reviewer'); + }); + + it('should not create event for regular message without agent mention', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12350', + event_time: 1705320000, + event: { + type: 'message', + user: 'U12345', + text: 'Just a regular message', + ts: '1705320000.000600', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + + it('should ignore bot messages', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12351', + event_time: 1705320000, + event: { + type: 'message', + subtype: 'bot_message', + user: 'U_BOT', + text: '@developer check this', + ts: '1705320000.000700', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + + it('should ignore message_changed subtypes', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12352', + event_time: 1705320000, + event: { + type: 'message', + subtype: 'message_changed', + user: 'U12345', + text: '@developer check this', + ts: '1705320000.000800', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + + it('should allow thread_broadcast subtype', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12353', + event_time: 1705320000, + event: { + type: 'message', + subtype: 'thread_broadcast', + user: 'U12345', + text: '@lead important update', + ts: '1705320000.000900', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].mentions).toContain('lead'); + }); + }); + + describe('reaction_added events', () => { + it('should parse reaction added event', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12354', + event_time: 1705320000, + event: { + type: 'reaction_added', + user: 'U12345', + reaction: 'thumbsup', + item: { + type: 'message', + channel: 'C12345', + ts: '1705310000.000100', + }, + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('reaction_added'); + expect(events[0].labels).toContain('thumbsup'); + expect(events[0].metadata?.reaction).toBe('thumbsup'); + }); + }); + + describe('channel_created events', () => { + it('should parse channel created event', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12355', + event_time: 1705320000, + event: { + type: 'channel_created', + channel: { + id: 'C_NEW', + name: 'project-alpha', + creator: 'U12345', + }, + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('channel_created'); + expect(events[0].context.name).toBe('project-alpha'); + }); + }); + + describe('member_joined_channel events', () => { + it('should parse member joined event', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12356', + event_time: 1705320000, + event: { + type: 'member_joined_channel', + user: 'U_NEW', + channel: 'C12345', + inviter: 'U12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('member_joined'); + expect(events[0].actor.id).toBe('U_NEW'); + expect(events[0].metadata?.inviter).toBe('U12345'); + }); + }); + + describe('Unknown events', () => { + it('should create generic event for unknown types', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12357', + event_time: 1705320000, + event: { + type: 'file_shared', + user: 'U12345', + file_id: 'F12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('slack.file_shared'); + }); + }); + + describe('Text cleaning', () => { + it('should clean URLs from text', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12358', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT> check and ', + ts: '1705320000.001000', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events[0].item?.body).toContain('this link'); + expect(events[0].item?.body).toContain('https://other.com'); + expect(events[0].item?.body).not.toContain(' { + it('should extract all mentioned agents', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12359', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT> @lead please assign @developer to review this with @reviewer', + ts: '1705320000.001100', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events[0].mentions).toContain('lead'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].mentions).toContain('reviewer'); + }); + }); +}); diff --git a/src/cloud/webhooks/parsers/slack.ts b/src/cloud/webhooks/parsers/slack.ts new file mode 100644 index 00000000..11ae3ec5 --- /dev/null +++ b/src/cloud/webhooks/parsers/slack.ts @@ -0,0 +1,234 @@ +/** + * Slack Webhook Parser + * + * Transforms Slack Events API payloads into normalized events. + * https://api.slack.com/apis/connections/events-api + */ + +import type { NormalizedEvent, WebhookParser } from '../types.js'; + +/** + * Extract user mentions from Slack message text + * Slack format: <@U12345678> or <@U12345678|username> + */ +function extractSlackMentions(text: string | null | undefined): string[] { + if (!text) return []; + const mentionPattern = /<@([A-Z0-9]+)(?:\|([^>]+))?>/g; + const mentions: string[] = []; + let match; + while ((match = mentionPattern.exec(text)) !== null) { + // Prefer display name if available, otherwise use ID + mentions.push(match[2] || match[1]); + } + return [...new Set(mentions)]; +} + +/** + * Extract agent mentions from text (our custom @agent-name format) + */ +function extractAgentMentions(text: string | null | undefined): string[] { + if (!text) return []; + // Match @agent-name patterns that aren't Slack user mentions + const mentionPattern = /(?])/g; + const mentions: string[] = []; + let match; + while ((match = mentionPattern.exec(text)) !== null) { + mentions.push(match[1].toLowerCase()); + } + return [...new Set(mentions)]; +} + +/** + * Clean Slack message text (remove user mention formatting) + */ +function cleanSlackText(text: string | null | undefined): string { + if (!text) return ''; + // Replace <@U12345678|username> with @username + return text.replace(/<@[A-Z0-9]+\|([^>]+)>/g, '@$1') + .replace(/<@[A-Z0-9]+>/g, '@user') + // Replace with text + .replace(/<([^|>]+)\|([^>]+)>/g, '$2') + // Replace with URL + .replace(/<([^>]+)>/g, '$1'); +} + +export const slackParser: WebhookParser = { + id: 'slack', + + parse(payload: unknown): NormalizedEvent[] { + const data = payload as Record; + const events: NormalizedEvent[] = []; + + // Handle URL verification challenge + if (data.type === 'url_verification') { + // This is handled separately in the router + return []; + } + + // Events API wrapper + if (data.type !== 'event_callback') { + return []; + } + + const event = data.event as Record | undefined; + if (!event) return []; + + const eventType = event.type as string; + const teamId = data.team_id as string || 'unknown'; + const eventId = data.event_id as string || `slack-${Date.now()}`; + const eventTime = data.event_time as number | undefined; + + const baseEvent: Partial = { + id: eventId, + source: 'slack', + timestamp: eventTime ? new Date(eventTime * 1000) : new Date(), + actor: { + id: String(event.user || 'unknown'), + name: String(event.user || 'unknown'), + }, + context: { + name: teamId, + }, + labels: [], + mentions: [], + metadata: { + teamId, + channelId: event.channel, + channelType: event.channel_type, + }, + rawPayload: payload, + }; + + switch (eventType) { + case 'app_mention': { + // Bot was mentioned in a channel + const text = event.text as string; + const agentMentions = extractAgentMentions(text); + + events.push({ + ...baseEvent, + type: 'mention', + item: { + type: 'message', + id: String(event.ts), + body: cleanSlackText(text), + }, + mentions: agentMentions.length > 0 ? agentMentions : ['lead'], // Default to lead if no specific agent + metadata: { + ...baseEvent.metadata, + ts: event.ts, + threadTs: event.thread_ts, + userMentions: extractSlackMentions(text), + }, + } as NormalizedEvent); + break; + } + + case 'message': { + // Regular message in channel + const text = event.text as string; + const subtype = event.subtype as string | undefined; + + // Ignore bot messages, message changes, etc. + if (subtype && subtype !== 'thread_broadcast') { + break; + } + + const agentMentions = extractAgentMentions(text); + + // Only create event if there are agent mentions + if (agentMentions.length > 0) { + events.push({ + ...baseEvent, + type: 'mention', + item: { + type: 'message', + id: String(event.ts), + body: cleanSlackText(text), + }, + mentions: agentMentions, + metadata: { + ...baseEvent.metadata, + ts: event.ts, + threadTs: event.thread_ts, + userMentions: extractSlackMentions(text), + }, + } as NormalizedEvent); + } + break; + } + + case 'reaction_added': { + // Reaction added to a message + const reaction = event.reaction as string; + const item = event.item as Record; + + events.push({ + ...baseEvent, + type: 'reaction_added', + item: { + type: 'message', + id: String(item?.ts || 'unknown'), + }, + labels: [reaction], + metadata: { + ...baseEvent.metadata, + reaction, + itemType: item?.type, + itemChannel: item?.channel, + itemTs: item?.ts, + }, + } as NormalizedEvent); + break; + } + + case 'channel_created': { + const channel = event.channel as Record; + + events.push({ + ...baseEvent, + type: 'channel_created', + context: { + name: String(channel?.name || 'unknown'), + }, + metadata: { + ...baseEvent.metadata, + channelId: channel?.id, + channelName: channel?.name, + creator: channel?.creator, + }, + } as NormalizedEvent); + break; + } + + case 'member_joined_channel': { + events.push({ + ...baseEvent, + type: 'member_joined', + actor: { + id: String(event.user), + name: String(event.user), + }, + metadata: { + ...baseEvent.metadata, + inviter: event.inviter, + }, + } as NormalizedEvent); + break; + } + + default: + // Unknown event type + events.push({ + ...baseEvent, + type: `slack.${eventType}`, + metadata: { + ...baseEvent.metadata, + subtype: event.subtype, + }, + } as NormalizedEvent); + } + + return events; + }, +}; diff --git a/src/cloud/webhooks/responders/github.ts b/src/cloud/webhooks/responders/github.ts new file mode 100644 index 00000000..e718045b --- /dev/null +++ b/src/cloud/webhooks/responders/github.ts @@ -0,0 +1,94 @@ +/** + * GitHub Responder + * + * Sends responses back to GitHub via the GitHub App API. + */ + +import type { NormalizedEvent, WebhookResponder, WebhookResponse } from '../types.js'; +import { nangoService } from '../../services/nango.js'; +import { db } from '../../db/index.js'; + +export const githubResponder: WebhookResponder = { + id: 'github', + + async respond( + event: NormalizedEvent, + response: WebhookResponse, + _config?: Record + ): Promise<{ success: boolean; id?: string; url?: string; error?: string }> { + try { + // Get repository info from event context + const repoFullName = event.context.name; + const [owner, repo] = repoFullName.split('/'); + + if (!owner || !repo) { + return { success: false, error: `Invalid repository name: ${repoFullName}` }; + } + + // Find the repository in our database to get the Nango connection + const repository = await db.repositories.findByFullName(repoFullName); + if (!repository?.nangoConnectionId) { + return { + success: false, + error: `Repository ${repoFullName} not found or has no Nango connection`, + }; + } + + switch (response.type) { + case 'comment': { + // Post a comment on an issue or PR + const issueNumber = typeof response.target === 'number' + ? response.target + : parseInt(String(response.target), 10); + + if (isNaN(issueNumber)) { + return { success: false, error: `Invalid issue number: ${response.target}` }; + } + + const result = await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + issueNumber, + response.body + ); + + return { + success: true, + id: String(result.id), + url: result.html_url, + }; + } + + case 'reaction': { + // Add a reaction to a comment or issue + // Note: This would need to be added to NangoService + return { + success: false, + error: 'Reactions not yet implemented for GitHub', + }; + } + + case 'status': { + // Update a check run status + // Note: This would need to be added to NangoService + return { + success: false, + error: 'Status updates not yet implemented for GitHub', + }; + } + + default: + return { + success: false, + error: `Unknown response type: ${response.type}`, + }; + } + } catch (error) { + return { + success: false, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } + }, +}; diff --git a/src/cloud/webhooks/responders/index.ts b/src/cloud/webhooks/responders/index.ts new file mode 100644 index 00000000..89b0f4c0 --- /dev/null +++ b/src/cloud/webhooks/responders/index.ts @@ -0,0 +1,35 @@ +/** + * Webhook Responders Index + * + * Registry of all available responders. + */ + +import type { WebhookResponder } from '../types.js'; +import { githubResponder } from './github.js'; +import { linearResponder } from './linear.js'; +import { slackResponder, formatSlackBlocks } from './slack.js'; + +/** + * Registry of all available responders + */ +export const responders: Record = { + github: githubResponder, + linear: linearResponder, + slack: slackResponder, +}; + +/** + * Get a responder by ID + */ +export function getResponder(id: string): WebhookResponder | undefined { + return responders[id]; +} + +/** + * Register a custom responder + */ +export function registerResponder(responder: WebhookResponder): void { + responders[responder.id] = responder; +} + +export { githubResponder, linearResponder, slackResponder, formatSlackBlocks }; diff --git a/src/cloud/webhooks/responders/linear.ts b/src/cloud/webhooks/responders/linear.ts new file mode 100644 index 00000000..a64936ed --- /dev/null +++ b/src/cloud/webhooks/responders/linear.ts @@ -0,0 +1,181 @@ +/** + * Linear Responder + * + * Sends responses back to Linear via their GraphQL API. + * https://developers.linear.app/docs/graphql/working-with-the-graphql-api + */ + +import type { NormalizedEvent, WebhookResponder, WebhookResponse } from '../types.js'; + +/** + * Execute a Linear GraphQL mutation + */ +async function linearGraphQL( + apiKey: string, + query: string, + variables: Record +): Promise<{ data?: Record; errors?: Array<{ message: string }> }> { + const response = await fetch('https://api.linear.app/graphql', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': apiKey, + }, + body: JSON.stringify({ query, variables }), + }); + + return response.json() as Promise<{ data?: Record; errors?: Array<{ message: string }> }>; +} + +export const linearResponder: WebhookResponder = { + id: 'linear', + + async respond( + event: NormalizedEvent, + response: WebhookResponse, + config?: Record + ): Promise<{ success: boolean; id?: string; url?: string; error?: string }> { + const apiKey = config?.apiKey as string || process.env.LINEAR_API_KEY; + + if (!apiKey) { + return { + success: false, + error: 'Linear API key not configured', + }; + } + + try { + switch (response.type) { + case 'comment': { + // Create a comment on an issue + const issueId = String(response.target); + + const mutation = ` + mutation CreateComment($issueId: String!, $body: String!) { + commentCreate(input: { issueId: $issueId, body: $body }) { + success + comment { + id + url + } + } + } + `; + + const result = await linearGraphQL(apiKey, mutation, { + issueId, + body: response.body, + }); + + if (result.errors?.length) { + return { + success: false, + error: result.errors.map(e => e.message).join(', '), + }; + } + + const commentCreate = result.data?.commentCreate as Record; + const comment = commentCreate?.comment as Record; + + return { + success: !!commentCreate?.success, + id: comment?.id as string, + url: comment?.url as string, + }; + } + + case 'reaction': { + // Add a reaction/emoji to a comment + const commentId = String(response.target); + const emoji = response.metadata?.emoji as string || '👍'; + + const mutation = ` + mutation CreateReaction($commentId: String!, $emoji: String!) { + reactionCreate(input: { commentId: $commentId, emoji: $emoji }) { + success + reaction { + id + } + } + } + `; + + const result = await linearGraphQL(apiKey, mutation, { + commentId, + emoji, + }); + + if (result.errors?.length) { + return { + success: false, + error: result.errors.map(e => e.message).join(', '), + }; + } + + const reactionCreate = result.data?.reactionCreate as Record; + return { + success: !!reactionCreate?.success, + id: (reactionCreate?.reaction as Record)?.id as string, + }; + } + + case 'status': { + // Update issue state + const issueId = String(response.target); + const stateId = response.metadata?.stateId as string; + + if (!stateId) { + return { + success: false, + error: 'State ID required for status update', + }; + } + + const mutation = ` + mutation UpdateIssue($issueId: String!, $stateId: String!) { + issueUpdate(id: $issueId, input: { stateId: $stateId }) { + success + issue { + id + url + } + } + } + `; + + const result = await linearGraphQL(apiKey, mutation, { + issueId, + stateId, + }); + + if (result.errors?.length) { + return { + success: false, + error: result.errors.map(e => e.message).join(', '), + }; + } + + const issueUpdate = result.data?.issueUpdate as Record; + const issue = issueUpdate?.issue as Record; + + return { + success: !!issueUpdate?.success, + id: issue?.id as string, + url: issue?.url as string, + }; + } + + default: + return { + success: false, + error: `Unknown response type: ${response.type}`, + }; + } + } catch (error) { + return { + success: false, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } + }, +}; diff --git a/src/cloud/webhooks/responders/slack.ts b/src/cloud/webhooks/responders/slack.ts new file mode 100644 index 00000000..20560ced --- /dev/null +++ b/src/cloud/webhooks/responders/slack.ts @@ -0,0 +1,220 @@ +/** + * Slack Responder + * + * Sends responses back to Slack via their Web API. + * https://api.slack.com/methods + */ + +import type { NormalizedEvent, WebhookResponder, WebhookResponse } from '../types.js'; + +/** + * Call a Slack Web API method + */ +async function slackAPI( + token: string, + method: string, + body: Record +): Promise<{ ok: boolean; error?: string; ts?: string; channel?: string; message?: Record }> { + const response = await fetch(`https://slack.com/api/${method}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json; charset=utf-8', + 'Authorization': `Bearer ${token}`, + }, + body: JSON.stringify(body), + }); + + return response.json() as Promise<{ ok: boolean; error?: string; ts?: string; channel?: string; message?: Record }>; +} + +export const slackResponder: WebhookResponder = { + id: 'slack', + + async respond( + event: NormalizedEvent, + response: WebhookResponse, + config?: Record + ): Promise<{ success: boolean; id?: string; url?: string; error?: string }> { + const botToken = config?.botToken as string || process.env.SLACK_BOT_TOKEN; + + if (!botToken) { + return { + success: false, + error: 'Slack bot token not configured', + }; + } + + try { + // Get channel from event metadata or response target + const channelId = response.metadata?.channel as string + || event.metadata?.channelId as string + || String(response.target); + + if (!channelId) { + return { + success: false, + error: 'Channel ID required', + }; + } + + switch (response.type) { + case 'message': { + // Post a message to a channel + const threadTs = response.metadata?.threadTs as string + || event.metadata?.threadTs as string + || event.metadata?.ts as string; + + const result = await slackAPI(botToken, 'chat.postMessage', { + channel: channelId, + text: response.body, + thread_ts: threadTs, // Reply in thread if available + unfurl_links: false, + unfurl_media: false, + }); + + if (!result.ok) { + return { + success: false, + error: result.error || 'Failed to post message', + }; + } + + return { + success: true, + id: result.ts, + // Construct Slack message URL + url: `https://slack.com/archives/${channelId}/p${result.ts?.replace('.', '')}`, + }; + } + + case 'comment': { + // Same as message, but explicitly in a thread + const threadTs = String(response.target); + + const result = await slackAPI(botToken, 'chat.postMessage', { + channel: channelId, + text: response.body, + thread_ts: threadTs, + reply_broadcast: response.metadata?.broadcast === true, + }); + + if (!result.ok) { + return { + success: false, + error: result.error || 'Failed to post reply', + }; + } + + return { + success: true, + id: result.ts, + }; + } + + case 'reaction': { + // Add a reaction to a message + const ts = String(response.target); + const emoji = response.metadata?.emoji as string || response.body.replace(/:/g, ''); + + const result = await slackAPI(botToken, 'reactions.add', { + channel: channelId, + timestamp: ts, + name: emoji, + }); + + if (!result.ok && result.error !== 'already_reacted') { + return { + success: false, + error: result.error || 'Failed to add reaction', + }; + } + + return { + success: true, + }; + } + + case 'status': { + // Update bot status/presence (not commonly used) + return { + success: false, + error: 'Status updates not implemented for Slack', + }; + } + + default: + return { + success: false, + error: `Unknown response type: ${response.type}`, + }; + } + } catch (error) { + return { + success: false, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } + }, +}; + +/** + * Helper to format a message with blocks for richer formatting + */ +export function formatSlackBlocks( + text: string, + options?: { + header?: string; + context?: string; + actions?: Array<{ text: string; url: string }>; + } +): Array> { + const blocks: Array> = []; + + if (options?.header) { + blocks.push({ + type: 'header', + text: { + type: 'plain_text', + text: options.header, + emoji: true, + }, + }); + } + + blocks.push({ + type: 'section', + text: { + type: 'mrkdwn', + text, + }, + }); + + if (options?.context) { + blocks.push({ + type: 'context', + elements: [ + { + type: 'mrkdwn', + text: options.context, + }, + ], + }); + } + + if (options?.actions?.length) { + blocks.push({ + type: 'actions', + elements: options.actions.map(action => ({ + type: 'button', + text: { + type: 'plain_text', + text: action.text, + emoji: true, + }, + url: action.url, + })), + }); + } + + return blocks; +} diff --git a/src/cloud/webhooks/router.test.ts b/src/cloud/webhooks/router.test.ts new file mode 100644 index 00000000..6c88d0cb --- /dev/null +++ b/src/cloud/webhooks/router.test.ts @@ -0,0 +1,391 @@ +/** + * Webhook Router Tests + */ + +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import crypto from 'crypto'; +import { processWebhook, getWebhookConfig, defaultSources } from './router.js'; +import type { WebhookConfig } from './types.js'; + +// Mock the db module +vi.mock('../db/index.js', () => ({ + db: { + repositories: { + findByFullName: vi.fn().mockResolvedValue({ + id: 'repo-1', + userId: 'user-1', + nangoConnectionId: 'nango-conn-1', + githubFullName: 'owner/repo', + }), + }, + linkedDaemons: { + findByUserId: vi.fn().mockResolvedValue([ + { id: 'daemon-1', userId: 'user-1', status: 'online' }, + ]), + queueMessage: vi.fn().mockResolvedValue(undefined), + }, + }, +})); + +// Mock the responders +vi.mock('./responders/index.js', () => ({ + getResponder: vi.fn().mockReturnValue({ + id: 'github', + respond: vi.fn().mockResolvedValue({ success: true, id: '123', url: 'https://example.com' }), + }), +})); + +describe('getWebhookConfig', () => { + it('should return default configuration', () => { + const config = getWebhookConfig(); + + expect(config.sources).toBeDefined(); + expect(config.rules).toBeDefined(); + expect(config.sources.github).toBeDefined(); + expect(config.sources.linear).toBeDefined(); + expect(config.sources.slack).toBeDefined(); + }); +}); + +describe('defaultSources', () => { + it('should have GitHub source configured', () => { + const github = defaultSources.github; + + expect(github.id).toBe('github'); + expect(github.enabled).toBe(true); + expect(github.signature.header).toBe('x-hub-signature-256'); + expect(github.signature.algorithm).toBe('sha256'); + expect(github.parser).toBe('github'); + expect(github.responder).toBe('github'); + }); + + it('should have Linear source configured', () => { + const linear = defaultSources.linear; + + expect(linear.id).toBe('linear'); + expect(linear.enabled).toBe(true); + expect(linear.signature.algorithm).toBe('sha256'); + expect(linear.parser).toBe('linear'); + expect(linear.responder).toBe('linear'); + }); + + it('should have Slack source configured', () => { + const slack = defaultSources.slack; + + expect(slack.id).toBe('slack'); + expect(slack.enabled).toBe(true); + expect(slack.signature.algorithm).toBe('slack-v0'); + expect(slack.parser).toBe('slack'); + expect(slack.responder).toBe('slack'); + }); +}); + +describe('processWebhook', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe('unknown source', () => { + it('should return error for unknown source', async () => { + const result = await processWebhook( + 'unknown-source', + '{}', + {} + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toContain('Unknown webhook source'); + }); + }); + + describe('disabled source', () => { + it('should return error for disabled source', async () => { + const config: WebhookConfig = { + sources: { + github: { + ...defaultSources.github, + enabled: false, + }, + }, + rules: [], + }; + + const result = await processWebhook( + 'github', + '{}', + {}, + config + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toContain('disabled'); + }); + }); + + describe('signature verification', () => { + const secret = 'test-secret'; + const payload = JSON.stringify({ test: true }); + + beforeEach(() => { + vi.stubEnv('GITHUB_WEBHOOK_SECRET', secret); + }); + + afterEach(() => { + vi.unstubAllEnvs(); + }); + + it('should reject invalid signature', async () => { + const result = await processWebhook( + 'github', + payload, + { 'x-hub-signature-256': 'sha256=invalid' } + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toBe('Invalid signature'); + }); + + it('should accept valid signature', async () => { + const signature = 'sha256=' + crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex'); + + const result = await processWebhook( + 'github', + payload, + { + 'x-hub-signature-256': signature, + 'x-github-event': 'ping', + 'x-github-delivery': 'test-delivery', + } + ); + + // May not be fully successful depending on mock setup, but shouldn't fail signature + expect(result.responses[0]?.error).not.toBe('Invalid signature'); + }); + + it('should reject missing signature', async () => { + const result = await processWebhook( + 'github', + payload, + {} // No signature header + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toBe('Invalid signature'); + }); + }); + + describe('invalid payload', () => { + beforeEach(() => { + vi.stubEnv('GITHUB_WEBHOOK_SECRET', ''); + }); + + afterEach(() => { + vi.unstubAllEnvs(); + }); + + it('should handle non-JSON payload', async () => { + // Create a config that skips signature verification + const config: WebhookConfig = { + sources: { + github: { + ...defaultSources.github, + signature: { + ...defaultSources.github.signature, + algorithm: 'none', + }, + }, + }, + rules: [], + }; + + const result = await processWebhook( + 'github', + 'not valid json', + {}, + config + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toBe('Invalid JSON payload'); + }); + }); + + describe('event processing', () => { + const mentionPayload = { + action: 'created', + issue: { number: 42, title: 'Test' }, + comment: { + id: 789, + body: '@developer please fix this', + html_url: 'https://github.com/owner/repo/issues/42#issuecomment-789', + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + beforeEach(() => { + vi.stubEnv('GITHUB_WEBHOOK_SECRET', ''); + }); + + afterEach(() => { + vi.unstubAllEnvs(); + }); + + it('should process GitHub mention event', async () => { + const payload = JSON.stringify(mentionPayload); + const config: WebhookConfig = { + sources: { + github: { + ...defaultSources.github, + signature: { ...defaultSources.github.signature, algorithm: 'none' }, + }, + }, + rules: [ + { + id: 'test-mention', + name: 'Test Mention', + enabled: true, + source: 'github', + eventType: 'mention', + action: { type: 'spawn_agent', agentType: '$.mentions' }, + priority: 10, + }, + ], + }; + + const result = await processWebhook( + 'github', + payload, + { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'test-delivery', + }, + config + ); + + expect(result.eventType).toBe('mention'); + expect(result.matchedRules).toContain('test-mention'); + }); + + it('should return empty result for no matching events', async () => { + const payload = JSON.stringify({ + action: 'completed', + check_run: { + id: 123, + name: 'build', + conclusion: 'success', // Not a failure + pull_requests: [], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'github-actions' }, + }); + + const config: WebhookConfig = { + sources: { + github: { + ...defaultSources.github, + signature: { ...defaultSources.github.signature, algorithm: 'none' }, + }, + }, + rules: [], + }; + + const result = await processWebhook( + 'github', + payload, + { + 'x-github-event': 'check_run', + 'x-github-delivery': 'test-delivery', + }, + config + ); + + // Should have processed but with no specific events + expect(result.matchedRules).toHaveLength(0); + }); + }); + + describe('Slack URL verification', () => { + it('should handle Slack URL verification (handled at API level)', async () => { + // Note: URL verification is actually handled at the API level, + // but the parser should return empty events for it + const payload = JSON.stringify({ + type: 'url_verification', + challenge: 'test-challenge', + }); + + const config: WebhookConfig = { + sources: { + slack: { + ...defaultSources.slack, + signature: { ...defaultSources.slack.signature, algorithm: 'none' }, + }, + }, + rules: [], + }; + + const result = await processWebhook( + 'slack', + payload, + {}, + config + ); + + // Parser returns empty for url_verification + expect(result.success).toBe(true); + expect(result.matchedRules).toHaveLength(0); + }); + }); +}); + +describe('signature verification algorithms', () => { + describe('sha256', () => { + it('should verify SHA256 HMAC signature', () => { + const secret = 'test-secret'; + const payload = '{"test": true}'; + const signature = crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex'); + + // Signature should match expected format + expect(signature).toMatch(/^[a-f0-9]{64}$/); + }); + }); + + describe('sha1', () => { + it('should verify SHA1 HMAC signature', () => { + const secret = 'test-secret'; + const payload = '{"test": true}'; + const signature = crypto + .createHmac('sha1', secret) + .update(payload) + .digest('hex'); + + // Signature should match expected format + expect(signature).toMatch(/^[a-f0-9]{40}$/); + }); + }); + + describe('slack-v0', () => { + it('should create Slack-format signature', () => { + const secret = 'test-secret'; + const timestamp = Math.floor(Date.now() / 1000); + const payload = '{"test": true}'; + const sigBasestring = `v0:${timestamp}:${payload}`; + const signature = 'v0=' + crypto + .createHmac('sha256', secret) + .update(sigBasestring) + .digest('hex'); + + expect(signature).toMatch(/^v0=[a-f0-9]{64}$/); + }); + }); +}); diff --git a/src/cloud/webhooks/router.ts b/src/cloud/webhooks/router.ts new file mode 100644 index 00000000..3cb7d4df --- /dev/null +++ b/src/cloud/webhooks/router.ts @@ -0,0 +1,591 @@ +/** + * Generic Webhook Router + * + * Routes incoming webhooks from any source through the configurable pipeline: + * 1. Verify signature + * 2. Parse payload into normalized events + * 3. Match events against rules + * 4. Execute actions + * 5. Send responses + */ + +import crypto from 'crypto'; +import type { + WebhookConfig, + WebhookSourceConfig, + NormalizedEvent, + WebhookAction, + WebhookResult, +} from './types.js'; +import { getParser } from './parsers/index.js'; +import { getResponder } from './responders/index.js'; +import { findMatchingRules, resolveActionTemplate, defaultRules } from './rules-engine.js'; +import { db } from '../db/index.js'; + +/** + * Default webhook source configurations + */ +export const defaultSources: Record = { + github: { + id: 'github', + name: 'GitHub', + enabled: true, + signature: { + header: 'x-hub-signature-256', + algorithm: 'sha256', + secretEnvVar: 'GITHUB_WEBHOOK_SECRET', + signaturePrefix: 'sha256=', + }, + parser: 'github', + responder: 'github', + }, + linear: { + id: 'linear', + name: 'Linear', + enabled: true, + signature: { + header: 'linear-signature', + algorithm: 'sha256', + secretEnvVar: 'LINEAR_WEBHOOK_SECRET', + }, + parser: 'linear', + responder: 'linear', + }, + slack: { + id: 'slack', + name: 'Slack', + enabled: true, + signature: { + header: 'x-slack-signature', + algorithm: 'slack-v0', + secretEnvVar: 'SLACK_SIGNING_SECRET', + }, + parser: 'slack', + responder: 'slack', + }, +}; + +/** + * Get webhook configuration + * In the future, this could load from database per-workspace + */ +export function getWebhookConfig(): WebhookConfig { + return { + sources: defaultSources, + rules: defaultRules, + }; +} + +/** + * Verify webhook signature + */ +function verifySignature( + payload: string, + signature: string | undefined, + config: WebhookSourceConfig, + headers?: Record +): boolean { + if (config.signature.algorithm === 'none') { + return true; + } + + if (!signature) { + return false; + } + + const secret = process.env[config.signature.secretEnvVar]; + if (!secret) { + console.warn(`[webhook-router] Secret not configured: ${config.signature.secretEnvVar}`); + return false; + } + + try { + let expectedSignature: string; + let actualSignature = signature; + + // Remove prefix if configured + if (config.signature.signaturePrefix && actualSignature.startsWith(config.signature.signaturePrefix)) { + actualSignature = actualSignature.slice(config.signature.signaturePrefix.length); + } + + switch (config.signature.algorithm) { + case 'sha256': + expectedSignature = crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex'); + break; + + case 'sha1': + expectedSignature = crypto + .createHmac('sha1', secret) + .update(payload) + .digest('hex'); + break; + + case 'token': + // Direct token comparison + return actualSignature === secret; + + case 'slack-v0': { + // Slack signature verification + // Format: v0= + const timestamp = headers?.['x-slack-request-timestamp'] as string; + if (!timestamp) return false; + + // Check timestamp is within 5 minutes + const now = Math.floor(Date.now() / 1000); + if (Math.abs(now - parseInt(timestamp, 10)) > 300) { + console.warn('[webhook-router] Slack request timestamp too old'); + return false; + } + + const sigBasestring = `v0:${timestamp}:${payload}`; + expectedSignature = 'v0=' + crypto + .createHmac('sha256', secret) + .update(sigBasestring) + .digest('hex'); + + return crypto.timingSafeEqual( + Buffer.from(signature), + Buffer.from(expectedSignature) + ); + } + + default: + console.warn(`[webhook-router] Unknown signature algorithm: ${config.signature.algorithm}`); + return false; + } + + return crypto.timingSafeEqual( + Buffer.from(actualSignature), + Buffer.from(expectedSignature) + ); + } catch (error) { + console.error('[webhook-router] Signature verification error:', error); + return false; + } +} + +/** + * Execute an action for an event + */ +async function executeAction( + action: WebhookAction, + event: NormalizedEvent, + responder: ReturnType, + responderConfig?: Record +): Promise<{ success: boolean; error?: string }> { + const resolvedAction = resolveActionTemplate(action, event); + + switch (resolvedAction.type) { + case 'spawn_agent': { + const agentType = resolvedAction.agentType || 'lead'; + const prompt = buildPrompt(resolvedAction.prompt || 'default', event); + + // Find the repository and queue spawn command + const repository = await db.repositories.findByFullName(event.context.name); + if (!repository?.userId) { + return { success: false, error: 'Repository not found or not linked' }; + } + + // Find an available daemon + const daemons = await db.linkedDaemons.findByUserId(repository.userId); + const onlineDaemon = daemons.find(d => d.status === 'online'); + + if (!onlineDaemon) { + // Post a response indicating no daemon available + if (responder && event.item?.number) { + await responder.respond(event, { + type: 'comment', + target: event.item.number, + body: `⚠️ No Agent Relay daemon is available to handle this request. Please ensure you have a linked daemon running.`, + }, responderConfig); + } + return { success: false, error: 'No available daemon' }; + } + + // Post acknowledgment + if (responder && event.item?.number) { + await responder.respond(event, { + type: 'comment', + target: event.item.number, + body: `👋 Routing to **@${agentType}** agent. The agent will respond shortly.`, + }, responderConfig); + } + + // Queue spawn command + const agentName = `${agentType}-${event.id.slice(0, 8)}`; + await db.linkedDaemons.queueMessage(onlineDaemon.id, { + from: { daemonId: 'cloud', daemonName: 'Agent Relay Cloud', agent: 'system' }, + to: '__spawner__', + content: JSON.stringify({ + type: 'spawn_agent', + agentName, + cli: 'claude', + task: prompt, + metadata: { + eventId: event.id, + source: event.source, + eventType: event.type, + repository: event.context.name, + itemNumber: event.item?.number, + }, + }), + metadata: { type: 'spawn_command' }, + timestamp: new Date().toISOString(), + }); + + console.log(`[webhook-router] Queued spawn command for ${agentName}`); + return { success: true }; + } + + case 'message_agent': { + // Send message to existing agent + return { success: false, error: 'message_agent not yet implemented' }; + } + + case 'post_comment': { + if (!responder) { + return { success: false, error: 'No responder available' }; + } + + const body = resolvedAction.config?.body as string || 'Action received.'; + const target = event.item?.number || event.item?.id || ''; + + const result = await responder.respond(event, { + type: 'comment', + target, + body, + }, responderConfig); + + return { success: result.success, error: result.error }; + } + + case 'create_issue': { + return { success: false, error: 'create_issue not yet implemented' }; + } + + case 'custom': { + // Custom action handler + const handler = resolvedAction.config?.handler as ((event: NormalizedEvent) => Promise) | undefined; + if (handler) { + await handler(event); + return { success: true }; + } + return { success: false, error: 'No custom handler defined' }; + } + + default: + return { success: false, error: `Unknown action type: ${resolvedAction.type}` }; + } +} + +/** + * Build a prompt from a template name and event + */ +function buildPrompt(templateName: string, event: NormalizedEvent): string { + const templates: Record string> = { + 'ci-failure': (e) => ` +# CI Failure Fix Task + +A CI check has failed in ${e.context.name}. + +## Failure Details + +**Check Name:** ${e.item?.title || 'Unknown'} +**Branch:** ${e.metadata?.branch || 'unknown'} +**Commit:** ${e.metadata?.commitSha || 'unknown'} + +${e.metadata?.failureSummary ? `**Summary:**\n${e.metadata.failureSummary}` : ''} + +${e.metadata?.annotations ? `## Annotations\n\n${formatAnnotations(e.metadata.annotations as Array>)}` : ''} + +## Your Task + +1. Analyze the failure +2. Fix the issues +3. Push your changes +4. Report back with a summary +`.trim(), + + 'mention': (e) => ` +# Agent Mention Task + +You were mentioned in ${e.source} in ${e.context.name}. + +## Context + +**Item:** ${e.item?.title || 'N/A'} (#${e.item?.number || e.item?.id || 'N/A'}) +**Author:** @${e.actor.name} + +## Message + +${e.item?.body || 'No message content'} + +## Your Task + +Respond helpfully to the mention. If code changes are needed, make them and push. +`.trim(), + + 'issue': (e) => ` +# Issue Assignment + +You've been assigned to work on an issue in ${e.context.name}. + +## Issue Details + +**Title:** ${e.item?.title} +**Priority:** ${e.priority || 'normal'} +**Labels:** ${e.labels.join(', ') || 'none'} + +## Description + +${e.item?.body || 'No description provided.'} + +## Your Task + +1. Analyze the issue +2. Implement a solution +3. Create a PR +`.trim(), + + 'linear-issue': (e) => ` +# Linear Issue + +A new issue was created in ${e.context.name}. + +## Issue Details + +**Identifier:** ${e.metadata?.identifier || 'N/A'} +**Title:** ${e.item?.title} +**Priority:** ${e.priority || 'normal'} +**State:** ${e.item?.state || 'unknown'} + +## Description + +${e.item?.body || 'No description provided.'} + +## Your Task + +Analyze and work on this issue if appropriate. +`.trim(), + + 'slack-request': (e) => ` +# Slack Request + +Someone mentioned you in Slack. + +## Message + +${e.item?.body || 'No message content'} + +## Your Task + +Respond to the request. Use the Slack API to post your response. +`.trim(), + + 'default': (e) => ` +# Webhook Event + +A webhook event was received from ${e.source}. + +## Event Details + +**Type:** ${e.type} +**Context:** ${e.context.name} +**Actor:** ${e.actor.name} + +## Item + +${e.item ? `**${e.item.type}:** ${e.item.title || e.item.id}` : 'No item'} + +## Body + +${e.item?.body || 'No content'} +`.trim(), + }; + + const template = templates[templateName] || templates['default']; + return template(event); +} + +/** + * Format annotations for prompt + */ +function formatAnnotations(annotations: Array>): string { + return annotations + .slice(0, 20) + .map(a => `- ${a.path}:${a.startLine} - ${a.message}`) + .join('\n'); +} + +/** + * Process a webhook from any source + */ +export async function processWebhook( + source: string, + payload: string, + headers: Record, + config?: WebhookConfig +): Promise { + const webhookConfig = config || getWebhookConfig(); + const sourceConfig = webhookConfig.sources[source]; + + if (!sourceConfig) { + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: `Unknown webhook source: ${source}`, + }], + }; + } + + if (!sourceConfig.enabled) { + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: `Webhook source disabled: ${source}`, + }], + }; + } + + // Verify signature + const signature = headers[sourceConfig.signature.header] as string | undefined; + if (!verifySignature(payload, signature, sourceConfig, headers)) { + console.error(`[webhook-router] Invalid signature for source: ${source}`); + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: 'Invalid signature', + }], + }; + } + + // Parse payload + const parser = getParser(sourceConfig.parser); + if (!parser) { + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: `Parser not found: ${sourceConfig.parser}`, + }], + }; + } + + let parsedPayload: unknown; + try { + parsedPayload = JSON.parse(payload); + } catch { + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: 'Invalid JSON payload', + }], + }; + } + + const events = parser.parse(parsedPayload, headers, sourceConfig.parserConfig); + + if (events.length === 0) { + return { + success: true, + eventId: 'none', + source, + eventType: 'none', + matchedRules: [], + actions: [], + responses: [], + }; + } + + // Get responder + const responder = getResponder(sourceConfig.responder); + + // Process each event + const results: WebhookResult[] = []; + + for (const event of events) { + const matchedRules = findMatchingRules(webhookConfig.rules, event); + const actionResults: WebhookResult['actions'] = []; + const responseResults: WebhookResult['responses'] = []; + + console.log(`[webhook-router] Event ${event.id}: type=${event.type}, matched ${matchedRules.length} rules`); + + for (const rule of matchedRules) { + const result = await executeAction( + rule.action, + event, + responder, + sourceConfig.responderConfig + ); + + actionResults.push({ + ruleId: rule.id, + action: rule.action, + success: result.success, + error: result.error, + }); + } + + results.push({ + success: actionResults.every(a => a.success), + eventId: event.id, + source: event.source, + eventType: event.type, + matchedRules: matchedRules.map(r => r.id), + actions: actionResults, + responses: responseResults, + }); + } + + // Return combined result + if (results.length === 1) { + return results[0]; + } + + return { + success: results.every(r => r.success), + eventId: events[0].id, + source, + eventType: events.map(e => e.type).join(','), + matchedRules: results.flatMap(r => r.matchedRules), + actions: results.flatMap(r => r.actions), + responses: results.flatMap(r => r.responses), + }; +} diff --git a/src/cloud/webhooks/rules-engine.test.ts b/src/cloud/webhooks/rules-engine.test.ts new file mode 100644 index 00000000..76f8ef78 --- /dev/null +++ b/src/cloud/webhooks/rules-engine.test.ts @@ -0,0 +1,346 @@ +/** + * Rules Engine Tests + */ + +import { describe, it, expect } from 'vitest'; +import { + matchesRule, + findMatchingRules, + resolveActionTemplate, + defaultRules, +} from './rules-engine.js'; +import type { NormalizedEvent, WebhookRule } from './types.js'; + +const createEvent = (overrides: Partial = {}): NormalizedEvent => ({ + id: 'test-event-1', + source: 'github', + type: 'mention', + timestamp: new Date(), + actor: { id: 'user-1', name: 'testuser' }, + context: { name: 'owner/repo' }, + mentions: ['developer'], + labels: [], + metadata: {}, + rawPayload: {}, + ...overrides, +}); + +const createRule = (overrides: Partial = {}): WebhookRule => ({ + id: 'test-rule', + name: 'Test Rule', + enabled: true, + source: '*', + eventType: '*', + action: { type: 'spawn_agent', agentType: 'developer' }, + priority: 10, + ...overrides, +}); + +describe('matchesRule', () => { + describe('enabled/disabled', () => { + it('should not match disabled rules', () => { + const rule = createRule({ enabled: false }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(false); + }); + + it('should match enabled rules', () => { + const rule = createRule({ enabled: true }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(true); + }); + }); + + describe('source matching', () => { + it('should match wildcard source', () => { + const rule = createRule({ source: '*' }); + const event = createEvent({ source: 'github' }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should match exact source', () => { + const rule = createRule({ source: 'github' }); + const event = createEvent({ source: 'github' }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should not match different source', () => { + const rule = createRule({ source: 'linear' }); + const event = createEvent({ source: 'github' }); + + expect(matchesRule(rule, event)).toBe(false); + }); + }); + + describe('eventType matching', () => { + it('should match wildcard eventType', () => { + const rule = createRule({ eventType: '*' }); + const event = createEvent({ type: 'ci_failure' }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should match exact eventType', () => { + const rule = createRule({ eventType: 'mention' }); + const event = createEvent({ type: 'mention' }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should match prefix wildcard', () => { + const rule = createRule({ eventType: 'ci_*' }); + + expect(matchesRule(rule, createEvent({ type: 'ci_failure' }))).toBe(true); + expect(matchesRule(rule, createEvent({ type: 'ci_success' }))).toBe(true); + expect(matchesRule(rule, createEvent({ type: 'issue_created' }))).toBe(false); + }); + + it('should not match different eventType', () => { + const rule = createRule({ eventType: 'ci_failure' }); + const event = createEvent({ type: 'mention' }); + + expect(matchesRule(rule, event)).toBe(false); + }); + }); + + describe('condition evaluation', () => { + it('should match without condition', () => { + const rule = createRule({ condition: undefined }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should match empty condition', () => { + const rule = createRule({ condition: '' }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should evaluate == condition', () => { + const rule = createRule({ condition: '$.priority == "high"' }); + + expect(matchesRule(rule, createEvent({ priority: 'high' }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'low' }))).toBe(false); + }); + + it('should evaluate != condition', () => { + const rule = createRule({ condition: '$.priority != "low"' }); + + expect(matchesRule(rule, createEvent({ priority: 'high' }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'low' }))).toBe(false); + }); + + it('should evaluate "in" condition with array', () => { + const rule = createRule({ condition: '$.priority in ["critical", "high"]' }); + + expect(matchesRule(rule, createEvent({ priority: 'critical' }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'high' }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'medium' }))).toBe(false); + }); + + it('should evaluate "contains" condition for arrays', () => { + const rule = createRule({ condition: '$.labels contains "bug"' }); + + expect(matchesRule(rule, createEvent({ labels: ['bug', 'critical'] }))).toBe(true); + expect(matchesRule(rule, createEvent({ labels: ['feature'] }))).toBe(false); + }); + + it('should evaluate "contains" condition for strings', () => { + const rule = createRule({ condition: '$.actor.name contains "test"' }); + + expect(matchesRule(rule, createEvent({ actor: { id: '1', name: 'testuser' } }))).toBe(true); + expect(matchesRule(rule, createEvent({ actor: { id: '1', name: 'admin' } }))).toBe(false); + }); + + it('should evaluate numeric comparisons', () => { + const event = createEvent({ metadata: { count: 5 } }); + + expect(matchesRule(createRule({ condition: '$.metadata.count > 3' }), event)).toBe(true); + expect(matchesRule(createRule({ condition: '$.metadata.count < 3' }), event)).toBe(false); + expect(matchesRule(createRule({ condition: '$.metadata.count >= 5' }), event)).toBe(true); + expect(matchesRule(createRule({ condition: '$.metadata.count <= 5' }), event)).toBe(true); + }); + + it('should evaluate boolean conditions', () => { + const rule = createRule({ condition: '$.metadata.urgent == true' }); + + expect(matchesRule(rule, createEvent({ metadata: { urgent: true } }))).toBe(true); + expect(matchesRule(rule, createEvent({ metadata: { urgent: false } }))).toBe(false); + }); + + it('should evaluate null conditions', () => { + const rule = createRule({ condition: '$.priority == null' }); + + expect(matchesRule(rule, createEvent({ priority: undefined }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'high' }))).toBe(false); + }); + + it('should handle nested path access', () => { + const rule = createRule({ condition: '$.metadata.check.name == "build"' }); + const event = createEvent({ + metadata: { check: { name: 'build' } }, + }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should handle invalid condition gracefully', () => { + const rule = createRule({ condition: 'invalid condition syntax' }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(false); + }); + }); +}); + +describe('findMatchingRules', () => { + it('should return matching rules sorted by priority', () => { + const rules: WebhookRule[] = [ + createRule({ id: 'rule-3', priority: 30 }), + createRule({ id: 'rule-1', priority: 10 }), + createRule({ id: 'rule-2', priority: 20 }), + ]; + const event = createEvent(); + + const matched = findMatchingRules(rules, event); + + expect(matched).toHaveLength(3); + expect(matched[0].id).toBe('rule-1'); + expect(matched[1].id).toBe('rule-2'); + expect(matched[2].id).toBe('rule-3'); + }); + + it('should filter out non-matching rules', () => { + const rules: WebhookRule[] = [ + createRule({ id: 'match-1', source: 'github' }), + createRule({ id: 'no-match', source: 'linear' }), + createRule({ id: 'match-2', source: '*' }), + ]; + const event = createEvent({ source: 'github' }); + + const matched = findMatchingRules(rules, event); + + expect(matched).toHaveLength(2); + expect(matched.map(r => r.id)).toContain('match-1'); + expect(matched.map(r => r.id)).toContain('match-2'); + }); + + it('should return empty array if no rules match', () => { + const rules: WebhookRule[] = [ + createRule({ source: 'linear' }), + createRule({ eventType: 'ci_failure' }), + ]; + const event = createEvent({ source: 'github', type: 'mention' }); + + const matched = findMatchingRules(rules, event); + + expect(matched).toHaveLength(0); + }); +}); + +describe('resolveActionTemplate', () => { + it('should resolve $.mentions to first mention', () => { + const action = { type: 'spawn_agent' as const, agentType: '$.mentions' }; + const event = createEvent({ mentions: ['developer', 'reviewer'] }); + + const resolved = resolveActionTemplate(action, event); + + expect(resolved.agentType).toBe('developer'); + }); + + it('should resolve nested path', () => { + const action = { type: 'spawn_agent' as const, agentType: '$.metadata.agentType' }; + const event = createEvent({ metadata: { agentType: 'ci-fix' } }); + + const resolved = resolveActionTemplate(action, event); + + expect(resolved.agentType).toBe('ci-fix'); + }); + + it('should keep literal agent type', () => { + const action = { type: 'spawn_agent' as const, agentType: 'developer' }; + const event = createEvent(); + + const resolved = resolveActionTemplate(action, event); + + expect(resolved.agentType).toBe('developer'); + }); + + it('should resolve prompt template references', () => { + const action = { type: 'spawn_agent' as const, prompt: '${item.body}' }; + const event = createEvent({ item: { type: 'issue', id: '1', body: 'Fix the bug' } }); + + const resolved = resolveActionTemplate(action, event); + + expect(resolved.prompt).toBe('Fix the bug'); + }); +}); + +describe('defaultRules', () => { + it('should have CI failure rule for GitHub', () => { + const ciRule = defaultRules.find(r => r.id === 'ci-failure'); + + expect(ciRule).toBeDefined(); + expect(ciRule?.source).toBe('github'); + expect(ciRule?.eventType).toBe('ci_failure'); + expect(ciRule?.action.agentType).toBe('ci-fix'); + }); + + it('should have mention rules for all sources', () => { + const githubMention = defaultRules.find(r => r.id === 'github-mention'); + const linearMention = defaultRules.find(r => r.id === 'linear-mention'); + const slackMention = defaultRules.find(r => r.id === 'slack-mention'); + + expect(githubMention).toBeDefined(); + expect(linearMention).toBeDefined(); + expect(slackMention).toBeDefined(); + }); + + it('should have assignment rules', () => { + const linearAssignment = defaultRules.find(r => r.id === 'linear-assignment'); + const githubAssignment = defaultRules.find(r => r.id === 'github-assignment'); + + expect(linearAssignment).toBeDefined(); + expect(githubAssignment).toBeDefined(); + expect(linearAssignment?.eventType).toBe('issue_assigned'); + }); + + it('should have all rules enabled by default', () => { + for (const rule of defaultRules) { + expect(rule.enabled).toBe(true); + } + }); + + it('should match CI failure event', () => { + const ciRule = defaultRules.find(r => r.id === 'ci-failure')!; + const event = createEvent({ + source: 'github', + type: 'ci_failure', + }); + + expect(matchesRule(ciRule, event)).toBe(true); + }); + + it('should match GitHub high priority issue', () => { + const issueRule = defaultRules.find(r => r.id === 'github-issue')!; + const highPriorityEvent = createEvent({ + source: 'github', + type: 'issue_created', + priority: 'high', + }); + const lowPriorityEvent = createEvent({ + source: 'github', + type: 'issue_created', + priority: 'low', + }); + + expect(matchesRule(issueRule, highPriorityEvent)).toBe(true); + expect(matchesRule(issueRule, lowPriorityEvent)).toBe(false); + }); +}); diff --git a/src/cloud/webhooks/rules-engine.ts b/src/cloud/webhooks/rules-engine.ts new file mode 100644 index 00000000..464ab033 --- /dev/null +++ b/src/cloud/webhooks/rules-engine.ts @@ -0,0 +1,296 @@ +/** + * Webhook Rules Engine + * + * Matches normalized events against configured rules and determines actions to take. + */ + +import type { NormalizedEvent, WebhookRule, WebhookAction } from './types.js'; + +/** + * Simple JSONPath-like evaluator for conditions + * Supports: $.field, $.field.subfield, comparisons (==, !=, in, contains) + */ +function evaluateCondition(condition: string, event: NormalizedEvent): boolean { + if (!condition || condition.trim() === '') return true; + + try { + // Parse condition: $.path operator value + // Note: >= and <= must come before > and < in the alternation to match correctly + const conditionPattern = /^\$\.([a-zA-Z0-9_.]+)\s*(==|!=|>=|<=|>|<|in|contains)\s*(.+)$/; + const match = condition.match(conditionPattern); + + if (!match) { + console.warn(`[rules-engine] Invalid condition format: ${condition}`); + return false; + } + + const [, path, operator, rawValue] = match; + const value = rawValue.trim(); + + // Get the value from the event + const eventValue = getValueByPath(event, path); + + // Parse the comparison value + let compareValue: unknown; + if (value.startsWith('[') && value.endsWith(']')) { + // Array literal + compareValue = JSON.parse(value); + } else if (value.startsWith('"') && value.endsWith('"')) { + // String literal + compareValue = value.slice(1, -1); + } else if (value === 'true') { + compareValue = true; + } else if (value === 'false') { + compareValue = false; + } else if (value === 'null') { + compareValue = null; + } else if (!isNaN(Number(value))) { + compareValue = Number(value); + } else { + // Treat as string + compareValue = value; + } + + switch (operator) { + case '==': + // Handle null/undefined equivalence + if (compareValue === null) { + return eventValue === null || eventValue === undefined; + } + return eventValue === compareValue; + case '!=': + return eventValue !== compareValue; + case 'in': + return Array.isArray(compareValue) && compareValue.includes(eventValue); + case 'contains': + if (Array.isArray(eventValue)) { + return eventValue.includes(compareValue); + } + if (typeof eventValue === 'string' && typeof compareValue === 'string') { + return eventValue.includes(compareValue); + } + return false; + case '>': + return typeof eventValue === 'number' && typeof compareValue === 'number' && eventValue > compareValue; + case '<': + return typeof eventValue === 'number' && typeof compareValue === 'number' && eventValue < compareValue; + case '>=': + return typeof eventValue === 'number' && typeof compareValue === 'number' && eventValue >= compareValue; + case '<=': + return typeof eventValue === 'number' && typeof compareValue === 'number' && eventValue <= compareValue; + default: + return false; + } + } catch (error) { + console.error(`[rules-engine] Error evaluating condition: ${condition}`, error); + return false; + } +} + +/** + * Get a value from an object by dot-separated path + */ +function getValueByPath(obj: unknown, path: string): unknown { + const parts = path.split('.'); + let current: unknown = obj; + + for (const part of parts) { + if (current === null || current === undefined) return undefined; + if (typeof current !== 'object') return undefined; + current = (current as Record)[part]; + } + + return current; +} + +/** + * Check if a rule matches an event + */ +export function matchesRule(rule: WebhookRule, event: NormalizedEvent): boolean { + // Check if rule is enabled + if (!rule.enabled) return false; + + // Check source match + if (rule.source !== '*' && rule.source !== event.source) { + return false; + } + + // Check event type match + if (rule.eventType !== '*' && rule.eventType !== event.type) { + // Support wildcard prefix matching (e.g., 'ci_*' matches 'ci_failure') + if (rule.eventType.endsWith('*')) { + const prefix = rule.eventType.slice(0, -1); + if (!event.type.startsWith(prefix)) { + return false; + } + } else { + return false; + } + } + + // Check condition if present + if (rule.condition && !evaluateCondition(rule.condition, event)) { + return false; + } + + return true; +} + +/** + * Find all matching rules for an event, sorted by priority + */ +export function findMatchingRules(rules: WebhookRule[], event: NormalizedEvent): WebhookRule[] { + return rules + .filter(rule => matchesRule(rule, event)) + .sort((a, b) => a.priority - b.priority); +} + +/** + * Resolve template variables in action configuration + * Supports: ${event.field}, ${event.field.subfield} + */ +export function resolveActionTemplate(action: WebhookAction, event: NormalizedEvent): WebhookAction { + const resolvedAction = { ...action }; + + // Resolve agentType if it references an event field + if (resolvedAction.agentType?.startsWith('$.')) { + const path = resolvedAction.agentType.slice(2); + const value = getValueByPath(event, path); + if (typeof value === 'string') { + resolvedAction.agentType = value; + } else if (Array.isArray(value) && value.length > 0) { + // Use first mentioned agent + resolvedAction.agentType = String(value[0]); + } + } + + // Resolve prompt template references + if (resolvedAction.prompt?.startsWith('${') && resolvedAction.prompt?.endsWith('}')) { + const path = resolvedAction.prompt.slice(2, -1); + const value = getValueByPath(event, path); + if (typeof value === 'string') { + resolvedAction.prompt = value; + } + } + + return resolvedAction; +} + +/** + * Default rules for common patterns + */ +export const defaultRules: WebhookRule[] = [ + // CI Failures + { + id: 'ci-failure', + name: 'CI Failure Handler', + enabled: true, + source: 'github', + eventType: 'ci_failure', + action: { + type: 'spawn_agent', + agentType: 'ci-fix', + prompt: 'ci-failure', + }, + priority: 10, + }, + // GitHub Mentions + { + id: 'github-mention', + name: 'GitHub Mention Handler', + enabled: true, + source: 'github', + eventType: 'mention', + action: { + type: 'spawn_agent', + agentType: '$.mentions', // Use first mentioned agent + prompt: 'mention', + }, + priority: 20, + }, + // GitHub Issues + { + id: 'github-issue', + name: 'GitHub Issue Handler', + enabled: true, + source: 'github', + eventType: 'issue_created', + condition: '$.priority in ["critical", "high"]', + action: { + type: 'spawn_agent', + agentType: 'developer', + prompt: 'issue', + }, + priority: 30, + }, + // Linear Issues + { + id: 'linear-issue', + name: 'Linear Issue Handler', + enabled: true, + source: 'linear', + eventType: 'issue_created', + action: { + type: 'spawn_agent', + agentType: 'developer', + prompt: 'linear-issue', + }, + priority: 20, + }, + // Linear Mentions + { + id: 'linear-mention', + name: 'Linear Mention Handler', + enabled: true, + source: 'linear', + eventType: 'mention', + action: { + type: 'spawn_agent', + agentType: '$.mentions', + prompt: 'mention', + }, + priority: 20, + }, + // Slack App Mentions + { + id: 'slack-mention', + name: 'Slack App Mention Handler', + enabled: true, + source: 'slack', + eventType: 'mention', + action: { + type: 'spawn_agent', + agentType: '$.mentions', + prompt: 'slack-request', + }, + priority: 20, + }, + // Linear Issue Assignments (native integration) + { + id: 'linear-assignment', + name: 'Linear Issue Assignment Handler', + enabled: true, + source: 'linear', + eventType: 'issue_assigned', + action: { + type: 'spawn_agent', + agentType: '$.mentions', // Use the assigned agent type + prompt: 'linear-issue', + }, + priority: 15, + }, + // GitHub Issue Assignments + { + id: 'github-assignment', + name: 'GitHub Issue Assignment Handler', + enabled: true, + source: 'github', + eventType: 'issue_assigned', + action: { + type: 'spawn_agent', + agentType: '$.mentions', + prompt: 'issue', + }, + priority: 15, + }, +]; diff --git a/src/cloud/webhooks/types.ts b/src/cloud/webhooks/types.ts new file mode 100644 index 00000000..2f78f339 --- /dev/null +++ b/src/cloud/webhooks/types.ts @@ -0,0 +1,198 @@ +/** + * Generic Webhook System - Type Definitions + * + * Defines the core types for a configurable webhook system + * that can handle events from any source (GitHub, GitLab, Linear, Slack, etc.) + */ + +/** + * Normalized event format that all parsers produce + */ +export interface NormalizedEvent { + /** Unique event ID */ + id: string; + /** Source system (github, gitlab, linear, slack, etc.) */ + source: string; + /** Event type (e.g., 'ci_failure', 'mention', 'issue_created') */ + type: string; + /** Timestamp of the event */ + timestamp: Date; + /** Actor who triggered the event */ + actor: { + id: string; + name: string; + email?: string; + }; + /** Repository or project context */ + context: { + /** Full name (e.g., 'owner/repo' or project ID) */ + name: string; + /** URL to the repository/project */ + url?: string; + }; + /** The item this event relates to (issue, PR, ticket, message) */ + item?: { + type: 'issue' | 'pull_request' | 'ticket' | 'message' | 'comment' | 'check'; + id: string | number; + number?: number; + title?: string; + body?: string; + url?: string; + state?: string; + }; + /** Mentioned agents or users */ + mentions: string[]; + /** Labels, tags, or categories */ + labels: string[]; + /** Priority level if applicable */ + priority?: 'critical' | 'high' | 'medium' | 'low'; + /** Additional source-specific data */ + metadata: Record; + /** Raw payload for debugging */ + rawPayload: unknown; +} + +/** + * Action to take in response to an event + */ +export interface WebhookAction { + type: 'spawn_agent' | 'message_agent' | 'post_comment' | 'create_issue' | 'custom'; + /** Agent type or name to spawn/message */ + agentType?: string; + /** Prompt template name or inline prompt */ + prompt?: string; + /** Additional action-specific config */ + config?: Record; +} + +/** + * Signature verification configuration + */ +export interface SignatureConfig { + /** Header containing the signature */ + header: string; + /** Algorithm to use for verification */ + algorithm: 'sha256' | 'sha1' | 'token' | 'slack-v0' | 'none'; + /** Environment variable containing the secret */ + secretEnvVar: string; + /** Optional prefix to strip from signature (e.g., 'sha256=') */ + signaturePrefix?: string; +} + +/** + * Webhook source configuration + */ +export interface WebhookSourceConfig { + /** Source identifier */ + id: string; + /** Display name */ + name: string; + /** Whether this source is enabled */ + enabled: boolean; + /** Signature verification config */ + signature: SignatureConfig; + /** Parser to use for this source */ + parser: string; + /** Responder to use for sending responses */ + responder: string; + /** Parser-specific configuration */ + parserConfig?: Record; + /** Responder-specific configuration */ + responderConfig?: Record; +} + +/** + * Event routing rule + */ +export interface WebhookRule { + /** Rule identifier */ + id: string; + /** Display name */ + name: string; + /** Whether this rule is enabled */ + enabled: boolean; + /** Source to match (* for any) */ + source: string; + /** Event type to match (* for any) */ + eventType: string; + /** JSONPath condition (optional) */ + condition?: string; + /** Action to take when matched */ + action: WebhookAction; + /** Priority (lower = higher priority) */ + priority: number; +} + +/** + * Complete webhook configuration + */ +export interface WebhookConfig { + sources: Record; + rules: WebhookRule[]; +} + +/** + * Parser interface - transforms source-specific payloads to normalized events + */ +export interface WebhookParser { + /** Parser identifier */ + id: string; + /** Parse raw payload into normalized event(s) */ + parse( + payload: unknown, + headers: Record, + config?: Record + ): NormalizedEvent[]; +} + +/** + * Response to send back to the source system + */ +export interface WebhookResponse { + /** Type of response */ + type: 'comment' | 'message' | 'reaction' | 'status'; + /** Target (issue number, channel ID, etc.) */ + target: string | number; + /** Response body/content */ + body: string; + /** Additional response metadata */ + metadata?: Record; +} + +/** + * Responder interface - sends responses back to source systems + */ +export interface WebhookResponder { + /** Responder identifier */ + id: string; + /** Send a response to the source system */ + respond( + event: NormalizedEvent, + response: WebhookResponse, + config?: Record + ): Promise<{ success: boolean; id?: string; url?: string; error?: string }>; +} + +/** + * Result of processing a webhook + */ +export interface WebhookResult { + success: boolean; + eventId: string; + source: string; + eventType: string; + matchedRules: string[]; + actions: Array<{ + ruleId: string; + action: WebhookAction; + success: boolean; + error?: string; + }>; + responses: Array<{ + type: string; + success: boolean; + id?: string; + url?: string; + error?: string; + }>; +} diff --git a/src/daemon/agent-registry.ts b/src/daemon/agent-registry.ts index cae29d11..0bbba06e 100644 --- a/src/daemon/agent-registry.ts +++ b/src/daemon/agent-registry.ts @@ -10,6 +10,26 @@ import { createLogger } from '../utils/logger.js'; const log = createLogger('registry'); +/** + * Agent profile information for display and understanding agent behavior + */ +export interface AgentProfileRecord { + /** Display title/role (e.g., "Lead Developer", "Code Reviewer") */ + title?: string; + /** Short description of what this agent does */ + description?: string; + /** The prompt/task the agent was spawned with */ + spawnPrompt?: string; + /** Agent profile/persona prompt (e.g., lead agent instructions) */ + personaPrompt?: string; + /** Name of the persona preset used (e.g., "lead", "reviewer", "shadow-auditor") */ + personaName?: string; + /** Capabilities or tools available to the agent */ + capabilities?: string[]; + /** Tags for categorization */ + tags?: string[]; +} + export interface AgentRecord { id: string; name: string; @@ -23,6 +43,8 @@ export interface AgentRecord { lastSeen: string; messagesSent: number; messagesReceived: number; + /** Profile information for understanding agent behavior */ + profile?: AgentProfileRecord; } type AgentInput = { @@ -33,6 +55,7 @@ type AgentInput = { task?: string; workingDirectory?: string; team?: string; + profile?: AgentProfileRecord; }; export class AgentRegistry { @@ -60,6 +83,11 @@ export class AgentRegistry { const existing = this.agents.get(agent.name); if (existing) { + // Merge profile data if provided + const mergedProfile = agent.profile + ? { ...existing.profile, ...agent.profile } + : existing.profile; + const updated: AgentRecord = { ...existing, cli: agent.cli ?? existing.cli, @@ -68,6 +96,7 @@ export class AgentRegistry { task: agent.task ?? existing.task, workingDirectory: agent.workingDirectory ?? existing.workingDirectory, team: agent.team ?? existing.team, + profile: mergedProfile, lastSeen: now, }; this.agents.set(agent.name, updated); @@ -84,6 +113,7 @@ export class AgentRegistry { task: agent.task, workingDirectory: agent.workingDirectory, team: agent.team, + profile: agent.profile, firstSeen: now, lastSeen: now, messagesSent: 0, @@ -134,6 +164,42 @@ export class AgentRegistry { return Array.from(this.agents.values()); } + /** + * Remove an agent from the registry. + */ + remove(agentName: string): boolean { + const deleted = this.agents.delete(agentName); + if (deleted) { + this.save(); + } + return deleted; + } + + /** + * Remove agents that haven't been seen for longer than the threshold. + * @param thresholdMs - Time in milliseconds (default: 24 hours) + * @returns Number of agents removed + */ + pruneStale(thresholdMs: number = 24 * 60 * 60 * 1000): number { + const cutoff = Date.now() - thresholdMs; + let removed = 0; + + for (const [name, record] of this.agents) { + const lastSeenTime = new Date(record.lastSeen).getTime(); + if (lastSeenTime < cutoff) { + this.agents.delete(name); + removed++; + log.info('Pruned stale agent', { name, lastSeen: record.lastSeen }); + } + } + + if (removed > 0) { + this.save(); + } + + return removed; + } + private ensureRecord(agentName: string): AgentRecord { const existing = this.agents.get(agentName); if (existing) return existing; @@ -182,6 +248,7 @@ export class AgentRegistry { task: raw.task, workingDirectory: raw.workingDirectory, team: raw.team, + profile: raw.profile, firstSeen: raw.firstSeen ?? new Date().toISOString(), lastSeen: raw.lastSeen ?? new Date().toISOString(), messagesSent: typeof raw.messagesSent === 'number' ? raw.messagesSent : 0, diff --git a/src/daemon/api.ts b/src/daemon/api.ts index 74a20d18..174f6998 100644 --- a/src/daemon/api.ts +++ b/src/daemon/api.ts @@ -19,6 +19,12 @@ import type { AddWorkspaceRequest, SpawnAgentRequest, } from './types.js'; +import { + startCLIAuth, + getAuthSession, + cancelAuthSession, + getSupportedProviders, +} from './cli-auth.js'; const logger = createLogger('daemon-api'); @@ -308,6 +314,82 @@ export class DaemonApi extends EventEmitter { const agents = this.agentManager.getAll(); return { status: 200, body: { agents } }; }); + + // === CLI Auth (for cloud server to call) === + + // List supported providers + this.routes.set('GET /auth/providers', async (): Promise => { + return { status: 200, body: { providers: getSupportedProviders() } }; + }); + + // Start CLI auth flow + this.routes.set('POST /auth/cli/:provider/start', async (req): Promise => { + const { provider } = req.params; + try { + const session = await startCLIAuth(provider); + return { + status: 200, + body: { + sessionId: session.id, + status: session.status, + authUrl: session.authUrl, + }, + }; + } catch (err) { + return { + status: 400, + body: { error: err instanceof Error ? err.message : 'Failed to start auth' }, + }; + } + }); + + // Get auth session status + this.routes.set('GET /auth/cli/:provider/status/:sessionId', async (req): Promise => { + const { sessionId } = req.params; + const session = getAuthSession(sessionId); + if (!session) { + return { status: 404, body: { error: 'Session not found' } }; + } + return { + status: 200, + body: { + sessionId: session.id, + status: session.status, + authUrl: session.authUrl, + error: session.error, + promptsHandled: session.promptsHandled, + }, + }; + }); + + // Get credentials from completed auth + this.routes.set('GET /auth/cli/:provider/creds/:sessionId', async (req): Promise => { + const { sessionId } = req.params; + const session = getAuthSession(sessionId); + if (!session) { + return { status: 404, body: { error: 'Session not found' } }; + } + if (session.status !== 'success') { + return { status: 400, body: { error: 'Auth not complete', status: session.status } }; + } + return { + status: 200, + body: { + token: session.token, + provider: session.provider, + }, + }; + }); + + // Cancel auth session + this.routes.set('POST /auth/cli/:provider/cancel/:sessionId', async (req): Promise => { + const { sessionId } = req.params; + const cancelled = cancelAuthSession(sessionId); + if (!cancelled) { + return { status: 404, body: { error: 'Session not found' } }; + } + return { status: 200, body: { success: true } }; + }); } /** diff --git a/src/daemon/cli-auth.ts b/src/daemon/cli-auth.ts new file mode 100644 index 00000000..908e6d5b --- /dev/null +++ b/src/daemon/cli-auth.ts @@ -0,0 +1,636 @@ +/** + * CLI Auth Handler for Workspace Daemon + * + * Handles CLI-based authentication (claude, codex, etc.) via PTY. + * Runs inside the workspace container where CLI tools are installed. + */ + +import * as pty from 'node-pty'; +import * as crypto from 'crypto'; +import * as fs from 'fs/promises'; +import * as os from 'os'; +import { createLogger } from '../resiliency/logger.js'; +import { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + getSupportedProviders, + type CLIAuthConfig, + type PromptHandler, +} from '../shared/cli-auth-config.js'; + +const logger = createLogger('cli-auth'); + +// Re-export for consumers +export { CLI_AUTH_CONFIG, getSupportedProviders }; +export type { CLIAuthConfig, PromptHandler }; + +/** + * Auth session state + */ +interface AuthSession { + id: string; + provider: string; + status: 'starting' | 'waiting_auth' | 'success' | 'error'; + authUrl?: string; + token?: string; + refreshToken?: string; + tokenExpiresAt?: Date; + error?: string; + output: string; + promptsHandled: string[]; + createdAt: Date; + process?: pty.IPty; +} + +// Active sessions +const sessions = new Map(); + +// Clean up old sessions periodically +setInterval(() => { + const now = Date.now(); + for (const [id, session] of sessions) { + if (now - session.createdAt.getTime() > 10 * 60 * 1000) { + if (session.process) { + try { + session.process.kill(); + } catch { + // Process may already be dead + } + } + sessions.delete(id); + } + } +}, 60000); + +export interface StartCLIAuthOptions { + /** Use device flow instead of standard OAuth (if provider supports it) */ + useDeviceFlow?: boolean; +} + +/** + * Start CLI auth flow + * + * This function waits for the auth URL to be captured before returning, + * ensuring the caller can immediately open the OAuth popup. + */ +export async function startCLIAuth( + provider: string, + options: StartCLIAuthOptions = {} +): Promise { + const config = CLI_AUTH_CONFIG[provider]; + if (!config) { + throw new Error(`Unknown provider: ${provider}`); + } + + const sessionId = crypto.randomUUID(); + const session: AuthSession = { + id: sessionId, + provider, + status: 'starting', + output: '', + promptsHandled: [], + createdAt: new Date(), + }; + sessions.set(sessionId, session); + + logger.info('CLI auth session created', { + sessionId, + provider, + totalActiveSessions: sessions.size, + allSessionIds: Array.from(sessions.keys()), + }); + + // Check if already authenticated (credentials exist) + try { + const existingCreds = await extractCredentials(provider, config); + if (existingCreds?.token) { + logger.info('Already authenticated - existing credentials found', { provider, sessionId }); + session.status = 'success'; + session.token = existingCreds.token; + session.refreshToken = existingCreds.refreshToken; + session.tokenExpiresAt = existingCreds.expiresAt; + return session; + } + } catch { + // No existing credentials, proceed with auth flow + } + + // Use device flow args if requested and supported + const args = options.useDeviceFlow && config.deviceFlowArgs + ? config.deviceFlowArgs + : config.args; + + logger.info('Starting CLI auth', { + provider, + sessionId, + useDeviceFlow: options.useDeviceFlow, + args, + }); + + const respondedPrompts = new Set(); + + // Create a promise that resolves when authUrl is captured or timeout + let resolveAuthUrl: () => void; + const authUrlPromise = new Promise((resolve) => { + resolveAuthUrl = resolve; + }); + + // Timeout for waiting for auth URL (shorter than the full OAuth timeout) + const AUTH_URL_WAIT_TIMEOUT = 15000; // 15 seconds to capture auth URL + const authUrlTimeout = setTimeout(() => { + logger.warn('Auth URL wait timeout, returning session without URL', { provider, sessionId }); + resolveAuthUrl(); + }, AUTH_URL_WAIT_TIMEOUT); + + try { + const proc = pty.spawn(config.command, args, { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: process.cwd(), + env: { + ...process.env, + NO_COLOR: '1', + TERM: 'xterm-256color', + // Don't set BROWSER - let CLI fail to open browser and fall back to manual paste mode + // Setting BROWSER: 'echo' caused CLI to think browser opened and wait for callback that never came + DISPLAY: '', + } as Record, + }); + + session.process = proc; + + // Timeout handler - give user plenty of time to complete OAuth flow + // 5 minutes should be enough for even slow OAuth flows + const OAUTH_COMPLETION_TIMEOUT = 5 * 60 * 1000; // 5 minutes + const timeout = setTimeout(() => { + if (session.status === 'starting' || session.status === 'waiting_auth') { + logger.warn('CLI auth timed out', { provider, sessionId, status: session.status }); + proc.kill(); + session.status = 'error'; + session.error = 'Timeout waiting for auth completion (5 minutes). Please try again.'; + } + }, config.waitTimeout + OAUTH_COMPLETION_TIMEOUT); + + // Keep-alive: Some CLIs timeout if they don't receive stdin input + // Send a space+backspace every 20 seconds to simulate user presence + const keepAliveInterval = setInterval(() => { + if (session.status === 'waiting_auth' && session.process) { + try { + // Send space then backspace - appears as user typing but no net effect + session.process.write(' \b'); + logger.debug('Keep-alive ping sent', { + sessionId, + status: session.status, + ageSeconds: Math.round((Date.now() - session.createdAt.getTime()) / 1000), + }); + } catch { + // Process may have exited + } + } + }, 20000); + + proc.onData((data: string) => { + session.output += data; + + // Handle prompts + const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); + if (matchingPrompt) { + respondedPrompts.add(matchingPrompt.description); + session.promptsHandled.push(matchingPrompt.description); + logger.info('Auto-responding to prompt', { description: matchingPrompt.description }); + + const delay = matchingPrompt.delay ?? 100; + setTimeout(() => { + try { + proc.write(matchingPrompt.response); + } catch { + // Process may have exited + } + }, delay); + } + + // Extract auth URL + const cleanText = stripAnsiCodes(data); + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !session.authUrl) { + session.authUrl = match[1]; + session.status = 'waiting_auth'; + logger.info('Auth URL captured', { provider, url: session.authUrl }); + // Signal that we have the auth URL + clearTimeout(authUrlTimeout); + resolveAuthUrl(); + } + + // Log all output after auth URL is captured (for debugging) + if (session.authUrl) { + const trimmedData = stripAnsiCodes(data).trim(); + if (trimmedData.length > 0) { + logger.info('PTY output after auth URL', { + provider, + sessionId, + output: trimmedData.substring(0, 500), + }); + } + } + + // Check for success and try to extract credentials + if (matchesSuccessPattern(data, config.successPatterns)) { + session.status = 'success'; + logger.info('Success pattern detected, attempting credential extraction', { provider }); + + // Try to extract credentials immediately (CLI may not exit after success) + // Use a small delay to let the CLI finish writing the file + setTimeout(async () => { + try { + const creds = await extractCredentials(provider, config); + if (creds) { + session.token = creds.token; + session.refreshToken = creds.refreshToken; + session.tokenExpiresAt = creds.expiresAt; + logger.info('Credentials extracted successfully', { provider, hasRefreshToken: !!creds.refreshToken }); + } + } catch (err) { + logger.error('Failed to extract credentials on success', { error: String(err) }); + } + }, 500); + } + }); + + proc.onExit(async ({ exitCode }) => { + clearTimeout(timeout); + clearTimeout(authUrlTimeout); + clearInterval(keepAliveInterval); + + // Clear process reference so submitAuthCode knows PTY is gone + session.process = undefined; + + // Log full output for debugging PTY exit issues + const cleanOutput = stripAnsiCodes(session.output); + logger.info('CLI process exited', { + provider, + exitCode, + outputLength: session.output.length, + hasAuthUrl: !!session.authUrl, + sessionStatus: session.status, + promptsHandled: session.promptsHandled, + // Last 500 chars of output for debugging + outputTail: cleanOutput.slice(-500), + }); + + // Try to extract credentials + if (session.authUrl || exitCode === 0) { + try { + const creds = await extractCredentials(provider, config); + if (creds) { + session.token = creds.token; + session.refreshToken = creds.refreshToken; + session.tokenExpiresAt = creds.expiresAt; + session.status = 'success'; + } + } catch (err) { + logger.error('Failed to extract credentials', { error: String(err) }); + } + } + + if (!session.authUrl && !session.token && session.status !== 'error') { + session.status = 'error'; + session.error = 'CLI exited without auth URL or credentials'; + } + + // Resolve in case we're still waiting + resolveAuthUrl(); + }); + } catch (err) { + session.status = 'error'; + session.error = err instanceof Error ? err.message : 'Failed to spawn CLI'; + logger.error('Failed to start CLI auth', { error: session.error }); + clearTimeout(authUrlTimeout); + resolveAuthUrl!(); + } + + // Wait for auth URL to be captured (or timeout) + await authUrlPromise; + + return session; +} + +/** + * Get auth session status + */ +export function getAuthSession(sessionId: string): AuthSession | null { + return sessions.get(sessionId) || null; +} + +/** + * Submit auth code to a waiting session + * This writes the code to the PTY process stdin + * + * @returns Object with success status and optional error message + */ +export async function submitAuthCode( + sessionId: string, + code: string +): Promise<{ success: boolean; error?: string; needsRestart?: boolean }> { + // Log all active sessions for debugging + const activeSessionIds = Array.from(sessions.keys()); + logger.info('submitAuthCode called', { + sessionId, + codeLength: code.length, + activeSessionCount: activeSessionIds.length, + activeSessionIds, + }); + + const session = sessions.get(sessionId); + if (!session) { + logger.warn('Auth code submission failed: session not found', { + sessionId, + activeSessionIds, + hint: 'Session may have been cleaned up or never created', + }); + return { success: false, error: 'Session not found or expired', needsRestart: true }; + } + + logger.info('Session found for code submission', { + sessionId, + provider: session.provider, + status: session.status, + hasProcess: !!session.process, + hasAuthUrl: !!session.authUrl, + hasToken: !!session.token, + promptsHandled: session.promptsHandled, + createdAt: session.createdAt.toISOString(), + ageSeconds: Math.round((Date.now() - session.createdAt.getTime()) / 1000), + }); + + if (!session.process) { + logger.warn('Auth code submission failed: no PTY process', { + sessionId, + sessionStatus: session.status, + provider: session.provider, + outputLength: session.output?.length || 0, + outputTail: session.output ? stripAnsiCodes(session.output).slice(-500) : 'no output', + }); + + // Try to extract credentials as a fallback - maybe auth completed in browser + const config = CLI_AUTH_CONFIG[session.provider]; + if (config) { + try { + const creds = await extractCredentials(session.provider, config); + if (creds) { + session.token = creds.token; + session.refreshToken = creds.refreshToken; + session.tokenExpiresAt = creds.expiresAt; + session.status = 'success'; + logger.info('Credentials found despite PTY exit', { provider: session.provider }); + return { success: true }; + } + } catch { + // No credentials found + } + } + + // For providers like Claude that need the code pasted into CLI, + // if the PTY is gone, user needs to restart the auth flow + return { + success: false, + error: 'The authentication session has ended. The CLI process exited before the code could be entered. Please click "Try Again" to restart.', + needsRestart: true, + }; + } + + try { + // Clean the code - trim whitespace + const cleanCode = code.trim(); + + logger.info('Writing auth code to PTY', { + sessionId, + originalLength: code.length, + cleanLength: cleanCode.length, + codePreview: cleanCode.substring(0, 20) + '...', + }); + + // Write the auth code WITHOUT Enter first + // Claude CLI's Ink text input needs time to process the input + // before receiving Enter (tested: immediate Enter fails, delayed Enter works) + session.process.write(cleanCode); + logger.info('Auth code written, waiting before sending Enter...', { sessionId }); + + // Wait 1 second for CLI to process the typed input + await new Promise(resolve => setTimeout(resolve, 1000)); + + // Now send Enter to submit + session.process.write('\r'); + logger.info('Enter key sent', { sessionId }); + + // Start polling for credentials after code submission + // The CLI should write credentials shortly after receiving the code + const config = CLI_AUTH_CONFIG[session.provider]; + if (config) { + pollForCredentials(session, config); + } + + return { success: true }; + } catch (err) { + logger.error('Failed to submit auth code', { sessionId, error: String(err) }); + return { + success: false, + error: 'Failed to write to CLI process. The process may have exited. Please try again.', + needsRestart: true, + }; + } +} + +/** + * Poll for credentials file after auth code submission + * Some CLIs don't output success patterns, so we check the file directly + */ +async function pollForCredentials(session: AuthSession, config: CLIAuthConfig): Promise { + const maxAttempts = 10; + const pollInterval = 1000; // 1 second + + for (let i = 0; i < maxAttempts; i++) { + await new Promise(resolve => setTimeout(resolve, pollInterval)); + + // Skip if session already has credentials or errored + if (session.token || session.status === 'error') { + return; + } + + try { + const creds = await extractCredentials(session.provider, config); + if (creds) { + session.token = creds.token; + session.refreshToken = creds.refreshToken; + session.tokenExpiresAt = creds.expiresAt; + session.status = 'success'; + logger.info('Credentials found via polling', { + provider: session.provider, + attempt: i + 1, + hasRefreshToken: !!creds.refreshToken, + }); + return; + } + } catch { + // File doesn't exist yet, continue polling + } + } + + logger.warn('Credential polling completed without finding credentials', { + provider: session.provider, + sessionId: session.id, + }); +} + +/** + * Complete auth session by polling for credentials + * Called when user indicates they've completed auth in browser + */ +export async function completeAuthSession(sessionId: string): Promise<{ + success: boolean; + error?: string; + token?: string; +}> { + const session = sessions.get(sessionId); + if (!session) { + return { success: false, error: 'Session not found or expired' }; + } + + // Already have credentials + if (session.token) { + return { success: true, token: session.token }; + } + + const config = CLI_AUTH_CONFIG[session.provider]; + if (!config) { + return { success: false, error: 'Unknown provider' }; + } + + // Poll for credentials (user just completed auth in browser) + const maxAttempts = 15; + const pollInterval = 1000; + + for (let i = 0; i < maxAttempts; i++) { + try { + const creds = await extractCredentials(session.provider, config); + if (creds) { + session.token = creds.token; + session.refreshToken = creds.refreshToken; + session.tokenExpiresAt = creds.expiresAt; + session.status = 'success'; + logger.info('Credentials found via complete polling', { + provider: session.provider, + attempt: i + 1, + }); + return { success: true, token: creds.token }; + } + } catch { + // File doesn't exist yet + } + await new Promise(resolve => setTimeout(resolve, pollInterval)); + } + + return { + success: false, + error: 'Credentials not found. Please ensure you completed authentication in the browser.', + }; +} + +/** + * Cancel auth session + */ +export function cancelAuthSession(sessionId: string): boolean { + const session = sessions.get(sessionId); + if (!session) return false; + + if (session.process) { + try { + session.process.kill(); + } catch { + // Already dead + } + } + + sessions.delete(sessionId); + return true; +} + +interface ExtractedCredentials { + token: string; + refreshToken?: string; + expiresAt?: Date; +} + +/** + * Extract credentials from CLI credential file + */ +async function extractCredentials( + provider: string, + config: CLIAuthConfig +): Promise { + if (!config.credentialPath) return null; + + try { + const credPath = config.credentialPath.replace('~', os.homedir()); + const content = await fs.readFile(credPath, 'utf8'); + const creds = JSON.parse(content); + + // Extract token based on provider + if (provider === 'anthropic') { + // Claude stores OAuth in: { claudeAiOauth: { accessToken: "...", refreshToken: "...", expiresAt: ... } } + if (creds.claudeAiOauth?.accessToken) { + return { + token: creds.claudeAiOauth.accessToken, + refreshToken: creds.claudeAiOauth.refreshToken, + expiresAt: creds.claudeAiOauth.expiresAt ? new Date(creds.claudeAiOauth.expiresAt) : undefined, + }; + } + // Fallback to legacy formats + const token = creds.oauth_token || creds.access_token || creds.api_key; + return token ? { token } : null; + } else if (provider === 'openai') { + // Codex stores OAuth in: { tokens: { access_token: "...", refresh_token: "...", ... } } + if (creds.tokens?.access_token) { + return { + token: creds.tokens.access_token, + refreshToken: creds.tokens.refresh_token, + }; + } + // Fallback: API key or legacy formats + const token = creds.OPENAI_API_KEY || creds.token || creds.access_token || creds.api_key; + return token ? { token } : null; + } else if (provider === 'opencode') { + // OpenCode stores multiple providers: { opencode: {...}, anthropic: {...}, openai: {...}, google: {...} } + // Check for any valid credential - prefer OpenCode Zen, then Anthropic + if (creds.opencode?.key) { + return { token: creds.opencode.key }; + } + if (creds.anthropic?.access) { + return { + token: creds.anthropic.access, + refreshToken: creds.anthropic.refresh, + expiresAt: creds.anthropic.expires ? new Date(creds.anthropic.expires) : undefined, + }; + } + if (creds.openai?.access) { + return { + token: creds.openai.access, + refreshToken: creds.openai.refresh, + expiresAt: creds.openai.expires ? new Date(creds.openai.expires) : undefined, + }; + } + if (creds.google?.key) { + return { token: creds.google.key }; + } + return null; + } + + const token = creds.token || creds.access_token || creds.api_key; + return token ? { token } : null; + } catch { + return null; + } +} + diff --git a/src/daemon/services/browser-testing.ts b/src/daemon/services/browser-testing.ts new file mode 100644 index 00000000..e0283f0c --- /dev/null +++ b/src/daemon/services/browser-testing.ts @@ -0,0 +1,320 @@ +/** + * Browser Testing Service + * + * Provides browser automation capabilities for agents running in the workspace. + * Uses Playwright for browser control and Xvfb for headless display. + * + * Features: + * - Screenshot capture + * - Browser automation via Playwright + * - Visual regression testing + * - PDF generation + */ + +import { spawn, execSync } from 'child_process'; +import { existsSync, writeFileSync, mkdirSync } from 'fs'; +import { join } from 'path'; + +export interface ScreenshotOptions { + /** Output path for screenshot (default: /tmp/screenshot-{timestamp}.png) */ + outputPath?: string; + /** Full page screenshot */ + fullPage?: boolean; + /** Clip region */ + clip?: { x: number; y: number; width: number; height: number }; +} + +export interface BrowserTestOptions { + /** Browser to use (chromium, firefox, webkit) */ + browser?: 'chromium' | 'firefox' | 'webkit'; + /** Headless mode (default: true in container, false with VNC) */ + headless?: boolean; + /** Viewport size */ + viewport?: { width: number; height: number }; + /** Timeout in ms */ + timeout?: number; +} + +/** + * Check if browser testing is available + */ +export function isBrowserTestingAvailable(): boolean { + try { + // Check if DISPLAY is set (Xvfb running) + if (!process.env.DISPLAY) { + return false; + } + + // Check if Playwright is installed + execSync('npx playwright --version', { stdio: 'pipe' }); + return true; + } catch { + return false; + } +} + +/** + * Take a screenshot of the current display + */ +export async function takeDisplayScreenshot( + options: ScreenshotOptions = {} +): Promise { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const outputPath = options.outputPath || `/tmp/screenshot-${timestamp}.png`; + + // Ensure output directory exists + const dir = join(outputPath, '..'); + if (!existsSync(dir)) { + mkdirSync(dir, { recursive: true }); + } + + return new Promise((resolve, reject) => { + const args = [outputPath]; + if (options.fullPage) { + args.unshift('-u'); // Capture including window decorations + } + + const proc = spawn('scrot', args, { + env: { ...process.env, DISPLAY: process.env.DISPLAY || ':99' }, + }); + + proc.on('close', (code) => { + if (code === 0) { + resolve(outputPath); + } else { + reject(new Error(`Screenshot failed with code ${code}`)); + } + }); + + proc.on('error', reject); + }); +} + +/** + * Run a Playwright test file + */ +export async function runPlaywrightTest( + testFile: string, + options: BrowserTestOptions = {} +): Promise<{ success: boolean; output: string; screenshots: string[] }> { + const browser = options.browser || 'chromium'; + const timeout = options.timeout || 30000; + + return new Promise((resolve) => { + const args = ['playwright', 'test', testFile, `--project=${browser}`]; + + if (options.headless !== false) { + args.push('--headed=false'); + } + + const proc = spawn('npx', args, { + env: { + ...process.env, + DISPLAY: process.env.DISPLAY || ':99', + PLAYWRIGHT_BROWSERS_PATH: '/ms-playwright', + }, + timeout, + }); + + let output = ''; + const screenshots: string[] = []; + + proc.stdout.on('data', (data) => { + output += data.toString(); + // Parse screenshot paths from output + const matches = data.toString().match(/Screenshot saved: (.+\.png)/g); + if (matches) { + screenshots.push(...matches.map((m: string) => m.replace('Screenshot saved: ', ''))); + } + }); + + proc.stderr.on('data', (data) => { + output += data.toString(); + }); + + proc.on('close', (code) => { + resolve({ + success: code === 0, + output, + screenshots, + }); + }); + + proc.on('error', (err) => { + resolve({ + success: false, + output: err.message, + screenshots: [], + }); + }); + }); +} + +/** + * Launch a browser and navigate to a URL + * Returns the browser PID for later control + */ +export async function launchBrowser( + url: string, + options: { browser?: 'chromium' | 'firefox' } = {} +): Promise<{ pid: number }> { + const browser = options.browser || 'chromium'; + const command = browser === 'firefox' ? 'firefox' : 'chromium'; + + return new Promise((resolve, reject) => { + const args = + browser === 'chromium' + ? ['--no-sandbox', '--disable-gpu', '--start-maximized', url] + : ['--new-window', url]; + + const proc = spawn(command, args, { + env: { ...process.env, DISPLAY: process.env.DISPLAY || ':99' }, + detached: true, + stdio: 'ignore', + }); + + proc.unref(); + + // Give browser time to start + setTimeout(() => { + if (proc.pid) { + resolve({ pid: proc.pid }); + } else { + reject(new Error('Failed to launch browser')); + } + }, 1000); + }); +} + +/** + * Generate a Playwright test file from a description + */ +export function generatePlaywrightTest( + name: string, + steps: Array<{ + action: 'goto' | 'click' | 'fill' | 'screenshot' | 'wait'; + target?: string; + value?: string; + }> +): string { + const testCode = ` +import { test, expect } from '@playwright/test'; + +test('${name}', async ({ page }) => { +${steps + .map((step) => { + switch (step.action) { + case 'goto': + return ` await page.goto('${step.target}');`; + case 'click': + return ` await page.click('${step.target}');`; + case 'fill': + return ` await page.fill('${step.target}', '${step.value}');`; + case 'screenshot': + return ` await page.screenshot({ path: '${step.target || 'screenshot.png'}' });`; + case 'wait': + return ` await page.waitForTimeout(${step.value || 1000});`; + default: + return ` // Unknown action: ${step.action}`; + } + }) + .join('\n')} +}); +`.trim(); + + return testCode; +} + +/** + * Run inline Playwright script + */ +export async function runPlaywrightScript( + script: string, + options: BrowserTestOptions = {} +): Promise<{ success: boolean; output: string; result?: unknown }> { + const tempDir = '/tmp/playwright-scripts'; + if (!existsSync(tempDir)) { + mkdirSync(tempDir, { recursive: true }); + } + + const scriptPath = join(tempDir, `script-${Date.now()}.mjs`); + + // Wrap script with Playwright imports and browser launch + const wrappedScript = ` +import { chromium, firefox, webkit } from 'playwright'; + +async function run() { + const browser = await ${options.browser || 'chromium'}.launch({ + headless: ${options.headless !== false}, + }); + const context = await browser.newContext({ + viewport: ${JSON.stringify(options.viewport || { width: 1920, height: 1080 })}, + }); + const page = await context.newPage(); + + try { + ${script} + } finally { + await browser.close(); + } +} + +run().catch(console.error); +`; + + writeFileSync(scriptPath, wrappedScript); + + return new Promise((resolve) => { + const proc = spawn('node', [scriptPath], { + env: { + ...process.env, + DISPLAY: process.env.DISPLAY || ':99', + }, + timeout: options.timeout || 30000, + }); + + let output = ''; + + proc.stdout.on('data', (data) => { + output += data.toString(); + }); + + proc.stderr.on('data', (data) => { + output += data.toString(); + }); + + proc.on('close', (code) => { + resolve({ + success: code === 0, + output, + }); + }); + + proc.on('error', (err) => { + resolve({ + success: false, + output: err.message, + }); + }); + }); +} + +/** + * Get VNC connection info + */ +export function getVNCInfo(): { + available: boolean; + vncUrl?: string; + noVncUrl?: string; +} { + const vncEnabled = process.env.VNC_ENABLED !== 'false'; + const vncPort = process.env.VNC_PORT || '5900'; + const noVncPort = process.env.NOVNC_PORT || '6080'; + const hostname = process.env.HOSTNAME || 'localhost'; + + return { + available: vncEnabled, + vncUrl: vncEnabled ? `vnc://${hostname}:${vncPort}` : undefined, + noVncUrl: vncEnabled ? `http://${hostname}:${noVncPort}/vnc.html` : undefined, + }; +} diff --git a/src/daemon/services/container-spawner.ts b/src/daemon/services/container-spawner.ts new file mode 100644 index 00000000..8eb4f887 --- /dev/null +++ b/src/daemon/services/container-spawner.ts @@ -0,0 +1,418 @@ +/** + * Container Spawner Service + * + * Allows agents to spawn isolated Docker containers for specific tasks. + * Requires Docker socket to be mounted: -v /var/run/docker.sock:/var/run/docker.sock + * + * Use cases: + * - Running untrusted code in isolation + * - Testing against different environments (Node versions, OS variants) + * - Parallel task execution + * - Language-specific toolchains + */ + +import { spawn, execSync } from 'child_process'; +import { existsSync } from 'fs'; + +export interface ContainerConfig { + /** Docker image to use */ + image: string; + /** Command to run (default: shell) */ + command?: string[]; + /** Working directory inside container */ + workdir?: string; + /** Environment variables */ + env?: Record; + /** Volumes to mount (host:container format) */ + volumes?: string[]; + /** Port mappings (host:container format) */ + ports?: string[]; + /** Memory limit (e.g., '512m', '2g') */ + memory?: string; + /** CPU limit (e.g., '0.5', '2') */ + cpus?: string; + /** Network mode (bridge, host, none) */ + network?: 'bridge' | 'host' | 'none'; + /** Remove container after exit */ + autoRemove?: boolean; + /** Container name */ + name?: string; + /** Timeout in ms */ + timeout?: number; +} + +export interface ContainerResult { + success: boolean; + exitCode: number | null; + stdout: string; + stderr: string; + containerId?: string; +} + +/** + * Check if Docker is available + */ +export function isDockerAvailable(): boolean { + // Check if socket exists + if (!existsSync('/var/run/docker.sock')) { + return false; + } + + try { + execSync('docker info', { stdio: 'pipe' }); + return true; + } catch { + return false; + } +} + +/** + * Build Docker command arguments from config + */ +function buildDockerArgs(config: ContainerConfig): string[] { + const args: string[] = ['run']; + + // Auto-remove + if (config.autoRemove !== false) { + args.push('--rm'); + } + + // Name + if (config.name) { + args.push('--name', config.name); + } + + // Working directory + if (config.workdir) { + args.push('-w', config.workdir); + } + + // Environment variables + if (config.env) { + for (const [key, value] of Object.entries(config.env)) { + args.push('-e', `${key}=${value}`); + } + } + + // Volumes + if (config.volumes) { + for (const vol of config.volumes) { + args.push('-v', vol); + } + } + + // Ports + if (config.ports) { + for (const port of config.ports) { + args.push('-p', port); + } + } + + // Resource limits + if (config.memory) { + args.push('--memory', config.memory); + } + if (config.cpus) { + args.push('--cpus', config.cpus); + } + + // Network + if (config.network) { + args.push('--network', config.network); + } + + // Image + args.push(config.image); + + // Command + if (config.command && config.command.length > 0) { + args.push(...config.command); + } + + return args; +} + +/** + * Run a command in a new container and wait for completion + */ +export async function runInContainer(config: ContainerConfig): Promise { + if (!isDockerAvailable()) { + return { + success: false, + exitCode: null, + stdout: '', + stderr: 'Docker is not available. Mount /var/run/docker.sock to enable container spawning.', + }; + } + + const args = buildDockerArgs(config); + + return new Promise((resolve) => { + const proc = spawn('docker', args, { + timeout: config.timeout || 60000, + }); + + let stdout = ''; + let stderr = ''; + + proc.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + proc.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + proc.on('close', (code) => { + resolve({ + success: code === 0, + exitCode: code, + stdout, + stderr, + }); + }); + + proc.on('error', (err) => { + resolve({ + success: false, + exitCode: null, + stdout, + stderr: err.message, + }); + }); + }); +} + +/** + * Run a command in a container interactively (for TTY) + */ +export function runInteractive(config: ContainerConfig): { pid: number; containerId?: string } { + if (!isDockerAvailable()) { + throw new Error('Docker is not available'); + } + + const args = buildDockerArgs({ ...config, autoRemove: true }); + args.splice(1, 0, '-it'); // Add interactive + TTY flags + + const proc = spawn('docker', args, { + stdio: 'inherit', + detached: false, + }); + + return { pid: proc.pid || 0 }; +} + +/** + * Start a container in the background + */ +export async function startContainer(config: ContainerConfig): Promise<{ containerId: string }> { + if (!isDockerAvailable()) { + throw new Error('Docker is not available'); + } + + const args = buildDockerArgs({ ...config, autoRemove: false }); + args.splice(1, 0, '-d'); // Add detach flag + + const result = execSync(`docker ${args.join(' ')}`, { encoding: 'utf-8' }); + const containerId = result.trim(); + + return { containerId }; +} + +/** + * Stop a running container + */ +export async function stopContainer(containerId: string): Promise { + execSync(`docker stop ${containerId}`, { stdio: 'pipe' }); +} + +/** + * Execute a command in a running container + */ +export async function execInContainer( + containerId: string, + command: string[], + options: { workdir?: string; env?: Record } = {} +): Promise { + const args = ['exec']; + + if (options.workdir) { + args.push('-w', options.workdir); + } + + if (options.env) { + for (const [key, value] of Object.entries(options.env)) { + args.push('-e', `${key}=${value}`); + } + } + + args.push(containerId, ...command); + + return new Promise((resolve) => { + const proc = spawn('docker', args); + + let stdout = ''; + let stderr = ''; + + proc.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + proc.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + proc.on('close', (code) => { + resolve({ + success: code === 0, + exitCode: code, + stdout, + stderr, + containerId, + }); + }); + + proc.on('error', (err) => { + resolve({ + success: false, + exitCode: null, + stdout, + stderr: err.message, + containerId, + }); + }); + }); +} + +/** + * Pull a Docker image + */ +export async function pullImage(image: string): Promise { + if (!isDockerAvailable()) { + return false; + } + + try { + execSync(`docker pull ${image}`, { stdio: 'pipe' }); + return true; + } catch { + return false; + } +} + +/** + * List running containers + */ +export function listContainers(): Array<{ + id: string; + image: string; + name: string; + status: string; +}> { + if (!isDockerAvailable()) { + return []; + } + + try { + const output = execSync( + 'docker ps --format "{{.ID}}|{{.Image}}|{{.Names}}|{{.Status}}"', + { encoding: 'utf-8' } + ); + + return output + .trim() + .split('\n') + .filter(Boolean) + .map((line) => { + const [id, image, name, status] = line.split('|'); + return { id, image, name, status }; + }); + } catch { + return []; + } +} + +// ============================================================================ +// Predefined container configurations for common tasks +// ============================================================================ + +export const PRESET_CONTAINERS = { + /** Node.js 20 environment */ + node20: { + image: 'node:20-slim', + workdir: '/workspace', + }, + + /** Python 3.11 environment */ + python311: { + image: 'python:3.11-slim', + workdir: '/workspace', + }, + + /** Go 1.21 environment */ + go121: { + image: 'golang:1.21-alpine', + workdir: '/workspace', + }, + + /** Rust environment */ + rust: { + image: 'rust:slim', + workdir: '/workspace', + }, + + /** Ubuntu with common tools */ + ubuntu: { + image: 'ubuntu:22.04', + workdir: '/workspace', + }, + + /** Alpine minimal */ + alpine: { + image: 'alpine:3.18', + workdir: '/workspace', + }, + + /** Playwright with browsers */ + playwright: { + image: 'mcr.microsoft.com/playwright:latest', + workdir: '/workspace', + }, +} as const; + +/** + * Run code in a language-specific container + */ +export async function runCode( + language: 'node' | 'python' | 'go' | 'rust' | 'bash', + code: string, + options: { workspaceDir?: string; timeout?: number } = {} +): Promise { + const configs: Record = { + node: { image: 'node:20-slim', command: ['node', '-e', code] }, + python: { image: 'python:3.11-slim', command: ['python', '-c', code] }, + go: { image: 'golang:1.21-alpine', command: ['go', 'run', '-'] }, + rust: { image: 'rust:slim', command: ['rustc', '--edition', '2021', '-', '-o', '/tmp/a', '&&', '/tmp/a'] }, + bash: { image: 'ubuntu:22.04', command: ['bash', '-c', code] }, + }; + + const config = configs[language]; + if (!config) { + return { + success: false, + exitCode: null, + stdout: '', + stderr: `Unknown language: ${language}`, + }; + } + + return runInContainer({ + image: config.image, + command: config.command, + workdir: '/workspace', + volumes: options.workspaceDir ? [`${options.workspaceDir}:/workspace`] : [], + timeout: options.timeout, + memory: '512m', + cpus: '1', + }); +} diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index 1c53a6db..c00a2864 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -18,6 +18,15 @@ import type { ProjectConfig, SpawnRequest } from '../bridge/types.js'; import { listTrajectorySteps, getTrajectoryStatus, getTrajectoryHistory } from '../trajectory/integration.js'; import { loadTeamsConfig } from '../bridge/teams-config.js'; import { getMemoryMonitor } from '../resiliency/memory-monitor.js'; +import { detectWorkspacePath } from '../utils/project-namespace.js'; +import { + startCLIAuth, + getAuthSession, + cancelAuthSession, + submitAuthCode, + completeAuthSession, + getSupportedProviders, +} from '../daemon/cli-auth.js'; /** * Initialize cloud persistence for session tracking. @@ -400,8 +409,13 @@ export async function startDashboard( : undefined; // Initialize spawner if enabled + // Use detectWorkspacePath to find the actual repo directory in cloud workspaces + const workspacePath = detectWorkspacePath(projectRoot || dataDir); + console.log(`[dashboard] Workspace path: ${workspacePath}`); + + // Pass dashboard port to spawner so spawned agents can call spawn/release APIs for nested spawning const spawner: AgentSpawner | undefined = enableSpawner - ? new AgentSpawner(projectRoot || dataDir, tmuxSession) + ? new AgentSpawner(workspacePath, tmuxSession, port) : undefined; // Initialize cloud persistence and memory monitoring if enabled (RELAY_CLOUD_ENABLED=true) @@ -1363,30 +1377,54 @@ export async function startDashboard( getAgentSummaries(), ]); - // Filter agents: + // Filter and separate agents from human users: // 1. Exclude "Dashboard" (internal agent, not a real team member) // 2. Exclude offline agents (no lastSeen or lastSeen > threshold) + // 3. Exclude agents without a known CLI (these are improperly registered or stale) + // 4. Separate human users (cli === 'dashboard') from AI agents const now = Date.now(); // 30 seconds - aligns with heartbeat timeout (5s heartbeat * 6 multiplier = 30s) // This ensures agents disappear quickly after they stop responding to heartbeats const OFFLINE_THRESHOLD_MS = 30 * 1000; - const filteredAgents = Array.from(agentsMap.values()).filter(agent => { - // Exclude Dashboard - if (agent.name === 'Dashboard') return false; - // Exclude agents starting with __ (internal/system agents) - if (agent.name.startsWith('__')) return false; + // First pass: filter out invalid/offline entries + const validEntries = Array.from(agentsMap.values()) + .filter(agent => { + // Exclude Dashboard + if (agent.name === 'Dashboard') return false; - // Exclude offline agents (no lastSeen or too old) - if (!agent.lastSeen) return false; - const lastSeenTime = new Date(agent.lastSeen).getTime(); - if (now - lastSeenTime > OFFLINE_THRESHOLD_MS) return false; + // Exclude agents starting with __ (internal/system agents) + if (agent.name.startsWith('__')) return false; - return true; - }); + // Exclude agents without a proper CLI (improperly registered or stale) + if (!agent.cli || agent.cli === 'Unknown') return false; + + // Exclude offline agents (no lastSeen or too old) + if (!agent.lastSeen) return false; + const lastSeenTime = new Date(agent.lastSeen).getTime(); + if (now - lastSeenTime > OFFLINE_THRESHOLD_MS) return false; + + return true; + }); + + // Separate AI agents from human users + const filteredAgents = validEntries + .filter(agent => agent.cli !== 'dashboard') + .map(agent => ({ + ...agent, + isHuman: false, + })); + + const humanUsers = validEntries + .filter(agent => agent.cli === 'dashboard') + .map(agent => ({ + ...agent, + isHuman: true, + })); return { agents: filteredAgents, + users: humanUsers, messages: allMessages, activity: allMessages, // For now, activity log is just the message log sessions, @@ -1726,11 +1764,56 @@ export async function startDashboard( }); }); + // Deduplication for log output - prevent same content from being broadcast multiple times + // Key: agentName -> Set of recent content hashes (rolling window) + const recentLogHashes = new Map>(); + const MAX_LOG_HASH_WINDOW = 50; // Keep last 50 hashes per agent + + // Simple hash function for log dedup + const hashLogContent = (content: string): string => { + // Normalize whitespace and create a simple hash + const normalized = content.replace(/\s+/g, ' ').trim().slice(0, 200); + let hash = 0; + for (let i = 0; i < normalized.length; i++) { + const char = normalized.charCodeAt(i); + hash = ((hash << 5) - hash) + char; + hash = hash & hash; + } + return hash.toString(36); + }; + // Function to broadcast log output to subscribed clients const broadcastLogOutput = (agentName: string, output: string) => { const clients = logSubscriptions.get(agentName); if (!clients || clients.size === 0) return; + // Skip empty or whitespace-only output + const trimmed = output.trim(); + if (!trimmed) return; + + // Dedup: Check if we've recently broadcast this content + const hash = hashLogContent(output); + let agentHashes = recentLogHashes.get(agentName); + if (!agentHashes) { + agentHashes = new Set(); + recentLogHashes.set(agentName, agentHashes); + } + + if (agentHashes.has(hash)) { + // Already broadcast this content recently, skip + return; + } + + // Add to rolling window + agentHashes.add(hash); + if (agentHashes.size > MAX_LOG_HASH_WINDOW) { + // Remove oldest entry (first in Set iteration order) + const oldest = agentHashes.values().next().value; + if (oldest !== undefined) { + agentHashes.delete(oldest); + } + } + const payload = JSON.stringify({ type: 'output', agent: agentName, @@ -1765,8 +1848,39 @@ export async function startDashboard( return Array.from(onlineUsers.values()).map((state) => state.info); }; + // Heartbeat to detect dead connections (30 seconds) + const PRESENCE_HEARTBEAT_INTERVAL = 30000; + const presenceHealth = new WeakMap(); + + const presenceHeartbeat = setInterval(() => { + wssPresence.clients.forEach((ws) => { + const health = presenceHealth.get(ws); + if (!health) { + presenceHealth.set(ws, { isAlive: true }); + return; + } + if (!health.isAlive) { + ws.terminate(); + return; + } + health.isAlive = false; + ws.ping(); + }); + }, PRESENCE_HEARTBEAT_INTERVAL); + + wssPresence.on('close', () => { + clearInterval(presenceHeartbeat); + }); + wssPresence.on('connection', (ws) => { - console.log('[dashboard] Presence WebSocket client connected'); + // Initialize health tracking (no log - too noisy) + presenceHealth.set(ws, { isAlive: true }); + + ws.on('pong', () => { + const health = presenceHealth.get(ws); + if (health) health.isAlive = true; + }); + let clientUsername: string | undefined; ws.on('message', (data) => { @@ -1797,7 +1911,11 @@ export async function startDashboard( // Add this connection to existing user existing.connections.add(ws); existing.info.lastSeen = now; - console.log(`[dashboard] User ${username} opened new tab (${existing.connections.size} connections)`); + // Only log at milestones to reduce noise + const count = existing.connections.size; + if (count === 2 || count === 5 || count === 10 || count % 50 === 0) { + console.log(`[dashboard] User ${username} has ${count} connections`); + } } else { // New user - create presence state onlineUsers.set(username, { @@ -1990,6 +2108,189 @@ export async function startDashboard( }); }); + // ===== CLI Auth API (for workspace-based provider authentication) ===== + + /** + * POST /auth/cli/:provider/start - Start CLI auth flow + * Body: { useDeviceFlow?: boolean } + */ + app.post('/auth/cli/:provider/start', async (req, res) => { + const { provider } = req.params; + const { useDeviceFlow } = req.body || {}; + try { + const session = await startCLIAuth(provider, { useDeviceFlow }); + res.json({ + sessionId: session.id, + status: session.status, + authUrl: session.authUrl, + }); + } catch (err) { + res.status(400).json({ + error: err instanceof Error ? err.message : 'Failed to start CLI auth', + }); + } + }); + + /** + * GET /auth/cli/:provider/status/:sessionId - Get auth session status + */ + app.get('/auth/cli/:provider/status/:sessionId', (req, res) => { + const { sessionId } = req.params; + const session = getAuthSession(sessionId); + if (!session) { + return res.status(404).json({ error: 'Session not found' }); + } + res.json({ + status: session.status, + authUrl: session.authUrl, + error: session.error, + }); + }); + + /** + * GET /auth/cli/:provider/creds/:sessionId - Get credentials from completed auth + */ + app.get('/auth/cli/:provider/creds/:sessionId', (req, res) => { + const { sessionId } = req.params; + const session = getAuthSession(sessionId); + if (!session) { + return res.status(404).json({ error: 'Session not found' }); + } + if (session.status !== 'success') { + return res.status(400).json({ error: 'Auth not complete', status: session.status }); + } + res.json({ + token: session.token, + refreshToken: session.refreshToken, + expiresAt: session.tokenExpiresAt?.toISOString(), + }); + }); + + /** + * POST /auth/cli/:provider/cancel/:sessionId - Cancel auth session + */ + app.post('/auth/cli/:provider/cancel/:sessionId', (req, res) => { + const { sessionId } = req.params; + const cancelled = cancelAuthSession(sessionId); + if (!cancelled) { + return res.status(404).json({ error: 'Session not found' }); + } + res.json({ success: true }); + }); + + /** + * POST /auth/cli/:provider/code/:sessionId - Submit auth code to PTY + * Used when OAuth returns a code that must be pasted into the CLI + */ + app.post('/auth/cli/:provider/code/:sessionId', async (req, res) => { + const { provider, sessionId } = req.params; + const { code } = req.body; + + console.log('[cli-auth] Auth code submission received', { provider, sessionId, codeLength: code?.length }); + + if (!code || typeof code !== 'string') { + return res.status(400).json({ error: 'Auth code is required' }); + } + + try { + const result = await submitAuthCode(sessionId, code); + console.log('[cli-auth] Auth code submission result', { provider, sessionId, result }); + + if (!result.success) { + // Use 400 for all errors since they can be retried + return res.status(400).json({ + error: result.error || 'Session not found or process not running', + needsRestart: result.needsRestart ?? true, + }); + } + + // Wait a few seconds for CLI to process and write credentials + // The 1s delay in submitAuthCode + CLI processing time means credentials + // should be available within 3-5 seconds + let sessionStatus = 'waiting_auth'; + for (let i = 0; i < 10; i++) { + await new Promise(resolve => setTimeout(resolve, 500)); + const session = getAuthSession(sessionId); + if (session?.status === 'success') { + sessionStatus = 'success'; + console.log('[cli-auth] Credentials found after code submission', { provider, sessionId, attempt: i + 1 }); + break; + } + if (session?.status === 'error') { + sessionStatus = 'error'; + break; + } + } + + res.json({ + success: true, + message: 'Auth code submitted', + status: sessionStatus, + }); + } catch (err) { + console.error('[cli-auth] Auth code submission error', { provider, sessionId, error: String(err) }); + return res.status(500).json({ + error: 'Internal error submitting auth code. Please try again.', + needsRestart: true, + }); + } + }); + + /** + * POST /auth/cli/:provider/complete/:sessionId - Complete auth + * For providers like Claude: just polls for credentials + * For providers like Codex: accepts authCode (redirect URL) and extracts the code + */ + app.post('/auth/cli/:provider/complete/:sessionId', async (req, res) => { + const { sessionId } = req.params; + const { authCode } = req.body || {}; + + // If authCode provided, try to extract code and submit it + if (authCode && typeof authCode === 'string') { + let code = authCode; + + // If it's a URL, extract the code parameter + if (authCode.startsWith('http')) { + try { + const url = new URL(authCode); + const codeParam = url.searchParams.get('code'); + if (codeParam) { + code = codeParam; + } + } catch { + // Not a valid URL, use as-is + } + } + + // Submit the code to the CLI process + const submitResult = await submitAuthCode(sessionId, code); + if (!submitResult.success) { + return res.status(400).json({ + error: submitResult.error, + needsRestart: submitResult.needsRestart, + }); + } + + // Wait a moment for credentials to be written + await new Promise(resolve => setTimeout(resolve, 2000)); + } + + // Poll for credentials + const result = await completeAuthSession(sessionId); + if (!result.success) { + return res.status(400).json({ error: result.error }); + } + + res.json({ success: true, message: 'Authentication complete' }); + }); + + /** + * GET /auth/cli/providers - List supported providers + */ + app.get('/auth/cli/providers', (req, res) => { + res.json({ providers: getSupportedProviders() }); + }); + // ===== Metrics API ===== /** @@ -2986,6 +3287,140 @@ Start by greeting the project leads and asking for status updates.`; } }); + // ===== Settings API ===== + + /** + * GET /api/settings - Get all workspace settings with documentation + */ + app.get('/api/settings', async (_req, res) => { + try { + const { readRelayConfig, shouldStoreInRepo, getTrajectoriesStorageDescription } = await import('../trajectory/config.js'); + const config = readRelayConfig(); + + res.json({ + success: true, + settings: { + trajectories: { + storeInRepo: shouldStoreInRepo(), + storageLocation: getTrajectoriesStorageDescription(), + description: 'Trajectories record the journey of agent work using the PDERO paradigm (Plan, Design, Execute, Review, Observe). They capture decisions, phase transitions, and retrospectives.', + benefits: [ + 'Track why decisions were made, not just what was built', + 'Enable session recovery when agents crash or context is lost', + 'Provide learning data for future agents working on similar tasks', + 'Create an audit trail of agent work for review', + ], + learnMore: 'https://pdero.com', + optInReason: 'Enable "Store in repo" to version-control your trajectories alongside your code. This is useful for teams who want to review agent decision-making processes.', + }, + }, + config, + }); + } catch (err: any) { + console.error('[api] Settings error:', err); + res.status(500).json({ + success: false, + error: err.message, + }); + } + }); + + /** + * GET /api/settings/trajectory - Get trajectory storage settings + */ + app.get('/api/settings/trajectory', async (_req, res) => { + try { + const { readRelayConfig, shouldStoreInRepo, getTrajectoriesStorageDescription } = await import('../trajectory/config.js'); + const config = readRelayConfig(); + + res.json({ + success: true, + settings: { + storeInRepo: shouldStoreInRepo(), + storageLocation: getTrajectoriesStorageDescription(), + }, + config: config.trajectories || {}, + // Documentation for the UI + documentation: { + title: 'Trajectory Storage', + description: 'Trajectories record the journey of agent work using the PDERO paradigm (Plan, Design, Execute, Review, Observe).', + whatIsIt: 'A trajectory captures not just what an agent built, but WHY it made specific decisions. This includes phase transitions, key decisions with reasoning, and retrospective summaries.', + benefits: [ + 'Understand agent decision-making for code review', + 'Enable session recovery if agents crash', + 'Train future agents on your codebase patterns', + 'Create audit trails of AI work', + ], + storeInRepoExplanation: 'When enabled, trajectories are stored in .trajectories/ in your repo and can be committed to source control. When disabled (default), they are stored in your user directory (~/.config/agent-relay/trajectories/).', + learnMore: 'https://pdero.com', + }, + }); + } catch (err: any) { + console.error('[api] Settings trajectory error:', err); + res.status(500).json({ + success: false, + error: err.message, + }); + } + }); + + /** + * PUT /api/settings/trajectory - Update trajectory storage settings + * + * Body: { storeInRepo: boolean } + * + * This writes to .relay/config.json in the project root + */ + app.put('/api/settings/trajectory', async (req, res) => { + try { + const { storeInRepo } = req.body; + + if (typeof storeInRepo !== 'boolean') { + return res.status(400).json({ + success: false, + error: 'storeInRepo must be a boolean', + }); + } + + const { getRelayConfigPath, readRelayConfig } = await import('../trajectory/config.js'); + const { getProjectPaths } = await import('../utils/project-namespace.js'); + const { projectRoot: _projectRoot } = getProjectPaths(); + + // Read existing config + const config = readRelayConfig(); + + // Update trajectory settings + config.trajectories = { + ...config.trajectories, + storeInRepo, + }; + + // Ensure .relay directory exists + const configPath = getRelayConfigPath(); + const configDir = path.dirname(configPath); + if (!fs.existsSync(configDir)) { + fs.mkdirSync(configDir, { recursive: true }); + } + + // Write updated config + fs.writeFileSync(configPath, JSON.stringify(config, null, 2)); + + res.json({ + success: true, + settings: { + storeInRepo, + storageLocation: storeInRepo ? 'repo (.trajectories/)' : 'user (~/.config/agent-relay/trajectories/)', + }, + }); + } catch (err: any) { + console.error('[api] Settings trajectory update error:', err); + res.status(500).json({ + success: false, + error: err.message, + }); + } + }); + // ===== Decision Queue API ===== interface Decision { diff --git a/src/dashboard/app/app/page.tsx b/src/dashboard/app/app/page.tsx index f3b2dc16..e5b80f2b 100644 --- a/src/dashboard/app/app/page.tsx +++ b/src/dashboard/app/app/page.tsx @@ -1,13 +1,776 @@ /** * Dashboard V2 - Main App Page * - * Entry point for the dashboard application (after login). + * In cloud mode: Shows workspace selection and connects to selected workspace's dashboard. + * In local mode: Connects to local daemon WebSocket. */ 'use client'; +import React, { useState, useEffect, useCallback } from 'react'; import { App } from '../../react-components/App'; +import { CloudSessionProvider } from '../../react-components/CloudSessionProvider'; +import { LogoIcon } from '../../react-components/Logo'; +import { setActiveWorkspaceId } from '../../lib/api'; +import { ProviderAuthFlow } from '../../react-components/ProviderAuthFlow'; + +interface Workspace { + id: string; + name: string; + status: 'provisioning' | 'running' | 'stopped' | 'error'; + publicUrl?: string; + providers?: string[]; + repositories?: string[]; + createdAt: string; +} + +interface Repository { + id: string; + fullName: string; + isPrivate: boolean; + defaultBranch: string; + syncStatus: string; + hasNangoConnection: boolean; +} + +interface ProviderInfo { + id: string; + name: string; + displayName: string; + color: string; + cliCommand?: string; + /** Whether this provider supports device flow (code displayed on screen) */ + supportsDeviceFlow?: boolean; + /** Whether standard flow redirects to a URL the user must copy (shows "not found" page) */ + requiresUrlCopy?: boolean; +} + +// ProviderAuthState simplified - now using ProviderAuthFlow shared component + +type PageState = 'loading' | 'local' | 'select-workspace' | 'no-workspaces' | 'connect-provider' | 'connecting' | 'connected' | 'error'; + +// Available AI providers +const AI_PROVIDERS: ProviderInfo[] = [ + { id: 'anthropic', name: 'Anthropic', displayName: 'Claude', color: '#D97757', cliCommand: 'claude' }, + { id: 'codex', name: 'OpenAI', displayName: 'Codex', color: '#10A37F', cliCommand: 'codex login', supportsDeviceFlow: true, requiresUrlCopy: true }, + { id: 'opencode', name: 'OpenCode', displayName: 'OpenCode', color: '#00D4AA', cliCommand: 'opencode' }, + { id: 'droid', name: 'Factory', displayName: 'Droid', color: '#6366F1', cliCommand: 'droid' }, +]; + +// Force cloud mode via env var - prevents silent fallback to local mode +const FORCE_CLOUD_MODE = process.env.NEXT_PUBLIC_FORCE_CLOUD_MODE === 'true'; export default function DashboardPage() { - return ; + const [state, setState] = useState('loading'); + const [workspaces, setWorkspaces] = useState([]); + const [repos, setRepos] = useState([]); + const [selectedWorkspace, setSelectedWorkspace] = useState(null); + const [wsUrl, setWsUrl] = useState(undefined); + const [error, setError] = useState(null); + // Track cloud mode for potential future use + const [_isCloudMode, setIsCloudMode] = useState(FORCE_CLOUD_MODE); + const [csrfToken, setCsrfToken] = useState(null); + const [connectingProvider, setConnectingProvider] = useState(null); + + // Check if we're in cloud mode and fetch data + useEffect(() => { + const init = async () => { + try { + // Check session to determine if we're in cloud mode + const sessionRes = await fetch('/api/auth/session', { credentials: 'include' }); + + // If session endpoint doesn't exist (404), we're in local mode + if (sessionRes.status === 404) { + if (FORCE_CLOUD_MODE) { + throw new Error('Cloud mode enforced but session endpoint returned 404. Is the cloud server running?'); + } + setIsCloudMode(false); + setState('local'); + return; + } + + // Capture CSRF token from response header + const token = sessionRes.headers.get('X-CSRF-Token'); + if (token) { + setCsrfToken(token); + } + + const session = await sessionRes.json(); + + if (!session.authenticated) { + // Cloud mode but not authenticated - redirect to login + window.location.href = '/login'; + return; + } + + // Cloud mode - fetch workspaces and repos + setIsCloudMode(true); + + const [workspacesRes, reposRes] = await Promise.all([ + fetch('/api/workspaces', { credentials: 'include' }), + fetch('/api/github-app/repos', { credentials: 'include' }), + ]); + + if (!workspacesRes.ok) { + if (workspacesRes.status === 401) { + window.location.href = '/login'; + return; + } + throw new Error('Failed to fetch workspaces'); + } + + const workspacesData = await workspacesRes.json(); + const reposData = reposRes.ok ? await reposRes.json() : { repositories: [] }; + + setWorkspaces(workspacesData.workspaces || []); + setRepos(reposData.repositories || []); + + // Determine next state based on workspace availability + const runningWorkspaces = (workspacesData.workspaces || []).filter( + (w: Workspace) => w.status === 'running' && w.publicUrl + ); + + if (runningWorkspaces.length === 1) { + // Auto-connect to the only running workspace + connectToWorkspace(runningWorkspaces[0]); + } else if (runningWorkspaces.length > 1) { + setState('select-workspace'); + } else if ((workspacesData.workspaces || []).length > 0) { + // Has workspaces but none running + setState('select-workspace'); + } else if ((reposData.repositories || []).length > 0) { + // Has repos but no workspaces - show create workspace + setState('no-workspaces'); + } else { + // No repos, no workspaces - redirect to connect repos + window.location.href = '/connect-repos'; + } + } catch (err) { + // If session check fails with network error, assume local mode (unless forced cloud) + if (err instanceof TypeError && err.message.includes('Failed to fetch')) { + if (FORCE_CLOUD_MODE) { + console.error('Cloud mode enforced but network request failed:', err); + setError('Cloud mode enforced but failed to connect to server. Is the cloud server running?'); + setState('error'); + return; + } + setIsCloudMode(false); + setState('local'); + return; + } + console.error('Init error:', err); + setError(err instanceof Error ? err.message : 'Failed to initialize'); + setState('error'); + } + }; + + init(); + }, []); + + const connectToWorkspace = useCallback((workspace: Workspace) => { + if (!workspace.publicUrl) { + setError('Workspace has no public URL'); + setState('error'); + return; + } + + setSelectedWorkspace(workspace); + setState('connecting'); + + // Set the active workspace ID for API proxying + setActiveWorkspaceId(workspace.id); + + // Derive WebSocket URL from public URL + // e.g., https://workspace-abc.agentrelay.dev -> wss://workspace-abc.agentrelay.dev/ws + const url = new URL(workspace.publicUrl); + const wsProtocol = url.protocol === 'https:' ? 'wss:' : 'ws:'; + const derivedWsUrl = `${wsProtocol}//${url.host}/ws`; + + setWsUrl(derivedWsUrl); + setState('connected'); + }, []); + + const handleCreateWorkspace = useCallback(async (repoFullName: string) => { + setState('loading'); + setError(null); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + + const res = await fetch('/api/workspaces/quick', { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ repositoryFullName: repoFullName }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to create workspace'); + } + + // Poll for workspace to be ready + // Cloud deployments (Fly.io) can take 3-5 minutes for cold starts + const pollForReady = async (workspaceId: string) => { + const maxAttempts = 150; // 5 minutes with 2s interval + const pollIntervalMs = 2000; + let attempts = 0; + + while (attempts < maxAttempts) { + const statusRes = await fetch(`/api/workspaces/${workspaceId}/status`, { + credentials: 'include', + }); + const statusData = await statusRes.json(); + + if (statusData.status === 'running') { + // Fetch updated workspace info + const wsRes = await fetch(`/api/workspaces/${workspaceId}`, { + credentials: 'include', + }); + const wsData = await wsRes.json(); + if (wsData.publicUrl) { + // Store workspace and show provider connection screen + setSelectedWorkspace(wsData); + setState('connect-provider'); + return; + } + } else if (statusData.status === 'error') { + const errorMsg = statusData.errorMessage || 'Workspace provisioning failed'; + throw new Error(errorMsg); + } + + await new Promise(resolve => setTimeout(resolve, pollIntervalMs)); + attempts++; + + // Log progress every 30 seconds + if (attempts % 15 === 0) { + console.log(`[workspace] Still provisioning... (${Math.floor(attempts * pollIntervalMs / 1000)}s elapsed)`); + } + } + + throw new Error('Workspace provisioning timed out after 5 minutes. Please try again or contact support.'); + }; + + await pollForReady(data.workspaceId); + } catch (err) { + console.error('Create workspace error:', err); + setError(err instanceof Error ? err.message : 'Failed to create workspace'); + setState('no-workspaces'); + } + }, [connectToWorkspace, csrfToken]); + + // Handle connecting an AI provider - simplified with ProviderAuthFlow component + const handleConnectProvider = useCallback((provider: ProviderInfo) => { + if (!selectedWorkspace) return; + setConnectingProvider(provider.id); + }, [selectedWorkspace]); + + // Skip provider connection and continue to workspace + const handleSkipProvider = useCallback(() => { + if (selectedWorkspace) { + setConnectingProvider(null); + connectToWorkspace(selectedWorkspace); + } + }, [selectedWorkspace, connectToWorkspace]); + + // Connect another provider after successful auth + const handleConnectAnother = useCallback(() => { + setConnectingProvider(null); + // Stay on connect-provider screen + }, []); + + const handleStartWorkspace = useCallback(async (workspace: Workspace) => { + setState('loading'); + setError(null); + + try { + const headers: Record = {}; + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + + const res = await fetch(`/api/workspaces/${workspace.id}/restart`, { + method: 'POST', + credentials: 'include', + headers, + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to start workspace'); + } + + // Poll for workspace to be ready + const maxAttempts = 60; + let attempts = 0; + + while (attempts < maxAttempts) { + const statusRes = await fetch(`/api/workspaces/${workspace.id}/status`, { + credentials: 'include', + }); + const statusData = await statusRes.json(); + + if (statusData.status === 'running') { + const wsRes = await fetch(`/api/workspaces/${workspace.id}`, { + credentials: 'include', + }); + const wsData = await wsRes.json(); + if (wsData.publicUrl) { + connectToWorkspace({ ...workspace, ...wsData }); + return; + } + } + + await new Promise(resolve => setTimeout(resolve, 2000)); + attempts++; + } + + throw new Error('Workspace start timed out'); + } catch (err) { + console.error('Start workspace error:', err); + setError(err instanceof Error ? err.message : 'Failed to start workspace'); + setState('select-workspace'); + } + }, [connectToWorkspace, csrfToken]); + + // Loading state + if (state === 'loading') { + return ( +
+
+ + + + +

Loading...

+
+
+ ); + } + + // Local mode - just render the App component + if (state === 'local') { + return ; + } + + // Connected to workspace - render App with workspace's WebSocket + // Wrap in CloudSessionProvider so App has access to cloud session context + if (state === 'connected' && wsUrl) { + return ( + + + + ); + } + + // Connecting state + if (state === 'connecting') { + return ( +
+
+ + + + +

Connecting to {selectedWorkspace?.name}...

+

{selectedWorkspace?.publicUrl}

+
+
+ ); + } + + // Error state + if (state === 'error') { + return ( +
+
+
+ + + +
+

Something went wrong

+

{error}

+ +
+
+ ); + } + + // Connect provider state - show after workspace is ready + if (state === 'connect-provider' && selectedWorkspace) { + return ( +
+ {/* Background grid */} +
+
+
+ +
+ {/* Logo */} +
+ +

Connect AI Provider

+

+ Your workspace {selectedWorkspace.name} is ready! +
Connect an AI provider to start using agents. +

+
+ + {/* Provider auth flow - using shared component */} + {connectingProvider && (() => { + // Handle codex-device as codex with device flow + const isDeviceFlow = connectingProvider === 'codex-device'; + const providerId = isDeviceFlow ? 'codex' : connectingProvider; + const provider = AI_PROVIDERS.find(p => p.id === providerId); + if (!provider) return null; + return ( +
+ { + // Show success state briefly, then offer options + setConnectingProvider(null); + // Stay on connect-provider screen to allow connecting more providers + // User can click "Continue to Dashboard" or connect another + }} + onCancel={() => { + setConnectingProvider(null); + }} + onError={() => { + setConnectingProvider(null); + }} + /> + + {/* After success, show options to connect another or continue */} +
+ + +
+
+ ); + })()} + + {/* Provider list */} + {!connectingProvider && ( +
+

Choose an AI Provider

+
+ {AI_PROVIDERS.map((provider) => ( +
+ {/* Special expanded section for Codex with device flow option */} + {provider.id === 'codex' ? ( +
+
+
+ {provider.displayName[0]} +
+
+

{provider.displayName}

+

{provider.name}

+
+
+ + {/* Warning about localhost redirect */} +
+

⚠️ Heads up about the login flow

+

+ OpenAI's OAuth redirects to localhost after login, + which will show a "Page not found" or "This site can't be reached" error. + This is expected! You'll need to copy the URL from your browser and paste it back here. +

+
+ + {/* Two auth options */} +
+ + + +
+
+ ) : ( + /* Standard provider button */ + + )} +
+ ))} +
+
+ )} + + {/* Skip button */} +
+ +
+
+
+ ); + } + + // Workspace selection / no workspaces UI + return ( +
+ {/* Background grid */} +
+
+
+ +
+ {/* Logo */} +
+ +

Agent Relay

+

+ {state === 'no-workspaces' ? 'Create a workspace to get started' : 'Select a workspace'} +

+
+ + {error && ( +
+

{error}

+
+ )} + + {/* Workspaces list */} + {state === 'select-workspace' && workspaces.length > 0 && ( +
+

Your Workspaces

+
+ {workspaces.map((workspace) => ( +
+
+
+
+

{workspace.name}

+

+ {workspace.status === 'running' ? 'Running' : + workspace.status === 'provisioning' ? 'Starting...' : + workspace.status === 'stopped' ? 'Stopped' : 'Error'} +

+
+
+
+ {workspace.status === 'running' && workspace.publicUrl ? ( + + ) : workspace.status === 'stopped' ? ( + + ) : workspace.status === 'provisioning' ? ( + Starting... + ) : ( + Failed + )} +
+
+ ))} +
+ + {repos.length > 0 && ( +
+

Or create a new workspace:

+
+ {repos.slice(0, 3).map((repo) => ( + + ))} +
+
+ )} +
+ )} + + {/* No workspaces - create first one */} + {state === 'no-workspaces' && ( +
+

Create Your First Workspace

+

+ Select a repository to create a workspace where agents can work on your code. +

+ + {repos.length > 0 ? ( +
+ {repos.map((repo) => ( + + ))} +
+ ) : ( +
+

No repositories connected yet.

+ + + + + Connect GitHub + +
+ )} +
+ )} + + {/* Navigation */} +
+ + Manage Repositories + + · + +
+
+
+ ); } diff --git a/src/dashboard/app/connect-repos/page.tsx b/src/dashboard/app/connect-repos/page.tsx index ae1179b6..2944bd9d 100644 --- a/src/dashboard/app/connect-repos/page.tsx +++ b/src/dashboard/app/connect-repos/page.tsx @@ -1,19 +1,17 @@ /** * Connect Repos Page - GitHub App OAuth via Nango * - * Allows authenticated users to connect their GitHub repositories - * via the GitHub App OAuth flow (separate from login). + * Key: Initialize Nango on page load, not on click. + * This avoids popup blockers by ensuring openConnectUI is synchronous. */ 'use client'; -import React, { useState, useEffect, useCallback, useRef } from 'react'; -import Nango, { ConnectUI } from '@nangohq/frontend'; -import type { ConnectUIEvent } from '@nangohq/frontend'; -import { cloudApi } from '../../lib/cloudApi'; +import React, { useState, useEffect, useRef } from 'react'; +import Nango from '@nangohq/frontend'; import { LogoIcon } from '../../react-components/Logo'; -type ConnectState = 'checking' | 'idle' | 'loading' | 'connecting' | 'polling' | 'pending-approval' | 'success' | 'error'; +type ConnectState = 'checking' | 'ready' | 'connecting' | 'polling' | 'pending-approval' | 'success' | 'error'; interface ConnectedRepo { id: string; @@ -27,148 +25,198 @@ export default function ConnectReposPage() { const [error, setError] = useState(null); const [repos, setRepos] = useState([]); const [pendingMessage, setPendingMessage] = useState(null); - const pollIntervalRef = useRef(null); - const connectUIRef = useRef(null); + const [statusMessage, setStatusMessage] = useState(''); - // Check session on mount - useEffect(() => { - const checkSession = async () => { - const session = await cloudApi.checkSession(); - if (!session.authenticated) { - // Redirect to login - window.location.href = '/login'; - return; - } - setState('idle'); - }; - checkSession(); - }, []); + // Store Nango instance - initialized on mount + const nangoRef = useRef | null>(null); - // Cleanup on unmount + // Check session and initialize Nango on mount useEffect(() => { - return () => { - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } - if (connectUIRef.current) { - connectUIRef.current.close(); + let mounted = true; + + const init = async () => { + try { + // Check if authenticated + const sessionRes = await fetch('/api/auth/session', { credentials: 'include' }); + const session = await sessionRes.json(); + if (!session.authenticated) { + window.location.href = '/login'; + return; + } + + if (!mounted) return; + + // Get Nango session token for repo connection + const nangoRes = await fetch('/api/auth/nango/repo-session', { + credentials: 'include', + }); + const nangoData = await nangoRes.json(); + + if (!mounted) return; + + if (!nangoRes.ok || !nangoData.sessionToken) { + if (nangoData?.sessionExpired || nangoData?.code === 'SESSION_EXPIRED') { + window.location.href = '/login'; + return; + } + setError('Failed to initialize. Please refresh the page.'); + setState('error'); + return; + } + + // Create Nango instance NOW, not on click + nangoRef.current = new Nango({ connectSessionToken: nangoData.sessionToken }); + setState('ready'); + } catch { + if (mounted) { + window.location.href = '/login'; + } } }; + + init(); + return () => { mounted = false; }; }, []); - // Poll for repo sync completion - const startPolling = useCallback((connId: string) => { - setState('polling'); + const checkRepoStatus = async (connectionId: string): Promise<{ + ready: boolean; + pendingApproval?: boolean; + message?: string; + repos?: ConnectedRepo[]; + }> => { + const response = await fetch(`/api/auth/nango/repo-status/${connectionId}`, { + credentials: 'include', + }); + if (!response.ok) { + throw new Error('Status not ready'); + } + return response.json(); + }; - pollIntervalRef.current = setInterval(async () => { - try { - const result = await cloudApi.checkNangoRepoStatus(connId); - if (result.success) { - if (result.data.pendingApproval) { - // Org approval pending - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } + const handleAuthSuccess = async (connectionId: string) => { + try { + setState('polling'); + setStatusMessage('Syncing repositories...'); + + const pollStartTime = Date.now(); + const maxPollTime = 5 * 60 * 1000; + const pollInterval = 2000; + + const pollForRepos = async (): Promise => { + const elapsed = Date.now() - pollStartTime; + + if (elapsed > maxPollTime) { + throw new Error('Connection timed out. Please try again.'); + } + + try { + const result = await checkRepoStatus(connectionId); + if (result.pendingApproval) { setState('pending-approval'); - setPendingMessage(result.data.message || 'Waiting for organization admin approval'); - } else if (result.data.ready && result.data.repos) { - // Repos synced successfully - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } - setRepos(result.data.repos); + setPendingMessage(result.message || 'Waiting for organization admin approval'); + return; + } else if (result.ready && result.repos) { + setRepos(result.repos); setState('success'); + return; } + + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForRepos(); + } catch { + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForRepos(); } - } catch (err) { - console.error('Polling error:', err); - } - }, 2000); + }; - // Timeout after 5 minutes - setTimeout(() => { - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - pollIntervalRef.current = null; - setState('error'); - setError('Connection timed out. Please try again.'); - } - }, 5 * 60 * 1000); - }, []); + await pollForRepos(); + } catch (err) { + console.error('[AUTH] Error:', err); + setError(err instanceof Error ? err.message : 'Connection failed'); + setState('error'); + setStatusMessage(''); + } + }; - // Handle connect button click - const handleConnect = useCallback(async () => { - setState('loading'); + // Use nango.auth() instead of openConnectUI to avoid popup blocker issues + const handleConnect = async () => { + if (!nangoRef.current) { + setError('Not ready. Please refresh the page.'); + return; + } + + setState('connecting'); setError(null); + setStatusMessage('Connecting to GitHub...'); try { - // Create Nango instance and open Connect UI first (shows loading state) - const nango = new Nango(); - - const handleEvent = (event: ConnectUIEvent) => { - if (event.type === 'connect') { - // Connection successful - start polling for repo sync - const connectionId = event.payload.connectionId; - startPolling(connectionId); - if (connectUIRef.current) { - connectUIRef.current.close(); - } - } else if (event.type === 'close') { - // User closed without connecting - setState('idle'); - } else if (event.type === 'error') { - setState('error'); - setError(event.payload.errorMessage || 'Connection failed'); - if (connectUIRef.current) { - connectUIRef.current.close(); - } - } - }; + // Use github-app-oauth for GitHub App installation + const result = await nangoRef.current.auth('github-app-oauth'); + if (result && 'connectionId' in result) { + await handleAuthSuccess(result.connectionId); + } else { + throw new Error('No connection ID returned'); + } + } catch (err: unknown) { + const error = err as Error & { type?: string }; + console.error('GitHub App auth error:', error); - // Open Connect UI (shows loading until token is set) - connectUIRef.current = nango.openConnectUI({ - onEvent: handleEvent, - }); - connectUIRef.current.open(); - setState('connecting'); - - // Get repo session token from backend and set it - const sessionResult = await cloudApi.getNangoRepoSession(); - if (!sessionResult.success) { - if (connectUIRef.current) { - connectUIRef.current.close(); - } - if (sessionResult.sessionExpired) { - window.location.href = '/login'; - return; - } - throw new Error(sessionResult.error || 'Failed to create session'); + // Don't show error for user-cancelled auth + if (error.type === 'user_cancelled' || error.message?.includes('closed')) { + setStatusMessage(''); + // Re-initialize for next attempt + fetch('/api/auth/nango/repo-session', { credentials: 'include' }) + .then(res => res.json()) + .then(data => { + if (data.sessionToken) { + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setState('ready'); + } + }); + return; } - // Set the session token - this enables the Connect UI - connectUIRef.current.setSessionToken(sessionResult.data.sessionToken); - } catch (err) { - console.error('Connect error:', err); + setError(error.message || 'Connection failed'); setState('error'); - setError(err instanceof Error ? err.message : 'Failed to connect'); + setStatusMessage(''); } - }, [startPolling]); + }; - // Handle retry - const handleRetry = useCallback(() => { - setState('idle'); + const handleRetry = async () => { setError(null); setRepos([]); setPendingMessage(null); - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); + setStatusMessage(''); + setState('checking'); + + // Re-initialize Nango for the retry + try { + const nangoRes = await fetch('/api/auth/nango/repo-session', { + credentials: 'include', + }); + const nangoData = await nangoRes.json(); + + if (!nangoRes.ok || !nangoData.sessionToken) { + if (nangoData?.sessionExpired || nangoData?.code === 'SESSION_EXPIRED') { + window.location.href = '/login'; + return; + } + setError('Failed to initialize. Please refresh the page.'); + setState('error'); + return; + } + + nangoRef.current = new Nango({ connectSessionToken: nangoData.sessionToken }); + setState('ready'); + } catch { + setError('Failed to initialize. Please refresh the page.'); + setState('error'); } - }, []); + }; - // Continue to dashboard - const handleContinue = useCallback(() => { + const handleContinue = () => { window.location.href = '/app'; - }, []); + }; if (state === 'checking') { return ( @@ -178,12 +226,15 @@ export default function ConnectReposPage() { -

Checking session...

+

Loading...

); } + const isConnecting = state === 'connecting' || state === 'polling'; + const isReady = state === 'ready'; + return (
{/* Background grid */} @@ -220,13 +271,9 @@ export default function ConnectReposPage() {

Repositories Connected!

- {/* Repo list */}
{repos.map((repo) => ( -
+
@@ -256,19 +303,12 @@ export default function ConnectReposPage() {

{pendingMessage}

An organization admin needs to approve the GitHub App installation. - You'll be able to connect once approved.

- -
@@ -282,10 +322,7 @@ export default function ConnectReposPage() {

Connection Failed

{error}

-
@@ -298,10 +335,16 @@ export default function ConnectReposPage() {

Syncing Repositories

-

Fetching your repositories...

+

{statusMessage || 'Fetching your repositories...'}

) : (
+ {error && ( +
+

{error}

+
+ )} +

What this enables:

    @@ -328,16 +371,16 @@ export default function ConnectReposPage() { -
)}
- {/* Back link */}
Back to dashboard diff --git a/src/dashboard/app/login/page.tsx b/src/dashboard/app/login/page.tsx index 51236f5c..40c1075b 100644 --- a/src/dashboard/app/login/page.tsx +++ b/src/dashboard/app/login/page.tsx @@ -1,135 +1,154 @@ /** * Login Page - GitHub OAuth via Nango * - * Uses Nango Connect UI for GitHub authentication with polling - * to detect when login completes. + * Key: Initialize Nango on page load, not on click. + * This avoids popup blockers by ensuring openConnectUI is synchronous. + * See: https://arveknudsen.com/posts/avoiding-popup-blocking-when-authing-with-google/ */ 'use client'; -import React, { useState, useEffect, useCallback, useRef } from 'react'; -import Nango, { ConnectUI } from '@nangohq/frontend'; -import type { ConnectUIEvent } from '@nangohq/frontend'; -import { cloudApi } from '../../lib/cloudApi'; +import React, { useState, useEffect, useRef } from 'react'; +import Nango from '@nangohq/frontend'; import { LogoIcon } from '../../react-components/Logo'; -type LoginState = 'idle' | 'loading' | 'connecting' | 'polling' | 'success' | 'error'; - export default function LoginPage() { - const [state, setState] = useState('idle'); - const [error, setError] = useState(null); - const pollIntervalRef = useRef(null); - const connectUIRef = useRef(null); + const [isReady, setIsReady] = useState(false); + const [isAuthenticating, setIsAuthenticating] = useState(false); + const [authStatus, setAuthStatus] = useState(''); + const [error, setError] = useState(''); - // Cleanup on unmount - useEffect(() => { - return () => { - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } - if (connectUIRef.current) { - connectUIRef.current.close(); - } - }; - }, []); + // Store Nango instance and session token - initialized on mount + const nangoRef = useRef | null>(null); - // Poll for login completion - const startPolling = useCallback((connId: string) => { - setState('polling'); + // Initialize Nango with session token on page load + useEffect(() => { + let mounted = true; - // Poll every 1 second - pollIntervalRef.current = setInterval(async () => { + const init = async () => { try { - const result = await cloudApi.checkNangoLoginStatus(connId); - if (result.success && result.data.ready) { - // Login complete - stop polling and redirect - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } - setState('success'); - // Redirect to dashboard after a brief success message - setTimeout(() => { - window.location.href = '/app'; - }, 1000); + const response = await fetch('/api/auth/nango/login-session', { + credentials: 'include', + }); + const data = await response.json(); + + if (!mounted) return; + + if (!response.ok || !data.sessionToken) { + setError('Failed to initialize. Please refresh the page.'); + return; } + + // Create Nango instance NOW, not on click + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setIsReady(true); } catch (err) { - console.error('Polling error:', err); - } - }, 1000); - - // Timeout after 5 minutes - setTimeout(() => { - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - pollIntervalRef.current = null; - setState('error'); - setError('Login timed out. Please try again.'); + if (mounted) { + console.error('Init error:', err); + setError('Failed to initialize. Please refresh the page.'); + } } - }, 5 * 60 * 1000); + }; + + init(); + return () => { mounted = false; }; }, []); - // Handle login button click - const handleLogin = useCallback(async () => { - setState('loading'); - setError(null); + const checkAuthStatus = async (connectionId: string): Promise<{ ready: boolean; hasRepos?: boolean }> => { + const response = await fetch(`/api/auth/nango/login-status/${connectionId}`, { + credentials: 'include', + }); + if (!response.ok) { + throw new Error('Auth status not ready'); + } + return response.json(); + }; + const handleAuthSuccess = async (connectionId: string) => { try { - // Create Nango instance and open Connect UI first (shows loading state) - const nango = new Nango(); - - const handleEvent = (event: ConnectUIEvent) => { - if (event.type === 'connect') { - // Connection successful - start polling - const connectionId = event.payload.connectionId; - startPolling(connectionId); - if (connectUIRef.current) { - connectUIRef.current.close(); - } - } else if (event.type === 'close') { - // User closed without connecting - setState('idle'); - } else if (event.type === 'error') { - setState('error'); - setError(event.payload.errorMessage || 'Connection failed'); - if (connectUIRef.current) { - connectUIRef.current.close(); - } + setAuthStatus('Completing authentication...'); + + const pollStartTime = Date.now(); + const maxPollTime = 30000; + const pollInterval = 1000; + + const pollForAuth = async (): Promise => { + const elapsed = Date.now() - pollStartTime; + + if (elapsed > maxPollTime) { + throw new Error('Authentication timed out. Please try again.'); } - }; - // Open Connect UI (shows loading until token is set) - connectUIRef.current = nango.openConnectUI({ - onEvent: handleEvent, - }); - connectUIRef.current.open(); - setState('connecting'); - - // Get session token from backend and set it - const sessionResult = await cloudApi.getNangoLoginSession(); - if (!sessionResult.success) { - if (connectUIRef.current) { - connectUIRef.current.close(); + try { + const result = await checkAuthStatus(connectionId); + if (result && result.ready) { + // Redirect to connect-repos if no repos, otherwise to app + window.location.href = result.hasRepos ? '/app' : '/connect-repos'; + return; + } + + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForAuth(); + } catch { + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForAuth(); } - throw new Error(sessionResult.error || 'Failed to create login session'); - } + }; - // Set the session token - this enables the Connect UI - connectUIRef.current.setSessionToken(sessionResult.data.sessionToken); + await pollForAuth(); } catch (err) { - console.error('Login error:', err); - setState('error'); - setError(err instanceof Error ? err.message : 'Login failed'); + console.error('[AUTH] Authentication error:', err); + setError(err instanceof Error ? err.message : 'Authentication failed'); + setIsAuthenticating(false); + setAuthStatus(''); } - }, [startPolling]); - - // Retry login - const handleRetry = useCallback(() => { - setState('idle'); - setError(null); - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); + }; + + // Use nango.auth() instead of openConnectUI to avoid popup blocker issues + const handleGitHubAuth = async () => { + if (!nangoRef.current) { + setError('Not ready. Please refresh the page.'); + return; } - }, []); + + setIsAuthenticating(true); + setError(''); + setAuthStatus('Connecting to GitHub...'); + + try { + const result = await nangoRef.current.auth('github'); + if (result && 'connectionId' in result) { + await handleAuthSuccess(result.connectionId); + } else { + throw new Error('No connection ID returned'); + } + } catch (err: unknown) { + const error = err as Error & { type?: string }; + console.error('GitHub auth error:', error); + + // Don't show error for user-cancelled auth + if (error.type === 'user_cancelled' || error.message?.includes('closed')) { + setIsAuthenticating(false); + setAuthStatus(''); + // Re-initialize for next attempt + fetch('/api/auth/nango/login-session', { credentials: 'include' }) + .then(res => res.json()) + .then(data => { + if (data.sessionToken) { + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setIsReady(true); + } + }); + return; + } + + setError(error.message || 'Authentication failed'); + setIsAuthenticating(false); + setAuthStatus(''); + } + }; + + const isLoading = !isReady || isAuthenticating; return (
@@ -156,81 +175,67 @@ export default function LoginPage() { {/* Login Card */}
- {state === 'success' ? ( -
-
- - - -
-

Welcome!

-

Redirecting to dashboard...

-
- ) : state === 'error' ? ( -
- {/* Back to home */} + {/* Sign up link */} + + {/* Back to home */} + diff --git a/src/dashboard/app/metrics/page.tsx b/src/dashboard/app/metrics/page.tsx index c3016699..d187f741 100644 --- a/src/dashboard/app/metrics/page.tsx +++ b/src/dashboard/app/metrics/page.tsx @@ -9,6 +9,7 @@ import React, { useState, useEffect } from 'react'; import Link from 'next/link'; +import { getApiUrl, initializeWorkspaceId } from '../../lib/api'; interface AgentMetric { name: string; @@ -112,13 +113,39 @@ export default function MetricsPage() { const [memoryMetrics, setMemoryMetrics] = useState(null); const [error, setError] = useState(null); const [loading, setLoading] = useState(true); + const [_isCloudMode, setIsCloudMode] = useState(false); useEffect(() => { + // Initialize workspace ID from localStorage for cloud mode + const workspaceId = initializeWorkspaceId(); + + // Check if we're in cloud mode by checking for session endpoint + const checkCloudMode = async () => { + try { + const res = await fetch('/api/auth/session', { credentials: 'include' }); + if (res.status !== 404) { + setIsCloudMode(true); + // In cloud mode without workspace, redirect to app to select one + if (!workspaceId) { + window.location.href = '/app'; + return false; + } + } + return true; + } catch { + return true; // Network error = local mode + } + }; + const fetchMetrics = async () => { try { + // Check cloud mode first + const shouldContinue = await checkCloudMode(); + if (!shouldContinue) return; + const [metricsRes, memoryRes] = await Promise.all([ - fetch('/api/metrics'), - fetch('/api/metrics/agents'), + fetch(getApiUrl('/api/metrics'), { credentials: 'include' }), + fetch(getApiUrl('/api/metrics/agents'), { credentials: 'include' }), ]); if (!metricsRes.ok) throw new Error('Failed to fetch metrics'); diff --git a/src/dashboard/app/providers/page.tsx b/src/dashboard/app/providers/page.tsx new file mode 100644 index 00000000..442ce91b --- /dev/null +++ b/src/dashboard/app/providers/page.tsx @@ -0,0 +1,220 @@ +/** + * Providers Page + * + * Connect AI providers (Anthropic, OpenAI, etc.) to enable workspace creation. + */ + +'use client'; + +import React, { useState, useEffect } from 'react'; +import { LogoIcon } from '../../react-components/Logo'; + +interface Provider { + id: string; + name: string; + displayName: string; + description: string; + color: string; + isConnected: boolean; + connectedAs?: string; +} + +export default function ProvidersPage() { + const [providers, setProviders] = useState([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [connectingProvider, setConnectingProvider] = useState(null); + const [apiKey, setApiKey] = useState(''); + const [csrfToken, setCsrfToken] = useState(null); + + useEffect(() => { + const fetchProviders = async () => { + try { + const res = await fetch('/api/providers', { credentials: 'include' }); + + // Capture CSRF token + const token = res.headers.get('X-CSRF-Token'); + if (token) setCsrfToken(token); + + if (!res.ok) { + if (res.status === 401) { + window.location.href = '/login'; + return; + } + throw new Error('Failed to fetch providers'); + } + + const data = await res.json(); + setProviders(data.providers || []); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to load providers'); + } finally { + setLoading(false); + } + }; + + fetchProviders(); + }, []); + + const handleConnect = async (providerId: string) => { + if (!apiKey.trim()) { + setError('Please enter an API key'); + return; + } + + setError(null); + setConnectingProvider(providerId); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/providers/${providerId}/api-key`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ apiKey: apiKey.trim() }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to connect provider'); + } + + // Update provider state + setProviders(prev => + prev.map(p => (p.id === providerId ? { ...p, isConnected: true } : p)) + ); + setApiKey(''); + setConnectingProvider(null); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to connect'); + setConnectingProvider(null); + } + }; + + const hasConnectedProvider = providers.some(p => p.isConnected && p.id !== 'github'); + + if (loading) { + return ( +
+
+ + + + +

Loading providers...

+
+
+ ); + } + + return ( +
+ {/* Background grid */} +
+
+
+ +
+ {/* Logo */} +
+ +

Connect AI Providers

+

+ Add your API keys to enable AI-powered coding assistants in your workspace. +

+
+ + {error && ( +
+

{error}

+
+ )} + + {/* Providers list */} +
+ {providers + .filter(p => p.id !== 'github') // Don't show GitHub here + .map(provider => ( +
+
+
+
+ {provider.displayName[0]} +
+
+

{provider.displayName}

+

{provider.description}

+
+
+ {provider.isConnected && ( + + Connected + + )} +
+ + {!provider.isConnected && ( +
+ { + setConnectingProvider(provider.id); + setApiKey(e.target.value); + }} + onFocus={() => setConnectingProvider(provider.id)} + className="flex-1 px-4 py-2 bg-bg-deep border border-border-subtle rounded-lg text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan/50" + /> + +
+ )} +
+ ))} +
+ + {/* Continue button */} +
+ {hasConnectedProvider ? ( + + Continue to Dashboard + + ) : ( +

+ Connect at least one AI provider to continue +

+ )} + + + Skip for now + +
+
+
+ ); +} diff --git a/src/dashboard/app/signup/page.tsx b/src/dashboard/app/signup/page.tsx new file mode 100644 index 00000000..92876ee3 --- /dev/null +++ b/src/dashboard/app/signup/page.tsx @@ -0,0 +1,343 @@ +/** + * Signup Page - GitHub OAuth via Nango + * + * Key: Initialize Nango on page load, not on click. + * This avoids popup blockers by ensuring openConnectUI is synchronous. + */ + +'use client'; + +import React, { useState, useEffect, useRef } from 'react'; +import Nango from '@nangohq/frontend'; +import { LogoIcon } from '../../react-components/Logo'; + +export default function SignupPage() { + const [isReady, setIsReady] = useState(false); + const [isAuthenticating, setIsAuthenticating] = useState(false); + const [authStatus, setAuthStatus] = useState(''); + const [error, setError] = useState(''); + const [redirectTarget, setRedirectTarget] = useState('/app'); + const [showSuccess, setShowSuccess] = useState(false); + + // Store Nango instance - initialized on mount + const nangoRef = useRef | null>(null); + + // Initialize Nango with session token on page load + useEffect(() => { + let mounted = true; + + const init = async () => { + // Check if already logged in + try { + const sessionRes = await fetch('/api/auth/session', { credentials: 'include' }); + const session = await sessionRes.json(); + if (session.authenticated) { + await handlePostAuthRedirect(); + return; + } + } catch { + // Not logged in, continue + } + + // Get Nango session token + try { + const response = await fetch('/api/auth/nango/login-session', { + credentials: 'include', + }); + const data = await response.json(); + + if (!mounted) return; + + if (!response.ok || !data.sessionToken) { + setError('Failed to initialize. Please refresh the page.'); + return; + } + + // Create Nango instance NOW, not on click + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setIsReady(true); + } catch (err) { + if (mounted) { + console.error('Init error:', err); + setError('Failed to initialize. Please refresh the page.'); + } + } + }; + + init(); + return () => { mounted = false; }; + }, []); + + const handlePostAuthRedirect = async () => { + setAuthStatus('Setting up your account...'); + + try { + const response = await fetch('/api/github-app/repos', { credentials: 'include' }); + const data = await response.json(); + + if (data.repositories && data.repositories.length > 0) { + setRedirectTarget('/app'); + } else { + setRedirectTarget('/connect-repos'); + } + + setShowSuccess(true); + + setTimeout(() => { + window.location.href = data.repositories && data.repositories.length > 0 + ? '/app' + : '/connect-repos'; + }, 1500); + } catch (err) { + console.error('Error checking repos:', err); + setRedirectTarget('/connect-repos'); + setShowSuccess(true); + setTimeout(() => { + window.location.href = '/connect-repos'; + }, 1500); + } + }; + + const checkAuthStatus = async (connectionId: string): Promise<{ ready: boolean }> => { + const response = await fetch(`/api/auth/nango/login-status/${connectionId}`, { + credentials: 'include', + }); + if (!response.ok) { + throw new Error('Auth status not ready'); + } + return response.json(); + }; + + const handleAuthSuccess = async (connectionId: string) => { + try { + setAuthStatus('Completing authentication...'); + + const pollStartTime = Date.now(); + const maxPollTime = 30000; + const pollInterval = 1000; + + const pollForAuth = async (): Promise => { + const elapsed = Date.now() - pollStartTime; + + if (elapsed > maxPollTime) { + throw new Error('Authentication timed out. Please try again.'); + } + + try { + const result = await checkAuthStatus(connectionId); + if (result && result.ready) { + await handlePostAuthRedirect(); + return; + } + + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForAuth(); + } catch { + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForAuth(); + } + }; + + await pollForAuth(); + } catch (err) { + console.error('[AUTH] Authentication error:', err); + setError(err instanceof Error ? err.message : 'Authentication failed'); + setIsAuthenticating(false); + setAuthStatus(''); + } + }; + + // Use nango.auth() instead of openConnectUI to avoid popup blocker issues + const handleGitHubAuth = async () => { + if (!nangoRef.current) { + setError('Not ready. Please refresh the page.'); + return; + } + + setIsAuthenticating(true); + setError(''); + setAuthStatus('Connecting to GitHub...'); + + try { + const result = await nangoRef.current.auth('github'); + if (result && 'connectionId' in result) { + await handleAuthSuccess(result.connectionId); + } else { + throw new Error('No connection ID returned'); + } + } catch (err: unknown) { + const error = err as Error & { type?: string }; + console.error('GitHub auth error:', error); + + // Don't show error for user-cancelled auth + if (error.type === 'user_cancelled' || error.message?.includes('closed')) { + setIsAuthenticating(false); + setAuthStatus(''); + // Re-initialize for next attempt + fetch('/api/auth/nango/login-session', { credentials: 'include' }) + .then(res => res.json()) + .then(data => { + if (data.sessionToken) { + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setIsReady(true); + } + }); + return; + } + + setError(error.message || 'Authentication failed'); + setIsAuthenticating(false); + setAuthStatus(''); + } + }; + + const isLoading = !isReady || isAuthenticating; + + return ( +
+ {/* Background grid */} +
+
+
+ + {/* Glow orbs */} +
+
+
+
+ + {/* Content */} +
+ {/* Logo */} +
+ +

Get Started

+

+ Create your account and start orchestrating AI agents +

+
+ + {/* Signup Card */} +
+ {showSuccess ? ( +
+
+ + + +
+

Welcome to Agent Relay!

+

+ {redirectTarget === '/connect-repos' + ? "Let's connect your repositories..." + : 'Redirecting to dashboard...'} +

+
+ ) : isAuthenticating ? ( +
+
+ + + + +
+

Creating Account

+

{authStatus || 'Connecting to GitHub...'}

+
+ ) : ( +
+ {error && ( +
+

{error}

+
+ )} + + {/* Features list */} +
+
+
+ + + +
+ Deploy AI agents in seconds +
+
+
+ + + +
+ Real-time agent collaboration +
+
+
+ + + +
+ Secure credential management +
+
+ + + +

+ By signing up, you agree to our{' '} + Terms of Service + {' '}and{' '} + Privacy Policy +

+
+ )} +
+ + {/* Already have account */} +
+

+ Already have an account?{' '} + + Sign in + +

+
+ + {/* Back to home */} + +
+
+ ); +} diff --git a/src/dashboard/landing/LandingPage.tsx b/src/dashboard/landing/LandingPage.tsx index d6e35c6c..bee8ffc3 100644 --- a/src/dashboard/landing/LandingPage.tsx +++ b/src/dashboard/landing/LandingPage.tsx @@ -11,9 +11,11 @@ import { Logo, LogoIcon, LogoHero } from '../react-components/Logo'; // Agent providers with their signature colors const PROVIDERS = { - claude: { name: 'Claude', color: '#00D9FF', icon: '◈' }, - codex: { name: 'Codex', color: '#FF6B35', icon: '⬡' }, - gemini: { name: 'Gemini', color: '#00FFC8', icon: '◇' }, + claude: { name: 'Claude', color: '#D97757', icon: '◈' }, + codex: { name: 'Codex', color: '#10A37F', icon: '⬡' }, + gemini: { name: 'Gemini', color: '#4285F4', icon: '◇' }, + opencode: { name: 'OpenCode', color: '#00D4AA', icon: '◆' }, + droid: { name: 'Droid', color: '#6366F1', icon: '⬢' }, }; // Simulated agent messages for the live demo @@ -466,11 +468,6 @@ function ProvidersSection() {
Supported
))} -
-
-
More Coming
-
2025
-
); diff --git a/src/dashboard/landing/styles.css b/src/dashboard/landing/styles.css index 668603ac..8eed7a85 100644 --- a/src/dashboard/landing/styles.css +++ b/src/dashboard/landing/styles.css @@ -477,6 +477,7 @@ animation: fadeIn 1s ease forwards; animation-delay: 0.6s; opacity: 0; + overflow: hidden; } @keyframes fadeIn { @@ -489,6 +490,7 @@ aspect-ratio: 1; max-width: 500px; margin: 0 auto; + overflow: hidden; } .network-lines { @@ -569,6 +571,10 @@ color: var(--text-secondary); text-transform: uppercase; letter-spacing: 0.5px; + white-space: nowrap; + text-overflow: ellipsis; + max-width: 80px; + overflow: hidden; } .data-packet { @@ -1491,6 +1497,30 @@ section { } } +/* ============================================ + RESPONSIVE - Small Tablets Edge Case (850px) + Fixes agent network display at intermediate widths + ============================================ */ +@media (max-width: 850px) { + .hero-visual { + max-width: 320px; + } + + .agent-network { + max-width: 320px; + } + + .agent-icon { + width: 50px; + height: 50px; + font-size: 20px; + } + + .agent-label { + font-size: 11px; + } +} + /* ============================================ RESPONSIVE - Tablets (768px) ============================================ */ diff --git a/src/dashboard/lib/api.ts b/src/dashboard/lib/api.ts index f23ac4ad..6267c4a6 100644 --- a/src/dashboard/lib/api.ts +++ b/src/dashboard/lib/api.ts @@ -21,6 +21,131 @@ import type { // API base URL - relative in browser, can be configured for SSR const API_BASE = ''; +// Storage key for workspace ID persistence +const WORKSPACE_ID_KEY = 'agentrelay_workspace_id'; + +// Workspace ID for cloud mode proxying +let activeWorkspaceId: string | null = null; + +// CSRF token for cloud mode requests +let csrfToken: string | null = null; + +/** + * Set the CSRF token for API requests + */ +export function setCsrfToken(token: string | null): void { + csrfToken = token; +} + +/** + * Get the current CSRF token + */ +export function getCsrfToken(): string | null { + return csrfToken; +} + +/** + * Capture CSRF token from response headers + */ +function captureCsrfToken(response: Response): void { + const token = response.headers.get('X-CSRF-Token'); + if (token) { + csrfToken = token; + } +} + +/** + * Set the active workspace ID for API proxying in cloud mode. + * Also persists to localStorage so other pages can access it. + */ +export function setActiveWorkspaceId(workspaceId: string | null): void { + activeWorkspaceId = workspaceId; + // Persist to localStorage for cross-page access + if (typeof window !== 'undefined') { + if (workspaceId) { + localStorage.setItem(WORKSPACE_ID_KEY, workspaceId); + } else { + localStorage.removeItem(WORKSPACE_ID_KEY); + } + } +} + +/** + * Get the active workspace ID + */ +export function getActiveWorkspaceId(): string | null { + return activeWorkspaceId; +} + +/** + * Initialize workspace ID from localStorage if not already set. + * Call this on pages that need workspace context but aren't in the main app flow. + */ +export function initializeWorkspaceId(): string | null { + if (activeWorkspaceId) { + return activeWorkspaceId; + } + if (typeof window !== 'undefined') { + const stored = localStorage.getItem(WORKSPACE_ID_KEY); + if (stored) { + activeWorkspaceId = stored; + return stored; + } + } + return null; +} + +/** + * Get the API URL, accounting for cloud mode proxying + * @param path - API path like '/api/spawn' or '/api/send' + */ +export function getApiUrl(path: string): string { + if (activeWorkspaceId) { + // In cloud mode, proxy through the cloud server + // Strip /api/ prefix since the proxy endpoint adds it back + const proxyPath = path.startsWith('/api/') ? path.substring(5) : path.replace(/^\//, ''); + return `/api/workspaces/${activeWorkspaceId}/proxy/${proxyPath}`; + } + return `${API_BASE}${path}`; +} + +/** + * Wrapper for fetch that handles CSRF tokens and credentials + * All requests include credentials and capture CSRF tokens from responses. + * Non-GET requests include the CSRF token in headers. + */ +async function apiFetch( + url: string, + options: RequestInit = {} +): Promise { + const method = options.method?.toUpperCase() || 'GET'; + const headers: Record = { + ...(options.headers as Record), + }; + + // Add CSRF token for state-changing requests + if (method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS') { + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + // Ensure Content-Type is set for requests with body + if (options.body && !headers['Content-Type']) { + headers['Content-Type'] = 'application/json'; + } + } + + const response = await fetch(url, { + ...options, + headers, + credentials: 'include', + }); + + // Always capture CSRF token from response + captureCsrfToken(response); + + return response; +} + /** * Dashboard data received from WebSocket */ @@ -174,9 +299,8 @@ export const api = { */ async sendMessage(request: SendMessageRequest): Promise> { try { - const response = await fetch(`${API_BASE}/api/send`, { + const response = await apiFetch(getApiUrl('/api/send'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); @@ -220,9 +344,8 @@ export const api = { data = file.data; } - const response = await fetch(`${API_BASE}/api/upload`, { + const response = await apiFetch(getApiUrl('/api/upload'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ filename, mimeType, data }), }); @@ -247,9 +370,8 @@ export const api = { */ async spawnAgent(request: SpawnAgentRequest): Promise { try { - const response = await fetch(`${API_BASE}/api/spawn`, { + const response = await apiFetch(getApiUrl('/api/spawn'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); @@ -264,7 +386,7 @@ export const api = { */ async getSpawnedAgents(): Promise }>> { try { - const response = await fetch(`${API_BASE}/api/spawned`); + const response = await apiFetch(getApiUrl('/api/spawned')); const result = await response.json() as { success?: boolean; agents?: Array<{ name: string; cli: string; startedAt: string }>; error?: string }; if (response.ok && result.success) { @@ -282,7 +404,7 @@ export const api = { */ async releaseAgent(name: string): Promise> { try { - const response = await fetch(`${API_BASE}/api/spawned/${encodeURIComponent(name)}`, { + const response = await apiFetch(getApiUrl(`/api/spawned/${encodeURIComponent(name)}`), { method: 'DELETE', }); @@ -303,7 +425,7 @@ export const api = { */ async getData(): Promise> { try { - const response = await fetch(`${API_BASE}/api/data`); + const response = await apiFetch(getApiUrl('/api/data')); const data = await response.json() as DashboardData; if (response.ok) { @@ -321,7 +443,7 @@ export const api = { */ async getBridgeData(): Promise> { try { - const response = await fetch(`${API_BASE}/api/bridge`); + const response = await apiFetch(getApiUrl('/api/bridge')); const data = await response.json() as FleetData; if (response.ok) { @@ -339,7 +461,7 @@ export const api = { */ async getMetrics(): Promise> { try { - const response = await fetch(`${API_BASE}/api/metrics`); + const response = await apiFetch(getApiUrl('/api/metrics')); const data = await response.json(); if (response.ok) { @@ -368,7 +490,7 @@ export const api = { if (params?.since) query.set('since', String(params.since)); if (params?.limit) query.set('limit', String(params.limit)); - const response = await fetch(`${API_BASE}/api/history/sessions?${query}`); + const response = await apiFetch(getApiUrl(`/api/history/sessions?${query}`)); const data = await response.json(); if (response.ok) { @@ -403,7 +525,7 @@ export const api = { if (params?.order) query.set('order', params.order); if (params?.search) query.set('search', params.search); - const response = await fetch(`${API_BASE}/api/history/messages?${query}`); + const response = await apiFetch(getApiUrl(`/api/history/messages?${query}`)); const data = await response.json(); if (response.ok) { @@ -421,7 +543,7 @@ export const api = { */ async getHistoryConversations(): Promise> { try { - const response = await fetch(`${API_BASE}/api/history/conversations`); + const response = await apiFetch(getApiUrl('/api/history/conversations')); const data = await response.json(); if (response.ok) { @@ -439,7 +561,7 @@ export const api = { */ async getHistoryMessage(id: string): Promise> { try { - const response = await fetch(`${API_BASE}/api/history/message/${encodeURIComponent(id)}`); + const response = await apiFetch(getApiUrl(`/api/history/message/${encodeURIComponent(id)}`)); const data = await response.json(); if (response.ok) { @@ -457,7 +579,7 @@ export const api = { */ async getHistoryStats(): Promise> { try { - const response = await fetch(`${API_BASE}/api/history/stats`); + const response = await apiFetch(getApiUrl('/api/history/stats')); const data = await response.json(); if (response.ok) { @@ -484,7 +606,7 @@ export const api = { if (params?.query) queryParams.set('q', params.query); if (params?.limit) queryParams.set('limit', String(params.limit)); - const response = await fetch(`${API_BASE}/api/files?${queryParams}`); + const response = await apiFetch(getApiUrl(`/api/files?${queryParams}`)); const data = await response.json(); if (response.ok) { @@ -504,7 +626,7 @@ export const api = { */ async getDecisions(): Promise> { try { - const response = await fetch(`${API_BASE}/api/decisions`); + const response = await apiFetch(getApiUrl('/api/decisions')); const data = await response.json(); if (response.ok && data.success) { @@ -522,9 +644,8 @@ export const api = { */ async approveDecision(id: string, optionId?: string, response?: string): Promise> { try { - const res = await fetch(`${API_BASE}/api/decisions/${encodeURIComponent(id)}/approve`, { + const res = await apiFetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}/approve`), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ optionId, response }), }); @@ -545,9 +666,8 @@ export const api = { */ async rejectDecision(id: string, reason?: string): Promise> { try { - const res = await fetch(`${API_BASE}/api/decisions/${encodeURIComponent(id)}/reject`, { + const res = await apiFetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}/reject`), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ reason }), }); @@ -568,7 +688,7 @@ export const api = { */ async dismissDecision(id: string): Promise> { try { - const res = await fetch(`${API_BASE}/api/decisions/${encodeURIComponent(id)}`, { + const res = await apiFetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}`), { method: 'DELETE', }); @@ -591,7 +711,7 @@ export const api = { */ async getFleetServers(): Promise> { try { - const response = await fetch(`${API_BASE}/api/fleet/servers`); + const response = await apiFetch(getApiUrl('/api/fleet/servers')); const data = await response.json(); if (response.ok && data.success) { @@ -609,7 +729,7 @@ export const api = { */ async getFleetStats(): Promise> { try { - const response = await fetch(`${API_BASE}/api/fleet/stats`); + const response = await apiFetch(getApiUrl('/api/fleet/stats')); const data = await response.json(); if (response.ok && data.success) { @@ -636,7 +756,7 @@ export const api = { if (params?.status) queryParams.set('status', params.status); if (params?.agent) queryParams.set('agent', params.agent); - const response = await fetch(`${API_BASE}/api/tasks?${queryParams}`); + const response = await apiFetch(getApiUrl(`/api/tasks?${queryParams}`)); const data = await response.json(); if (response.ok && data.success) { @@ -659,9 +779,8 @@ export const api = { priority: 'low' | 'medium' | 'high' | 'critical'; }): Promise> { try { - const response = await fetch(`${API_BASE}/api/tasks`, { + const response = await apiFetch(getApiUrl('/api/tasks'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); @@ -685,9 +804,8 @@ export const api = { result?: string; }): Promise> { try { - const response = await fetch(`${API_BASE}/api/tasks/${encodeURIComponent(id)}`, { + const response = await apiFetch(getApiUrl(`/api/tasks/${encodeURIComponent(id)}`), { method: 'PATCH', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(updates), }); @@ -708,7 +826,7 @@ export const api = { */ async cancelTask(id: string): Promise> { try { - const response = await fetch(`${API_BASE}/api/tasks/${encodeURIComponent(id)}`, { + const response = await apiFetch(getApiUrl(`/api/tasks/${encodeURIComponent(id)}`), { method: 'DELETE', }); @@ -737,9 +855,8 @@ export const api = { description?: string; }): Promise> { try { - const response = await fetch(`${API_BASE}/api/beads`, { + const response = await apiFetch(getApiUrl('/api/beads'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); @@ -764,9 +881,8 @@ export const api = { thread?: string; }): Promise> { try { - const response = await fetch(`${API_BASE}/api/relay/send`, { + const response = await apiFetch(getApiUrl('/api/relay/send'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); diff --git a/src/dashboard/lib/cloudApi.ts b/src/dashboard/lib/cloudApi.ts index 3a327968..7be869e4 100644 --- a/src/dashboard/lib/cloudApi.ts +++ b/src/dashboard/lib/cloudApi.ts @@ -5,6 +5,8 @@ * Includes automatic session expiration detection and handling. */ +import { setCsrfToken as setApiCsrfToken } from './api'; + // Session error codes from the backend export type SessionErrorCode = 'SESSION_EXPIRED' | 'USER_NOT_FOUND' | 'SESSION_ERROR'; @@ -47,6 +49,29 @@ export type SessionExpiredCallback = (error: SessionError) => void; // Global session expiration listeners const sessionExpiredListeners = new Set(); +// Global CSRF token storage +let csrfToken: string | null = null; + +/** + * Get the current CSRF token + */ +export function getCsrfToken(): string | null { + return csrfToken; +} + +/** + * Capture CSRF token from response headers + * Also syncs with the api.ts library for dashboard requests + */ +function captureCsrfToken(response: Response): void { + const token = response.headers.get('X-CSRF-Token'); + if (token) { + csrfToken = token; + // Sync with api.ts for dashboard-to-workspace requests + setApiCsrfToken(token); + } +} + /** * Register a callback for when session expires */ @@ -90,15 +115,26 @@ async function cloudFetch( options: RequestInit = {} ): Promise<{ success: true; data: T } | { success: false; error: string; sessionExpired?: boolean }> { try { + // Build headers, including CSRF token for non-GET requests + const headers: Record = { + 'Content-Type': 'application/json', + ...(options.headers as Record), + }; + + // Include CSRF token for state-changing requests + if (options.method && options.method !== 'GET' && csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + const response = await fetch(endpoint, { ...options, credentials: 'include', // Include cookies for session - headers: { - 'Content-Type': 'application/json', - ...options.headers, - }, + headers, }); + // Capture CSRF token from response + captureCsrfToken(response); + const data = await response.json(); if (isSessionError(response, data)) { @@ -175,6 +211,8 @@ export const cloudApi = { const response = await fetch('/api/auth/nango/login-session', { credentials: 'include', }); + // Capture CSRF token from response + captureCsrfToken(response); const data = await response.json(); if (!response.ok) { return { success: false, error: data.error || 'Failed to create login session' }; @@ -193,6 +231,8 @@ export const cloudApi = { const response = await fetch(`/api/auth/nango/login-status/${encodeURIComponent(connectionId)}`, { credentials: 'include', }); + // Capture CSRF token from response + captureCsrfToken(response); const data = await response.json(); if (!response.ok) { return { success: false, error: data.error || 'Failed to check login status' }; @@ -225,6 +265,8 @@ export const cloudApi = { const response = await fetch('/api/auth/session', { credentials: 'include', }); + // Capture CSRF token from response + captureCsrfToken(response); const data = await response.json(); return data as SessionStatus; } catch { @@ -248,9 +290,14 @@ export const cloudApi = { */ async logout(): Promise<{ success: boolean; error?: string }> { try { + const headers: Record = {}; + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } const response = await fetch('/api/auth/logout', { method: 'POST', credentials: 'include', + headers, }); const data = await response.json(); return data as { success: boolean; error?: string }; @@ -298,6 +345,90 @@ export const cloudApi = { }); }, + /** + * Get primary workspace with live status + */ + async getPrimaryWorkspace() { + return cloudFetch<{ + exists: boolean; + message?: string; + workspace?: { + id: string; + name: string; + status: string; + publicUrl?: string; + isStopped: boolean; + isRunning: boolean; + isProvisioning: boolean; + hasError: boolean; + config: { + providers: string[]; + repositories: string[]; + }; + }; + statusMessage: string; + actionNeeded?: 'wakeup' | 'check_error' | null; + }>('/api/workspaces/primary'); + }, + + /** + * Get workspace summary (all workspaces with status) + */ + async getWorkspaceSummary() { + return cloudFetch<{ + workspaces: Array<{ + id: string; + name: string; + status: string; + publicUrl?: string; + isStopped: boolean; + isRunning: boolean; + isProvisioning: boolean; + hasError: boolean; + }>; + summary: { + total: number; + running: number; + stopped: number; + provisioning: number; + error: number; + }; + overallStatus: 'ready' | 'provisioning' | 'stopped' | 'none' | 'error'; + }>('/api/workspaces/summary'); + }, + + /** + * Get workspace status (live polling from compute provider) + */ + async getWorkspaceStatus(id: string) { + return cloudFetch<{ status: string }>(`/api/workspaces/${encodeURIComponent(id)}/status`); + }, + + /** + * Wake up a stopped workspace + */ + async wakeupWorkspace(id: string) { + return cloudFetch<{ + status: string; + wasRestarted: boolean; + message: string; + estimatedStartTime?: number; + publicUrl?: string; + }>(`/api/workspaces/${encodeURIComponent(id)}/wakeup`, { + method: 'POST', + }); + }, + + /** + * Restart a workspace + */ + async restartWorkspace(id: string) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(id)}/restart`, + { method: 'POST' } + ); + }, + // ===== Provider API ===== /** @@ -386,4 +517,294 @@ export const cloudApi = { { method: 'POST' } ); }, + + /** + * Update member role + */ + async updateMemberRole(workspaceId: string, memberId: string, role: string) { + return cloudFetch<{ success: boolean; role: string }>( + `/api/workspaces/${encodeURIComponent(workspaceId)}/members/${encodeURIComponent(memberId)}`, + { + method: 'PATCH', + body: JSON.stringify({ role }), + } + ); + }, + + /** + * Remove member from workspace + */ + async removeMember(workspaceId: string, memberId: string) { + return cloudFetch<{ success: boolean }>( + `/api/workspaces/${encodeURIComponent(workspaceId)}/members/${encodeURIComponent(memberId)}`, + { method: 'DELETE' } + ); + }, + + // ===== Billing API ===== + + /** + * Get all billing plans + */ + async getBillingPlans() { + return cloudFetch<{ + plans: Array<{ + tier: string; + name: string; + description: string; + price: { monthly: number; yearly: number }; + features: string[]; + limits: Record; + recommended?: boolean; + }>; + publishableKey: string; + }>('/api/billing/plans'); + }, + + /** + * Get current subscription status + */ + async getSubscription() { + return cloudFetch<{ + tier: string; + subscription: { + id: string; + tier: string; + status: string; + currentPeriodStart: string; + currentPeriodEnd: string; + cancelAtPeriodEnd: boolean; + interval: 'month' | 'year'; + } | null; + customer: { + id: string; + email: string; + name?: string; + paymentMethods: Array<{ + id: string; + type: string; + last4?: string; + brand?: string; + isDefault: boolean; + }>; + invoices: Array<{ + id: string; + number: string; + amount: number; + status: string; + date: string; + pdfUrl?: string; + }>; + } | null; + }>('/api/billing/subscription'); + }, + + /** + * Create checkout session for new subscription + */ + async createCheckoutSession(tier: string, interval: 'month' | 'year' = 'month') { + return cloudFetch<{ + sessionId: string; + checkoutUrl: string; + }>('/api/billing/checkout', { + method: 'POST', + body: JSON.stringify({ tier, interval }), + }); + }, + + /** + * Create billing portal session + */ + async createBillingPortal() { + return cloudFetch<{ + sessionId: string; + portalUrl: string; + }>('/api/billing/portal', { + method: 'POST', + }); + }, + + /** + * Change subscription tier + */ + async changeSubscription(tier: string, interval: 'month' | 'year' = 'month') { + return cloudFetch<{ + subscription: { + tier: string; + status: string; + }; + }>('/api/billing/change', { + method: 'POST', + body: JSON.stringify({ tier, interval }), + }); + }, + + /** + * Cancel subscription at period end + */ + async cancelSubscription() { + return cloudFetch<{ + subscription: { cancelAtPeriodEnd: boolean; currentPeriodEnd: string }; + message: string; + }>('/api/billing/cancel', { + method: 'POST', + }); + }, + + /** + * Resume cancelled subscription + */ + async resumeSubscription() { + return cloudFetch<{ + subscription: { cancelAtPeriodEnd: boolean }; + message: string; + }>('/api/billing/resume', { + method: 'POST', + }); + }, + + /** + * Get invoices + */ + async getInvoices() { + return cloudFetch<{ + invoices: Array<{ + id: string; + number: string; + amount: number; + status: string; + date: string; + pdfUrl?: string; + }>; + }>('/api/billing/invoices'); + }, + + // ===== Workspace Management API ===== + + /** + * Stop workspace + */ + async stopWorkspace(id: string) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(id)}/stop`, + { method: 'POST' } + ); + }, + + /** + * Delete workspace + */ + async deleteWorkspace(id: string) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(id)}`, + { method: 'DELETE' } + ); + }, + + /** + * Add repositories to workspace + */ + async addReposToWorkspace(workspaceId: string, repositoryIds: string[]) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(workspaceId)}/repos`, + { + method: 'POST', + body: JSON.stringify({ repositoryIds }), + } + ); + }, + + /** + * Set custom domain for workspace + */ + async setCustomDomain(workspaceId: string, domain: string) { + return cloudFetch<{ + success: boolean; + domain: string; + status: string; + instructions: { + type: string; + name: string; + value: string; + ttl: number; + }; + verifyEndpoint: string; + message: string; + }>(`/api/workspaces/${encodeURIComponent(workspaceId)}/domain`, { + method: 'POST', + body: JSON.stringify({ domain }), + }); + }, + + /** + * Verify custom domain + */ + async verifyCustomDomain(workspaceId: string) { + return cloudFetch<{ + success: boolean; + status: string; + domain?: string; + message?: string; + error?: string; + }>(`/api/workspaces/${encodeURIComponent(workspaceId)}/domain/verify`, { + method: 'POST', + }); + }, + + /** + * Remove custom domain + */ + async removeCustomDomain(workspaceId: string) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(workspaceId)}/domain`, + { method: 'DELETE' } + ); + }, + + /** + * Get detailed workspace info + */ + async getWorkspaceDetails(id: string) { + return cloudFetch<{ + id: string; + name: string; + status: string; + publicUrl?: string; + computeProvider: string; + config: { + providers: string[]; + repositories: string[]; + supervisorEnabled?: boolean; + maxAgents?: number; + }; + customDomain?: string; + customDomainStatus?: string; + errorMessage?: string; + repositories: Array<{ + id: string; + fullName: string; + syncStatus: string; + lastSyncedAt?: string; + }>; + createdAt: string; + updatedAt: string; + }>(`/api/workspaces/${encodeURIComponent(id)}`); + }, + + // ===== GitHub App API ===== + + /** + * Get user's connected repositories + */ + async getRepos() { + return cloudFetch<{ repositories: Array<{ + id: string; + fullName: string; + isPrivate: boolean; + defaultBranch: string; + syncStatus: string; + hasNangoConnection: boolean; + lastSyncedAt?: string; + }> }>('/api/github-app/repos'); + }, }; diff --git a/src/dashboard/next.config.js b/src/dashboard/next.config.js index 2cc9be8a..5e426c2c 100644 --- a/src/dashboard/next.config.js +++ b/src/dashboard/next.config.js @@ -2,7 +2,7 @@ const nextConfig = { // Static export - generates HTML/JS/CSS that can be served by any server output: 'export', - distDir: 'out', + // Export output goes to 'out/' by default with output: 'export' // Disable strict mode for now during development reactStrictMode: true, diff --git a/src/dashboard/react-components/AgentCard.tsx b/src/dashboard/react-components/AgentCard.tsx index 54135b7d..f06fe5f9 100644 --- a/src/dashboard/react-components/AgentCard.tsx +++ b/src/dashboard/react-components/AgentCard.tsx @@ -27,6 +27,7 @@ export interface AgentCardProps { onMessageClick?: (agent: Agent) => void; onReleaseClick?: (agent: Agent) => void; onLogsClick?: (agent: Agent) => void; + onProfileClick?: (agent: Agent) => void; } /** @@ -69,6 +70,7 @@ export function AgentCard({ onMessageClick, onReleaseClick, onLogsClick, + onProfileClick, }: AgentCardProps) { const colors = getAgentColor(agent.name); const initials = getAgentInitials(agent.name); @@ -98,6 +100,11 @@ export function AgentCard({ onLogsClick?.(agent); }; + const handleProfileClick = (e: React.MouseEvent) => { + e.stopPropagation(); + onProfileClick?.(agent); + }; + if (compact) { return (
+ {onProfileClick && ( + + )} {agent.isSpawned && onLogsClick && ( + )} {agent.isSpawned && onLogsClick && ( +
+ + {/* Agent Info */} +
+ {/* Large Avatar */} +
+
+ {initials} +
+ {/* Status indicator */} +
+
+ + {/* Name */} +

+ {displayName} +

+ + {/* Breadcrumb */} + {breadcrumb && ( + + {breadcrumb} + + )} + + {/* Title/Role */} + {profile?.title && ( + + {profile.title} + + )} + + {/* Status */} + +
+ {agent.status.charAt(0).toUpperCase() + agent.status.slice(1)} + {agent.isProcessing && ' - Thinking...'} + + + {/* Tags */} +
+ {agent.cli && ( + + {agent.cli} + + )} + {agent.isSpawned && ( + + Spawned + + )} + {agent.team && ( + + {agent.team} + + )} + {profile?.personaName && ( + + {profile.personaName} + + )} +
+
+ + {/* Details - Scrollable */} +
+
+ {/* Description */} + {profile?.description && ( +
+ +

+ {profile.description} +

+
+ )} + + {/* Current Task */} + {agent.currentTask && ( +
+ +

+ {agent.currentTask} +

+
+ )} + + {/* Spawn Prompt */} + {profile?.spawnPrompt && ( +
+ +
 200 ? 'line-clamp-4' : ''}`}>
+                  {profile.spawnPrompt}
+                
+
+ )} + + {/* Persona Prompt */} + {profile?.personaPrompt && ( +
+ +
 200 ? 'line-clamp-4' : ''}`}>
+                  {profile.personaPrompt}
+                
+
+ )} + + {/* Model */} + {profile?.model && ( +
+ +

+ {profile.model} +

+
+ )} + + {/* Working Directory */} + {profile?.workingDirectory && ( +
+ +

+ {profile.workingDirectory} +

+
+ )} + + {/* Agent ID */} + {agent.agentId && ( +
+ +

+ {agent.agentId} +

+
+ )} + + {/* Capabilities */} + {profile?.capabilities && profile.capabilities.length > 0 && ( +
+ +
+ {profile.capabilities.map((cap, i) => ( + + {cap} + + ))} +
+
+ )} + + {/* Last Seen */} + {agent.lastSeen && ( +
+ +

+ {formatDateTime(agent.lastSeen)} +

+
+ )} + + {/* First Seen */} + {profile?.firstSeen && ( +
+ +

+ {formatDateTime(profile.firstSeen)} +

+
+ )} + + {/* Message Count */} + {agent.messageCount !== undefined && agent.messageCount > 0 && ( +
+ +

+ {agent.messageCount} messages sent +

+
+ )} +
+
+ + {/* Actions */} +
+ {/* Message Button */} + {onMessage && ( + + )} + + {/* Logs Button */} + {agent.isSpawned && onLogs && ( + + )} + + {/* Release Button */} + {agent.isSpawned && onRelease && ( + + )} +
+
+ + ); +} + +/** + * Format a timestamp to a readable date/time + */ +function formatDateTime(timestamp: string): string { + const date = new Date(timestamp); + return date.toLocaleString([], { + month: 'short', + day: 'numeric', + year: 'numeric', + hour: '2-digit', + minute: '2-digit', + }); +} diff --git a/src/dashboard/react-components/App.tsx b/src/dashboard/react-components/App.tsx index 42bdfc58..b7ad5a19 100644 --- a/src/dashboard/react-components/App.tsx +++ b/src/dashboard/react-components/App.tsx @@ -15,6 +15,7 @@ import { CommandPalette, type TaskCreateRequest, PRIORITY_CONFIG } from './Comma import { SpawnModal, type SpawnConfig } from './SpawnModal'; import { NewConversationModal } from './NewConversationModal'; import { SettingsPanel, defaultSettings, type Settings } from './SettingsPanel'; +import { SettingsPage } from './settings'; import { ConversationHistory } from './ConversationHistory'; import { MentionAutocomplete, getMentionQuery, completeMentionInValue, type HumanUser } from './MentionAutocomplete'; import { FileAutocomplete, getFileQuery, completeFileInValue } from './FileAutocomplete'; @@ -37,7 +38,9 @@ import { useTrajectory } from './hooks/useTrajectory'; import { useRecentRepos } from './hooks/useRecentRepos'; import { usePresence, type UserPresence } from './hooks/usePresence'; import { useCloudSessionOptional } from './CloudSessionProvider'; -import { api, convertApiDecision } from '../lib/api'; +import { WorkspaceProvider } from './WorkspaceContext'; +import { api, convertApiDecision, setActiveWorkspaceId as setApiWorkspaceId } from '../lib/api'; +import { cloudApi } from '../lib/cloudApi'; import type { CurrentUser } from './MessageList'; /** @@ -87,6 +90,83 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { } : undefined; + // Cloud workspaces state (for cloud mode) + const [cloudWorkspaces, setCloudWorkspaces] = useState>([]); + const [activeCloudWorkspaceId, setActiveCloudWorkspaceId] = useState(null); + const [isLoadingCloudWorkspaces, setIsLoadingCloudWorkspaces] = useState(false); + + // Fetch cloud workspaces when in cloud mode + useEffect(() => { + if (!cloudSession?.user) return; + + const fetchCloudWorkspaces = async () => { + setIsLoadingCloudWorkspaces(true); + try { + const result = await cloudApi.getWorkspaceSummary(); + if (result.success && result.data.workspaces) { + setCloudWorkspaces(result.data.workspaces); + // Auto-select first workspace if none selected + if (!activeCloudWorkspaceId && result.data.workspaces.length > 0) { + setActiveCloudWorkspaceId(result.data.workspaces[0].id); + } + } + } catch (err) { + console.error('Failed to fetch cloud workspaces:', err); + } finally { + setIsLoadingCloudWorkspaces(false); + } + }; + + fetchCloudWorkspaces(); + // Poll for updates every 30 seconds + const interval = setInterval(fetchCloudWorkspaces, 30000); + return () => clearInterval(interval); + }, [cloudSession?.user, activeCloudWorkspaceId]); + + // Determine which workspaces to use (cloud mode or orchestrator) + const isCloudMode = Boolean(cloudSession?.user); + const effectiveWorkspaces = useMemo(() => { + if (isCloudMode && cloudWorkspaces.length > 0) { + // Convert cloud workspaces to the format expected by WorkspaceSelector + return cloudWorkspaces.map(ws => ({ + id: ws.id, + name: ws.name, + path: ws.path || `/workspace/${ws.name}`, + status: ws.status === 'running' ? 'active' as const : 'inactive' as const, + provider: 'claude' as const, + lastActiveAt: new Date(), + })); + } + return workspaces; + }, [isCloudMode, cloudWorkspaces, workspaces]); + + const effectiveActiveWorkspaceId = isCloudMode ? activeCloudWorkspaceId : activeWorkspaceId; + const effectiveIsLoading = isCloudMode ? isLoadingCloudWorkspaces : isOrchestratorLoading; + + // Sync the active workspace ID with the api module for cloud mode proxying + useEffect(() => { + if (isCloudMode && activeCloudWorkspaceId) { + setApiWorkspaceId(activeCloudWorkspaceId); + } else if (!isCloudMode) { + // Clear the workspace ID when not in cloud mode + setApiWorkspaceId(null); + } + }, [isCloudMode, activeCloudWorkspaceId]); + + // Handle workspace selection (works for both cloud and orchestrator) + const handleEffectiveWorkspaceSelect = useCallback(async (workspace: { id: string; name: string }) => { + if (isCloudMode) { + setActiveCloudWorkspaceId(workspace.id); + } else { + await switchWorkspace(workspace.id); + } + }, [isCloudMode, switchWorkspace]); + // Presence tracking for online users and typing indicators const { onlineUsers, typingUsers, sendTyping, isConnected: isPresenceConnected } = usePresence({ currentUser: currentUser @@ -121,6 +201,10 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { const [isSettingsOpen, setIsSettingsOpen] = useState(false); const [settings, setSettings] = useState(defaultSettings); + // Full settings page state + const [isFullSettingsOpen, setIsFullSettingsOpen] = useState(false); + const [settingsInitialTab, setSettingsInitialTab] = useState<'dashboard' | 'workspace' | 'team' | 'billing'>('dashboard'); + // Conversation history panel state const [isHistoryOpen, setIsHistoryOpen] = useState(false); @@ -467,9 +551,16 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { setIsSpawnModalOpen(true); }, []); - // Handle settings click + // Handle settings click - opens full settings page const handleSettingsClick = useCallback(() => { - setIsSettingsOpen(true); + setSettingsInitialTab('dashboard'); + setIsFullSettingsOpen(true); + }, []); + + // Handle workspace settings click - opens settings to workspace tab + const handleWorkspaceSettingsClick = useCallback(() => { + setSettingsInitialTab('workspace'); + setIsFullSettingsOpen(true); }, []); // Handle history click @@ -745,6 +836,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { setIsSpawnModalOpen(false); setIsNewConversationOpen(false); setIsTrajectoryOpen(false); + setIsFullSettingsOpen(false); } }; @@ -753,6 +845,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { }, [handleSpawnClick, handleNewConversationClick]); return ( +
{/* Mobile Sidebar Overlay */}
setIsAddWorkspaceOpen(true)} - isLoading={isOrchestratorLoading} + onWorkspaceSettings={handleWorkspaceSettingsClick} + isLoading={effectiveIsLoading} />
@@ -804,6 +898,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { onLogsClick={handleLogsClick} onThreadSelect={setCurrentThread} onClose={() => setIsSidebarOpen(false)} + onSettingsClick={handleSettingsClick} />
@@ -986,6 +1081,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { settings={settings} onSettingsChange={setSettings} onResetSettings={() => setSettings(defaultSettings)} + csrfToken={cloudSession?.csrfToken ?? undefined} /> {/* Add Workspace Modal */} @@ -1146,7 +1242,17 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { setIsCoordinatorOpen(false); }} /> + + {/* Full Settings Page */} + {isFullSettingsOpen && ( + setIsFullSettingsOpen(false)} + /> + )}
+ ); } diff --git a/src/dashboard/react-components/ProviderAuthFlow.tsx b/src/dashboard/react-components/ProviderAuthFlow.tsx new file mode 100644 index 00000000..60b3f259 --- /dev/null +++ b/src/dashboard/react-components/ProviderAuthFlow.tsx @@ -0,0 +1,548 @@ +/** + * Provider Auth Flow Component + * + * Shared component for AI provider OAuth authentication. + * Used by both the onboarding page and workspace settings. + * + * Handles different auth flows: + * - Claude/Anthropic: OAuth popup → "I've completed login" → poll for credentials + * - Codex/OpenAI: OAuth popup → copy localhost URL → paste code → submit + */ + +import React, { useState, useCallback, useRef, useEffect } from 'react'; + +export interface ProviderInfo { + id: string; + name: string; + displayName: string; + color: string; + cliCommand?: string; + /** Whether this provider's OAuth redirects to localhost (shows "site can't be reached") */ + requiresUrlCopy?: boolean; + /** Whether this provider supports device flow */ + supportsDeviceFlow?: boolean; +} + +export interface ProviderAuthFlowProps { + provider: ProviderInfo; + workspaceId: string; + csrfToken?: string; + onSuccess: () => void; + onCancel: () => void; + onError: (error: string) => void; + /** Whether to use device flow (for providers that support it) */ + useDeviceFlow?: boolean; +} + +type AuthStatus = 'idle' | 'starting' | 'waiting' | 'submitting' | 'success' | 'error'; + +// Provider ID mapping for backend +const PROVIDER_ID_MAP: Record = { + codex: 'openai', +}; + +export function ProviderAuthFlow({ + provider, + workspaceId, + csrfToken, + onSuccess, + onCancel, + onError, + useDeviceFlow = false, +}: ProviderAuthFlowProps) { + const [status, setStatus] = useState('idle'); + const [authUrl, setAuthUrl] = useState(null); + const [sessionId, setSessionId] = useState(null); + const [codeInput, setCodeInput] = useState(''); + const [errorMessage, setErrorMessage] = useState(null); + const popupOpenedRef = useRef(false); + const pollingRef = useRef(false); + + const backendProviderId = PROVIDER_ID_MAP[provider.id] || provider.id; + + // Start the OAuth flow + const startAuth = useCallback(async () => { + setStatus('starting'); + setErrorMessage(null); + popupOpenedRef.current = false; + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/start`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ workspaceId, useDeviceFlow }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to start authentication'); + } + + if (data.status === 'success' || data.alreadyAuthenticated) { + setStatus('success'); + onSuccess(); + return; + } + + setSessionId(data.sessionId); + + if (data.authUrl) { + setAuthUrl(data.authUrl); + setStatus('waiting'); + openAuthPopup(data.authUrl); + startPolling(data.sessionId); + } else if (data.sessionId) { + // No URL yet, poll for it + startPolling(data.sessionId); + } + } catch (err) { + const msg = err instanceof Error ? err.message : 'Failed to start authentication'; + setErrorMessage(msg); + setStatus('error'); + onError(msg); + } + }, [backendProviderId, workspaceId, csrfToken, useDeviceFlow, onSuccess, onError]); + + // Open OAuth popup + const openAuthPopup = useCallback((url: string) => { + const width = 600; + const height = 700; + const left = window.screenX + (window.outerWidth - width) / 2; + const top = window.screenY + (window.outerHeight - height) / 2; + window.open( + url, + `${provider.displayName} Login`, + `width=${width},height=${height},left=${left},top=${top},popup=yes` + ); + popupOpenedRef.current = true; + }, [provider.displayName]); + + // Poll for auth status + const startPolling = useCallback((sid: string) => { + if (pollingRef.current) return; + pollingRef.current = true; + + const maxAttempts = 60; + let attempts = 0; + + const poll = async () => { + if (attempts >= maxAttempts) { + pollingRef.current = false; + setErrorMessage('Authentication timed out. Please try again.'); + setStatus('error'); + onError('Authentication timed out'); + return; + } + + try { + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/status/${sid}`, { + credentials: 'include', + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to check status'); + } + + if (data.status === 'success') { + pollingRef.current = false; + await handleComplete(sid); + return; + } else if (data.status === 'error') { + throw new Error(data.error || 'Authentication failed'); + } else if (data.status === 'waiting_auth' && data.authUrl) { + setAuthUrl(data.authUrl); + setStatus('waiting'); + if (!popupOpenedRef.current) { + openAuthPopup(data.authUrl); + } + } + + attempts++; + setTimeout(poll, 5000); + } catch (err) { + pollingRef.current = false; + const msg = err instanceof Error ? err.message : 'Auth check failed'; + setErrorMessage(msg); + setStatus('error'); + onError(msg); + } + }; + + poll(); + }, [backendProviderId, openAuthPopup, onError]); + + // Complete auth by polling for credentials + const handleComplete = useCallback(async (sid?: string) => { + const targetSessionId = sid || sessionId; + if (!targetSessionId) return; + + setStatus('submitting'); + setErrorMessage(null); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/complete/${targetSessionId}`, { + method: 'POST', + credentials: 'include', + headers, + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to complete authentication'); + } + + setStatus('success'); + // Brief delay to show success message before parent unmounts component + setTimeout(() => onSuccess(), 1500); + } catch (err) { + const msg = err instanceof Error ? err.message : 'Failed to complete authentication'; + setErrorMessage(msg); + setStatus('error'); + onError(msg); + } + }, [sessionId, backendProviderId, csrfToken, onSuccess, onError]); + + // Submit auth code (for providers like Codex that need it) + const handleSubmitCode = useCallback(async () => { + if (!sessionId || !codeInput.trim()) return; + + setStatus('submitting'); + setErrorMessage(null); + + // Extract code from URL if user pasted the full callback URL + let code = codeInput.trim(); + if (code.includes('code=')) { + try { + const url = new URL(code); + const extractedCode = url.searchParams.get('code'); + if (extractedCode) { + code = extractedCode; + } + } catch { + const match = code.match(/code=([^&\s]+)/); + if (match) { + code = match[1]; + } + } + } + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/code/${sessionId}`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ code }), + }); + + const data = await res.json() as { success?: boolean; status?: string; error?: string; needsRestart?: boolean }; + + if (!res.ok) { + // If server indicates we need to restart, show helpful message + if (data.needsRestart) { + setErrorMessage('The authentication session timed out. Please click "Try Again" to restart.'); + setStatus('error'); + return; + } + throw new Error(data.error || 'Failed to submit auth code'); + } + + setCodeInput(''); + + // Backend returns { success: true } not { status: 'success' } + if (data.success) { + // Code was accepted, now complete the auth flow to store credentials + await handleComplete(); + } + // Otherwise continue polling + } catch (err) { + const msg = err instanceof Error ? err.message : 'Failed to submit auth code'; + setErrorMessage(msg); + setStatus('error'); + onError(msg); + } + }, [sessionId, codeInput, backendProviderId, csrfToken, handleComplete, onError]); + + // Cancel auth flow + const handleCancel = useCallback(async () => { + pollingRef.current = false; + + if (sessionId) { + try { + await fetch(`/api/onboarding/cli/${backendProviderId}/cancel/${sessionId}`, { + method: 'POST', + credentials: 'include', + }); + } catch { + // Ignore cancel errors + } + } + + setStatus('idle'); + setAuthUrl(null); + setSessionId(null); + setCodeInput(''); + setErrorMessage(null); + onCancel(); + }, [sessionId, backendProviderId, onCancel]); + + // Start auth when component mounts (parent controls when to render this component) + useEffect(() => { + if (status === 'idle') { + startAuth(); + } + // Cleanup on unmount + return () => { + pollingRef.current = false; + }; + }, [startAuth, status]); + + // Determine which flow type to use based on provider + const isCodexFlow = provider.requiresUrlCopy || provider.id === 'codex' || backendProviderId === 'openai'; + const isClaudeFlow = provider.id === 'anthropic' || backendProviderId === 'anthropic'; + + return ( +
+ {/* Header */} +
+
+ {provider.displayName[0]} +
+
+

{provider.displayName}

+

+ {status === 'starting' && 'Starting authentication...'} + {status === 'waiting' && 'Complete authentication below'} + {status === 'submitting' && 'Verifying...'} + {status === 'success' && 'Connected!'} + {status === 'error' && (errorMessage || 'Authentication failed')} +

+
+
+ + {/* Starting state */} + {status === 'starting' && ( +
+ + + + + Preparing authentication... +
+ )} + + {/* Waiting state */} + {status === 'waiting' && authUrl && ( +
+ {/* Instructions - different for each provider */} +
+

Complete authentication:

+ {isCodexFlow ? ( + /* Codex/OpenAI: OAuth redirects to localhost which is unreachable */ +
    +
  1. Click the button below to open the login page
  2. +
  3. Sign in with your {provider.displayName} account
  4. +
  5. + Important: After signing in, you'll see a "This site can't be reached" error - this is expected! +
  6. +
  7. Copy the entire URL from your browser's address bar (it starts with http://localhost...)
  8. +
  9. Paste it in the input below and click Submit
  10. +
+ ) : isClaudeFlow ? ( + /* Claude/Anthropic: Shows a code after OAuth completion */ +
    +
  1. Click the button below to open the login page
  2. +
  3. Sign in with your Anthropic account
  4. +
  5. After signing in, Anthropic will display an authentication code
  6. +
  7. Copy that code and paste it in the input below
  8. +
  9. Click Submit to complete authentication
  10. +
+ ) : ( + /* Other providers: Try polling for credentials first */ +
    +
  1. Click the button below to open the login page
  2. +
  3. Sign in with your {provider.displayName} account
  4. +
  5. If you receive a code, paste it below. Otherwise click "I've completed login"
  6. +
+ )} +
+ + {/* Auth URL button */} + + Open {provider.displayName} Login Page + + + {isCodexFlow ? ( + /* Codex: URL paste flow with warning about "site can't be reached" */ +
+
+

+ Expected behavior: After login, you'll see "This site can't be reached" - this is normal! + Copy the full URL from your browser's address bar and paste it below. +

+
+
+ setCodeInput(e.target.value)} + className="flex-1 px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan transition-colors font-mono text-sm" + onKeyDown={(e) => { + if (e.key === 'Enter' && codeInput.trim()) { + handleSubmitCode(); + } + }} + /> + +
+
+ ) : isClaudeFlow ? ( + /* Claude: Code paste flow */ +
+
+

+ Look for the code: After signing in, Anthropic will show you an authentication code. + Copy it and paste it below. +

+
+
+ setCodeInput(e.target.value)} + className="flex-1 px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan transition-colors font-mono text-sm" + onKeyDown={(e) => { + if (e.key === 'Enter' && codeInput.trim()) { + handleSubmitCode(); + } + }} + /> + +
+
+ ) : ( + /* Other providers: Code input with fallback button */ +
+
+ setCodeInput(e.target.value)} + className="flex-1 px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan transition-colors font-mono text-sm" + onKeyDown={(e) => { + if (e.key === 'Enter' && codeInput.trim()) { + handleSubmitCode(); + } + }} + /> + +
+ +
+ )} + + {/* Cancel button */} + +
+ )} + + {/* Submitting state */} + {status === 'submitting' && ( +
+ + + + + Verifying authentication... +
+ )} + + {/* Success state */} + {status === 'success' && ( +
+
+ + + +
+ {provider.displayName} connected! +
+ )} + + {/* Error state */} + {status === 'error' && ( +
+
+ {errorMessage || 'Authentication failed. Please try again.'} +
+
+ + +
+
+ )} +
+ ); +} diff --git a/src/dashboard/react-components/SettingsPanel.tsx b/src/dashboard/react-components/SettingsPanel.tsx index 4a237af9..799ee566 100644 --- a/src/dashboard/react-components/SettingsPanel.tsx +++ b/src/dashboard/react-components/SettingsPanel.tsx @@ -49,12 +49,106 @@ export const defaultSettings: Settings = { }, }; +interface AIProvider { + id: string; + name: string; + displayName: string; + description: string; + color: string; + cliCommand: string; + apiKeyUrl?: string; // URL to get API key (fallback) + apiKeyName?: string; // How the API key is labeled on their site + supportsOAuth?: boolean; // Whether CLI-based OAuth is supported + isConnected?: boolean; +} + +const AI_PROVIDERS: AIProvider[] = [ + { + id: 'anthropic', + name: 'Anthropic', + displayName: 'Claude', + description: 'Claude Code - recommended for code tasks', + color: '#D97757', + cliCommand: 'claude', + apiKeyUrl: 'https://console.anthropic.com/settings/keys', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'openai', + name: 'OpenAI', + displayName: 'Codex', + description: 'Codex - OpenAI coding assistant', + color: '#10A37F', + cliCommand: 'codex login', + apiKeyUrl: 'https://platform.openai.com/api-keys', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'google', + name: 'Google', + displayName: 'Gemini', + description: 'Gemini - Google AI coding assistant', + color: '#4285F4', + cliCommand: 'gemini', + apiKeyUrl: 'https://aistudio.google.com/app/apikey', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'opencode', + name: 'OpenCode', + displayName: 'OpenCode', + description: 'OpenCode - AI coding assistant', + color: '#00D4AA', + cliCommand: 'opencode', + supportsOAuth: true, + }, + { + id: 'droid', + name: 'Factory', + displayName: 'Droid', + description: 'Droid - Factory AI coding agent', + color: '#6366F1', + cliCommand: 'droid', + supportsOAuth: true, + }, +]; + +// Auth session state for CLI-based OAuth +interface OAuthSession { + providerId: string; + sessionId: string; + authUrl?: string; + status: 'starting' | 'waiting_auth' | 'success' | 'error'; + error?: string; +} + export interface SettingsPanelProps { isOpen: boolean; onClose: () => void; settings: Settings; onSettingsChange: (settings: Settings) => void; onResetSettings?: () => void; + workspaceId?: string; // For cloud mode provider connection + csrfToken?: string; // For cloud mode API calls +} + +// Trajectory settings state +interface TrajectorySettings { + storeInRepo: boolean; + storageLocation: string; + loading: boolean; + error: string | null; + documentation?: { + title: string; + description: string; + whatIsIt: string; + benefits: string[]; + storeInRepoExplanation: string; + learnMore: string; + }; } export function SettingsPanel({ @@ -63,8 +157,282 @@ export function SettingsPanel({ settings, onSettingsChange, onResetSettings, + workspaceId, + csrfToken, }: SettingsPanelProps) { - const [activeTab, setActiveTab] = useState<'appearance' | 'notifications' | 'connection'>('appearance'); + const [activeTab, setActiveTab] = useState<'appearance' | 'notifications' | 'connection' | 'providers' | 'trajectories'>('appearance'); + const [providerStatus, setProviderStatus] = useState>({}); + const [connectingProvider, setConnectingProvider] = useState(null); + const [apiKeyInput, setApiKeyInput] = useState(''); + const [providerError, setProviderError] = useState(null); + const [oauthSession, setOauthSession] = useState(null); + const [showApiKeyFallback, setShowApiKeyFallback] = useState>({}); + const [trajectorySettings, setTrajectorySettings] = useState({ + storeInRepo: false, + storageLocation: '', + loading: true, + error: null, + }); + + // Load trajectory settings on mount + React.useEffect(() => { + if (isOpen && activeTab === 'trajectories') { + fetchTrajectorySettings(); + } + }, [isOpen, activeTab]); + + const fetchTrajectorySettings = async () => { + try { + setTrajectorySettings(prev => ({ ...prev, loading: true, error: null })); + const res = await fetch('/api/settings/trajectory'); + if (!res.ok) throw new Error('Failed to load settings'); + const data = await res.json(); + setTrajectorySettings({ + storeInRepo: data.settings.storeInRepo, + storageLocation: data.settings.storageLocation, + loading: false, + error: null, + documentation: data.documentation, + }); + } catch (err) { + setTrajectorySettings(prev => ({ + ...prev, + loading: false, + error: err instanceof Error ? err.message : 'Failed to load settings', + })); + } + }; + + const updateTrajectorySettings = async (storeInRepo: boolean) => { + try { + setTrajectorySettings(prev => ({ ...prev, loading: true, error: null })); + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch('/api/settings/trajectory', { + method: 'PUT', + credentials: 'include', + headers, + body: JSON.stringify({ storeInRepo }), + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to update settings'); + } + + const data = await res.json(); + setTrajectorySettings(prev => ({ + ...prev, + storeInRepo: data.settings.storeInRepo, + storageLocation: data.settings.storageLocation, + loading: false, + error: null, + })); + } catch (err) { + setTrajectorySettings(prev => ({ + ...prev, + loading: false, + error: err instanceof Error ? err.message : 'Failed to update settings', + })); + } + }; + + // Start CLI-based OAuth flow for a provider + const startOAuthFlow = async (provider: AIProvider) => { + setProviderError(null); + setConnectingProvider(provider.id); + setOauthSession({ providerId: provider.id, sessionId: '', status: 'starting' }); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${provider.id}/start`, { + method: 'POST', + credentials: 'include', + headers, + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to start authentication'); + } + + // Handle immediate success (already authenticated) + if (data.status === 'success' || data.alreadyAuthenticated) { + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setOauthSession(null); + setConnectingProvider(null); + return; + } + + const session: OAuthSession = { + providerId: provider.id, + sessionId: data.sessionId, + authUrl: data.authUrl, + status: data.status || 'starting', + }; + setOauthSession(session); + + // If we have an auth URL, open it in a popup + if (data.authUrl) { + openAuthPopup(data.authUrl, provider.displayName); + // Start polling for completion + pollAuthStatus(provider.id, data.sessionId); + } else if (data.status === 'starting') { + // Still starting, poll for URL + pollAuthStatus(provider.id, data.sessionId); + } + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to start OAuth'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + // Open auth URL in a popup window + const openAuthPopup = (url: string, providerName: string) => { + const width = 600; + const height = 700; + const left = window.screenX + (window.outerWidth - width) / 2; + const top = window.screenY + (window.outerHeight - height) / 2; + window.open( + url, + `${providerName} Login`, + `width=${width},height=${height},left=${left},top=${top},popup=yes` + ); + }; + + // Poll for OAuth session status + const pollAuthStatus = async (providerId: string, sessionId: string) => { + const maxAttempts = 60; // 5 minutes with 5-second intervals + let attempts = 0; + + const poll = async () => { + if (attempts >= maxAttempts) { + setProviderError('Authentication timed out. Please try again.'); + setOauthSession(null); + setConnectingProvider(null); + return; + } + + try { + const res = await fetch(`/api/onboarding/cli/${providerId}/status/${sessionId}`, { + credentials: 'include', + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to check status'); + } + + if (data.status === 'success') { + // Complete the auth flow + await completeAuthFlow(providerId, sessionId); + return; + } else if (data.status === 'error') { + throw new Error(data.error || 'Authentication failed'); + } else if (data.status === 'waiting_auth' && data.authUrl && !oauthSession?.authUrl) { + // Got the auth URL, open popup + setOauthSession(prev => prev ? { ...prev, authUrl: data.authUrl, status: 'waiting_auth' } : null); + openAuthPopup(data.authUrl, AI_PROVIDERS.find(p => p.id === providerId)?.displayName || 'Provider'); + } + + // Continue polling + attempts++; + setTimeout(poll, 5000); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Auth check failed'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + poll(); + }; + + // Complete OAuth flow + const completeAuthFlow = async (providerId: string, sessionId: string) => { + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${providerId}/complete/${sessionId}`, { + method: 'POST', + credentials: 'include', + headers, + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to complete authentication'); + } + + // Success! + setProviderStatus(prev => ({ ...prev, [providerId]: true })); + setOauthSession(null); + setConnectingProvider(null); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to complete auth'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + // Cancel OAuth flow + const cancelOAuthFlow = async () => { + if (oauthSession?.sessionId) { + try { + await fetch(`/api/onboarding/cli/${oauthSession.providerId}/cancel/${oauthSession.sessionId}`, { + method: 'POST', + credentials: 'include', + }); + } catch { + // Ignore cancel errors + } + } + setOauthSession(null); + setConnectingProvider(null); + }; + + // Submit API key (fallback flow) + const submitApiKey = async (provider: AIProvider) => { + if (!apiKeyInput.trim()) { + setProviderError('Please enter an API key'); + return; + } + + setProviderError(null); + setConnectingProvider(provider.id); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/token/${provider.id}`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ token: apiKeyInput.trim() }), + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to connect'); + } + + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setApiKeyInput(''); + setConnectingProvider(null); + setShowApiKeyFallback(prev => ({ ...prev, [provider.id]: false })); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to connect'); + setConnectingProvider(null); + } + }; const updateSetting = useCallback( ( @@ -146,6 +514,28 @@ export function SettingsPanel({ Connection + +
@@ -282,6 +672,250 @@ export function SettingsPanel({
)} + + {activeTab === 'providers' && ( +
+
+ +

+ Connect AI providers to spawn agents. API keys are stored securely. +

+ + {providerError && ( +
+ {providerError} +
+ )} + +
+ {AI_PROVIDERS.map((provider) => ( +
+
+
+
+ {provider.displayName[0]} +
+
+

{provider.displayName}

+

{provider.description}

+
+
+ {providerStatus[provider.id] && ( + + Connected + + )} +
+ + {!providerStatus[provider.id] && ( +
+ {/* OAuth flow (primary) */} + {oauthSession?.providerId === provider.id ? ( +
+ {oauthSession.status === 'starting' && ( +
+ + Starting authentication... +
+ )} + {oauthSession.status === 'waiting_auth' && ( + <> +
+ 🔐 + Complete login in the popup window +
+ {oauthSession.authUrl && ( +
+ Popup didn't open?{' '} + +
+ )} + + )} + +
+ ) : showApiKeyFallback[provider.id] ? ( + /* API key fallback */ +
+
+ { + setConnectingProvider(provider.id); + setApiKeyInput(e.target.value); + }} + onFocus={() => setConnectingProvider(provider.id)} + className="flex-1 py-2 px-3 border border-border rounded-md text-sm bg-bg-tertiary text-text-primary placeholder-text-muted focus:outline-none focus:border-accent" + /> + +
+ {provider.apiKeyUrl && ( +
+ Get your API key from{' '} + + {new URL(provider.apiKeyUrl).hostname} + +
+ )} + +
+ ) : ( + /* Primary connect button */ +
+ + {provider.apiKeyUrl && ( + + )} +
+ )} +
+ )} + +
+ CLI: {provider.cliCommand} +
+
+ ))} +
+
+
+ )} + + {activeTab === 'trajectories' && ( +
+ {trajectorySettings.loading ? ( +
+
Loading settings...
+
+ ) : trajectorySettings.error ? ( +
+

{trajectorySettings.error}

+ +
+ ) : ( + <> + {/* Documentation section */} +
+

+ + What are Trajectories? +

+

+ {trajectorySettings.documentation?.description || + 'Trajectories record the journey of agent work using the PDERO paradigm (Plan, Design, Execute, Review, Observe). They capture decisions, phase transitions, and retrospectives.'} +

+
+

Benefits

+
    + {(trajectorySettings.documentation?.benefits || [ + 'Track why decisions were made, not just what was built', + 'Enable session recovery when agents crash', + 'Provide learning data for future agents', + 'Create audit trails of AI work', + ]).map((benefit, i) => ( +
  • + + {benefit} +
  • + ))} +
+
+ + Learn more about PDERO + + +
+ + {/* Settings */} +
+ + + updateTrajectorySettings(v)} + /> + +
+
+ Current storage location +
+ + {trajectorySettings.storageLocation || 'user (~/.config/agent-relay/trajectories/)'} + +
+
+ + {/* Why opt-in info */} +
+

Why opt-in to repo storage?

+

+ Teams who want to review agent decision-making processes can store trajectories + in the repo to version control them alongside code. This makes it easy to understand + why agents made specific choices during code review. +

+
+ + )} +
+ )}
@@ -414,3 +1048,31 @@ function MonitorIcon() { ); } + +function ProviderIcon() { + return ( + + + + + + ); +} + +function TrajectoryIcon() { + return ( + + + + ); +} + +function ExternalLinkIcon() { + return ( + + + + + + ); +} diff --git a/src/dashboard/react-components/SpawnModal.tsx b/src/dashboard/react-components/SpawnModal.tsx index 666a16de..d421bb70 100644 --- a/src/dashboard/react-components/SpawnModal.tsx +++ b/src/dashboard/react-components/SpawnModal.tsx @@ -24,7 +24,7 @@ export interface SpawnConfig { function deriveShadowMode(command: string): 'subagent' | 'process' { const base = command.trim().split(' ')[0].toLowerCase(); - if (base.startsWith('claude') || base === 'codex' || base === 'opencode') return 'subagent'; + if (base.startsWith('claude') || base === 'codex' || base === 'opencode' || base === 'gemini' || base === 'droid') return 'subagent'; return 'process'; } @@ -52,6 +52,27 @@ const AGENT_TEMPLATES = [ description: 'OpenAI Codex agent', icon: '⚡', }, + { + id: 'gemini', + name: 'Gemini', + command: 'gemini', + description: 'Google Gemini CLI agent', + icon: '💎', + }, + { + id: 'opencode', + name: 'OpenCode', + command: 'opencode', + description: 'OpenCode AI agent', + icon: '🔷', + }, + { + id: 'droid', + name: 'Droid', + command: 'droid', + description: 'Factory Droid agent', + icon: '🤖', + }, { id: 'custom', name: 'Custom', diff --git a/src/dashboard/react-components/WorkspaceContext.tsx b/src/dashboard/react-components/WorkspaceContext.tsx new file mode 100644 index 00000000..0a10ed0f --- /dev/null +++ b/src/dashboard/react-components/WorkspaceContext.tsx @@ -0,0 +1,107 @@ +/** + * Workspace Context + * + * Provides the current workspace's base URL for WebSocket connections. + * Used by LogViewer and other components that need to connect to workspace-specific endpoints. + */ + +import React, { createContext, useContext, useMemo } from 'react'; + +interface WorkspaceContextValue { + /** Base WebSocket URL for the workspace (e.g., wss://workspace-abc.agentrelay.dev) */ + wsBaseUrl: string | null; + /** Whether we're in cloud mode (workspace URL is different from page host) */ + isCloudMode: boolean; +} + +const WorkspaceContext = createContext({ + wsBaseUrl: null, + isCloudMode: false, +}); + +export interface WorkspaceProviderProps { + children: React.ReactNode; + /** The workspace WebSocket URL (e.g., wss://workspace-abc.agentrelay.dev/ws) */ + wsUrl?: string; +} + +/** + * Extract base URL from a WebSocket URL + * e.g., wss://workspace-abc.agentrelay.dev/ws -> wss://workspace-abc.agentrelay.dev + */ +function getBaseUrl(wsUrl: string): string { + try { + const url = new URL(wsUrl); + return `${url.protocol}//${url.host}`; + } catch { + return wsUrl; + } +} + +export function WorkspaceProvider({ children, wsUrl }: WorkspaceProviderProps) { + const value = useMemo(() => { + if (!wsUrl) { + return { wsBaseUrl: null, isCloudMode: false }; + } + + const wsBaseUrl = getBaseUrl(wsUrl); + + // Check if we're in cloud mode by comparing the workspace URL host with the current page host + let isCloudMode = false; + if (typeof window !== 'undefined') { + try { + const wsHost = new URL(wsUrl).host; + isCloudMode = wsHost !== window.location.host; + } catch { + // Ignore parse errors + } + } + + return { wsBaseUrl, isCloudMode }; + }, [wsUrl]); + + return ( + + {children} + + ); +} + +/** + * Hook to access the workspace context + */ +export function useWorkspace(): WorkspaceContextValue { + return useContext(WorkspaceContext); +} + +/** + * Get the WebSocket URL for a specific path within the workspace + * Falls back to current host if not in a workspace context + */ +export function useWorkspaceWsUrl(path: string): string { + const { wsBaseUrl } = useWorkspace(); + + return useMemo(() => { + if (wsBaseUrl) { + return `${wsBaseUrl}${path}`; + } + + // Fallback to current host + if (typeof window === 'undefined') { + return `ws://localhost:3889${path}`; + } + + const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; + const isDev = process.env.NODE_ENV === 'development'; + const { hostname, port } = window.location; + + // Next.js dev runs on 3888, dashboard server on 3889 + if (isDev && port === '3888') { + return `${protocol}//${hostname || 'localhost'}:3889${path}`; + } + + return `${protocol}//${window.location.host}${path}`; + }, [wsBaseUrl, path]); +} + +export default WorkspaceContext; diff --git a/src/dashboard/react-components/WorkspaceSelector.tsx b/src/dashboard/react-components/WorkspaceSelector.tsx index b1cab61e..40d2fd5d 100644 --- a/src/dashboard/react-components/WorkspaceSelector.tsx +++ b/src/dashboard/react-components/WorkspaceSelector.tsx @@ -23,6 +23,7 @@ export interface WorkspaceSelectorProps { activeWorkspaceId?: string; onSelect: (workspace: Workspace) => void; onAddWorkspace: () => void; + onWorkspaceSettings?: () => void; isLoading?: boolean; } @@ -31,6 +32,7 @@ export function WorkspaceSelector({ activeWorkspaceId, onSelect, onAddWorkspace, + onWorkspaceSettings, isLoading = false, }: WorkspaceSelectorProps) { const [isOpen, setIsOpen] = useState(false); @@ -122,7 +124,19 @@ export function WorkspaceSelector({ )}
-
+
+ {onWorkspaceSettings && activeWorkspace && ( + + )} + )} + + {/* Toast notification */} + {showToast && ( +
+ {toastMessage} +
+ )} +
+ ); + } + + // Expanded view (for sidebar or dedicated panel) + return ( +
+
+
+ {config.icon} + + Workspace Status + +
+ {config.pulseColor && ( + + )} +
+ +
+
+ Name + + {workspace?.name || 'None'} + +
+ +
+ Status + + {config.label} + +
+ + {statusMessage && ( +

{statusMessage}

+ )} + + {/* Action buttons */} + {actionNeeded === 'wakeup' && !isWakingUp && ( + + )} + + {actionNeeded === 'check_error' && ( + + View error details + + )} +
+ + {/* Toast notification */} + {showToast && ( +
+ {toastMessage} +
+ )} +
+ ); +} + +// Icons +function RunningIcon() { + return ( + + + + ); +} + +function StoppedIcon() { + return ( + + + + ); +} + +function ProvisioningIcon() { + return ( + + + + ); +} + +function ErrorIcon() { + return ( + + + + + + ); +} + +function NoWorkspaceIcon() { + return ( + + + + + + ); +} + +function LoadingIcon() { + return ( + + + + + ); +} diff --git a/src/dashboard/react-components/XTermLogViewer.tsx b/src/dashboard/react-components/XTermLogViewer.tsx index af9ea23a..666f8157 100644 --- a/src/dashboard/react-components/XTermLogViewer.tsx +++ b/src/dashboard/react-components/XTermLogViewer.tsx @@ -10,6 +10,7 @@ import { Terminal } from '@xterm/xterm'; import { FitAddon } from '@xterm/addon-fit'; import { SearchAddon } from '@xterm/addon-search'; import { getAgentColor } from '../lib/colors'; +import { useWorkspaceWsUrl } from './WorkspaceContext'; export interface XTermLogViewerProps { /** Agent name to stream logs from */ @@ -50,27 +51,7 @@ const TERMINAL_THEME = { brightWhite: '#ffffff', }; -/** - * Get WebSocket URL for agent log streaming - */ -function getLogStreamUrl(agentName: string): string { - const path = `/ws/logs/${encodeURIComponent(agentName)}`; - const isDev = process.env.NODE_ENV === 'development'; - - if (typeof window === 'undefined') { - return `ws://localhost:3889${path}`; - } - - const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; - const { hostname, port } = window.location; - - if (isDev && port === '3888') { - const host = hostname || 'localhost'; - return `${protocol}//${host}:3889${path}`; - } - - return `${protocol}//${window.location.host}${path}`; -} +// getLogStreamUrl removed - now using useWorkspaceWsUrl hook export function XTermLogViewer({ agentName, @@ -97,6 +78,9 @@ export function XTermLogViewer({ const searchInputRef = useRef(null); const colors = getAgentColor(agentName); + // Get WebSocket URL from workspace context (handles cloud vs local mode) + const logStreamUrl = useWorkspaceWsUrl(`/ws/logs/${encodeURIComponent(agentName)}`); + // Initialize terminal useEffect(() => { if (!containerRef.current) return; @@ -152,8 +136,7 @@ export function XTermLogViewer({ setIsConnecting(true); setError(null); - const url = getLogStreamUrl(agentName); - const ws = new WebSocket(url); + const ws = new WebSocket(logStreamUrl); wsRef.current = ws; ws.onopen = () => { @@ -254,7 +237,7 @@ export function XTermLogViewer({ } } }; - }, [agentName]); + }, [logStreamUrl, agentName]); // Disconnect from WebSocket const disconnect = useCallback(() => { diff --git a/src/dashboard/react-components/hooks/index.ts b/src/dashboard/react-components/hooks/index.ts index cb77c719..549b64a8 100644 --- a/src/dashboard/react-components/hooks/index.ts +++ b/src/dashboard/react-components/hooks/index.ts @@ -27,3 +27,9 @@ export { type UseRecentReposReturn, type RecentRepo, } from './useRecentRepos'; +export { + useWorkspaceStatus, + type UseWorkspaceStatusOptions, + type UseWorkspaceStatusReturn, + type WorkspaceStatus, +} from './useWorkspaceStatus'; diff --git a/src/dashboard/react-components/hooks/useMessages.ts b/src/dashboard/react-components/hooks/useMessages.ts index e2f46056..9f775334 100644 --- a/src/dashboard/react-components/hooks/useMessages.ts +++ b/src/dashboard/react-components/hooks/useMessages.ts @@ -7,6 +7,7 @@ import { useState, useMemo, useCallback, useEffect } from 'react'; import type { Message, SendMessageRequest } from '../../types'; +import { api } from '../../lib/api'; export interface UseMessagesOptions { messages: Message[]; @@ -261,15 +262,13 @@ export function useMessages({ request.from = senderName; } - const response = await fetch('/api/send', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(request), - }); + // Use api.sendMessage which handles: + // - Workspace proxy routing (in cloud mode) + // - CSRF token headers + // - Credentials + const result = await api.sendMessage(request); - const result = await response.json() as { success?: boolean; error?: string }; - - if (response.ok && result.success) { + if (result.success) { // Success! The optimistic message will be cleaned up when // the real message arrives via WebSocket return true; diff --git a/src/dashboard/react-components/hooks/usePresence.ts b/src/dashboard/react-components/hooks/usePresence.ts index 71396e8a..efeac8ee 100644 --- a/src/dashboard/react-components/hooks/usePresence.ts +++ b/src/dashboard/react-components/hooks/usePresence.ts @@ -87,6 +87,7 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn const wsRef = useRef(null); const reconnectTimeoutRef = useRef(null); const typingTimeoutRef = useRef(null); + const isConnectingRef = useRef(false); // Prevent race conditions // Clear stale typing indicators (after 3 seconds of no update) useEffect(() => { @@ -103,13 +104,16 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn const connect = useCallback(() => { if (!currentUser) return; // Don't connect without user info if (wsRef.current?.readyState === WebSocket.OPEN) return; + if (isConnectingRef.current) return; // Prevent concurrent connect attempts + isConnectingRef.current = true; const url = wsUrl || getPresenceUrl(); try { const ws = new WebSocket(url); ws.onopen = () => { + isConnectingRef.current = false; setIsConnected(true); // Announce presence @@ -124,13 +128,16 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn }; ws.onclose = () => { + isConnectingRef.current = false; setIsConnected(false); wsRef.current = null; - // Reconnect after 2 seconds - reconnectTimeoutRef.current = setTimeout(() => { - connect(); - }, 2000); + // Reconnect after 2 seconds (only if not intentionally disconnected) + if (currentUser) { + reconnectTimeoutRef.current = setTimeout(() => { + connect(); + }, 2000); + } }; ws.onerror = (event) => { @@ -200,21 +207,30 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn }, [currentUser, wsUrl]); const disconnect = useCallback(() => { + // Clear reconnect timeout first if (reconnectTimeoutRef.current) { clearTimeout(reconnectTimeoutRef.current); reconnectTimeoutRef.current = null; } + // Reset connecting flag + isConnectingRef.current = false; + if (wsRef.current) { + // Prevent auto-reconnect by removing onclose handler before closing + const ws = wsRef.current; + ws.onclose = null; + ws.onerror = null; + // Send leave message before closing - if (wsRef.current.readyState === WebSocket.OPEN && currentUser) { - wsRef.current.send(JSON.stringify({ + if (ws.readyState === WebSocket.OPEN && currentUser) { + ws.send(JSON.stringify({ type: 'presence', action: 'leave', username: currentUser.username, })); } - wsRef.current.close(); + ws.close(); wsRef.current = null; } @@ -249,15 +265,25 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn }, [currentUser]); // Connect when user is available + // Use refs to avoid effect re-running on function reference changes + const currentUserRef = useRef(currentUser); + currentUserRef.current = currentUser; + useEffect(() => { - if (autoConnect && currentUser) { - connect(); + if (!autoConnect || !currentUserRef.current) return; + + // Prevent connecting if already connected or connecting + if (wsRef.current && wsRef.current.readyState !== WebSocket.CLOSED) { + return; } + connect(); + return () => { disconnect(); }; - }, [autoConnect, currentUser, connect, disconnect]); + // Only re-run when autoConnect or currentUser identity changes + }, [autoConnect, currentUser?.username, connect, disconnect]); // Send leave on page unload useEffect(() => { diff --git a/src/dashboard/react-components/hooks/useSession.ts b/src/dashboard/react-components/hooks/useSession.ts index a83aa9a2..7e75a09e 100644 --- a/src/dashboard/react-components/hooks/useSession.ts +++ b/src/dashboard/react-components/hooks/useSession.ts @@ -9,6 +9,7 @@ import { useState, useEffect, useCallback, useRef } from 'react'; import { cloudApi, onSessionExpired, + getCsrfToken, type CloudUser, type SessionError, type SessionStatus, @@ -34,6 +35,8 @@ export interface UseSessionReturn { isExpired: boolean; /** Session error if any */ error: SessionError | null; + /** CSRF token for API requests */ + csrfToken: string | null; /** Manually check session status */ checkSession: () => Promise; /** Clear the expired state (e.g., after dismissing modal) */ @@ -195,6 +198,7 @@ export function useSession(options: UseSessionOptions = {}): UseSessionReturn { isAuthenticated: user !== null, isExpired, error, + csrfToken: getCsrfToken(), checkSession, clearExpired, redirectToLogin, diff --git a/src/dashboard/react-components/hooks/useTrajectory.ts b/src/dashboard/react-components/hooks/useTrajectory.ts index ca61dd3e..184c5326 100644 --- a/src/dashboard/react-components/hooks/useTrajectory.ts +++ b/src/dashboard/react-components/hooks/useTrajectory.ts @@ -7,6 +7,7 @@ import { useState, useEffect, useCallback, useRef } from 'react'; import type { TrajectoryStep } from '../TrajectoryViewer'; +import { getApiUrl } from '../../lib/api'; interface TrajectoryStatus { active: boolean; @@ -67,7 +68,11 @@ export function useTrajectory(options: UseTrajectoryOptions = {}): UseTrajectory // Fetch trajectory status const fetchStatus = useCallback(async () => { try { - const response = await fetch(`${apiBaseUrl}/api/trajectory`); + // Use apiBaseUrl if provided, otherwise use getApiUrl for cloud mode routing + const url = apiBaseUrl + ? `${apiBaseUrl}/api/trajectory` + : getApiUrl('/api/trajectory'); + const response = await fetch(url, { credentials: 'include' }); const data = await response.json(); if (data.success !== false) { @@ -86,7 +91,10 @@ export function useTrajectory(options: UseTrajectoryOptions = {}): UseTrajectory // Fetch trajectory history const fetchHistory = useCallback(async () => { try { - const response = await fetch(`${apiBaseUrl}/api/trajectory/history`); + const url = apiBaseUrl + ? `${apiBaseUrl}/api/trajectory/history` + : getApiUrl('/api/trajectory/history'); + const response = await fetch(url, { credentials: 'include' }); const data = await response.json(); if (data.success) { @@ -101,11 +109,14 @@ export function useTrajectory(options: UseTrajectoryOptions = {}): UseTrajectory const fetchSteps = useCallback(async () => { try { const trajectoryId = selectedTrajectoryId; - const url = trajectoryId - ? `${apiBaseUrl}/api/trajectory/steps?trajectoryId=${encodeURIComponent(trajectoryId)}` - : `${apiBaseUrl}/api/trajectory/steps`; - - const response = await fetch(url); + const basePath = trajectoryId + ? `/api/trajectory/steps?trajectoryId=${encodeURIComponent(trajectoryId)}` + : '/api/trajectory/steps'; + const url = apiBaseUrl + ? `${apiBaseUrl}${basePath}` + : getApiUrl(basePath); + + const response = await fetch(url, { credentials: 'include' }); const data = await response.json(); if (data.success) { diff --git a/src/dashboard/react-components/hooks/useWebSocket.ts b/src/dashboard/react-components/hooks/useWebSocket.ts index 664fd3a7..8e076902 100644 --- a/src/dashboard/react-components/hooks/useWebSocket.ts +++ b/src/dashboard/react-components/hooks/useWebSocket.ts @@ -10,6 +10,7 @@ import type { Agent, Message, Session, AgentSummary, FleetData } from '../../typ export interface DashboardData { agents: Agent[]; + users?: Agent[]; // Human users (cli === 'dashboard') messages: Message[]; sessions?: Session[]; summaries?: AgentSummary[]; diff --git a/src/dashboard/react-components/hooks/useWorkspaceStatus.ts b/src/dashboard/react-components/hooks/useWorkspaceStatus.ts new file mode 100644 index 00000000..066df7f5 --- /dev/null +++ b/src/dashboard/react-components/hooks/useWorkspaceStatus.ts @@ -0,0 +1,237 @@ +/** + * useWorkspaceStatus Hook + * + * React hook for monitoring workspace status with auto-wakeup capability. + * Polls for status updates and can automatically restart stopped workspaces. + */ + +import { useState, useEffect, useCallback, useRef } from 'react'; +import { cloudApi } from '../../lib/cloudApi'; + +export interface WorkspaceStatus { + id: string; + name: string; + status: string; + publicUrl?: string; + isStopped: boolean; + isRunning: boolean; + isProvisioning: boolean; + hasError: boolean; + config: { + providers: string[]; + repositories: string[]; + }; +} + +export interface UseWorkspaceStatusOptions { + /** Poll for status updates (default: true) */ + autoRefresh?: boolean; + /** Interval to poll for status in ms (default: 30000) */ + refreshInterval?: number; + /** Auto-wakeup when workspace is stopped (default: false) */ + autoWakeup?: boolean; + /** Callback when workspace status changes */ + onStatusChange?: (status: string, wasRestarted: boolean) => void; +} + +export interface UseWorkspaceStatusReturn { + /** Current workspace data (null if no workspace) */ + workspace: WorkspaceStatus | null; + /** Whether workspace exists */ + exists: boolean; + /** Whether the status check is in progress */ + isLoading: boolean; + /** Whether a wakeup is in progress */ + isWakingUp: boolean; + /** Status message for display */ + statusMessage: string; + /** Action needed (wakeup, check_error, etc) */ + actionNeeded: 'wakeup' | 'check_error' | null; + /** Error if any */ + error: string | null; + /** Manually refresh status */ + refresh: () => Promise; + /** Manually wake up workspace */ + wakeup: () => Promise<{ success: boolean; message: string }>; +} + +const DEFAULT_OPTIONS: Required = { + autoRefresh: true, + refreshInterval: 30000, // 30 seconds + autoWakeup: false, + onStatusChange: () => {}, +}; + +export function useWorkspaceStatus( + options: UseWorkspaceStatusOptions = {} +): UseWorkspaceStatusReturn { + // Stabilize options to prevent infinite re-renders + // Use refs for callbacks and useMemo for primitive values + const autoRefresh = options.autoRefresh ?? DEFAULT_OPTIONS.autoRefresh; + const refreshInterval = options.refreshInterval ?? DEFAULT_OPTIONS.refreshInterval; + const autoWakeup = options.autoWakeup ?? DEFAULT_OPTIONS.autoWakeup; + + // Store callback in ref to avoid recreating refresh on every render + const onStatusChangeRef = useRef(options.onStatusChange ?? DEFAULT_OPTIONS.onStatusChange); + onStatusChangeRef.current = options.onStatusChange ?? DEFAULT_OPTIONS.onStatusChange; + + const [workspace, setWorkspace] = useState(null); + const [exists, setExists] = useState(false); + const [isLoading, setIsLoading] = useState(true); + const [isWakingUp, setIsWakingUp] = useState(false); + const [statusMessage, setStatusMessage] = useState(''); + const [actionNeeded, setActionNeeded] = useState<'wakeup' | 'check_error' | null>(null); + const [error, setError] = useState(null); + + const intervalRef = useRef(null); + const mountedRef = useRef(true); + const previousStatusRef = useRef(null); + + // Fetch workspace status + const refresh = useCallback(async () => { + try { + setIsLoading(true); + setError(null); + + const result = await cloudApi.getPrimaryWorkspace(); + + if (!mountedRef.current) return; + + if (result.success) { + setExists(result.data.exists); + setStatusMessage(result.data.statusMessage); + setActionNeeded(result.data.actionNeeded || null); + + if (result.data.workspace) { + const ws = result.data.workspace; + setWorkspace(ws); + + // Check for status change + if (previousStatusRef.current && previousStatusRef.current !== ws.status) { + onStatusChangeRef.current(ws.status, false); + } + previousStatusRef.current = ws.status; + } else { + setWorkspace(null); + } + } else { + setError(result.error); + } + } catch (_e) { + if (mountedRef.current) { + setError('Failed to fetch workspace status'); + } + } finally { + if (mountedRef.current) { + setIsLoading(false); + } + } + }, []); // No dependencies - uses refs for callbacks + + // Store refresh interval in ref for wakeup callback + const refreshIntervalRef = useRef(refreshInterval); + refreshIntervalRef.current = refreshInterval; + const autoRefreshRef = useRef(autoRefresh); + autoRefreshRef.current = autoRefresh; + + // Wake up workspace + const wakeup = useCallback(async (): Promise<{ success: boolean; message: string }> => { + if (!workspace?.id) { + return { success: false, message: 'No workspace to wake up' }; + } + + try { + setIsWakingUp(true); + setError(null); + + const result = await cloudApi.wakeupWorkspace(workspace.id); + + if (!mountedRef.current) { + return { success: false, message: 'Component unmounted' }; + } + + if (result.success) { + // Update local state + if (result.data.wasRestarted) { + setStatusMessage(result.data.message); + setActionNeeded(null); + onStatusChangeRef.current('starting', true); + + // Start more frequent polling to catch when workspace is ready + if (intervalRef.current) { + clearInterval(intervalRef.current); + } + intervalRef.current = setInterval(refresh, 5000); // Poll every 5s during startup + + // Reset to normal interval after 2 minutes + setTimeout(() => { + if (mountedRef.current && intervalRef.current) { + clearInterval(intervalRef.current); + if (autoRefreshRef.current) { + intervalRef.current = setInterval(refresh, refreshIntervalRef.current); + } + } + }, 120000); + } + + return { success: true, message: result.data.message }; + } else { + setError(result.error); + return { success: false, message: result.error }; + } + } catch (e) { + const message = e instanceof Error ? e.message : 'Failed to wake up workspace'; + if (mountedRef.current) { + setError(message); + } + return { success: false, message }; + } finally { + if (mountedRef.current) { + setIsWakingUp(false); + } + } + }, [workspace?.id, refresh]); + + // Initial fetch + useEffect(() => { + mountedRef.current = true; + refresh(); + + return () => { + mountedRef.current = false; + }; + }, [refresh]); + + // Auto-refresh polling + useEffect(() => { + if (!autoRefresh) return; + + intervalRef.current = setInterval(refresh, refreshInterval); + + return () => { + if (intervalRef.current) { + clearInterval(intervalRef.current); + intervalRef.current = null; + } + }; + }, [autoRefresh, refreshInterval, refresh]); + + // Auto-wakeup when workspace is stopped + useEffect(() => { + if (autoWakeup && workspace?.isStopped && !isWakingUp) { + wakeup(); + } + }, [autoWakeup, workspace?.isStopped, isWakingUp, wakeup]); + + return { + workspace, + exists, + isLoading, + isWakingUp, + statusMessage, + actionNeeded, + error, + refresh, + wakeup, + }; +} diff --git a/src/dashboard/react-components/index.ts b/src/dashboard/react-components/index.ts index df3dddcb..ebdbd453 100644 --- a/src/dashboard/react-components/index.ts +++ b/src/dashboard/react-components/index.ts @@ -38,6 +38,12 @@ export { useCloudSessionOptional, type CloudSessionProviderProps, } from './CloudSessionProvider'; +export { + WorkspaceProvider, + useWorkspace, + useWorkspaceWsUrl, + type WorkspaceProviderProps, +} from './WorkspaceContext'; // Layout Components export { Sidebar, type SidebarProps } from './layout/Sidebar'; diff --git a/src/dashboard/react-components/layout/Header.tsx b/src/dashboard/react-components/layout/Header.tsx index 6d27247c..001a6737 100644 --- a/src/dashboard/react-components/layout/Header.tsx +++ b/src/dashboard/react-components/layout/Header.tsx @@ -10,6 +10,7 @@ import type { Agent, Project } from '../../types'; import { getAgentColor, getAgentInitials } from '../../lib/colors'; import { getAgentBreadcrumb } from '../../lib/hierarchy'; import { RepoContextHeader } from './RepoContextHeader'; +import { WorkspaceStatusIndicator } from '../WorkspaceStatusIndicator'; export interface HeaderProps { currentChannel: string; @@ -95,6 +96,12 @@ export function Header({
)} + {/* Workspace Status Indicator */} + + + {/* Divider after workspace status */} +
+
{isGeneral ? ( <> diff --git a/src/dashboard/react-components/layout/Sidebar.tsx b/src/dashboard/react-components/layout/Sidebar.tsx index 14d11c0d..138902bd 100644 --- a/src/dashboard/react-components/layout/Sidebar.tsx +++ b/src/dashboard/react-components/layout/Sidebar.tsx @@ -14,6 +14,9 @@ import { ThreadList } from '../ThreadList'; import { LogoIcon } from '../Logo'; const THREADS_COLLAPSED_KEY = 'agent-relay-threads-collapsed'; +const SIDEBAR_TAB_KEY = 'agent-relay-sidebar-tab'; + +export type SidebarTab = 'agents' | 'team'; export interface SidebarProps { agents: Agent[]; @@ -42,6 +45,8 @@ export interface SidebarProps { onThreadSelect?: (threadId: string) => void; /** Mobile: close sidebar handler */ onClose?: () => void; + /** Handler for opening settings */ + onSettingsClick?: () => void; } export function Sidebar({ @@ -65,8 +70,18 @@ export function Sidebar({ onLogsClick, onThreadSelect, onClose, + onSettingsClick, }: SidebarProps) { const [searchQuery, setSearchQuery] = useState(''); + const [activeTab, setActiveTab] = useState(() => { + // Initialize from localStorage + try { + const stored = localStorage.getItem(SIDEBAR_TAB_KEY); + return (stored === 'team' ? 'team' : 'agents') as SidebarTab; + } catch { + return 'agents'; + } + }); const [isThreadsCollapsed, setIsThreadsCollapsed] = useState(() => { // Initialize from localStorage try { @@ -77,6 +92,15 @@ export function Sidebar({ } }); + // Persist tab state to localStorage + useEffect(() => { + try { + localStorage.setItem(SIDEBAR_TAB_KEY, activeTab); + } catch { + // localStorage not available + } + }, [activeTab]); + // Persist collapsed state to localStorage useEffect(() => { try { @@ -86,6 +110,10 @@ export function Sidebar({ } }, [isThreadsCollapsed]); + // Separate AI agents from human team members + const aiAgents = agents.filter(a => !a.isHuman); + const humanMembers = agents.filter(a => a.isHuman); + // Determine if we should show unified project view const hasProjects = projects.length > 0; @@ -138,12 +166,48 @@ export function Sidebar({ )}
+ {/* Agents/Team Tabs */} + {humanMembers.length > 0 && ( +
+ + +
+ )} + {/* Search */}
setSearchQuery(e.target.value)} className="flex-1 bg-transparent border-none text-text-primary text-sm outline-none placeholder:text-text-muted" @@ -174,10 +238,51 @@ export function Sidebar({ {/* Agent/Project List */}
- {hasProjects ? ( + {activeTab === 'team' && humanMembers.length > 0 ? ( + /* Team Members List */ +
+ {humanMembers + .filter(m => !searchQuery || m.name.toLowerCase().includes(searchQuery.toLowerCase())) + .map((member) => ( + + ))} + {humanMembers.filter(m => !searchQuery || m.name.toLowerCase().includes(searchQuery.toLowerCase())).length === 0 && ( +
+ +

No team members match "{searchQuery}"

+
+ )} +
+ ) : hasProjects ? ( ) : ( onAgentSelect?.(agent)} @@ -203,7 +308,7 @@ export function Sidebar({
{/* Footer Actions */} -
+
+
); @@ -268,3 +380,35 @@ function CloseIcon() { ); } + +function SettingsIcon() { + return ( + + + + + ); +} + +function RobotIcon() { + return ( + + + + + + + + ); +} + +function UsersIcon() { + return ( + + + + + + + ); +} diff --git a/src/dashboard/react-components/settings/BillingSettingsPanel.tsx b/src/dashboard/react-components/settings/BillingSettingsPanel.tsx new file mode 100644 index 00000000..6b950a69 --- /dev/null +++ b/src/dashboard/react-components/settings/BillingSettingsPanel.tsx @@ -0,0 +1,542 @@ +/** + * Billing Settings Panel + * + * Manage subscription, view plans, and access billing portal. + */ + +import React, { useState, useEffect, useCallback } from 'react'; +import { cloudApi } from '../../lib/cloudApi'; + +export interface BillingSettingsPanelProps { + onUpgrade?: () => void; +} + +interface Plan { + tier: string; + name: string; + description: string; + price: { monthly: number; yearly: number }; + features: string[]; + limits: Record; + recommended?: boolean; +} + +interface Subscription { + id: string; + tier: string; + status: string; + currentPeriodStart: string; + currentPeriodEnd: string; + cancelAtPeriodEnd: boolean; + interval: 'month' | 'year'; +} + +interface Invoice { + id: string; + number: string; + amount: number; + status: string; + date: string; + pdfUrl?: string; +} + +const TIER_COLORS: Record = { + free: 'bg-bg-tertiary border-border-subtle text-text-muted', + pro: 'bg-accent-cyan/10 border-accent-cyan/30 text-accent-cyan', + team: 'bg-accent-purple/10 border-accent-purple/30 text-accent-purple', + enterprise: 'bg-amber-400/10 border-amber-400/30 text-amber-400', +}; + +export function BillingSettingsPanel({ onUpgrade }: BillingSettingsPanelProps) { + const [plans, setPlans] = useState([]); + const [currentTier, setCurrentTier] = useState('free'); + const [subscription, setSubscription] = useState(null); + const [invoices, setInvoices] = useState([]); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + const [successMessage, setSuccessMessage] = useState(null); + + // Billing interval toggle + const [billingInterval, setBillingInterval] = useState<'month' | 'year'>('month'); + + // Action loading states + const [checkoutLoading, setCheckoutLoading] = useState(null); + const [portalLoading, setPortalLoading] = useState(false); + const [cancelLoading, setCancelLoading] = useState(false); + const [resumeLoading, setResumeLoading] = useState(false); + + // Load billing data + useEffect(() => { + async function loadBillingData() { + setIsLoading(true); + setError(null); + + const [plansResult, subscriptionResult, invoicesResult] = await Promise.all([ + cloudApi.getBillingPlans(), + cloudApi.getSubscription(), + cloudApi.getInvoices(), + ]); + + if (plansResult.success) { + setPlans(plansResult.data.plans); + } + + if (subscriptionResult.success) { + setCurrentTier(subscriptionResult.data.tier); + setSubscription(subscriptionResult.data.subscription); + if (subscriptionResult.data.subscription?.interval) { + setBillingInterval(subscriptionResult.data.subscription.interval); + } + } + + if (invoicesResult.success) { + setInvoices(invoicesResult.data.invoices); + } + + if (!plansResult.success) { + setError(plansResult.error); + } + + setIsLoading(false); + } + + loadBillingData(); + }, []); + + // Start checkout for plan upgrade + const handleCheckout = useCallback(async (tier: string) => { + setCheckoutLoading(tier); + + const result = await cloudApi.createCheckoutSession(tier, billingInterval); + + if (result.success && result.data.checkoutUrl) { + // Redirect to Stripe checkout + window.location.href = result.data.checkoutUrl; + } else if (!result.success) { + setError(result.error); + setCheckoutLoading(null); + } + }, [billingInterval]); + + // Open billing portal + const handleOpenPortal = useCallback(async () => { + setPortalLoading(true); + + const result = await cloudApi.createBillingPortal(); + + if (result.success && result.data.portalUrl) { + window.location.href = result.data.portalUrl; + } else if (!result.success) { + setError(result.error); + } + + setPortalLoading(false); + }, []); + + // Cancel subscription + const handleCancel = useCallback(async () => { + const confirmed = window.confirm( + 'Are you sure you want to cancel your subscription? You will retain access until the end of your billing period.' + ); + if (!confirmed) return; + + setCancelLoading(true); + + const result = await cloudApi.cancelSubscription(); + + if (result.success) { + setSubscription((prev) => + prev ? { ...prev, cancelAtPeriodEnd: true } : null + ); + setSuccessMessage(result.data.message); + setTimeout(() => setSuccessMessage(null), 5000); + } else { + setError(result.error); + } + + setCancelLoading(false); + }, []); + + // Resume subscription + const handleResume = useCallback(async () => { + setResumeLoading(true); + + const result = await cloudApi.resumeSubscription(); + + if (result.success) { + setSubscription((prev) => + prev ? { ...prev, cancelAtPeriodEnd: false } : null + ); + setSuccessMessage(result.data.message); + setTimeout(() => setSuccessMessage(null), 3000); + } else { + setError(result.error); + } + + setResumeLoading(false); + }, []); + + if (isLoading) { + return ( +
+ + Loading billing information... +
+ ); + } + + return ( +
+ {/* Messages */} + {error && ( +
+ {error} + +
+ )} + + {successMessage && ( +
+ {successMessage} +
+ )} + + {/* Current Plan */} +
+

+ Current Plan +

+
+
+
+

+ {currentTier} Plan +

+ {subscription ? ( +

+ {subscription.cancelAtPeriodEnd ? ( + + Cancels on {new Date(subscription.currentPeriodEnd).toLocaleDateString()} + + ) : ( + <> + Renews on {new Date(subscription.currentPeriodEnd).toLocaleDateString()} + + ({subscription.interval === 'year' ? 'Yearly' : 'Monthly'}) + + + )} +

+ ) : ( +

+ Free tier - upgrade to unlock more features +

+ )} +
+ +
+ {subscription && !subscription.cancelAtPeriodEnd && ( + + )} + {subscription?.cancelAtPeriodEnd && ( + + )} + {subscription && ( + + )} +
+
+
+
+ + {/* Billing Interval Toggle */} +
+ + Monthly + + + + Yearly + (Save 20%) + +
+ + {/* Available Plans */} +
+

+ Available Plans +

+
+ {plans + .filter((p) => p.tier !== 'free') + .map((plan) => ( +
+ {plan.recommended && ( +
+ Most Popular +
+ )} + +

{plan.name}

+

{plan.description}

+ +
+ + ${billingInterval === 'year' ? plan.price.yearly : plan.price.monthly} + + + /{billingInterval === 'year' ? 'year' : 'month'} + +
+ +
    + {plan.features.slice(0, 5).map((feature, i) => ( +
  • + + {feature} +
  • + ))} +
+ + {currentTier === plan.tier ? ( + + ) : ( + + )} +
+ ))} +
+
+ + {/* Enterprise CTA */} +
+
+
+

Enterprise

+

+ Custom solutions for large teams with dedicated support, SLA, and custom integrations. +

+
+ + Contact Sales + +
+
+ + {/* Invoices */} + {invoices.length > 0 && ( +
+

+ Billing History +

+ + {/* Desktop Table */} +
+ + + + + + + + + + + + {invoices.map((invoice) => ( + + + + + + + + ))} + +
+ Invoice + + Date + + Amount + + Status + + +
+ {invoice.number} + + {new Date(invoice.date).toLocaleDateString()} + + ${(invoice.amount / 100).toFixed(2)} + + + {invoice.status} + + + {invoice.pdfUrl && ( + + Download + + )} +
+
+ + {/* Mobile Card Layout */} +
+ {invoices.map((invoice) => ( +
+
+ {invoice.number} + + {invoice.status} + +
+
+ {new Date(invoice.date).toLocaleDateString()} + ${(invoice.amount / 100).toFixed(2)} +
+ {invoice.pdfUrl && ( + + Download PDF + + )} +
+ ))} +
+
+ )} +
+ ); +} + +// Icons +function LoadingSpinner() { + return ( + + + + ); +} + +function CheckIcon({ className = '' }: { className?: string }) { + return ( + + + + ); +} diff --git a/src/dashboard/react-components/settings/SettingsPage.tsx b/src/dashboard/react-components/settings/SettingsPage.tsx new file mode 100644 index 00000000..5524b8ef --- /dev/null +++ b/src/dashboard/react-components/settings/SettingsPage.tsx @@ -0,0 +1,588 @@ +/** + * Unified Settings Page + * + * Full-page settings view with tabbed navigation for: + * - Dashboard Settings (personal preferences) + * - Workspace Settings (repos, providers, domains) + * - Team Settings (members, invitations) + * - Billing Settings (subscription, plans) + * + * Design: Mission Control theme - deep space aesthetic with cyan/purple accents + */ + +import React, { useState, useEffect } from 'react'; +import { cloudApi, getCsrfToken } from '../../lib/cloudApi'; +import { WorkspaceSettingsPanel } from './WorkspaceSettingsPanel'; +import { TeamSettingsPanel } from './TeamSettingsPanel'; +import { BillingSettingsPanel } from './BillingSettingsPanel'; + +export interface SettingsPageProps { + /** Current user ID for team membership checks */ + currentUserId?: string; + /** Initial tab to show */ + initialTab?: 'dashboard' | 'workspace' | 'team' | 'billing'; + /** Callback when settings page is closed */ + onClose?: () => void; +} + +interface WorkspaceSummary { + id: string; + name: string; + status: string; +} + +interface DashboardSettings { + theme: 'dark' | 'light' | 'system'; + compactMode: boolean; + showTimestamps: boolean; + soundEnabled: boolean; + notificationsEnabled: boolean; + autoScrollMessages: boolean; +} + +const DEFAULT_DASHBOARD_SETTINGS: DashboardSettings = { + theme: 'dark', + compactMode: false, + showTimestamps: true, + soundEnabled: true, + notificationsEnabled: true, + autoScrollMessages: true, +}; + +export function SettingsPage({ + currentUserId, + initialTab = 'dashboard', + onClose, +}: SettingsPageProps) { + const [activeTab, setActiveTab] = useState<'dashboard' | 'workspace' | 'team' | 'billing'>(initialTab); + const [workspaces, setWorkspaces] = useState([]); + const [selectedWorkspaceId, setSelectedWorkspaceId] = useState(null); + const [isLoadingWorkspaces, setIsLoadingWorkspaces] = useState(true); + const [dashboardSettings, setDashboardSettings] = useState(DEFAULT_DASHBOARD_SETTINGS); + + // Load workspaces + useEffect(() => { + async function loadWorkspaces() { + setIsLoadingWorkspaces(true); + const result = await cloudApi.getWorkspaceSummary(); + if (result.success && result.data.workspaces.length > 0) { + setWorkspaces(result.data.workspaces); + setSelectedWorkspaceId(result.data.workspaces[0].id); + } + setIsLoadingWorkspaces(false); + } + loadWorkspaces(); + }, []); + + // Load dashboard settings from localStorage + useEffect(() => { + const saved = localStorage.getItem('dashboard-settings'); + if (saved) { + try { + setDashboardSettings({ ...DEFAULT_DASHBOARD_SETTINGS, ...JSON.parse(saved) }); + } catch { + // Use defaults + } + } + }, []); + + // Save dashboard settings + const updateDashboardSetting = ( + key: K, + value: DashboardSettings[K] + ) => { + const newSettings = { ...dashboardSettings, [key]: value }; + setDashboardSettings(newSettings); + localStorage.setItem('dashboard-settings', JSON.stringify(newSettings)); + + // Apply theme immediately + if (key === 'theme') { + document.documentElement.setAttribute('data-theme', value as string); + } + }; + + const tabs = [ + { id: 'dashboard', label: 'Dashboard', icon: }, + { id: 'workspace', label: 'Workspace', icon: }, + { id: 'team', label: 'Team', icon: }, + { id: 'billing', label: 'Billing', icon: }, + ] as const; + + return ( +
+ {/* Background Pattern */} +
+
+
+
+ +
+ {/* Header */} +
+
+
+ +
+
+

Settings

+

Manage your workspace and preferences

+
+
+ + +
+ + {/* Mobile Tab Navigation */} +
+ {tabs.map((tab) => ( + + ))} +
+ + {/* Mobile Workspace Selector */} + {(activeTab === 'workspace' || activeTab === 'team') && workspaces.length > 0 && ( +
+
+
ws.id === selectedWorkspaceId)?.status === 'running' + ? 'bg-success' + : workspaces.find(ws => ws.id === selectedWorkspaceId)?.status === 'stopped' + ? 'bg-amber-400' + : 'bg-text-muted' + }`} + /> + {workspaces.length === 1 ? ( + {workspaces[0].name} + ) : ( + + )} +
+
+ )} + + {/* Content */} +
+ {/* Desktop Sidebar Navigation */} + + + {/* Main Content */} +
+
+ {/* Dashboard Settings */} + {activeTab === 'dashboard' && ( +
+ + + {/* Appearance */} + }> + + + + + + updateDashboardSetting('compactMode', v)} + /> + + + + updateDashboardSetting('showTimestamps', v)} + /> + + + + {/* Notifications */} + }> + + updateDashboardSetting('soundEnabled', v)} + /> + + + + updateDashboardSetting('notificationsEnabled', v)} + /> + + + + {/* Behavior */} + }> + + updateDashboardSetting('autoScrollMessages', v)} + /> + + +
+ )} + + {/* Workspace Settings */} + {activeTab === 'workspace' && ( + <> + {isLoadingWorkspaces ? ( +
+
+
+
+ Loading workspaces... +
+ ) : selectedWorkspaceId ? ( + + ) : ( + } + title="No Workspace" + description="Create a workspace to get started with Agent Relay." + action={ + + } + /> + )} + + )} + + {/* Team Settings */} + {activeTab === 'team' && ( + <> + {selectedWorkspaceId ? ( +
+ + +
+ ) : ( + } + title="No Workspace Selected" + description="Select a workspace to manage team members." + /> + )} + + )} + + {/* Billing Settings */} + {activeTab === 'billing' && ( +
+ + +
+ )} +
+
+
+
+
+ ); +} + +// Utility Components +function PageHeader({ title, subtitle }: { title: string; subtitle: string }) { + return ( +
+

{title}

+

{subtitle}

+
+ ); +} + +function SettingsSection({ + title, + icon, + children, +}: { + title: string; + icon: React.ReactNode; + children: React.ReactNode; +}) { + return ( +
+
+ {icon} +

{title}

+
+
{children}
+
+ ); +} + +function SettingRow({ + label, + description, + children, +}: { + label: string; + description: string; + children: React.ReactNode; +}) { + return ( +
+
+

{label}

+

{description}

+
+ {children} +
+ ); +} + +function Toggle({ + checked, + onChange, +}: { + checked: boolean; + onChange: (value: boolean) => void; +}) { + return ( + + ); +} + +function EmptyState({ + icon, + title, + description, + action, +}: { + icon: React.ReactNode; + title: string; + description: string; + action?: React.ReactNode; +}) { + return ( +
+
+ {icon} +
+

{title}

+

{description}

+ {action} +
+ ); +} + +// Icons +function SettingsIcon({ className = '' }: { className?: string }) { + return ( + + + + + ); +} + +function DashboardIcon() { + return ( + + + + + + + ); +} + +function WorkspaceIcon() { + return ( + + + + ); +} + +function TeamIcon() { + return ( + + + + + + + ); +} + +function BillingIcon() { + return ( + + + + + ); +} + +function CloseIcon() { + return ( + + + + + ); +} + +function PaletteIcon() { + return ( + + + + + + + + ); +} + +function BellIcon() { + return ( + + + + + ); +} diff --git a/src/dashboard/react-components/settings/TeamSettingsPanel.tsx b/src/dashboard/react-components/settings/TeamSettingsPanel.tsx new file mode 100644 index 00000000..a75a4021 --- /dev/null +++ b/src/dashboard/react-components/settings/TeamSettingsPanel.tsx @@ -0,0 +1,460 @@ +/** + * Team Settings Panel + * + * Manage workspace team members, invitations, and roles. + */ + +import React, { useState, useEffect, useCallback } from 'react'; +import { cloudApi } from '../../lib/cloudApi'; + +export interface TeamSettingsPanelProps { + workspaceId: string; + currentUserId?: string; +} + +interface Member { + id: string; + userId: string; + role: 'owner' | 'admin' | 'member' | 'viewer'; + isPending: boolean; + user?: { + githubUsername: string; + email?: string; + avatarUrl?: string; + }; +} + +interface PendingInvite { + id: string; + workspaceId: string; + workspaceName: string; + role: string; + invitedAt: string; + invitedBy: string; +} + +const ROLE_COLORS: Record = { + owner: 'bg-accent-purple/20 text-accent-purple', + admin: 'bg-accent-cyan/20 text-accent-cyan', + member: 'bg-success/20 text-success', + viewer: 'bg-bg-hover text-text-muted', +}; + +const ROLE_DESCRIPTIONS: Record = { + owner: 'Full access, can delete workspace and transfer ownership', + admin: 'Can manage members, settings, and all workspace features', + member: 'Can use workspace, spawn agents, and send messages', + viewer: 'Read-only access to workspace activity', +}; + +export function TeamSettingsPanel({ + workspaceId, + currentUserId, +}: TeamSettingsPanelProps) { + const [members, setMembers] = useState([]); + const [pendingInvites, setPendingInvites] = useState([]); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + const [successMessage, setSuccessMessage] = useState(null); + + // Invite form + const [showInviteForm, setShowInviteForm] = useState(false); + const [inviteUsername, setInviteUsername] = useState(''); + const [inviteRole, setInviteRole] = useState<'admin' | 'member' | 'viewer'>('member'); + const [inviteLoading, setInviteLoading] = useState(false); + const [inviteError, setInviteError] = useState(null); + + // Role change + const [changingRoleFor, setChangingRoleFor] = useState(null); + + // Load members + useEffect(() => { + async function loadMembers() { + setIsLoading(true); + setError(null); + + const [membersResult, invitesResult] = await Promise.all([ + cloudApi.getWorkspaceMembers(workspaceId), + cloudApi.getPendingInvites(), + ]); + + if (membersResult.success) { + setMembers(membersResult.data.members as Member[]); + } else { + setError(membersResult.error); + } + + if (invitesResult.success) { + // Filter to invites for this workspace + setPendingInvites( + invitesResult.data.invites.filter((i) => i.workspaceId === workspaceId) + ); + } + + setIsLoading(false); + } + + loadMembers(); + }, [workspaceId]); + + // Invite member + const handleInvite = useCallback(async () => { + if (!inviteUsername.trim()) { + setInviteError('Please enter a GitHub username'); + return; + } + + setInviteLoading(true); + setInviteError(null); + + const result = await cloudApi.inviteMember(workspaceId, inviteUsername.trim(), inviteRole); + + if (result.success) { + // Refresh members + const membersResult = await cloudApi.getWorkspaceMembers(workspaceId); + if (membersResult.success) { + setMembers(membersResult.data.members as Member[]); + } + setInviteUsername(''); + setShowInviteForm(false); + setSuccessMessage(`Invitation sent to ${inviteUsername}`); + setTimeout(() => setSuccessMessage(null), 3000); + } else { + setInviteError(result.error); + } + + setInviteLoading(false); + }, [workspaceId, inviteUsername, inviteRole]); + + // Update member role + const handleUpdateRole = useCallback(async (memberId: string, newRole: string) => { + setChangingRoleFor(memberId); + + const result = await cloudApi.updateMemberRole(workspaceId, memberId, newRole); + + if (result.success) { + setMembers((prev) => + prev.map((m) => (m.id === memberId ? { ...m, role: newRole as Member['role'] } : m)) + ); + setSuccessMessage('Role updated successfully'); + setTimeout(() => setSuccessMessage(null), 3000); + } else { + setError(result.error); + } + + setChangingRoleFor(null); + }, [workspaceId]); + + // Remove member + const handleRemoveMember = useCallback(async (member: Member) => { + const confirmed = window.confirm( + `Are you sure you want to remove ${member.user?.githubUsername || 'this member'} from the workspace?` + ); + if (!confirmed) return; + + const result = await cloudApi.removeMember(workspaceId, member.id); + + if (result.success) { + setMembers((prev) => prev.filter((m) => m.id !== member.id)); + setSuccessMessage('Member removed successfully'); + setTimeout(() => setSuccessMessage(null), 3000); + } else { + setError(result.error); + } + }, [workspaceId]); + + // Get current user's role + const currentUserRole = members.find((m) => m.userId === currentUserId)?.role; + const canManageMembers = currentUserRole === 'owner' || currentUserRole === 'admin'; + + if (isLoading) { + return ( +
+ + Loading team members... +
+ ); + } + + return ( +
+ {/* Header */} +
+
+

+ Team Members +

+

+ {members.length} member{members.length !== 1 ? 's' : ''} +

+
+ {canManageMembers && ( + + )} +
+ + {/* Messages */} + {error && ( +
+ {error} + +
+ )} + + {successMessage && ( +
+ {successMessage} +
+ )} + + {/* Invite Form */} + {showInviteForm && ( +
+

Invite New Member

+ + {inviteError && ( +
+ {inviteError} +
+ )} + +
+
+ + setInviteUsername(e.target.value)} + placeholder="username" + className="w-full px-3 py-2 bg-bg-card border border-border-subtle rounded-lg text-sm text-text-primary placeholder:text-text-muted focus:outline-none focus:border-accent-cyan" + /> +
+
+ + +
+
+ +

+ {ROLE_DESCRIPTIONS[inviteRole]} +

+ +
+ + +
+
+ )} + + {/* Members List */} +
+ {members.map((member) => ( +
+
+ {member.user?.avatarUrl ? ( + {member.user.githubUsername} + ) : ( +
+ {member.user?.githubUsername?.[0]?.toUpperCase() || '?'} +
+ )} +
+
+

+ {member.user?.githubUsername || 'Unknown User'} +

+ {member.isPending && ( + + Pending + + )} + {member.userId === currentUserId && ( + (you) + )} +
+ {member.user?.email && ( +

{member.user.email}

+ )} +
+
+ +
+ {canManageMembers && member.role !== 'owner' && member.userId !== currentUserId ? ( + + ) : ( + + {member.role.charAt(0).toUpperCase() + member.role.slice(1)} + + )} + + {canManageMembers && member.role !== 'owner' && member.userId !== currentUserId && ( + + )} +
+
+ ))} +
+ + {/* Pending Invites for Current User */} + {pendingInvites.length > 0 && ( +
+

+ Your Pending Invitations +

+
+ {pendingInvites.map((invite) => ( +
+
+

+ {invite.workspaceName} +

+

+ Invited by {invite.invitedBy} as {invite.role} +

+
+
+ + +
+
+ ))} +
+
+ )} + + {/* Role Permissions Info */} +
+

+ Role Permissions +

+
+ {Object.entries(ROLE_DESCRIPTIONS).map(([role, description]) => ( +
+ + {role.charAt(0).toUpperCase() + role.slice(1)} + +

{description}

+
+ ))} +
+
+
+ ); +} + +// Icons +function LoadingSpinner() { + return ( + + + + ); +} + +function PlusIcon() { + return ( + + + + + ); +} + +function TrashIcon() { + return ( + + + + + ); +} diff --git a/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx b/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx new file mode 100644 index 00000000..fee7eaad --- /dev/null +++ b/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx @@ -0,0 +1,1365 @@ +/** + * Workspace Settings Panel + * + * Manage workspace configuration including repositories, + * AI providers, custom domains, and agent policies. + * + * Design: Mission Control theme with deep space aesthetic + */ + +import React, { useState, useEffect, useCallback, useRef } from 'react'; +import { cloudApi } from '../../lib/cloudApi'; +import { ProviderAuthFlow } from '../ProviderAuthFlow'; + +export interface WorkspaceSettingsPanelProps { + workspaceId: string; + csrfToken?: string; + onClose?: () => void; +} + +interface WorkspaceDetails { + id: string; + name: string; + status: string; + publicUrl?: string; + computeProvider: string; + config: { + providers: string[]; + repositories: string[]; + supervisorEnabled?: boolean; + maxAgents?: number; + }; + customDomain?: string; + customDomainStatus?: string; + errorMessage?: string; + repositories: Array<{ + id: string; + fullName: string; + syncStatus: string; + lastSyncedAt?: string; + }>; + createdAt: string; + updatedAt: string; +} + +interface AvailableRepo { + id: string; + fullName: string; + isPrivate: boolean; + defaultBranch: string; + syncStatus: string; + hasNangoConnection: boolean; + lastSyncedAt?: string; +} + +interface AIProvider { + id: string; + name: string; + displayName: string; + description: string; + color: string; + cliCommand: string; + apiKeyUrl?: string; + apiKeyName?: string; + supportsOAuth?: boolean; + supportsDeviceFlow?: boolean; // Provider supports device flow (easier for headless environments) + preferApiKey?: boolean; // Show API key input by default (simpler for mobile/containers) + isConnected?: boolean; +} + +const AI_PROVIDERS: AIProvider[] = [ + { + id: 'anthropic', + name: 'Anthropic', + displayName: 'Claude', + description: 'Claude Code - recommended for code tasks', + color: '#D97757', + cliCommand: 'claude', + apiKeyUrl: 'https://console.anthropic.com/settings/keys', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'openai', + name: 'OpenAI', + displayName: 'Codex', + description: 'Codex - OpenAI coding assistant', + color: '#10A37F', + cliCommand: 'codex login', + apiKeyUrl: 'https://platform.openai.com/api-keys', + apiKeyName: 'API key', + supportsOAuth: true, + supportsDeviceFlow: true, // Codex supports --device-auth for headless environments + }, + { + id: 'google', + name: 'Google', + displayName: 'Gemini', + description: 'Gemini - Google AI coding assistant', + color: '#4285F4', + cliCommand: 'gemini', + apiKeyUrl: 'https://aistudio.google.com/app/apikey', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'opencode', + name: 'OpenCode', + displayName: 'OpenCode', + description: 'OpenCode - AI coding assistant', + color: '#00D4AA', + cliCommand: 'opencode', + supportsOAuth: true, + }, + { + id: 'droid', + name: 'Factory', + displayName: 'Droid', + description: 'Droid - Factory AI coding agent', + color: '#6366F1', + cliCommand: 'droid', + supportsOAuth: true, + }, +]; + +interface OAuthSession { + providerId: string; + sessionId: string; + authUrl?: string; + status: 'starting' | 'waiting_auth' | 'success' | 'error'; + error?: string; +} + +export function WorkspaceSettingsPanel({ + workspaceId, + csrfToken, + onClose, +}: WorkspaceSettingsPanelProps) { + const [workspace, setWorkspace] = useState(null); + const [availableRepos, setAvailableRepos] = useState([]); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + const [activeSection, setActiveSection] = useState<'general' | 'providers' | 'repos' | 'domain' | 'danger'>('general'); + + // Provider connection state + const [providerStatus, setProviderStatus] = useState>({}); + const [connectingProvider, setConnectingProvider] = useState(null); + const [apiKeyInput, setApiKeyInput] = useState(''); + const [authCodeInput, setAuthCodeInput] = useState(''); + const [providerError, setProviderError] = useState(null); + const [oauthSession, setOauthSession] = useState(null); + const [showApiKeyFallback, setShowApiKeyFallback] = useState>({}); + // Track whether popup has been opened for current session (avoids stale closure issues) + const popupOpenedRef = useRef(null); + // Device flow preference for providers that support it + const [useDeviceFlow, setUseDeviceFlow] = useState>({}); + + // Custom domain form + const [customDomain, setCustomDomain] = useState(''); + const [domainLoading, setDomainLoading] = useState(false); + const [domainError, setDomainError] = useState(null); + const [domainInstructions, setDomainInstructions] = useState<{ + type: string; + name: string; + value: string; + ttl: number; + } | null>(null); + + // Load workspace details + useEffect(() => { + async function loadWorkspace() { + setIsLoading(true); + setError(null); + + const [wsResult, reposResult] = await Promise.all([ + cloudApi.getWorkspaceDetails(workspaceId), + cloudApi.getRepos(), + ]); + + if (wsResult.success) { + setWorkspace(wsResult.data); + if (wsResult.data.customDomain) { + setCustomDomain(wsResult.data.customDomain); + } + // Mark connected providers + const connected: Record = {}; + wsResult.data.config.providers.forEach((p) => { + connected[p] = true; + }); + setProviderStatus(connected); + } else { + setError(wsResult.error); + } + + if (reposResult.success) { + setAvailableRepos(reposResult.data.repositories); + } + + setIsLoading(false); + } + + loadWorkspace(); + }, [workspaceId]); + + // Start CLI-based OAuth flow for a provider + const startOAuthFlow = async (provider: AIProvider) => { + setProviderError(null); + setConnectingProvider(provider.id); + setOauthSession({ providerId: provider.id, sessionId: '', status: 'starting' }); + // Reset popup tracking for new session + popupOpenedRef.current = null; + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${provider.id}/start`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ + workspaceId, + useDeviceFlow: useDeviceFlow[provider.id] || false, + }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to start authentication'); + } + + if (data.status === 'success' || data.alreadyAuthenticated) { + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setOauthSession(null); + setConnectingProvider(null); + return; + } + + const session: OAuthSession = { + providerId: provider.id, + sessionId: data.sessionId, + authUrl: data.authUrl, + // If we have an authUrl, immediately show waiting_auth status so auth code input appears + status: data.authUrl ? 'waiting_auth' : (data.status || 'starting'), + }; + setOauthSession(session); + + if (data.authUrl) { + // Track that popup was opened for this session + popupOpenedRef.current = data.sessionId; + openAuthPopup(data.authUrl, provider.displayName); + pollAuthStatus(provider.id, data.sessionId); + } else if (data.status === 'starting') { + pollAuthStatus(provider.id, data.sessionId); + } + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to start OAuth'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + const openAuthPopup = (url: string, providerName: string) => { + const width = 600; + const height = 700; + const left = window.screenX + (window.outerWidth - width) / 2; + const top = window.screenY + (window.outerHeight - height) / 2; + window.open( + url, + `${providerName} Login`, + `width=${width},height=${height},left=${left},top=${top},popup=yes` + ); + }; + + const pollAuthStatus = async (providerId: string, sessionId: string) => { + const maxAttempts = 60; + let attempts = 0; + + const poll = async () => { + if (attempts >= maxAttempts) { + setProviderError('Authentication timed out. Please try again.'); + setOauthSession(null); + setConnectingProvider(null); + popupOpenedRef.current = null; + return; + } + + try { + const res = await fetch(`/api/onboarding/cli/${providerId}/status/${sessionId}`, { + credentials: 'include', + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to check status'); + } + + if (data.status === 'success') { + await completeAuthFlow(providerId, sessionId); + return; + } else if (data.status === 'error') { + throw new Error(data.error || 'Authentication failed'); + } else if (data.status === 'waiting_auth' && data.authUrl && popupOpenedRef.current !== sessionId) { + // Use ref to prevent multiple popups (avoids stale closure issue) + popupOpenedRef.current = sessionId; + setOauthSession(prev => prev ? { ...prev, authUrl: data.authUrl, status: 'waiting_auth' } : null); + openAuthPopup(data.authUrl, AI_PROVIDERS.find(p => p.id === providerId)?.displayName || 'Provider'); + } + + attempts++; + setTimeout(poll, 5000); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Auth check failed'); + setOauthSession(null); + setConnectingProvider(null); + popupOpenedRef.current = null; + } + }; + + poll(); + }; + + const completeAuthFlow = async (providerId: string, sessionId: string) => { + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${providerId}/complete/${sessionId}`, { + method: 'POST', + credentials: 'include', + headers, + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to complete authentication'); + } + + setProviderStatus(prev => ({ ...prev, [providerId]: true })); + setOauthSession(null); + setConnectingProvider(null); + popupOpenedRef.current = null; + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to complete auth'); + setOauthSession(null); + setConnectingProvider(null); + popupOpenedRef.current = null; + } + }; + + const cancelOAuthFlow = async () => { + if (oauthSession?.sessionId) { + try { + await fetch(`/api/onboarding/cli/${oauthSession.providerId}/cancel/${oauthSession.sessionId}`, { + method: 'POST', + credentials: 'include', + }); + } catch { + // Ignore cancel errors + } + } + setOauthSession(null); + setConnectingProvider(null); + setAuthCodeInput(''); + popupOpenedRef.current = null; + }; + + const submitAuthCodeToSession = async () => { + if (!oauthSession?.sessionId || !authCodeInput.trim()) { + return; + } + + setProviderError(null); + + // Extract code from URL if user pasted the full callback URL + let code = authCodeInput.trim(); + if (code.includes('code=')) { + try { + const url = new URL(code); + const extractedCode = url.searchParams.get('code'); + if (extractedCode) { + code = extractedCode; + } + } catch { + // Not a valid URL, try to extract code parameter manually + const match = code.match(/code=([^&\s]+)/); + if (match) { + code = match[1]; + } + } + } + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${oauthSession.providerId}/code/${oauthSession.sessionId}`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ code }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to submit auth code'); + } + + // Clear the input and continue polling - the CLI should now complete + setAuthCodeInput(''); + + // If immediate success, complete the flow + if (data.status === 'success') { + await completeAuthFlow(oauthSession.providerId, oauthSession.sessionId); + } + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to submit auth code'); + } + }; + + const submitApiKey = async (provider: AIProvider) => { + if (!apiKeyInput.trim()) { + setProviderError('Please enter an API key'); + return; + } + + setProviderError(null); + setConnectingProvider(provider.id); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/token/${provider.id}`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ token: apiKeyInput.trim() }), + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to connect'); + } + + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setApiKeyInput(''); + setConnectingProvider(null); + setShowApiKeyFallback(prev => ({ ...prev, [provider.id]: false })); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to connect'); + setConnectingProvider(null); + } + }; + + // Restart workspace + const handleRestart = useCallback(async () => { + if (!workspace) return; + + const confirmed = window.confirm('Are you sure you want to restart this workspace?'); + if (!confirmed) return; + + const result = await cloudApi.restartWorkspace(workspace.id); + if (result.success) { + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setError(result.error); + } + }, [workspace, workspaceId]); + + // Stop workspace + const handleStop = useCallback(async () => { + if (!workspace) return; + + const confirmed = window.confirm('Are you sure you want to stop this workspace?'); + if (!confirmed) return; + + const result = await cloudApi.stopWorkspace(workspace.id); + if (result.success) { + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setError(result.error); + } + }, [workspace, workspaceId]); + + // Add repository to workspace + const handleAddRepo = useCallback(async (repoId: string) => { + if (!workspace) return; + + const result = await cloudApi.addReposToWorkspace(workspace.id, [repoId]); + if (result.success) { + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setError(result.error); + } + }, [workspace, workspaceId]); + + // Set custom domain + const handleSetDomain = useCallback(async () => { + if (!workspace || !customDomain.trim()) return; + + setDomainLoading(true); + setDomainError(null); + setDomainInstructions(null); + + const result = await cloudApi.setCustomDomain(workspace.id, customDomain.trim()); + if (result.success) { + setDomainInstructions(result.data.instructions); + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setDomainError(result.error); + } + + setDomainLoading(false); + }, [workspace, customDomain, workspaceId]); + + // Verify custom domain + const handleVerifyDomain = useCallback(async () => { + if (!workspace) return; + + setDomainLoading(true); + setDomainError(null); + + const result = await cloudApi.verifyCustomDomain(workspace.id); + if (result.success) { + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + if (result.data.status === 'active') { + setDomainInstructions(null); + } + } else { + setDomainError(result.error); + } + + setDomainLoading(false); + }, [workspace, workspaceId]); + + // Remove custom domain + const handleRemoveDomain = useCallback(async () => { + if (!workspace) return; + + const confirmed = window.confirm('Are you sure you want to remove the custom domain?'); + if (!confirmed) return; + + setDomainLoading(true); + const result = await cloudApi.removeCustomDomain(workspace.id); + if (result.success) { + setCustomDomain(''); + setDomainInstructions(null); + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setDomainError(result.error); + } + setDomainLoading(false); + }, [workspace, workspaceId]); + + // Delete workspace + const handleDelete = useCallback(async () => { + if (!workspace) return; + + const confirmed = window.confirm( + `Are you sure you want to delete "${workspace.name}"? This action cannot be undone.` + ); + if (!confirmed) return; + + const doubleConfirm = window.confirm( + 'This will permanently delete all workspace data. Are you absolutely sure?' + ); + if (!doubleConfirm) return; + + const result = await cloudApi.deleteWorkspace(workspace.id); + if (result.success) { + onClose?.(); + } else { + setError(result.error); + } + }, [workspace, onClose]); + + if (isLoading) { + return ( +
+
+
+
+
+
+
+ + LOADING WORKSPACE CONFIG... + +
+ ); + } + + if (error && !workspace) { + return ( +
+
+ + {error} +
+
+ ); + } + + if (!workspace) { + return null; + } + + const unassignedRepos = availableRepos.filter( + (r) => !workspace.repositories.some((wr) => wr.id === r.id) + ); + + const sections = [ + { id: 'general', label: 'General', icon: }, + { id: 'providers', label: 'AI Providers', icon: }, + { id: 'repos', label: 'Repositories', icon: }, + { id: 'domain', label: 'Domain', icon: }, + { id: 'danger', label: 'Danger', icon: }, + ]; + + return ( +
+ {/* Section Navigation */} +
+ {sections.map((section) => ( + + ))} +
+ + {/* Content */} +
+ {error && ( +
+ + {error} + +
+ )} + + {/* General Section */} + {activeSection === 'general' && ( +
+ + +
+ + + + +
+ +
+ +
+ {workspace.status === 'running' && ( + } + > + Stop Workspace + + )} + } + > + Restart Workspace + +
+
+
+ )} + + {/* AI Providers Section */} + {activeSection === 'providers' && ( +
+ + + {providerError && ( +
+ + {providerError} +
+ )} + +
+ {AI_PROVIDERS.map((provider) => ( +
+
+
+
+ {provider.displayName[0]} +
+
+

+ {provider.displayName} +

+

{provider.description}

+
+
+ + {providerStatus[provider.id] ? ( +
+
+ Connected +
+ ) : null} +
+ + {!providerStatus[provider.id] && ( +
+ {connectingProvider === provider.id ? ( + { + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setConnectingProvider(null); + setOauthSession(null); + }} + onCancel={() => { + setConnectingProvider(null); + setOauthSession(null); + }} + onError={(err) => { + setProviderError(err); + setConnectingProvider(null); + setOauthSession(null); + }} + /> + ) : showApiKeyFallback[provider.id] ? ( +
+
+ { + setConnectingProvider(provider.id); + setApiKeyInput(e.target.value); + }} + onFocus={() => setConnectingProvider(provider.id)} + className="flex-1 px-4 py-3 bg-bg-card border border-border-subtle rounded-lg text-sm text-text-primary placeholder:text-text-muted focus:outline-none focus:border-accent-cyan focus:ring-1 focus:ring-accent-cyan/30 transition-all" + /> + +
+ {provider.apiKeyUrl && ( +

+ Get your API key from{' '} + + {new URL(provider.apiKeyUrl).hostname} + +

+ )} + {provider.supportsOAuth && ( + + )} +
+ ) : provider.supportsOAuth ? ( +
+ {/* Device flow toggle for providers that support it */} + {provider.supportsDeviceFlow && ( + + )} + + {provider.apiKeyUrl && ( + + )} +
+ ) : ( + /* Provider doesn't support OAuth - show API key input directly */ +
+
+ { + setConnectingProvider(provider.id); + setApiKeyInput(e.target.value); + }} + onFocus={() => setConnectingProvider(provider.id)} + className="flex-1 px-4 py-3 bg-bg-card border border-border-subtle rounded-lg text-sm text-text-primary placeholder:text-text-muted focus:outline-none focus:border-accent-cyan focus:ring-1 focus:ring-accent-cyan/30 transition-all" + /> + +
+ {provider.apiKeyUrl && ( +

+ Get your API key from{' '} + + {new URL(provider.apiKeyUrl).hostname} + +

+ )} +

+ OAuth not available for {provider.displayName} in container environments +

+
+ )} +
+ )} + +
+

+ CLI: {provider.cliCommand} +

+
+
+ ))} +
+
+ )} + + {/* Repositories Section */} + {activeSection === 'repos' && ( +
+ + +
+ {workspace.repositories.length > 0 ? ( + workspace.repositories.map((repo) => ( +
+
+
+ +
+
+

{repo.fullName}

+

+ {repo.lastSyncedAt + ? `Synced ${new Date(repo.lastSyncedAt).toLocaleDateString()}` + : 'Not synced'} +

+
+
+ +
+ )) + ) : ( +
+ +

No repositories connected

+
+ )} +
+ + {unassignedRepos.length > 0 && ( + <> + +
+ {unassignedRepos.map((repo) => ( +
+
+
+ +
+
+

{repo.fullName}

+

+ {repo.isPrivate ? 'Private' : 'Public'} +

+
+
+ +
+ ))} +
+ + )} +
+ )} + + {/* Custom Domain Section */} + {activeSection === 'domain' && ( +
+ + +
+
+
+ +
+
+

Premium Feature

+

Requires Team or Enterprise plan

+
+
+
+ + {workspace.customDomain ? ( +
+
+
+ + Current Domain + + +
+

{workspace.customDomain}

+
+ + {workspace.customDomainStatus === 'pending' && ( + } + fullWidth + > + {domainLoading ? 'Verifying...' : 'Verify DNS Configuration'} + + )} + + } + fullWidth + > + Remove Custom Domain + +
+ ) : ( +
+
+ + setCustomDomain(e.target.value)} + placeholder="workspace.yourdomain.com" + className="w-full px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-lg text-sm text-text-primary font-mono placeholder:text-text-muted focus:outline-none focus:border-accent-cyan focus:ring-1 focus:ring-accent-cyan/30 transition-all" + /> +
+ + } + fullWidth + > + {domainLoading ? 'Setting up...' : 'Set Custom Domain'} + +
+ )} + + {domainError && ( +
+ {domainError} +
+ )} + + {domainInstructions && ( +
+

+ + DNS Configuration Required +

+

+ Add the following DNS record to your domain provider: +

+
+ + + +
+
+ )} +
+ )} + + {/* Danger Zone Section */} + {activeSection === 'danger' && ( +
+
+
+
+ +
+
+

Danger Zone

+

+ These actions are destructive and cannot be undone +

+
+
+ +
+
+
+

Delete Workspace

+

+ Permanently delete this workspace and all its data +

+
+ +
+
+
+
+ )} +
+
+ ); +} + +// Utility Components +function SectionHeader({ title, subtitle }: { title: string; subtitle: string }) { + return ( +
+

{title}

+

{subtitle}

+
+ ); +} + +function InfoCard({ + label, + value, + valueColor = 'text-text-primary', + mono = false, + indicator = false, +}: { + label: string; + value: string; + valueColor?: string; + mono?: boolean; + indicator?: boolean; +}) { + return ( +
+ +
+ {indicator &&
} +

+ {value} +

+
+
+ ); +} + +function ActionButton({ + children, + onClick, + disabled, + variant, + icon, + fullWidth, +}: { + children: React.ReactNode; + onClick: () => void; + disabled?: boolean; + variant: 'primary' | 'warning' | 'danger'; + icon?: React.ReactNode; + fullWidth?: boolean; +}) { + const variants = { + primary: 'bg-accent-cyan/10 border-accent-cyan/30 text-accent-cyan hover:bg-accent-cyan/20', + warning: 'bg-amber-400/10 border-amber-400/30 text-amber-400 hover:bg-amber-400/20', + danger: 'bg-error/10 border-error/30 text-error hover:bg-error/20', + }; + + return ( + + ); +} + +function StatusBadge({ status }: { status: string }) { + const styles: Record = { + synced: 'bg-success/15 text-success border-success/30', + active: 'bg-success/15 text-success border-success/30', + syncing: 'bg-accent-cyan/15 text-accent-cyan border-accent-cyan/30', + verifying: 'bg-accent-cyan/15 text-accent-cyan border-accent-cyan/30', + pending: 'bg-amber-400/15 text-amber-400 border-amber-400/30', + error: 'bg-error/15 text-error border-error/30', + }; + + return ( + + {status} + + ); +} + +function DNSField({ label, value }: { label: string; value: string }) { + return ( +
+ +

{value}

+
+ ); +} + +// Icons +function SettingsGearIcon() { + return ( + + + + + ); +} + +function ProviderIcon() { + return ( + + + + + + ); +} + +function RepoIcon({ className = '' }: { className?: string }) { + return ( + + + + ); +} + +function GlobeIcon({ className = '' }: { className?: string }) { + return ( + + + + + + ); +} + +function AlertIcon({ className = '' }: { className?: string }) { + return ( + + + + + + ); +} + +function LockIcon() { + return ( + + + + + ); +} + +function StopIcon() { + return ( + + + + ); +} + +function RestartIcon() { + return ( + + + + + ); +} + +function CheckIcon() { + return ( + + + + ); +} + +function TrashIcon() { + return ( + + + + + ); +} + +function CloseIcon() { + return ( + + + + + ); +} + +function InfoIcon() { + return ( + + + + + + ); +} diff --git a/src/dashboard/react-components/settings/index.ts b/src/dashboard/react-components/settings/index.ts new file mode 100644 index 00000000..cc1d45be --- /dev/null +++ b/src/dashboard/react-components/settings/index.ts @@ -0,0 +1,10 @@ +/** + * Settings Components + * + * Unified settings UI for dashboard, workspace, team, and billing. + */ + +export { SettingsPage, type SettingsPageProps } from './SettingsPage'; +export { WorkspaceSettingsPanel, type WorkspaceSettingsPanelProps } from './WorkspaceSettingsPanel'; +export { TeamSettingsPanel, type TeamSettingsPanelProps } from './TeamSettingsPanel'; +export { BillingSettingsPanel, type BillingSettingsPanelProps } from './BillingSettingsPanel'; diff --git a/src/dashboard/types/index.ts b/src/dashboard/types/index.ts index f7494a1c..7dfaf856 100644 --- a/src/dashboard/types/index.ts +++ b/src/dashboard/types/index.ts @@ -24,6 +24,36 @@ export interface Agent { lastMessageReceivedAt?: number; // Timestamp when agent last received a message lastOutputAt?: number; // Timestamp when agent last produced output isStuck?: boolean; // True when agent received message but hasn't responded within threshold + isHuman?: boolean; // True if this is a human user, not an AI agent + avatarUrl?: string; // Avatar URL for human users + // Profile fields for understanding agent behavior + profile?: AgentProfile; +} + +/** + * Agent profile information - helps users understand agent behavior + */ +export interface AgentProfile { + /** Display title/role (e.g., "Lead Developer", "Code Reviewer") */ + title?: string; + /** Short description of what this agent does */ + description?: string; + /** The prompt/task the agent was spawned with */ + spawnPrompt?: string; + /** Agent profile/persona prompt (e.g., lead agent instructions) */ + personaPrompt?: string; + /** Name of the persona preset used (e.g., "lead", "reviewer", "shadow-auditor") */ + personaName?: string; + /** Model being used (e.g., "claude-3-opus", "gpt-4") */ + model?: string; + /** Working directory */ + workingDirectory?: string; + /** When the agent was first seen */ + firstSeen?: string; + /** Capabilities or tools available to the agent */ + capabilities?: string[]; + /** Tags for categorization */ + tags?: string[]; } export interface AgentSummary { diff --git a/src/hooks/trajectory-hooks.ts b/src/hooks/trajectory-hooks.ts index 5ca15fa8..9699fa08 100644 --- a/src/hooks/trajectory-hooks.ts +++ b/src/hooks/trajectory-hooks.ts @@ -18,6 +18,8 @@ import { TrajectoryIntegration, getTrajectoryIntegration, detectPhaseFromContent, + detectToolCalls, + detectErrors, getCompactTrailInstructions, type PDEROPhase, } from '../trajectory/integration.js'; @@ -32,6 +34,10 @@ export interface TrajectoryHooksOptions { agentName: string; /** Whether to auto-detect phase transitions */ autoDetectPhase?: boolean; + /** Whether to detect and record tool calls */ + detectTools?: boolean; + /** Whether to detect and record errors */ + detectErrors?: boolean; /** Whether to inject trail instructions on session start */ injectInstructions?: boolean; /** Whether to prompt for retrospective on session end */ @@ -44,6 +50,10 @@ export interface TrajectoryHooksOptions { interface TrajectoryHooksState { trajectory: TrajectoryIntegration; lastDetectedPhase?: PDEROPhase; + /** Set of tool calls already recorded to avoid duplicates */ + seenTools: Set; + /** Set of errors already recorded to avoid duplicates */ + seenErrors: Set; options: TrajectoryHooksOptions; } @@ -63,8 +73,12 @@ interface TrajectoryHooksState { export function createTrajectoryHooks(options: TrajectoryHooksOptions): LifecycleHooks { const state: TrajectoryHooksState = { trajectory: getTrajectoryIntegration(options.projectId, options.agentName), + seenTools: new Set(), + seenErrors: new Set(), options: { autoDetectPhase: true, + detectTools: true, + detectErrors: true, injectInstructions: true, promptRetrospective: true, ...options, @@ -149,21 +163,49 @@ Or if you need to document learnings: } /** - * Output hook - auto-detects PDERO phase transitions + * Output hook - auto-detects PDERO phase transitions, tool calls, and errors */ function createOutputHook(state: TrajectoryHooksState) { return async (ctx: OutputContext): Promise => { const { trajectory, options } = state; - if (!options.autoDetectPhase) { - return; + // Detect and record phase transitions + if (options.autoDetectPhase) { + const detectedPhase = detectPhaseFromContent(ctx.content); + + if (detectedPhase && detectedPhase !== state.lastDetectedPhase) { + state.lastDetectedPhase = detectedPhase; + await trajectory.transition(detectedPhase, 'Auto-detected from output'); + } } - const detectedPhase = detectPhaseFromContent(ctx.content); + // Detect and record tool calls + // Note: We deduplicate by tool+status to record each unique tool type once per session + // (e.g., "Read" started, "Read" completed). This provides a summary of tools used + // without flooding the trajectory with every individual invocation. + if (options.detectTools) { + const tools = detectToolCalls(ctx.content); + for (const tool of tools) { + const key = `${tool.tool}:${tool.status || 'started'}`; + if (!state.seenTools.has(key)) { + state.seenTools.add(key); + const statusLabel = tool.status === 'completed' ? ' (completed)' : ''; + await trajectory.event(`Tool: ${tool.tool}${statusLabel}`, 'tool_call'); + } + } + } - if (detectedPhase && detectedPhase !== state.lastDetectedPhase) { - state.lastDetectedPhase = detectedPhase; - await trajectory.transition(detectedPhase, 'Auto-detected from output'); + // Detect and record errors + if (options.detectErrors) { + const errors = detectErrors(ctx.content); + for (const error of errors) { + // Deduplicate by message content + if (!state.seenErrors.has(error.message)) { + state.seenErrors.add(error.message); + const prefix = error.type === 'warning' ? 'Warning' : 'Error'; + await trajectory.event(`${prefix}: ${error.message}`, 'error'); + } + } } }; } diff --git a/src/policy/agent-policy.ts b/src/policy/agent-policy.ts new file mode 100644 index 00000000..538aa564 --- /dev/null +++ b/src/policy/agent-policy.ts @@ -0,0 +1,866 @@ +/** + * Agent Policy Service + * + * Manages agent permissions and rules with multi-level fallback: + * 1. Repo-level policy (.claude/agents/*.md) + * 2. Workspace-level policy (from cloud API) + * 3. Built-in safe defaults + * + * Provides spawn authorization, tool permission checks, and audit logging. + */ + +import fs from 'node:fs'; +import path from 'node:path'; +import { findAgentConfig, type AgentConfig } from '../utils/agent-config.js'; + +import os from 'node:os'; + +/** + * PRPM-style policy file format (YAML or JSON) + * + * Policy files are loaded from (in order of precedence): + * 1. User-level: ~/.config/agent-relay/policies/*.yaml (NOT in source control) + * 2. Cloud: Workspace config from dashboard (stored in database) + * + * PRPM packages install to the user-level location to avoid polluting repos. + * Install via: prpm install @org/strict-agent-rules --global + * + * Example policy file (~/.config/agent-relay/policies/strict-rules.yaml): + * ```yaml + * name: strict-spawn-rules + * version: 1.0.0 + * description: Restrict agent spawning to leads only + * + * agents: + * - name: Lead + * canSpawn: ["*"] + * canMessage: ["*"] + * - name: Worker* + * canSpawn: [] + * canMessage: ["Lead", "Coordinator"] + * + * settings: + * requireExplicitAgents: false + * auditEnabled: true + * ``` + */ + +/** + * Agent policy definition + */ +export interface AgentPolicy { + /** Agent name pattern (supports wildcards: "Lead", "Worker*", "*") */ + name: string; + /** Allowed tools (empty = all allowed, ["none"] = no tools) */ + allowedTools?: string[]; + /** Agents this agent can spawn (empty = can spawn any) */ + canSpawn?: string[]; + /** Agents this agent can message (empty = can message any) */ + canMessage?: string[]; + /** Maximum concurrent spawns allowed */ + maxSpawns?: number; + /** Rate limit: messages per minute */ + rateLimit?: number; + /** Whether this agent can be spawned by others */ + canBeSpawned?: boolean; + /** Custom metadata */ + metadata?: Record; +} + +/** + * Workspace-level policy configuration + */ +export interface WorkspacePolicy { + /** Default policy for agents without explicit config */ + defaultPolicy: AgentPolicy; + /** Named agent policies */ + agents: AgentPolicy[]; + /** Global settings */ + settings: { + /** Require explicit agent definitions (reject unknown agents) */ + requireExplicitAgents: boolean; + /** Enable audit logging */ + auditEnabled: boolean; + /** Maximum total agents */ + maxTotalAgents: number; + }; +} + +/** + * Policy decision with reasoning + */ +export interface PolicyDecision { + allowed: boolean; + reason: string; + policySource: 'repo' | 'local' | 'workspace' | 'default'; + matchedPolicy?: AgentPolicy; +} + +/** + * Audit log entry + */ +export interface AuditEntry { + timestamp: number; + action: 'spawn' | 'message' | 'tool' | 'release'; + actor: string; + target?: string; + decision: PolicyDecision; + context?: Record; +} + +/** Built-in safe defaults when no policy exists */ +const DEFAULT_POLICY: AgentPolicy = { + name: '*', + allowedTools: undefined, // All tools allowed by default + canSpawn: undefined, // Can spawn any agent + canMessage: undefined, // Can message any agent + maxSpawns: 10, + rateLimit: 60, // 60 messages per minute + canBeSpawned: true, +}; + +/** Restrictive defaults for unknown agents in strict mode */ +const STRICT_DEFAULT_POLICY: AgentPolicy = { + name: '*', + allowedTools: ['Read', 'Grep', 'Glob'], // Read-only by default + canSpawn: [], // Cannot spawn + canMessage: ['Lead', 'Coordinator'], // Can only message leads + maxSpawns: 0, + rateLimit: 10, + canBeSpawned: false, +}; + +/** + * Cloud policy fetcher interface + * Implement this to fetch workspace policies from cloud API + */ +export interface CloudPolicyFetcher { + getWorkspacePolicy(workspaceId: string): Promise; +} + +export class AgentPolicyService { + private projectRoot: string; + private workspaceId?: string; + private cloudFetcher?: CloudPolicyFetcher; + private cachedWorkspacePolicy?: WorkspacePolicy; + private cachedLocalPolicy?: WorkspacePolicy; + private policyCacheExpiry = 0; + private localPolicyCacheExpiry = 0; + private auditLog: AuditEntry[] = []; + private strictMode: boolean; + + /** Cache TTL in milliseconds (5 minutes) */ + private static readonly CACHE_TTL_MS = 5 * 60 * 1000; + /** Local policy cache TTL (1 minute - files can change) */ + private static readonly LOCAL_CACHE_TTL_MS = 60 * 1000; + /** Maximum audit log entries to keep in memory */ + private static readonly MAX_AUDIT_ENTRIES = 1000; + + constructor(options: { + projectRoot: string; + workspaceId?: string; + cloudFetcher?: CloudPolicyFetcher; + strictMode?: boolean; + }) { + this.projectRoot = options.projectRoot; + this.workspaceId = options.workspaceId; + this.cloudFetcher = options.cloudFetcher; + this.strictMode = options.strictMode ?? false; + } + + /** + * Get the user-level policies directory + * Uses ~/.config/agent-relay/policies/ (not in source control) + */ + private getUserPoliciesDir(): string { + const configDir = process.env.AGENT_RELAY_CONFIG_DIR ?? + path.join(os.homedir(), '.config', 'agent-relay'); + return path.join(configDir, 'policies'); + } + + /** + * Load policies from user-level directory (PRPM-installable) + * Files are YAML/JSON with agent policy definitions + * Location: ~/.config/agent-relay/policies/*.yaml + */ + private loadLocalPolicies(): WorkspacePolicy | null { + // Check cache + if (this.cachedLocalPolicy && Date.now() < this.localPolicyCacheExpiry) { + return this.cachedLocalPolicy; + } + + const policiesDir = this.getUserPoliciesDir(); + if (!fs.existsSync(policiesDir)) { + return null; + } + + try { + const files = fs.readdirSync(policiesDir).filter(f => + f.endsWith('.yaml') || f.endsWith('.yml') || f.endsWith('.json') + ); + + if (files.length === 0) { + return null; + } + + // Merge all policy files + const mergedAgents: AgentPolicy[] = []; + let mergedSettings: WorkspacePolicy['settings'] = { + requireExplicitAgents: false, + auditEnabled: true, + maxTotalAgents: 50, + }; + let mergedDefault: AgentPolicy = { ...DEFAULT_POLICY }; + + for (const file of files) { + const filePath = path.join(policiesDir, file); + const content = fs.readFileSync(filePath, 'utf-8'); + + let parsed: Record; + if (file.endsWith('.json')) { + parsed = JSON.parse(content); + } else { + // Simple YAML parsing for policy files + parsed = this.parseSimpleYaml(content); + } + + // Merge agents + if (Array.isArray(parsed.agents)) { + for (const agent of parsed.agents) { + if (agent && typeof agent === 'object' && 'name' in agent) { + mergedAgents.push(agent as AgentPolicy); + } + } + } + + // Merge settings (later files override) + if (parsed.settings && typeof parsed.settings === 'object') { + mergedSettings = { ...mergedSettings, ...parsed.settings as Record }; + } + + // Merge default policy + if (parsed.defaultPolicy && typeof parsed.defaultPolicy === 'object') { + mergedDefault = { ...mergedDefault, ...parsed.defaultPolicy as AgentPolicy }; + } + } + + const policy: WorkspacePolicy = { + defaultPolicy: mergedDefault, + agents: mergedAgents, + settings: mergedSettings, + }; + + this.cachedLocalPolicy = policy; + this.localPolicyCacheExpiry = Date.now() + AgentPolicyService.LOCAL_CACHE_TTL_MS; + + return policy; + } catch (err) { + console.error('[policy] Failed to load local policies:', err); + return null; + } + } + + /** + * Simple YAML parser for policy files + * Handles basic key: value and arrays + */ + private parseSimpleYaml(content: string): Record { + const result: Record = {}; + const lines = content.split('\n'); + let _currentKey = ''; + let currentArray: unknown[] | null = null; + let currentObject: Record | null = null; + let indent = 0; + + for (const line of lines) { + const trimmed = line.trim(); + + // Skip comments and empty lines + if (!trimmed || trimmed.startsWith('#')) continue; + + // Calculate indentation + const lineIndent = line.length - line.trimStart().length; + + // Array item + if (trimmed.startsWith('- ')) { + const value = trimmed.slice(2).trim(); + + // Object in array (e.g., "- name: Worker") + if (value.includes(':')) { + const [key, val] = value.split(':').map(s => s.trim()); + currentObject = { [key]: this.parseValue(val) }; + if (currentArray) { + currentArray.push(currentObject); + } + } else { + // Simple array value + if (currentArray) { + currentArray.push(this.parseValue(value)); + } + } + continue; + } + + // Key: value pair + const colonIdx = trimmed.indexOf(':'); + if (colonIdx > 0) { + const key = trimmed.slice(0, colonIdx).trim(); + const value = trimmed.slice(colonIdx + 1).trim(); + + // If we're inside an object in an array + if (currentObject && lineIndent > indent) { + currentObject[key] = this.parseValue(value); + continue; + } + + // Top-level or section key + if (value === '' || value === '|' || value === '>') { + // Start of array or nested object + _currentKey = key; + currentArray = []; + currentObject = null; + indent = lineIndent; + result[key] = currentArray; + } else { + // Simple key: value + if (lineIndent === 0) { + result[key] = this.parseValue(value); + _currentKey = ''; + currentArray = null; + currentObject = null; + } else if (currentObject) { + currentObject[key] = this.parseValue(value); + } + } + } + } + + return result; + } + + /** + * Parse a YAML value string + */ + private parseValue(value: string): unknown { + if (!value || value === '~' || value === 'null') return null; + if (value === 'true') return true; + if (value === 'false') return false; + + // Array notation [a, b, c] + if (value.startsWith('[') && value.endsWith(']')) { + const inner = value.slice(1, -1); + if (!inner.trim()) return []; + return inner.split(',').map(s => { + const trimmed = s.trim().replace(/^["']|["']$/g, ''); + return trimmed; + }); + } + + // Number + if (/^-?\d+(\.\d+)?$/.test(value)) { + return parseFloat(value); + } + + // String (remove quotes if present) + return value.replace(/^["']|["']$/g, ''); + } + + /** + * Check if an agent can spawn another agent + */ + async canSpawn( + spawnerName: string, + targetName: string, + targetCli: string + ): Promise { + const spawnerPolicy = await this.getAgentPolicy(spawnerName); + const targetPolicy = await this.getAgentPolicy(targetName); + + // Check if target can be spawned + if (targetPolicy.matchedPolicy?.canBeSpawned === false) { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${targetName}" is not allowed to be spawned`, + policySource: targetPolicy.policySource, + matchedPolicy: targetPolicy.matchedPolicy, + }; + this.audit('spawn', spawnerName, targetName, decision, { cli: targetCli }); + return decision; + } + + // Check if spawner can spawn + const canSpawnList = spawnerPolicy.matchedPolicy?.canSpawn; + if (canSpawnList !== undefined && canSpawnList.length > 0) { + const canSpawn = this.matchesPattern(targetName, canSpawnList); + if (!canSpawn) { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${spawnerName}" is not allowed to spawn "${targetName}"`, + policySource: spawnerPolicy.policySource, + matchedPolicy: spawnerPolicy.matchedPolicy, + }; + this.audit('spawn', spawnerName, targetName, decision, { cli: targetCli }); + return decision; + } + } + + // Check max spawns (would need spawn count tracking - placeholder) + const decision: PolicyDecision = { + allowed: true, + reason: 'Spawn permitted by policy', + policySource: spawnerPolicy.policySource, + matchedPolicy: spawnerPolicy.matchedPolicy, + }; + this.audit('spawn', spawnerName, targetName, decision, { cli: targetCli }); + return decision; + } + + /** + * Check if an agent can send a message to another agent + */ + async canMessage( + senderName: string, + recipientName: string + ): Promise { + const senderPolicy = await this.getAgentPolicy(senderName); + + const canMessageList = senderPolicy.matchedPolicy?.canMessage; + if (canMessageList !== undefined && canMessageList.length > 0) { + const canMessage = this.matchesPattern(recipientName, canMessageList); + if (!canMessage) { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${senderName}" is not allowed to message "${recipientName}"`, + policySource: senderPolicy.policySource, + matchedPolicy: senderPolicy.matchedPolicy, + }; + this.audit('message', senderName, recipientName, decision); + return decision; + } + } + + const decision: PolicyDecision = { + allowed: true, + reason: 'Message permitted by policy', + policySource: senderPolicy.policySource, + matchedPolicy: senderPolicy.matchedPolicy, + }; + this.audit('message', senderName, recipientName, decision); + return decision; + } + + /** + * Check if an agent can use a specific tool + */ + async canUseTool(agentName: string, toolName: string): Promise { + const policy = await this.getAgentPolicy(agentName); + + const allowedTools = policy.matchedPolicy?.allowedTools; + if (allowedTools !== undefined) { + // ["none"] means no tools allowed + if (allowedTools.length === 1 && allowedTools[0] === 'none') { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${agentName}" is not allowed to use any tools`, + policySource: policy.policySource, + matchedPolicy: policy.matchedPolicy, + }; + this.audit('tool', agentName, toolName, decision); + return decision; + } + + // Check if tool is in allowed list + const allowed = this.matchesPattern(toolName, allowedTools); + if (!allowed) { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${agentName}" is not allowed to use tool "${toolName}"`, + policySource: policy.policySource, + matchedPolicy: policy.matchedPolicy, + }; + this.audit('tool', agentName, toolName, decision); + return decision; + } + } + + const decision: PolicyDecision = { + allowed: true, + reason: 'Tool usage permitted by policy', + policySource: policy.policySource, + matchedPolicy: policy.matchedPolicy, + }; + this.audit('tool', agentName, toolName, decision); + return decision; + } + + /** + * Get the effective policy for an agent + * Fallback chain: repo config → user PRPM policies → cloud workspace → defaults + */ + async getAgentPolicy(agentName: string): Promise<{ + matchedPolicy: AgentPolicy; + policySource: 'repo' | 'local' | 'workspace' | 'default'; + }> { + // 1. Try repo-level config (.claude/agents/*.md) + const repoConfig = findAgentConfig(agentName, this.projectRoot); + if (repoConfig) { + return { + matchedPolicy: this.configToPolicy(repoConfig), + policySource: 'repo', + }; + } + + // 2. Try user-level PRPM policies (~/.config/agent-relay/policies/*.yaml) + const localPolicy = this.loadLocalPolicies(); + if (localPolicy) { + // Check for strict mode in local policy + if (localPolicy.settings?.requireExplicitAgents) { + const matchedPolicy = this.findMatchingPolicy(agentName, localPolicy.agents); + if (matchedPolicy) { + return { matchedPolicy, policySource: 'local' }; + } + // Unknown agent in strict mode + return { + matchedPolicy: { ...STRICT_DEFAULT_POLICY, name: agentName }, + policySource: 'local', + }; + } + + // Find matching policy + const matchedPolicy = this.findMatchingPolicy(agentName, localPolicy.agents); + if (matchedPolicy) { + return { matchedPolicy, policySource: 'local' }; + } + + // Use local default + if (localPolicy.defaultPolicy) { + return { + matchedPolicy: { ...localPolicy.defaultPolicy, name: agentName }, + policySource: 'local', + }; + } + } + + // 3. Try workspace-level policy from cloud + const workspacePolicy = await this.getWorkspacePolicy(); + if (workspacePolicy) { + // Check for strict mode + if (workspacePolicy.settings?.requireExplicitAgents) { + // In strict mode, unknown agents get restrictive defaults + const matchedPolicy = this.findMatchingPolicy(agentName, workspacePolicy.agents); + if (matchedPolicy) { + return { matchedPolicy, policySource: 'workspace' }; + } + // Unknown agent in strict mode + return { + matchedPolicy: { ...STRICT_DEFAULT_POLICY, name: agentName }, + policySource: 'workspace', + }; + } + + // Find matching policy + const matchedPolicy = this.findMatchingPolicy(agentName, workspacePolicy.agents); + if (matchedPolicy) { + return { matchedPolicy, policySource: 'workspace' }; + } + + // Use workspace default + if (workspacePolicy.defaultPolicy) { + return { + matchedPolicy: { ...workspacePolicy.defaultPolicy, name: agentName }, + policySource: 'workspace', + }; + } + } + + // 4. Fall back to built-in defaults + const defaultPolicy = this.strictMode ? STRICT_DEFAULT_POLICY : DEFAULT_POLICY; + return { + matchedPolicy: { ...defaultPolicy, name: agentName }, + policySource: 'default', + }; + } + + /** + * Get workspace policy from cloud (with caching) + */ + private async getWorkspacePolicy(): Promise { + if (!this.workspaceId || !this.cloudFetcher) { + return null; + } + + // Check cache + if (this.cachedWorkspacePolicy && Date.now() < this.policyCacheExpiry) { + return this.cachedWorkspacePolicy; + } + + try { + const policy = await this.cloudFetcher.getWorkspacePolicy(this.workspaceId); + if (policy) { + this.cachedWorkspacePolicy = policy; + this.policyCacheExpiry = Date.now() + AgentPolicyService.CACHE_TTL_MS; + } + return policy; + } catch (err) { + console.error('[policy] Failed to fetch workspace policy:', err); + // Return cached policy if available, even if expired + return this.cachedWorkspacePolicy ?? null; + } + } + + /** + * Find matching policy from a list (supports wildcards) + */ + private findMatchingPolicy(agentName: string, policies: AgentPolicy[]): AgentPolicy | null { + // First try exact match + const exactMatch = policies.find(p => p.name.toLowerCase() === agentName.toLowerCase()); + if (exactMatch) return exactMatch; + + // Then try pattern match + for (const policy of policies) { + if (this.matchesPattern(agentName, [policy.name])) { + return policy; + } + } + + return null; + } + + /** + * Check if a name matches any pattern in the list + * Supports: exact match, prefix* match, *suffix match, * (all) + */ + private matchesPattern(name: string, patterns: string[]): boolean { + const lowerName = name.toLowerCase(); + for (const pattern of patterns) { + const lowerPattern = pattern.toLowerCase(); + + // Wildcard all + if (lowerPattern === '*') return true; + + // Exact match + if (lowerPattern === lowerName) return true; + + // Prefix match (e.g., "Worker*" matches "WorkerA") + if (lowerPattern.endsWith('*')) { + const prefix = lowerPattern.slice(0, -1); + if (lowerName.startsWith(prefix)) return true; + } + + // Suffix match (e.g., "*Lead" matches "TeamLead") + if (lowerPattern.startsWith('*')) { + const suffix = lowerPattern.slice(1); + if (lowerName.endsWith(suffix)) return true; + } + } + return false; + } + + /** + * Convert AgentConfig to AgentPolicy + */ + private configToPolicy(config: AgentConfig): AgentPolicy { + return { + name: config.name, + allowedTools: config.allowedTools, + // Other fields come from defaults since repo config doesn't specify them + canSpawn: undefined, + canMessage: undefined, + maxSpawns: 10, + rateLimit: 60, + canBeSpawned: true, + }; + } + + /** + * Record an audit entry + */ + private audit( + action: AuditEntry['action'], + actor: string, + target: string | undefined, + decision: PolicyDecision, + context?: Record + ): void { + const entry: AuditEntry = { + timestamp: Date.now(), + action, + actor, + target, + decision, + context, + }; + + this.auditLog.push(entry); + + // Trim log if too large + if (this.auditLog.length > AgentPolicyService.MAX_AUDIT_ENTRIES) { + this.auditLog = this.auditLog.slice(-AgentPolicyService.MAX_AUDIT_ENTRIES / 2); + } + + // Log denied actions + if (!decision.allowed) { + console.warn(`[policy] DENIED: ${action} by ${actor}${target ? ` -> ${target}` : ''}: ${decision.reason}`); + } + } + + /** + * Get audit log entries + */ + getAuditLog(options?: { + limit?: number; + action?: AuditEntry['action']; + actor?: string; + deniedOnly?: boolean; + }): AuditEntry[] { + let entries = [...this.auditLog]; + + if (options?.action) { + entries = entries.filter(e => e.action === options.action); + } + if (options?.actor) { + entries = entries.filter(e => e.actor === options.actor); + } + if (options?.deniedOnly) { + entries = entries.filter(e => !e.decision.allowed); + } + if (options?.limit) { + entries = entries.slice(-options.limit); + } + + return entries; + } + + /** + * Clear audit log + */ + clearAuditLog(): void { + this.auditLog = []; + } + + /** + * Invalidate cached workspace policy + */ + invalidateCache(): void { + this.cachedWorkspacePolicy = undefined; + this.policyCacheExpiry = 0; + } + + /** + * Get a human-readable policy summary for an agent + * This can be injected into agent prompts to inform them of their permissions + */ + async getPolicySummary(agentName: string): Promise { + const { matchedPolicy, policySource } = await this.getAgentPolicy(agentName); + + const lines: string[] = [ + `# Agent Policy for ${agentName}`, + `Source: ${policySource}`, + '', + ]; + + // Tools + if (matchedPolicy.allowedTools) { + if (matchedPolicy.allowedTools.length === 1 && matchedPolicy.allowedTools[0] === 'none') { + lines.push('**Tools**: No tools allowed'); + } else { + lines.push(`**Allowed Tools**: ${matchedPolicy.allowedTools.join(', ')}`); + } + } else { + lines.push('**Tools**: All tools allowed'); + } + + // Spawning + if (matchedPolicy.canSpawn) { + if (matchedPolicy.canSpawn.length === 0) { + lines.push('**Spawning**: Cannot spawn other agents'); + } else { + lines.push(`**Can Spawn**: ${matchedPolicy.canSpawn.join(', ')}`); + } + } else { + lines.push('**Spawning**: Can spawn any agent'); + } + + // Messaging + if (matchedPolicy.canMessage) { + if (matchedPolicy.canMessage.length === 0) { + lines.push('**Messaging**: Cannot message other agents'); + } else { + lines.push(`**Can Message**: ${matchedPolicy.canMessage.join(', ')}`); + } + } else { + lines.push('**Messaging**: Can message any agent'); + } + + // Limits + if (matchedPolicy.maxSpawns !== undefined) { + lines.push(`**Max Spawns**: ${matchedPolicy.maxSpawns}`); + } + if (matchedPolicy.rateLimit !== undefined) { + lines.push(`**Rate Limit**: ${matchedPolicy.rateLimit} messages/min`); + } + + return lines.join('\n'); + } + + /** + * Get a concise policy instruction for injection into agent prompts + */ + async getPolicyInstruction(agentName: string): Promise { + const { matchedPolicy, policySource: _policySource } = await this.getAgentPolicy(agentName); + + // Only generate instructions if there are restrictions + const hasRestrictions = + matchedPolicy.allowedTools !== undefined || + matchedPolicy.canSpawn !== undefined || + matchedPolicy.canMessage !== undefined; + + if (!hasRestrictions) { + return null; // No restrictions, no need to inform agent + } + + const restrictions: string[] = []; + + if (matchedPolicy.allowedTools) { + if (matchedPolicy.allowedTools.length === 1 && matchedPolicy.allowedTools[0] === 'none') { + restrictions.push('You are not allowed to use any tools.'); + } else { + restrictions.push(`You may only use these tools: ${matchedPolicy.allowedTools.join(', ')}.`); + } + } + + if (matchedPolicy.canSpawn) { + if (matchedPolicy.canSpawn.length === 0) { + restrictions.push('You are not allowed to spawn other agents.'); + } else { + restrictions.push(`You may only spawn these agents: ${matchedPolicy.canSpawn.join(', ')}.`); + } + } + + if (matchedPolicy.canMessage) { + if (matchedPolicy.canMessage.length === 0) { + restrictions.push('You are not allowed to message other agents.'); + } else { + restrictions.push(`You may only message these agents: ${matchedPolicy.canMessage.join(', ')}.`); + } + } + + if (restrictions.length === 0) { + return null; + } + + return `[Policy Restrictions]\n${restrictions.join('\n')}`; + } +} + +/** + * Create a policy service for a project + */ +export function createPolicyService(options: { + projectRoot: string; + workspaceId?: string; + cloudFetcher?: CloudPolicyFetcher; + strictMode?: boolean; +}): AgentPolicyService { + return new AgentPolicyService(options); +} diff --git a/src/policy/cloud-policy-fetcher.ts b/src/policy/cloud-policy-fetcher.ts new file mode 100644 index 00000000..9b8be6e7 --- /dev/null +++ b/src/policy/cloud-policy-fetcher.ts @@ -0,0 +1,78 @@ +/** + * Cloud Policy Fetcher + * + * Fetches workspace agent policies from the cloud API. + * Used by workspace containers to get their policy configuration. + */ + +import type { CloudPolicyFetcher, WorkspacePolicy, AgentPolicy } from './agent-policy.js'; + +/** + * Create a cloud policy fetcher for workspace containers + */ +export function createCloudPolicyFetcher(): CloudPolicyFetcher | null { + const cloudApiUrl = process.env.CLOUD_API_URL; + const workspaceId = process.env.WORKSPACE_ID; + const workspaceToken = process.env.WORKSPACE_TOKEN; + + if (!cloudApiUrl || !workspaceId) { + return null; + } + + return { + async getWorkspacePolicy(requestedWorkspaceId: string): Promise { + // Only allow fetching policy for this workspace + if (requestedWorkspaceId !== workspaceId) { + console.warn(`[policy-fetcher] Attempted to fetch policy for different workspace: ${requestedWorkspaceId}`); + return null; + } + + try { + const url = `${cloudApiUrl}/api/policy/${workspaceId}/internal`; + const headers: Record = { + 'Content-Type': 'application/json', + }; + + // Add auth header if we have a workspace token + if (workspaceToken) { + headers['Authorization'] = `Bearer ${workspaceToken}`; + } + + const response = await fetch(url, { headers }); + + if (!response.ok) { + console.error(`[policy-fetcher] Failed to fetch policy: ${response.status} ${response.statusText}`); + return null; + } + + const data = await response.json() as { + defaultPolicy?: AgentPolicy; + agents?: AgentPolicy[]; + settings?: WorkspacePolicy['settings']; + }; + + // Convert API response to WorkspacePolicy + const policy: WorkspacePolicy = { + defaultPolicy: data.defaultPolicy ?? { + name: '*', + maxSpawns: 10, + rateLimit: 60, + canBeSpawned: true, + }, + agents: data.agents ?? [], + settings: data.settings ?? { + requireExplicitAgents: false, + auditEnabled: true, + maxTotalAgents: 50, + }, + }; + + console.log(`[policy-fetcher] Fetched policy for workspace ${workspaceId}: ${policy.agents.length} agent rules`); + return policy; + } catch (error) { + console.error('[policy-fetcher] Error fetching policy:', error); + return null; + } + }, + }; +} diff --git a/src/resiliency/crash-insights.test.ts b/src/resiliency/crash-insights.test.ts index 36487432..ae0e5650 100644 --- a/src/resiliency/crash-insights.test.ts +++ b/src/resiliency/crash-insights.test.ts @@ -4,13 +4,9 @@ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; import * as fs from 'fs'; -import * as path from 'path'; -import * as os from 'os'; import { CrashInsightsService, getCrashInsights, - type CrashRecord, - type CrashAnalysis, } from './crash-insights.js'; import type { AgentMemoryMonitor, CrashMemoryContext } from './memory-monitor.js'; diff --git a/src/resiliency/crash-insights.ts b/src/resiliency/crash-insights.ts index 6fe45e6b..b068cb1c 100644 --- a/src/resiliency/crash-insights.ts +++ b/src/resiliency/crash-insights.ts @@ -15,7 +15,6 @@ import * as os from 'os'; import { AgentMemoryMonitor, CrashMemoryContext, - MemorySnapshot, formatBytes, } from './memory-monitor.js'; diff --git a/src/resiliency/memory-monitor.test.ts b/src/resiliency/memory-monitor.test.ts index 33cd5fa2..043f6af0 100644 --- a/src/resiliency/memory-monitor.test.ts +++ b/src/resiliency/memory-monitor.test.ts @@ -8,7 +8,6 @@ import { getMemoryMonitor, formatBytes, type MemorySnapshot, - type AgentMemoryMetrics, type MemoryAlert, } from './memory-monitor.js'; @@ -115,7 +114,7 @@ describe('AgentMemoryMonitor', () => { it('should reset metrics on PID update', () => { monitor.register('test-agent', 12345); - const metrics = monitor.get('test-agent'); + const _metrics = monitor.get('test-agent'); monitor.updatePid('test-agent', 54321); diff --git a/src/resiliency/memory-monitor.ts b/src/resiliency/memory-monitor.ts index 5aba218e..7db8b76c 100644 --- a/src/resiliency/memory-monitor.ts +++ b/src/resiliency/memory-monitor.ts @@ -391,7 +391,7 @@ export class AgentMemoryMonitor extends EventEmitter { if (!metrics) return; const { thresholds } = this.config; - const previousRss = metrics.current.rssBytes; + const _previousRss = metrics.current.rssBytes; const previousAlertLevel = metrics.alertLevel; // Update current snapshot diff --git a/src/shared/cli-auth-config.ts b/src/shared/cli-auth-config.ts new file mode 100644 index 00000000..670018fc --- /dev/null +++ b/src/shared/cli-auth-config.ts @@ -0,0 +1,327 @@ +/** + * Shared CLI Auth Configuration + * + * Provider-specific CLI commands and patterns for OAuth authentication. + * Used by both the cloud API and workspace daemon. + */ + +/** + * Interactive prompt handler configuration + * Defines patterns to detect prompts and responses to send + */ +export interface PromptHandler { + /** Pattern to detect in CLI output (case-insensitive) */ + pattern: RegExp; + /** Response to send (e.g., '\r' for enter, 'y\r' for yes+enter) */ + response: string; + /** Delay before sending response (ms) */ + delay?: number; + /** Description for logging/debugging */ + description: string; +} + +/** + * CLI auth configuration for each provider + */ +export interface CLIAuthConfig { + /** CLI command to run */ + command: string; + /** Arguments to pass */ + args: string[]; + /** Alternative args for device flow (if supported) */ + deviceFlowArgs?: string[]; + /** Pattern to extract auth URL from output */ + urlPattern: RegExp; + /** Path to credentials file (for reading after auth) */ + credentialPath?: string; + /** Display name for UI */ + displayName: string; + /** Interactive prompts to auto-respond to */ + prompts: PromptHandler[]; + /** Success indicators in output */ + successPatterns: RegExp[]; + /** How long to wait for URL to appear (ms) */ + waitTimeout: number; + /** Whether this provider supports device flow */ + supportsDeviceFlow?: boolean; +} + +/** + * CLI commands and URL patterns for each provider + * + * Each CLI tool outputs an OAuth URL when run without credentials. + * We capture stdout/stderr and extract the URL using regex patterns. + * + * IMPORTANT: These CLIs are interactive - they output the auth URL then wait + * for the user to complete OAuth in their browser. We capture the URL and + * display it in a popup for the user. + */ +export const CLI_AUTH_CONFIG: Record = { + anthropic: { + command: 'claude', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + credentialPath: '~/.claude/.credentials.json', + displayName: 'Claude', + waitTimeout: 30000, // Claude can take a while to show the auth URL + prompts: [ + { + // Claude Code version selection - accept default (recommended) + pattern: /which\s*version|claude\s*code\s*version|select.*version/i, + response: '\r', + delay: 100, + description: 'Version selection prompt', + }, + { + pattern: /dark\s*(mode|theme)/i, + response: '\r', // Press enter to accept default + delay: 100, + description: 'Dark mode prompt', + }, + { + // Login method selection - "Select login method:" with Claude account or Console options + pattern: /select\s*login\s*method|how\s*would\s*you\s*like\s*to\s*authenticate|choose.*auth.*method|select.*auth|subscription\s*or.*api\s*key/i, + response: '\r', // Press enter for first option (Claude account with subscription) + delay: 100, + description: 'Login method selection', + }, + { + // Login success - press enter to continue + pattern: /login\s*successful|logged\s*in.*press\s*enter|press\s*enter\s*to\s*continue/i, + response: '\r', + delay: 200, + description: 'Login success prompt', + }, + { + // Trust directory - matches various trust prompts including: + // "Quick safety check: Is this a project you created or one you trust?" + // "Yes, I trust this folder" + // "Do you trust the files in this folder?" + pattern: /trust\s*(this|the)?\s*(files|directory|folder|workspace)|do\s*you\s*trust|safety\s*check|yes,?\s*i\s*trust/i, + response: '\r', // Press enter for first option (Yes, proceed) + delay: 300, // Slightly longer delay for menu to render + description: 'Trust directory prompt', + }, + { + // "Ready to code here?" permission prompt - asks for file access permission + // Shows "Yes, continue" / "No, exit" options with "Enter to confirm" + // This is different from trust directory - it's about granting file permissions + pattern: /ready\s*to\s*code\s*here|permission\s*to\s*work\s*with\s*your\s*files|yes,?\s*continue/i, + response: '\r', // Press enter to accept "Yes, continue" (already selected) + delay: 300, + description: 'Ready to code permission prompt', + }, + { + // Fallback: Any "press enter" or "enter to confirm/continue" prompt + // Keep this LAST so more specific handlers match first + pattern: /press\s*enter|enter\s*to\s*(confirm|continue|proceed)|hit\s*enter/i, + response: '\r', + delay: 300, + description: 'Generic enter prompt', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i, /you.*(?:are|now).*logged/i], + }, + openai: { + command: 'codex', + args: ['login'], // Standard OAuth flow + deviceFlowArgs: ['login', '--device-auth'], // Device auth for headless/container environments + supportsDeviceFlow: true, + urlPattern: /(https:\/\/[^\s]+)/, + credentialPath: '~/.codex/auth.json', + displayName: 'Codex', + waitTimeout: 30000, + prompts: [ + { + pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, + response: 'y\r', + delay: 100, + description: 'Trust directory prompt', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + google: { + command: 'gemini', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Gemini', + waitTimeout: 30000, + prompts: [ + { + pattern: /login\s*with\s*google|google\s*account|choose.*auth/i, + response: '\r', // Select first option (Login with Google) + delay: 200, + description: 'Auth method selection', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + opencode: { + command: 'opencode', + args: ['auth', 'login'], + // OpenCode redirects to provider OAuth pages (Anthropic, OpenAI, Google) + urlPattern: /(https:\/\/[^\s]+)/, + credentialPath: '~/.local/share/opencode/auth.json', + displayName: 'OpenCode', + waitTimeout: 30000, + prompts: [ + { + pattern: /select.*provider|choose.*provider|which.*provider/i, + response: '\r', // Select first provider (OpenCode Zen - recommended) + delay: 300, + description: 'Provider selection', + }, + { + pattern: /opencode\s*zen|recommended/i, + response: '\r', // Confirm provider selection + delay: 200, + description: 'Confirm provider', + }, + ], + // Success patterns include credential added and existing credentials list + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i, /credential\s*added/i, /\d+\s*credentials?/i], + }, + droid: { + command: 'droid', + args: ['--login'], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Droid', + waitTimeout: 30000, + prompts: [ + { + pattern: /sign\s*in|log\s*in|authenticate/i, + response: '\r', + delay: 200, + description: 'Login prompt', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, +}; + +/** + * Strip ANSI escape codes from text + */ +export function stripAnsiCodes(text: string): string { + // eslint-disable-next-line no-control-regex + return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +/** + * Check if text matches any success pattern + */ +export function matchesSuccessPattern(text: string, patterns: RegExp[]): boolean { + const cleanText = stripAnsiCodes(text).toLowerCase(); + return patterns.some((p) => p.test(cleanText)); +} + +/** + * Find matching prompt handler that hasn't been responded to yet + */ +export function findMatchingPrompt( + text: string, + prompts: PromptHandler[], + respondedPrompts: Set +): PromptHandler | null { + const cleanText = stripAnsiCodes(text); + for (const prompt of prompts) { + if (respondedPrompts.has(prompt.description)) continue; + if (prompt.pattern.test(cleanText)) { + return prompt; + } + } + return null; +} + +/** + * Get list of supported provider IDs + */ +export function getSupportedProviderIds(): string[] { + return Object.keys(CLI_AUTH_CONFIG); +} + +/** + * Get list of supported providers with details + */ +export function getSupportedProviders(): { id: string; displayName: string; command: string }[] { + return Object.entries(CLI_AUTH_CONFIG).map(([id, config]) => ({ + id, + displayName: config.displayName, + command: config.command, + })); +} + +/** + * Validate a provider's CLI auth configuration + * Returns null if valid, or an error message if invalid + */ +export function validateProviderConfig( + providerId: string, + config: CLIAuthConfig +): string | null { + if (!config.command || typeof config.command !== 'string') { + return `${providerId}: missing or invalid 'command'`; + } + + if (!Array.isArray(config.args)) { + return `${providerId}: 'args' must be an array`; + } + + if (!(config.urlPattern instanceof RegExp)) { + return `${providerId}: 'urlPattern' must be a RegExp`; + } + + // Check urlPattern has a capture group + const testUrl = 'https://example.com/test'; + const match = testUrl.match(config.urlPattern); + if (!match || !match[1]) { + return `${providerId}: 'urlPattern' must have a capture group - got ${config.urlPattern}`; + } + + if (!config.displayName || typeof config.displayName !== 'string') { + return `${providerId}: missing or invalid 'displayName'`; + } + + if (typeof config.waitTimeout !== 'number' || config.waitTimeout <= 0) { + return `${providerId}: 'waitTimeout' must be a positive number`; + } + + if (!Array.isArray(config.prompts)) { + return `${providerId}: 'prompts' must be an array`; + } + + for (let i = 0; i < config.prompts.length; i++) { + const prompt = config.prompts[i]; + if (!(prompt.pattern instanceof RegExp)) { + return `${providerId}: prompt[${i}].pattern must be a RegExp`; + } + if (typeof prompt.response !== 'string') { + return `${providerId}: prompt[${i}].response must be a string`; + } + if (!prompt.description) { + return `${providerId}: prompt[${i}].description is required`; + } + } + + if (!Array.isArray(config.successPatterns)) { + return `${providerId}: 'successPatterns' must be an array`; + } + + return null; +} + +/** + * Validate all provider configurations + * Returns array of error messages (empty if all valid) + */ +export function validateAllProviderConfigs(): string[] { + const errors: string[] = []; + for (const [id, config] of Object.entries(CLI_AUTH_CONFIG)) { + const error = validateProviderConfig(id, config); + if (error) { + errors.push(error); + } + } + return errors; +} diff --git a/src/trajectory/config.ts b/src/trajectory/config.ts new file mode 100644 index 00000000..607b016a --- /dev/null +++ b/src/trajectory/config.ts @@ -0,0 +1,195 @@ +/** + * Trajectory Configuration + * + * Handles repo-level opt-in/opt-out for trajectory storage. + * When trajectories are opt-out (not in source control), they're stored + * in the user's home directory instead of the repo. + * + * DECISIONS: + * 1. Default behavior: trajectories are OPT-OUT (stored outside repo) + * - Reasoning: Most repos won't want trajectory files in source control + * - Users must explicitly opt-in to store in repo + * + * 2. Setting location: .relay/config.json in repo root + * - Reasoning: Keeps relay config separate from .claude/ which may have other uses + * - Alternative considered: .claude/settings.json - rejected to avoid conflicts + * + * 3. User-level storage: ~/.config/agent-relay/trajectories// + * - Reasoning: XDG-compliant, project-isolated, survives repo deletion + */ + +import { existsSync, readFileSync, mkdirSync, statSync } from 'node:fs'; +import { join } from 'node:path'; +import { homedir } from 'node:os'; +import { createHash } from 'node:crypto'; +import { getProjectPaths } from '../utils/project-namespace.js'; + +/** + * Relay config structure + */ +export interface RelayConfig { + /** Trajectory settings */ + trajectories?: { + /** + * Whether to store trajectories in the repo (.trajectories/) + * Default: false (stored in ~/.config/agent-relay/trajectories/) + */ + storeInRepo?: boolean; + }; +} + +/** + * Cache for config to avoid repeated file reads + */ +let configCache: { path: string; config: RelayConfig; mtime: number } | null = null; + +/** + * Get the relay config file path + */ +export function getRelayConfigPath(projectRoot?: string): string { + const root = projectRoot ?? getProjectPaths().projectRoot; + return join(root, '.relay', 'config.json'); +} + +/** + * Read the relay config from the repo + */ +export function readRelayConfig(projectRoot?: string): RelayConfig { + const configPath = getRelayConfigPath(projectRoot); + + // Check cache + if (configCache && configCache.path === configPath) { + try { + const stat = statSync(configPath); + if (stat.mtimeMs === configCache.mtime) { + return configCache.config; + } + } catch { + // File may not exist or be readable + } + } + + try { + if (!existsSync(configPath)) { + return {}; + } + + const content = readFileSync(configPath, 'utf-8'); + const config = JSON.parse(content) as RelayConfig; + + // Update cache + try { + const stat = statSync(configPath); + configCache = { path: configPath, config, mtime: stat.mtimeMs }; + } catch { + // Ignore cache update failures + } + + return config; + } catch (err) { + console.warn('[trajectory-config] Failed to read config:', err); + return {}; + } +} + +/** + * Check if trajectories should be stored in the repo + */ +export function shouldStoreInRepo(projectRoot?: string): boolean { + const config = readRelayConfig(projectRoot); + // Default to false - trajectories are stored outside repo by default + return config.trajectories?.storeInRepo === true; +} + +/** + * Get a hash of the project path for user-level storage isolation + */ +export function getProjectHash(projectRoot?: string): string { + const root = projectRoot ?? getProjectPaths().projectRoot; + return createHash('sha256').update(root).digest('hex').slice(0, 16); +} + +/** + * Get the user-level trajectories directory + */ +export function getUserTrajectoriesDir(projectRoot?: string): string { + const projectHash = getProjectHash(projectRoot); + const configDir = process.env.XDG_CONFIG_HOME || join(homedir(), '.config'); + return join(configDir, 'agent-relay', 'trajectories', projectHash); +} + +/** + * Get the repo-level trajectories directory + */ +export function getRepoTrajectoriesDir(projectRoot?: string): string { + const root = projectRoot ?? getProjectPaths().projectRoot; + return join(root, '.trajectories'); +} + +/** + * Get the primary trajectories directory based on config + * This is where new trajectories will be written + */ +export function getPrimaryTrajectoriesDir(projectRoot?: string): string { + if (shouldStoreInRepo(projectRoot)) { + return getRepoTrajectoriesDir(projectRoot); + } + return getUserTrajectoriesDir(projectRoot); +} + +/** + * Get all trajectories directories (for reading) + * Returns both repo and user-level if they exist + */ +export function getAllTrajectoriesDirs(projectRoot?: string): string[] { + const dirs: string[] = []; + + const repoDir = getRepoTrajectoriesDir(projectRoot); + if (existsSync(repoDir)) { + dirs.push(repoDir); + } + + const userDir = getUserTrajectoriesDir(projectRoot); + if (existsSync(userDir)) { + dirs.push(userDir); + } + + return dirs; +} + +/** + * Ensure the primary trajectories directory exists + */ +export function ensureTrajectoriesDir(projectRoot?: string): string { + const dir = getPrimaryTrajectoriesDir(projectRoot); + mkdirSync(dir, { recursive: true }); + return dir; +} + +/** + * Get trajectory environment variables for trail CLI + * Sets TRAJECTORIES_DATA_DIR to the appropriate location + */ +export function getTrajectoryEnvVars(projectRoot?: string): Record { + const dataDir = getPrimaryTrajectoriesDir(projectRoot); + return { + TRAJECTORIES_DATA_DIR: dataDir, + }; +} + +/** + * Check if project has opted into repo-level trajectory storage + */ +export function isTrajectoryOptedIn(projectRoot?: string): boolean { + return shouldStoreInRepo(projectRoot); +} + +/** + * Get a human-readable description of where trajectories are stored + */ +export function getTrajectoriesStorageDescription(projectRoot?: string): string { + if (shouldStoreInRepo(projectRoot)) { + return `repo (.trajectories/)`; + } + return `user (~/.config/agent-relay/trajectories/)`; +} diff --git a/src/trajectory/detection.test.ts b/src/trajectory/detection.test.ts new file mode 100644 index 00000000..e4b06f9b --- /dev/null +++ b/src/trajectory/detection.test.ts @@ -0,0 +1,151 @@ +/** + * Tests for trajectory detection functions + */ + +import { describe, it, expect } from 'vitest'; +import { detectToolCalls, detectErrors } from './integration.js'; + +describe('detectToolCalls', () => { + it('detects tool completion markers', () => { + const output = ` +✓ Read file.ts +✔ Bash completed +`; + const tools = detectToolCalls(output); + expect(tools).toHaveLength(2); + expect(tools[0].tool).toBe('Read'); + expect(tools[0].status).toBe('completed'); + expect(tools[1].tool).toBe('Bash'); + expect(tools[1].status).toBe('completed'); + }); + + it('detects tool invocation patterns', () => { + const output = ` +Using tool Read to read the file +Calling Bash command +`; + const tools = detectToolCalls(output); + expect(tools.length).toBeGreaterThan(0); + expect(tools.some(t => t.tool === 'Read' || t.tool === 'Bash')).toBe(true); + }); + + it('deduplicates tools by position', () => { + const output = ` +✓ Read file.ts +✓ Read file.ts +`; + const tools = detectToolCalls(output); + // Should detect both as they're at different positions + expect(tools).toHaveLength(2); + }); + + it('handles empty output', () => { + const tools = detectToolCalls(''); + expect(tools).toHaveLength(0); + }); + + it('handles output with no tools', () => { + const tools = detectToolCalls('Just some regular text without any tools.'); + expect(tools).toHaveLength(0); + }); + + it('detects newer tools like Skill and TaskOutput', () => { + const output = ` +✓ Skill invoked +TaskOutput({"task_id": "123"}) +`; + const tools = detectToolCalls(output); + expect(tools.some(t => t.tool === 'Skill')).toBe(true); + expect(tools.some(t => t.tool === 'TaskOutput')).toBe(true); + }); +}); + +describe('detectErrors', () => { + it('detects JavaScript/TypeScript errors', () => { + const output = ` +TypeError: Cannot read property 'foo' of undefined + at Object. (test.ts:10:5) +`; + const errors = detectErrors(output); + expect(errors.length).toBeGreaterThan(0); + expect(errors.some(e => e.message.includes('TypeError'))).toBe(true); + expect(errors[0].type).toBe('error'); + }); + + it('detects test failures', () => { + const output = ` +FAIL src/test.ts +✗ Test case failed +`; + const errors = detectErrors(output); + expect(errors.length).toBeGreaterThan(0); + expect(errors.some(e => e.type === 'error')).toBe(true); + }); + + it('detects warnings', () => { + const output = ` +warning: Package is deprecated +WARN: Something might be wrong +`; + const errors = detectErrors(output); + expect(errors.some(e => e.type === 'warning')).toBe(true); + }); + + it('detects command failures', () => { + const output = ` +Command failed with exit code 1 +Exit code: 127 +`; + const errors = detectErrors(output); + expect(errors.length).toBeGreaterThan(0); + }); + + it('deduplicates errors by message', () => { + const output = ` +Error: Something went wrong +Error: Something went wrong +`; + const errors = detectErrors(output); + // The detection may find two different patterns matching (generic "Error:" prefix) + // but should deduplicate if the exact same message is found multiple times + expect(errors.length).toBeGreaterThan(0); + // Count unique messages about "Something went wrong" + const wrongMessages = errors.filter(e => e.message.includes('Something went wrong')); + // At least one should be found + expect(wrongMessages.length).toBeGreaterThanOrEqual(1); + }); + + it('handles empty output', () => { + const errors = detectErrors(''); + expect(errors).toHaveLength(0); + }); + + it('handles output with no errors', () => { + const errors = detectErrors('Everything is working fine. Success!'); + expect(errors).toHaveLength(0); + }); + + it('truncates long error messages', () => { + const longMessage = 'Error: ' + 'x'.repeat(500); + const errors = detectErrors(longMessage); + expect(errors.length).toBeGreaterThan(0); + expect(errors[0].message.length).toBeLessThanOrEqual(200); + }); + + it('detects TypeScript compilation errors', () => { + const output = ` +error TS2339: Property 'foo' does not exist on type 'Bar'. +error[E0001]: Some rust error +`; + const errors = detectErrors(output); + expect(errors.length).toBeGreaterThan(0); + expect(errors.some(e => e.message.includes('TS2339'))).toBe(true); + }); + + it('does not match "error handling" as an error', () => { + const output = 'Implementing error handling for the API.'; + const errors = detectErrors(output); + // Should not detect "error handling" as an error + expect(errors).toHaveLength(0); + }); +}); diff --git a/src/trajectory/integration.ts b/src/trajectory/integration.ts index fb49a3cd..7d4c1c0f 100644 --- a/src/trajectory/integration.ts +++ b/src/trajectory/integration.ts @@ -18,6 +18,11 @@ import { spawn, execSync } from 'node:child_process'; import { readFileSync, existsSync } from 'node:fs'; import { join } from 'node:path'; import { getProjectPaths } from '../utils/project-namespace.js'; +import { + getPrimaryTrajectoriesDir, + getAllTrajectoriesDirs, + getTrajectoryEnvVars, +} from './config.js'; /** * Trajectory index file structure @@ -72,19 +77,11 @@ interface TrajectoryFile { } /** - * Get the trajectories directory path + * Read a single trajectory index file from a directory */ -function getTrajectoriesDir(): string { - const { projectRoot } = getProjectPaths(); - return join(projectRoot, '.trajectories'); -} - -/** - * Read the trajectory index file directly from filesystem - */ -function readTrajectoryIndex(): TrajectoryIndex | null { +function readSingleTrajectoryIndex(trajectoriesDir: string): TrajectoryIndex | null { try { - const indexPath = join(getTrajectoriesDir(), 'index.json'); + const indexPath = join(trajectoriesDir, 'index.json'); if (!existsSync(indexPath)) { return null; } @@ -95,6 +92,51 @@ function readTrajectoryIndex(): TrajectoryIndex | null { } } +/** + * Read and merge trajectory indexes from all locations (repo + user-level) + * This allows reading trajectories from both places + */ +function readTrajectoryIndex(): TrajectoryIndex | null { + const dirs = getAllTrajectoriesDirs(); + + if (dirs.length === 0) { + return null; + } + + // Read and merge all indexes + let mergedIndex: TrajectoryIndex | null = null; + + for (const dir of dirs) { + const index = readSingleTrajectoryIndex(dir); + if (!index) continue; + + if (!mergedIndex) { + mergedIndex = index; + } else { + // Merge trajectories, preferring more recent entries + for (const [id, entry] of Object.entries(index.trajectories)) { + const existing = mergedIndex.trajectories[id]; + if (!existing) { + mergedIndex.trajectories[id] = entry; + } else { + // Keep the more recently updated one + const existingTime = new Date(existing.completedAt || existing.startedAt).getTime(); + const newTime = new Date(entry.completedAt || entry.startedAt).getTime(); + if (newTime > existingTime) { + mergedIndex.trajectories[id] = entry; + } + } + } + // Update lastUpdated to most recent + if (new Date(index.lastUpdated) > new Date(mergedIndex.lastUpdated)) { + mergedIndex.lastUpdated = index.lastUpdated; + } + } + } + + return mergedIndex; +} + /** * Read a specific trajectory file directly from filesystem */ @@ -149,12 +191,16 @@ export interface DecisionOptions { /** * Run a trail CLI command + * Uses config-based environment to control trajectory storage location */ async function runTrail(args: string[]): Promise<{ success: boolean; output: string; error?: string }> { return new Promise((resolve) => { + // Get trajectory env vars to set correct storage location + const trajectoryEnv = getTrajectoryEnvVars(); + const proc = spawn('trail', args, { cwd: getProjectPaths().projectRoot, - env: process.env, + env: { ...process.env, ...trajectoryEnv }, stdio: ['pipe', 'pipe', 'pipe'], }); @@ -588,6 +634,168 @@ export function detectPhaseFromContent(content: string): PDEROPhase | undefined return undefined; } +/** + * Detected tool call information + */ +export interface DetectedToolCall { + tool: string; + args?: string; + status?: 'started' | 'completed' | 'failed'; +} + +/** + * Detected error information + */ +export interface DetectedError { + type: 'error' | 'warning' | 'failure'; + message: string; + stack?: string; +} + +/** + * All known Claude Code tool names + */ +const TOOL_NAMES = [ + 'Read', 'Write', 'Edit', 'Bash', 'Glob', 'Grep', 'Task', 'TaskOutput', + 'WebFetch', 'WebSearch', 'NotebookEdit', 'TodoWrite', 'AskUserQuestion', + 'KillShell', 'EnterPlanMode', 'ExitPlanMode', 'Skill', 'SlashCommand', +]; + +const TOOL_NAME_PATTERN = TOOL_NAMES.join('|'); + +/** + * Tool call patterns for Claude Code and similar AI CLIs + */ +const TOOL_PATTERNS = [ + // Claude Code tool invocations (displayed in output with parenthesis/braces) + new RegExp(`(?:^|\\n)\\s*(?:${TOOL_NAME_PATTERN})\\s*[({]`, 'i'), + // Tool completion markers (checkmarks, spinners) + new RegExp(`(?:^|\\n)\\s*(?:✓|✔|⠋|⠙|⠹|⠸|⠼|⠴|⠦|⠧|⠇|⠏)\\s*(${TOOL_NAME_PATTERN})`, 'i'), + // Function call patterns (explicit mentions) + new RegExp(`(?:^|\\n)\\s*(?:Calling|Using|Invoking)\\s+(?:tool\\s+)?['"]?(${TOOL_NAME_PATTERN})['"]?`, 'i'), + // Tool result patterns + new RegExp(`(?:^|\\n)\\s*(?:Tool result|Result from)\\s*:?\\s*(${TOOL_NAME_PATTERN})`, 'i'), +]; + +/** + * Error patterns for detecting failures in output + * Note: Patterns are ordered from most specific to least specific + */ +const ERROR_PATTERNS = [ + // JavaScript/TypeScript runtime errors (most specific) + /(?:^|\n)((?:TypeError|ReferenceError|SyntaxError|RangeError|EvalError|URIError):\s*.+)/i, + // Named Error with message (e.g., "Error: Something went wrong") + /(?:^|\n)(Error:\s+.+)/, + // Failed assertions + /(?:^|\n)\s*(AssertionError:\s*.+)/i, + // Test failures (Vitest, Jest patterns) + /(?:^|\n)\s*(FAIL\s+\S+\.(?:ts|js|tsx|jsx))/i, + /(?:^|\n)\s*(✗|✘|×)\s+(.+)/, + // Command/process failures + /(?:^|\n)\s*(Command failed[^\n]+)/i, + /(?:^|\n)\s*((?:Exit|exit)\s+code[:\s]+[1-9]\d*)/i, + /(?:^|\n)\s*(exited with (?:code\s+)?[1-9]\d*)/i, + // Node.js/system errors + /(?:^|\n)\s*(EACCES|EPERM|ENOENT|ECONNREFUSED|ETIMEDOUT|ENOTFOUND)(?::\s*.+)?/, + // Build/compile errors (webpack, tsc, etc.) + /(?:^|\n)\s*(error TS\d+:\s*.+)/i, + /(?:^|\n)\s*(error\[\S+\]:\s*.+)/i, +]; + +/** + * Warning patterns for detecting potential issues + */ +const WARNING_PATTERNS = [ + /(?:^|\n)\s*(?:warning|WARN|⚠️?)\s*[:\[]?\s*(.+)/i, + /(?:^|\n)\s*(?:deprecated|DEPRECATED):\s*(.+)/i, +]; + +/** + * Detect tool calls from agent output + * + * @example + * ```typescript + * const tools = detectToolCalls(output); + * // Returns: [{ tool: 'Read', args: 'file.ts' }, { tool: 'Bash', status: 'completed' }] + * ``` + */ +export function detectToolCalls(content: string): DetectedToolCall[] { + const detected: DetectedToolCall[] = []; + const seenTools = new Set(); + const toolNameExtractor = new RegExp(`\\b(${TOOL_NAME_PATTERN})\\b`, 'i'); + + for (const pattern of TOOL_PATTERNS) { + const matches = content.matchAll(new RegExp(pattern.source, 'gi')); + for (const match of matches) { + // Extract tool name from the match + const fullMatch = match[0]; + const toolNameMatch = fullMatch.match(toolNameExtractor); + if (toolNameMatch) { + const tool = toolNameMatch[1]; + // Avoid duplicates by position (same tool at same position) + const key = `${tool}:${match.index}`; + if (!seenTools.has(key)) { + seenTools.add(key); + detected.push({ + tool, + status: fullMatch.includes('✓') || fullMatch.includes('✔') ? 'completed' : 'started', + }); + } + } + } + } + + return detected; +} + +/** + * Detect errors from agent output + * + * @example + * ```typescript + * const errors = detectErrors(output); + * // Returns: [{ type: 'error', message: 'TypeError: Cannot read property...' }] + * ``` + */ +export function detectErrors(content: string): DetectedError[] { + const detected: DetectedError[] = []; + const seenMessages = new Set(); + + // Check for error patterns + for (const pattern of ERROR_PATTERNS) { + const matches = content.matchAll(new RegExp(pattern, 'gi')); + for (const match of matches) { + const message = match[1] || match[0]; + const cleanMessage = message.trim().slice(0, 200); // Limit length + if (!seenMessages.has(cleanMessage)) { + seenMessages.add(cleanMessage); + detected.push({ + type: 'error', + message: cleanMessage, + }); + } + } + } + + // Check for warning patterns + for (const pattern of WARNING_PATTERNS) { + const matches = content.matchAll(new RegExp(pattern, 'gi')); + for (const match of matches) { + const message = match[1] || match[0]; + const cleanMessage = message.trim().slice(0, 200); + if (!seenMessages.has(cleanMessage)) { + seenMessages.add(cleanMessage); + detected.push({ + type: 'warning', + message: cleanMessage, + }); + } + } + } + + return detected; +} + /** * TrajectoryIntegration class for managing trajectory state * @@ -863,11 +1071,15 @@ export function getCompactTrailInstructions(): string { /** * Get environment variables for trail CLI + * If dataDir is not provided, uses config-based storage location */ -export function getTrailEnvVars(projectId: string, agentName: string, dataDir: string): Record { +export function getTrailEnvVars(projectId: string, agentName: string, dataDir?: string): Record { + // Use config-based path if dataDir not explicitly provided + const effectiveDataDir = dataDir ?? getPrimaryTrajectoriesDir(); + return { TRAJECTORIES_PROJECT: projectId, - TRAJECTORIES_DATA_DIR: dataDir, + TRAJECTORIES_DATA_DIR: effectiveDataDir, TRAJECTORIES_AGENT: agentName, TRAIL_AUTO_PHASE: '1', // Enable auto phase detection }; diff --git a/src/utils/project-namespace.ts b/src/utils/project-namespace.ts index dcc9e5c4..e706af1f 100644 --- a/src/utils/project-namespace.ts +++ b/src/utils/project-namespace.ts @@ -168,3 +168,96 @@ export function listProjects(): Array<{ projectId: string; projectRoot: string; return projects; } + +/** + * Detect the actual workspace directory for cloud deployments. + * + * In cloud workspaces, repos are cloned to /workspace/{repo-name}. + * This function finds the correct working directory: + * + * Priority: + * 1. WORKSPACE_CWD env var (explicit override) + * 2. If baseDir itself is a git repo, use it + * 3. Scan baseDir for cloned repos - use the first one found (alphabetically) + * 4. Fall back to baseDir + * + * @param baseDir - The base workspace directory (e.g., /workspace) + * @returns The actual workspace path to use + */ +export function detectWorkspacePath(baseDir: string): string { + // 1. Explicit override + if (process.env.WORKSPACE_CWD) { + return process.env.WORKSPACE_CWD; + } + + // 2. Check if baseDir itself is a git repo + if (fs.existsSync(path.join(baseDir, '.git'))) { + return baseDir; + } + + // 3. Scan for cloned repos (directories with .git) + try { + const entries = fs.readdirSync(baseDir, { withFileTypes: true }); + const repos: string[] = []; + + for (const entry of entries) { + if (entry.isDirectory()) { + const repoPath = path.join(baseDir, entry.name); + const gitPath = path.join(repoPath, '.git'); + if (fs.existsSync(gitPath)) { + repos.push(repoPath); + } + } + } + + // Sort alphabetically for consistent behavior + repos.sort(); + + // Use the first repo found + if (repos.length > 0) { + if (repos.length > 1) { + console.log(`[workspace] Multiple repos found, using first: ${repos[0]} (others: ${repos.slice(1).join(', ')})`); + } else { + console.log(`[workspace] Detected repo: ${repos[0]}`); + } + return repos[0]; + } + } catch (err) { + // Failed to scan, fall back + console.warn(`[workspace] Failed to scan ${baseDir}:`, err); + } + + // 4. Fall back to baseDir + return baseDir; +} + +/** + * List all git repos in a workspace directory. + * Useful for allowing users to select which repo to work in. + * + * @param baseDir - The base workspace directory + * @returns Array of repo paths + */ +export function listWorkspaceRepos(baseDir: string): string[] { + const repos: string[] = []; + + try { + const entries = fs.readdirSync(baseDir, { withFileTypes: true }); + + for (const entry of entries) { + if (entry.isDirectory()) { + const repoPath = path.join(baseDir, entry.name); + const gitPath = path.join(repoPath, '.git'); + if (fs.existsSync(gitPath)) { + repos.push(repoPath); + } + } + } + + repos.sort(); + } catch { + // Failed to scan + } + + return repos; +} diff --git a/src/wrapper/parser.ts b/src/wrapper/parser.ts index b7c4b646..cf122638 100644 --- a/src/wrapper/parser.ts +++ b/src/wrapper/parser.ts @@ -126,6 +126,7 @@ const PLACEHOLDER_TARGETS = new Set([ 'someagent', 'otheragent', 'worker', // Too generic, often used in examples + // NOTE: Don't add 'agent', 'name', 'lead', 'developer', etc. - these can be valid agent names! ]); /** @@ -140,7 +141,7 @@ function isInstructionalText(body: string): boolean { * Check if a target name is a placeholder commonly used in documentation/examples. * These should not be treated as real message targets. */ -function isPlaceholderTarget(target: string): boolean { +export function isPlaceholderTarget(target: string): boolean { return PLACEHOLDER_TARGETS.has(target.toLowerCase()); } diff --git a/src/wrapper/pty-wrapper.ts b/src/wrapper/pty-wrapper.ts index e3cd3d5d..e292883a 100644 --- a/src/wrapper/pty-wrapper.ts +++ b/src/wrapper/pty-wrapper.ts @@ -12,7 +12,7 @@ import path from 'node:path'; import { EventEmitter } from 'node:events'; import { RelayClient } from './client.js'; import type { ParsedCommand, ParsedSummary, SessionEndMarker } from './parser.js'; -import { parseSummaryWithDetails, parseSessionEndFromOutput } from './parser.js'; +import { parseSummaryWithDetails, parseSessionEndFromOutput, isPlaceholderTarget } from './parser.js'; import type { SendPayload, SendMeta, SpeakOnTrigger } from '../protocol/types.js'; import { getProjectPaths } from '../utils/project-namespace.js'; import { getTrailEnvVars } from '../trajectory/integration.js'; @@ -125,6 +125,7 @@ export class PtyWrapper extends EventEmitter { private relayPrefix: string; private cliType: CliType; private sentMessageHashes: Set = new Set(); + private receivedMessageIds: Set = new Set(); // Dedup incoming messages private processedSpawnCommands: Set = new Set(); private processedReleaseCommands: Set = new Set(); private pendingFencedSpawn: { name: string; cli: string; taskLines: string[] } | null = null; @@ -135,7 +136,7 @@ export class PtyWrapper extends EventEmitter { private injectionMetrics: InjectionMetrics = createInjectionMetrics(); private logFilePath?: string; private logStream?: fs.WriteStream; - private hasAcceptedPrompt = false; + private acceptedPrompts: Set = new Set(); // Track which prompts have been accepted private hookRegistry: HookRegistry; private sessionStartTime = Date.now(); private continuity?: ContinuityManager; @@ -148,6 +149,13 @@ export class PtyWrapper extends EventEmitter { private outputsSinceSummary = 0; // Count outputs since last summary private detectedTask?: string; // Auto-detected task from agent config private sessionEndData?: SessionEndMarker; // Store SESSION_END data for handoff + private instructionsInjected = false; // Track if init instructions have been injected + private continuityInjected = false; // Track if continuity context has been injected + private recentLogChunks: Map = new Map(); // Dedup log streaming (hash -> timestamp) + private static readonly LOG_DEDUP_WINDOW_MS = 500; // Window for considering logs as duplicates + private static readonly LOG_DEDUP_MAX_SIZE = 100; // Max entries in dedup map + private lastParsedLength = 0; // Track last parsed position to avoid re-parsing entire buffer + private lastContinuityParsedLength = 0; // Same for continuity commands constructor(config: PtyWrapperConfig) { super(); @@ -372,8 +380,20 @@ export class PtyWrapper extends EventEmitter { private async injectContinuityContext(): Promise { if (!this.continuity || !this.running) return; + // Guard: Only inject once per session + if (this.continuityInjected) { + console.log(`[pty:${this.config.name}] Continuity context already injected, skipping`); + return; + } + this.continuityInjected = true; + try { const context = await this.continuity.getStartupContext(this.config.name); + // Skip if no meaningful context (empty ledger or just boilerplate) + if (!context?.formatted || context.formatted.length < 50) { + console.log(`[pty:${this.config.name}] Skipping continuity injection (no meaningful context)`); + return; + } if (context?.formatted) { // Build context notification similar to TmuxWrapper const taskInfo = context.ledger?.currentTask @@ -469,9 +489,10 @@ export class PtyWrapper extends EventEmitter { // Stream to daemon for dashboard log viewing (if connected) // Filter out Claude's extended thinking blocks before streaming + // Also deduplicate to prevent terminal redraws from causing duplicate log entries if (this.config.streamLogs !== false && this.client.state === 'READY') { const filteredData = this.filterThinkingBlocks(data); - if (filteredData) { + if (filteredData && !this.isDuplicateLogChunk(filteredData)) { this.client.sendLog(filteredData); } } @@ -514,9 +535,15 @@ export class PtyWrapper extends EventEmitter { // Parse for continuity commands (->continuity:save, ->continuity:load, etc.) // Use rawBuffer (accumulated content) not immediate chunk, since multi-line // fenced commands like ->continuity:save <<<...>>> span multiple output events - this.parseContinuityCommands(cleanContent).catch(err => { - console.error(`[pty:${this.config.name}] Continuity command parsing error:`, err); - }); + // Optimization: Only parse new content with lookback for incomplete fenced commands + if (cleanContent.length > this.lastContinuityParsedLength) { + const lookbackStart = Math.max(0, this.lastContinuityParsedLength - 500); + const contentToParse = cleanContent.substring(lookbackStart); + this.parseContinuityCommands(contentToParse).catch(err => { + console.error(`[pty:${this.config.name}] Continuity command parsing error:`, err); + }); + this.lastContinuityParsedLength = cleanContent.length; + } // Track outputs and potentially remind about summaries this.trackOutputAndRemind(data); @@ -574,25 +601,111 @@ export class PtyWrapper extends EventEmitter { } /** - * Auto-accept Claude's first-run prompts for --dangerously-skip-permissions - * Detects the acceptance prompt and sends "2" to select "Yes, I accept" + * Check if a log chunk is a duplicate (recently streamed). + * Prevents terminal redraws from causing duplicate log entries in the dashboard. + * + * Uses content normalization and time-based deduplication: + * - Strips whitespace and normalizes content for comparison + * - Considers chunks with same normalized content within LOG_DEDUP_WINDOW_MS as duplicates + * - Cleans up old entries to prevent memory growth + */ + private isDuplicateLogChunk(data: string): boolean { + // Normalize: strip excessive whitespace, limit to first 200 chars for hash + // This helps catch redraws that might have slight formatting differences + const normalized = stripAnsi(data).replace(/\s+/g, ' ').trim().substring(0, 200); + + // Very short chunks (likely control chars or partial output) - allow through + if (normalized.length < 10) { + return false; + } + + // Simple hash using string as key + const hash = normalized; + const now = Date.now(); + + // Check if this chunk was recently streamed + const lastSeen = this.recentLogChunks.get(hash); + if (lastSeen && (now - lastSeen) < PtyWrapper.LOG_DEDUP_WINDOW_MS) { + return true; // Duplicate + } + + // Record this chunk + this.recentLogChunks.set(hash, now); + + // Cleanup: remove old entries if map is getting large + if (this.recentLogChunks.size > PtyWrapper.LOG_DEDUP_MAX_SIZE) { + const cutoff = now - PtyWrapper.LOG_DEDUP_WINDOW_MS * 2; + for (const [key, timestamp] of this.recentLogChunks) { + if (timestamp < cutoff) { + this.recentLogChunks.delete(key); + } + } + } + + return false; // Not a duplicate + } + + /** + * Auto-accept Claude's first-run prompts + * Handles multiple prompts in sequence: + * 1. --dangerously-skip-permissions acceptance ("Yes, I accept") + * 2. Trust directory prompt ("Yes, I trust this folder") + * 3. "Ready to code here?" permission prompt ("Yes, continue") + * + * Uses a Set to track which prompts have been accepted, allowing + * multiple different prompts to be handled in sequence. */ private handleAutoAcceptPrompts(data: string): void { - if (this.hasAcceptedPrompt) return; if (!this.ptyProcess || !this.running) return; - // Check for the permission acceptance prompt - // Pattern: "2. Yes, I accept" in the output const cleanData = stripAnsi(data); - if (cleanData.includes('Yes, I accept') && cleanData.includes('No, exit')) { + + // Check for the permission acceptance prompt (--dangerously-skip-permissions) + // Pattern: "2. Yes, I accept" in the output + if (!this.acceptedPrompts.has('permission') && + cleanData.includes('Yes, I accept') && cleanData.includes('No, exit')) { console.log(`[pty:${this.config.name}] Detected permission prompt, auto-accepting...`); - this.hasAcceptedPrompt = true; + this.acceptedPrompts.add('permission'); // Send "2" to select "Yes, I accept" and Enter to confirm setTimeout(() => { if (this.ptyProcess && this.running) { this.ptyProcess.write('2'); } }, 100); + return; + } + + // Check for the trust directory prompt + // Pattern: "1. Yes, I trust this folder" with "No, exit" + if (!this.acceptedPrompts.has('trust') && + (cleanData.includes('trust this folder') || cleanData.includes('safety check')) + && cleanData.includes('No, exit')) { + console.log(`[pty:${this.config.name}] Detected trust directory prompt, auto-accepting...`); + this.acceptedPrompts.add('trust'); + // Send Enter to accept first option (already selected) + setTimeout(() => { + if (this.ptyProcess && this.running) { + this.ptyProcess.write('\r'); + } + }, 300); + return; + } + + // Check for "Ready to code here?" permission prompt + // Pattern: "Yes, continue" with "No, exit" and "Ready to code here?" + // This prompt asks for permission to work with files in the workspace + if (!this.acceptedPrompts.has('ready-to-code') && + cleanData.includes('Yes, continue') && cleanData.includes('No, exit') + && (cleanData.includes('Ready to code here') || cleanData.includes('permission to work with your files'))) { + console.log(`[pty:${this.config.name}] Detected "Ready to code here?" prompt, auto-accepting...`); + this.acceptedPrompts.add('ready-to-code'); + // Send Enter to accept first option (already selected with ❯) + setTimeout(() => { + if (this.ptyProcess && this.running) { + this.ptyProcess.write('\r'); + } + }, 300); + return; } } @@ -630,18 +743,32 @@ export class PtyWrapper extends EventEmitter { * Parse relay commands from output. * Handles both single-line and multi-line (fenced) formats. * Deduplication via sentMessageHashes. + * + * Optimization: Only parses new content since last parse to avoid O(n²) behavior. + * Uses lookback buffer for incomplete fenced messages that span output chunks. */ private parseRelayCommands(): void { const cleanContent = stripAnsi(this.rawBuffer); + // Skip if no new content + if (cleanContent.length <= this.lastParsedLength) return; + + // For fenced messages, need some lookback for incomplete fences that span chunks + // 500 chars is enough to capture most relay message headers + const lookbackStart = Math.max(0, this.lastParsedLength - 500); + const contentToParse = cleanContent.substring(lookbackStart); + // First, try to find fenced multi-line messages: ->relay:Target <<<\n...\n>>> - this.parseFencedMessages(cleanContent); + this.parseFencedMessages(contentToParse); // Then parse single-line messages - this.parseSingleLineMessages(cleanContent); + this.parseSingleLineMessages(contentToParse); // Parse spawn/release commands - this.parseSpawnReleaseCommands(cleanContent); + this.parseSpawnReleaseCommands(contentToParse); + + // Update parsed position + this.lastParsedLength = cleanContent.length; } /** @@ -668,6 +795,11 @@ export class PtyWrapper extends EventEmitter { continue; } + // Skip placeholder targets (documentation examples like "AgentName", "Lead", etc.) + if (isPlaceholderTarget(target)) { + continue; + } + // Find the closing >>> const endIdx = content.indexOf('>>>', startIdx); if (endIdx === -1) continue; @@ -685,6 +817,11 @@ export class PtyWrapper extends EventEmitter { to = target.substring(colonIdx + 1); } + // Skip placeholder targets after parsing cross-project syntax + if (isPlaceholderTarget(to)) { + continue; + } + this.sendRelayCommand({ to, kind: 'message', @@ -732,6 +869,9 @@ export class PtyWrapper extends EventEmitter { const [, target, body] = simpleMatch; if (!body) continue; + // Skip placeholder targets (documentation examples) + if (isPlaceholderTarget(target)) continue; + // Parse target for cross-project syntax const colonIdx = target.indexOf(':'); let to = target; @@ -741,6 +881,9 @@ export class PtyWrapper extends EventEmitter { to = target.substring(colonIdx + 1); } + // Skip placeholder targets after parsing cross-project syntax + if (isPlaceholderTarget(to)) continue; + this.sendRelayCommand({ to, kind: 'message', @@ -754,6 +897,9 @@ export class PtyWrapper extends EventEmitter { const [, target, threadProject, threadId, body] = targetMatch; if (!body) continue; + // Skip placeholder targets (documentation examples) + if (isPlaceholderTarget(target)) continue; + // Parse target for cross-project syntax const colonIdx = target.indexOf(':'); let to = target; @@ -763,6 +909,9 @@ export class PtyWrapper extends EventEmitter { to = target.substring(colonIdx + 1); } + // Skip placeholder targets after parsing cross-project syntax + if (isPlaceholderTarget(to)) continue; + this.sendRelayCommand({ to, kind: 'message', @@ -782,14 +931,17 @@ export class PtyWrapper extends EventEmitter { const msgHash = `${cmd.to}:${cmd.body}`; if (this.sentMessageHashes.has(msgHash)) { + console.log(`[pty:${this.config.name}] Skipping duplicate message to ${cmd.to}`); return; } if (this.client.state !== 'READY') { + console.log(`[pty:${this.config.name}] Cannot send to ${cmd.to} - relay not ready (state: ${this.client.state})`); return; } const success = this.client.sendMessage(cmd.to, cmd.body, cmd.kind, cmd.data, cmd.thread); + console.log(`[pty:${this.config.name}] Sent message to ${cmd.to}: ${success ? 'success' : 'failed'}`); if (success) { this.sentMessageHashes.add(msgHash); @@ -838,6 +990,24 @@ export class PtyWrapper extends EventEmitter { const spawnAllowed = this.config.allowSpawn !== false; const canSpawn = spawnAllowed && (this.config.dashboardPort || this.config.onSpawn); const canRelease = this.config.dashboardPort || this.config.onRelease; + + // Debug: always log spawn detection for debugging + if (content.includes('->relay:spawn')) { + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Spawn pattern detected in content`); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] canSpawn=${canSpawn} (allowSpawn=${spawnAllowed}, dashboardPort=${this.config.dashboardPort}, hasOnSpawn=${!!this.config.onSpawn})`); + // Log the actual lines containing spawn + const spawnLines = content.split('\n').filter(l => l.includes('->relay:spawn')); + spawnLines.forEach((line, i) => { + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Line ${i}: "${line.substring(0, 100)}"`); + }); + } + + // Debug: always log release detection for debugging + if (content.includes('->relay:release')) { + console.log(`[pty:${this.config.name}] [RELEASE-DEBUG] Release pattern detected in content`); + console.log(`[pty:${this.config.name}] [RELEASE-DEBUG] canRelease=${canRelease} (dashboardPort=${this.config.dashboardPort}, hasOnRelease=${!!this.config.onRelease})`); + } + if (!canSpawn && !canRelease) return; const lines = content.split('\n'); @@ -845,7 +1015,21 @@ export class PtyWrapper extends EventEmitter { const releasePrefix = '->relay:release'; for (const line of lines) { - const trimmed = line.trim(); + let trimmed = line.trim(); + + // Strip bullet/prompt prefixes but PRESERVE the ->relay: pattern + // Look for ->relay: in the line and only strip what comes before it + const relayIdx = trimmed.indexOf('->relay:'); + if (relayIdx > 0) { + // There's content before ->relay: - check if it's just prefix chars + const beforeRelay = trimmed.substring(0, relayIdx); + // Only strip if the prefix is just bullets/prompts/whitespace + if (/^[\s●•◦‣⁃⏺◆◇○□■│┃┆┇┊┋╎╏✦→➜›»$%#*]+$/.test(beforeRelay)) { + const originalTrimmed = trimmed; + trimmed = trimmed.substring(relayIdx); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Stripped prefix: "${originalTrimmed.substring(0, 60)}" -> "${trimmed.substring(0, 60)}"`); + } + } // Skip escaped commands: \->relay:spawn should not trigger if (trimmed.includes('\\->relay:')) { @@ -885,9 +1069,11 @@ export class PtyWrapper extends EventEmitter { // STRICT: Must be at start of line (after whitespace) if (canSpawn && trimmed.startsWith(spawnPrefix)) { const afterSpawn = trimmed.substring(spawnPrefix.length).trim(); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Detected spawn prefix, afterSpawn: "${afterSpawn.substring(0, 60)}"`); // Check for fenced format: Name [cli] <<< (CLI optional, defaults to 'claude') const fencedMatch = afterSpawn.match(/^(\S+)(?:\s+(\S+))?\s+<<<(.*)$/); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Fenced match result: ${fencedMatch ? 'MATCHED' : 'NO MATCH'}`); if (fencedMatch) { const [, name, cliOrUndefined, inlineContent] = fencedMatch; let cli = cliOrUndefined || 'claude'; @@ -915,7 +1101,12 @@ export class PtyWrapper extends EventEmitter { this.executeSpawn(name, cli, taskStr); } } else { - // Start multi-line fenced mode + // Start multi-line fenced mode - but only if not already processed + const spawnKey = `${name}:${cli}`; + if (this.processedSpawnCommands.has(spawnKey)) { + // Already processed this spawn, skip the fenced capture + continue; + } this.pendingFencedSpawn = { name, cli, @@ -967,14 +1158,18 @@ export class PtyWrapper extends EventEmitter { // Check for release command // STRICT: Must be at start of line (after whitespace) - if (canRelease && trimmed.startsWith(releasePrefix)) { - const afterRelease = trimmed.substring(releasePrefix.length).trim(); - const name = afterRelease.split(/\s+/)[0]; - - // STRICT: Validate agent name format - if (name && this.isValidAgentName(name) && !this.processedReleaseCommands.has(name)) { - this.processedReleaseCommands.add(name); - this.executeRelease(name); + if (trimmed.startsWith(releasePrefix)) { + console.log(`[pty:${this.config.name}] [RELEASE-DEBUG] Release prefix detected, canRelease=${canRelease}`); + if (canRelease) { + const afterRelease = trimmed.substring(releasePrefix.length).trim(); + const name = afterRelease.split(/\s+/)[0]; + console.log(`[pty:${this.config.name}] [RELEASE-DEBUG] Parsed name: ${name}, isValidName=${name ? this.isValidAgentName(name) : false}, alreadyProcessed=${this.processedReleaseCommands.has(name)}`); + + // STRICT: Validate agent name format + if (name && this.isValidAgentName(name) && !this.processedReleaseCommands.has(name)) { + this.processedReleaseCommands.add(name); + this.executeRelease(name); + } } } } @@ -984,6 +1179,9 @@ export class PtyWrapper extends EventEmitter { * Execute spawn via API or callback */ private async executeSpawn(name: string, cli: string, task: string): Promise { + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] executeSpawn called: name=${name}, cli=${cli}, task="${task.substring(0, 50)}..."`); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] dashboardPort=${this.config.dashboardPort}, hasOnSpawn=${!!this.config.onSpawn}`); + if (this.config.dashboardPort) { // Use dashboard API for spawning (works from spawned agents) try { @@ -1018,7 +1216,7 @@ export class PtyWrapper extends EventEmitter { if (this.config.dashboardPort) { // Use dashboard API for releasing try { - const response = await fetch(`http://localhost:${this.config.dashboardPort}/api/spawned/${name}`, { + const response = await fetch(`http://localhost:${this.config.dashboardPort}/api/spawned/${encodeURIComponent(name)}`, { method: 'DELETE', }); const result = await response.json() as { success: boolean; error?: string }; @@ -1045,6 +1243,19 @@ export class PtyWrapper extends EventEmitter { * @param originalTo - The original 'to' field from sender. '*' indicates this was a broadcast message. */ private handleIncomingMessage(from: string, payload: SendPayload, messageId: string, meta?: SendMeta, originalTo?: string): void { + // Deduplicate: skip if we've already received this message + if (this.receivedMessageIds.has(messageId)) { + console.log(`[pty:${this.config.name}] Skipping duplicate message: ${messageId.substring(0, 8)}`); + return; + } + this.receivedMessageIds.add(messageId); + + // Limit dedup set size to prevent memory leak + if (this.receivedMessageIds.size > 1000) { + const oldest = this.receivedMessageIds.values().next().value; + if (oldest) this.receivedMessageIds.delete(oldest); + } + this.messageQueue.push({ from, body: payload.body, messageId, thread: payload.thread, importance: meta?.importance, data: payload.data, originalTo }); this.processMessageQueue(); @@ -1172,6 +1383,9 @@ export class PtyWrapper extends EventEmitter { log: (message: string) => console.log(`[pty:${this.config.name}] ${message}`), logError: (message: string) => console.error(`[pty:${this.config.name}] ${message}`), getMetrics: () => this.injectionMetrics, + // Skip verification for PTY-based injection - CLIs don't echo input back + // so verification will always fail. Trust that pty.write() succeeds. + skipVerification: true, }; // Inject with retry and verification using shared logic @@ -1204,27 +1418,31 @@ export class PtyWrapper extends EventEmitter { } /** - * Inject usage instructions including persistence protocol + * Queue minimal agent identity notification as the first message. + * + * Full protocol instructions are in ~/.claude/CLAUDE.md (set up by entrypoint.sh). + * We only inject a brief identity message here to let the agent know its name + * and that it's connected to the relay. */ private injectInstructions(): void { - if (!this.running || !this.ptyProcess) return; + if (!this.running) return; - const escapedPrefix = '\\' + this.relayPrefix; - const instructions = [ - `[Agent Relay] You are "${this.config.name}" - connected for real-time messaging.`, - `SEND: ${escapedPrefix}AgentName message`, - `PROTOCOL: (1) ACK receipt (2) Work (3) Send "DONE: summary"`, - `PERSIST: Output [[SUMMARY]]{"currentTask":"...","context":"..."}[[/SUMMARY]] after major work.`, - `END: Output [[SESSION_END]]{"summary":"..."}[[/SESSION_END]] when session complete.`, - ].join(' | '); + // Guard: Only inject once per session + if (this.instructionsInjected) { + console.log(`[pty:${this.config.name}] Init instructions already injected, skipping`); + return; + } + this.instructionsInjected = true; - // Note: Trail instructions are injected via hooks (trajectory-hooks.ts) + // Minimal notification - full protocol is in ~/.claude/CLAUDE.md + const notification = `You are agent "${this.config.name}" connected to Agent Relay. See CLAUDE.md for the messaging protocol. ACK messages, do work, send DONE when complete.`; - try { - this.ptyProcess.write(instructions + '\r'); - } catch { - // Silent fail - } + // Queue as first message from "system" - will be injected when CLI is ready + this.messageQueue.unshift({ + from: 'system', + body: notification, + messageId: `init-${Date.now()}`, + }); } /** @@ -1391,12 +1609,17 @@ export class PtyWrapper extends EventEmitter { this.outputsSinceSummary = 0; // Inject reminder as a relay-style message - const reminder = `\n[Agent Relay] It's been ${Math.round(minutesSinceSummary)} minutes. Please output a [[SUMMARY]] block to checkpoint your progress:\n[[SUMMARY]]\n{"currentTask": "...", "completedTasks": [...], "context": "..."}\n[[/SUMMARY]]\n`; + // IMPORTANT: Must be single-line - embedded newlines cause the message to span + // multiple lines in the CLI input buffer, and the final Enter only submits + // the last (empty) line. Regular relay messages are also single-line (see buildInjectionString). + const reminder = `[Agent Relay] It's been ${Math.round(minutesSinceSummary)} minutes. Please output a [[SUMMARY]] block to checkpoint your progress: [[SUMMARY]]{"currentTask": "...", "completedTasks": [...], "context": "..."}[[/SUMMARY]]`; - // Delay slightly to not interrupt current output - setTimeout(() => { + // Delay slightly to not interrupt current output, then write + Enter + setTimeout(async () => { if (this.ptyProcess && this.running) { - this.ptyProcess.write(reminder + '\r'); + this.ptyProcess.write(reminder); + await sleep(INJECTION_CONSTANTS.ENTER_DELAY_MS); + this.ptyProcess.write('\r'); } }, 1000); } diff --git a/src/wrapper/shared.ts b/src/wrapper/shared.ts index 8e14f8df..9d49ece7 100644 --- a/src/wrapper/shared.ts +++ b/src/wrapper/shared.ts @@ -41,7 +41,7 @@ export interface InjectionMetrics { /** * CLI types for special handling */ -export type CliType = 'claude' | 'codex' | 'gemini' | 'droid' | 'spawned' | 'other'; +export type CliType = 'claude' | 'codex' | 'gemini' | 'droid' | 'opencode' | 'spawned' | 'other'; /** * Injection timing constants @@ -167,6 +167,7 @@ export function detectCliType(command: string): CliType { if (cmdLower.includes('codex')) return 'codex'; if (cmdLower.includes('claude')) return 'claude'; if (cmdLower.includes('droid')) return 'droid'; + if (cmdLower.includes('opencode')) return 'opencode'; return 'other'; } @@ -186,7 +187,7 @@ export const CLI_QUIRKS = { * Others may interpret the escape sequences literally. */ supportsBracketedPaste: (cli: CliType): boolean => { - return cli === 'claude' || cli === 'codex' || cli === 'gemini'; + return cli === 'claude' || cli === 'codex' || cli === 'gemini' || cli === 'opencode'; }, /** @@ -207,6 +208,7 @@ export const CLI_QUIRKS = { gemini: /^[>›»]\s*$/, codex: /^[>›»]\s*$/, droid: /^[>›»]\s*$/, + opencode: /^[>›»]\s*$/, spawned: /^[>›»]\s*$/, other: /^[>$%#➜›»]\s*$/, }; @@ -239,6 +241,12 @@ export interface InjectionCallbacks { logError: (message: string) => void; /** Get the injection metrics object to update */ getMetrics: () => InjectionMetrics; + /** + * Skip verification and trust that write succeeded. + * Set to true for PTY-based injection where CLIs don't echo input. + * When true, injection succeeds on first attempt without verification. + */ + skipVerification?: boolean; } /** @@ -298,6 +306,20 @@ export async function injectWithRetry( const metrics = callbacks.getMetrics(); metrics.total++; + // Skip verification mode: trust that write() succeeds without checking output + // Used for PTY-based injection where CLIs don't echo input back + if (callbacks.skipVerification) { + try { + await callbacks.performInjection(injection); + metrics.successFirstTry++; + return { success: true, attempts: 1 }; + } catch (err: any) { + callbacks.logError(`Injection error: ${err?.message || err}`); + metrics.failed++; + return { success: false, attempts: 1 }; + } + } + for (let attempt = 0; attempt < INJECTION_CONSTANTS.MAX_RETRIES; attempt++) { try { // On retry attempts, first check if message already exists (race condition fix) diff --git a/src/wrapper/tmux-wrapper.ts b/src/wrapper/tmux-wrapper.ts index 352246b3..ac12e0a7 100644 --- a/src/wrapper/tmux-wrapper.ts +++ b/src/wrapper/tmux-wrapper.ts @@ -27,6 +27,8 @@ import { TrajectoryIntegration, getTrajectoryIntegration, detectPhaseFromContent, + detectToolCalls, + detectErrors, getCompactTrailInstructions, getTrailEnvVars, type PDEROPhase, @@ -102,7 +104,7 @@ export interface TmuxWrapperConfig { /** Polling interval when waiting for clear input (ms) */ inputWaitPollMs?: number; /** CLI type for special handling (auto-detected from command if not set) */ - cliType?: 'claude' | 'codex' | 'gemini' | 'droid' | 'other'; + cliType?: 'claude' | 'codex' | 'gemini' | 'droid' | 'opencode' | 'other'; /** Enable tmux mouse mode for scroll passthrough (default: true) */ mouseMode?: boolean; /** Relay prefix pattern (default: '->relay:') */ @@ -173,6 +175,8 @@ export class TmuxWrapper { private tmuxPath: string; // Resolved path to tmux binary (system or bundled) private trajectory?: TrajectoryIntegration; // Trajectory tracking via trail private lastDetectedPhase?: PDEROPhase; // Track last auto-detected PDERO phase + private seenToolCalls: Set = new Set(); // Dedup tool call trajectory events + private seenErrors: Set = new Set(); // Dedup error trajectory events private continuity?: ContinuityManager; // Session continuity management private processedContinuityCommands: Set = new Set(); // Dedup continuity commands private agentId?: string; // Unique agent ID for resume functionality @@ -207,6 +211,8 @@ export class TmuxWrapper { this.cliType = 'claude'; } else if (cmdLower.includes('droid')) { this.cliType = 'droid'; + } else if (cmdLower.includes('opencode')) { + this.cliType = 'opencode'; } else { this.cliType = 'other'; } @@ -306,11 +312,13 @@ export class TmuxWrapper { } /** - * Detect PDERO phase from output content and auto-transition if needed + * Detect PDERO phase from output content and auto-transition if needed. + * Also detects tool calls and errors, recording them to the trajectory. */ private detectAndTransitionPhase(content: string): void { if (!this.trajectory) return; + // Detect phase transitions const detectedPhase = detectPhaseFromContent(content); if (detectedPhase && detectedPhase !== this.lastDetectedPhase) { const currentPhase = this.trajectory.getPhase(); @@ -320,6 +328,30 @@ export class TmuxWrapper { this.logStderr(`Phase transition: ${currentPhase || 'none'} → ${detectedPhase}`); } } + + // Detect and record tool calls + // Note: We deduplicate by tool+status to record each unique tool type once per session + // (e.g., "Read" started, "Read" completed). This provides a summary of tools used + // without flooding the trajectory with every individual invocation. + const tools = detectToolCalls(content); + for (const tool of tools) { + const key = `${tool.tool}:${tool.status || 'started'}`; + if (!this.seenToolCalls.has(key)) { + this.seenToolCalls.add(key); + const statusLabel = tool.status === 'completed' ? ' (completed)' : ''; + this.trajectory.event(`Tool: ${tool.tool}${statusLabel}`, 'tool_call'); + } + } + + // Detect and record errors + const errors = detectErrors(content); + for (const error of errors) { + if (!this.seenErrors.has(error.message)) { + this.seenErrors.add(error.message); + const prefix = error.type === 'warning' ? 'Warning' : 'Error'; + this.trajectory.event(`${prefix}: ${error.message}`, 'error'); + } + } } /** @@ -1233,8 +1265,15 @@ export class TmuxWrapper { const lines = content.split('\n'); + // Pattern to strip common line prefixes (bullets, prompts, etc.) + // Must include ● (U+25CF BLACK CIRCLE) used by Claude's TUI + const linePrefixPattern = /^(?:[>$%#→➜›»●•◦‣⁃\-*⏺◆◇○□■│┃┆┇┊┋╎╏✦]\s*)+/; + for (const line of lines) { - const trimmed = line.trim(); + let trimmed = line.trim(); + + // Strip common line prefixes (bullets, prompts) before checking for commands + trimmed = trimmed.replace(linePrefixPattern, ''); // If we're in fenced spawn mode, accumulate lines until we see >>> if (this.pendingFencedSpawn) { @@ -1271,7 +1310,8 @@ export class TmuxWrapper { } // Check for fenced spawn start: ->relay:spawn Name [cli] <<< (CLI optional, defaults to 'claude') - const fencedSpawnMatch = trimmed.match(/^(?:[•\-*]\s*)?->relay:spawn\s+(\S+)(?:\s+(\S+))?\s+<<<(.*)$/); + // Prefixes are stripped above, so we just look for the command at start of line + const fencedSpawnMatch = trimmed.match(/^->relay:spawn\s+(\S+)(?:\s+(\S+))?\s+<<<(.*)$/); if (fencedSpawnMatch && canSpawn) { const [, name, cliOrUndefined, inlineContent] = fencedSpawnMatch; const cli = cliOrUndefined || 'claude'; @@ -1312,7 +1352,8 @@ export class TmuxWrapper { // Match single-line spawn: ->relay:spawn WorkerName [cli] ["task"] // CLI is optional - defaults to 'claude'. Task is also optional. - const spawnMatch = trimmed.match(/^(?:[•\-*]\s*)?->relay:spawn\s+(\S+)(?:\s+(\S+))?(?:\s+["'](.+?)["'])?\s*$/); + // Prefixes are stripped above, so we just look for the command at start of line + const spawnMatch = trimmed.match(/^->relay:spawn\s+(\S+)(?:\s+(\S+))?(?:\s+["'](.+?)["'])?\s*$/); if (spawnMatch && canSpawn) { const [, name, cliOrUndefined, task] = spawnMatch; const cli = cliOrUndefined || 'claude'; @@ -1343,7 +1384,8 @@ export class TmuxWrapper { } // Match ->relay:release WorkerName - const releaseMatch = trimmed.match(/^(?:[•\-*]\s*)?->relay:release\s+(\S+)\s*$/); + // Prefixes are stripped above, so we just look for the command at start of line + const releaseMatch = trimmed.match(/^->relay:release\s+(\S+)\s*$/); if (releaseMatch && canRelease) { const [, name] = releaseMatch; @@ -1586,7 +1628,7 @@ export class TmuxWrapper { // Set tmux buffer then paste // Skip bracketed paste (-p) for CLIs that don't handle it properly (droid, other) await execAsync(`"${this.tmuxPath}" set-buffer -- "${escaped}"`); - const useBracketedPaste = this.cliType === 'claude' || this.cliType === 'codex' || this.cliType === 'gemini'; + const useBracketedPaste = this.cliType === 'claude' || this.cliType === 'codex' || this.cliType === 'gemini' || this.cliType === 'opencode'; if (useBracketedPaste) { await execAsync(`"${this.tmuxPath}" paste-buffer -t ${this.sessionName} -p`); } else { diff --git a/test_parser.js b/test_parser.js deleted file mode 100644 index b67a3f01..00000000 --- a/test_parser.js +++ /dev/null @@ -1,31 +0,0 @@ -const { OutputParser } = require('./dist/wrapper/parser.js'); - -const parser = new OutputParser({ prefix: '->relay:' }); - -// Test case 1: Simple multi-line with blank lines -const test1 = `->relay:Dashboard Line 1 - -Line 3 -Line 4`; - -console.log('Test 1: Message with blank line'); -const result1 = parser.parse(test1 + '\n'); -if (result1.commands.length > 0) { - console.log('Body:', JSON.stringify(result1.commands[0].body)); -} else { - console.log('No command parsed'); -} - -// Test case 2: Without blank lines -const test2 = `->relay:Dashboard Line 1 -Line 2 -Line 3`; - -console.log('\nTest 2: Message without blank lines'); -parser.reset(); -const result2 = parser.parse(test2 + '\n'); -if (result2.commands.length > 0) { - console.log('Body:', JSON.stringify(result2.commands[0].body)); -} else { - console.log('No command parsed'); -} diff --git a/test_parser.mjs b/test_parser.mjs deleted file mode 100644 index 93af1f13..00000000 --- a/test_parser.mjs +++ /dev/null @@ -1,49 +0,0 @@ -import { OutputParser } from './dist/wrapper/parser.js'; - -const parser = new OutputParser({ prefix: '->relay:' }); - -// Test case 1: Simple multi-line with blank lines -const test1 = `->relay:Dashboard Line 1 - -Line 3 -Line 4`; - -console.log('Test 1: Message with blank line'); -const result1 = parser.parse(test1 + '\n'); -if (result1.commands.length > 0) { - console.log('Body:', JSON.stringify(result1.commands[0].body)); - console.log('Lines:', result1.commands[0].body.split('\n').length); -} else { - console.log('No command parsed'); -} - -// Test case 2: Without blank lines -const test2 = `->relay:Dashboard Line 1 -Line 2 -Line 3`; - -console.log('\nTest 2: Message without blank lines'); -parser.reset(); -const result2 = parser.parse(test2 + '\n'); -if (result2.commands.length > 0) { - console.log('Body:', JSON.stringify(result2.commands[0].body)); - console.log('Lines:', result2.commands[0].body.split('\n').length); -} else { - console.log('No command parsed'); -} - -// Test case 3: Multi-line with blank lines in middle -const test3 = `->relay:Dashboard TASK DETAILS: Something - -More details here -Final line`; - -console.log('\nTest 3: Real-world task message'); -parser.reset(); -const result3 = parser.parse(test3 + '\n'); -if (result3.commands.length > 0) { - console.log('Body:', JSON.stringify(result3.commands[0].body)); - console.log('Lines:', result3.commands[0].body.split('\n').length); -} else { - console.log('No command parsed'); -}