From 3e89fefe8d55117decaa38adfd1765b90bddbf90 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Sun, 4 Jan 2026 01:26:11 +0100 Subject: [PATCH 001/103] e2e fixes --- .beads/issues.jsonl | 1 + .claude/rules/migrations.md | 74 ++ .github/workflows/docker.yml | 3 + .gitignore | 3 + .trajectories/active/traj_7ludwvz45veh.json | 58 ++ .../completed/2026-01/traj_1k5if5snst2e.json | 65 ++ .../completed/2026-01/traj_1k5if5snst2e.md | 37 + .../completed/2026-01/traj_ajs7zqfux4wc.json | 49 ++ .../completed/2026-01/traj_ajs7zqfux4wc.md | 23 + .../completed/2026-01/traj_cxofprm2m2en.json | 49 ++ .../completed/2026-01/traj_cxofprm2m2en.md | 31 + .trajectories/index.json | 29 +- deploy/init-db.sql | 6 +- deploy/workspace/Dockerfile | 55 +- docker-compose.dev.yml | 16 +- package.json | 7 +- scripts/cloud-setup.sh | 96 +++ src/cloud/api/github-app.ts | 83 +- src/cloud/api/nango-auth.ts | 146 +++- src/cloud/api/providers.ts | 194 ++++- src/cloud/api/test-helpers.ts | 487 +++++++++++- src/cloud/api/workspaces.ts | 161 +++- src/cloud/config.ts | 4 +- .../db/migrations/0003_nango_user_columns.sql | 15 + .../0004_repositories_nango_columns.sql | 10 + .../migrations/0005_github_installations.sql | 35 + src/cloud/db/migrations/meta/_journal.json | 21 + src/cloud/provisioner/index.ts | 26 +- src/cloud/server.ts | 28 +- src/cloud/services/nango.ts | 184 ++++- src/dashboard/app/app/page.tsx | 721 +++++++++++++++++- src/dashboard/app/connect-repos/page.tsx | 335 ++++---- src/dashboard/app/login/page.tsx | 355 ++++----- src/dashboard/app/providers/page.tsx | 220 ++++++ src/dashboard/app/signup/page.tsx | 343 +++++++++ src/dashboard/landing/styles.css | 30 + src/dashboard/lib/api.ts | 76 +- src/dashboard/lib/cloudApi.ts | 17 + src/dashboard/next.config.js | 2 +- src/dashboard/package-lock.json | 2 +- .../react-components/SettingsPanel.tsx | 19 + src/hooks/trajectory-hooks.ts | 56 +- src/trajectory/detection.test.ts | 151 ++++ src/trajectory/integration.ts | 162 ++++ src/wrapper/shared.ts | 6 +- src/wrapper/tmux-wrapper.ts | 38 +- 46 files changed, 3970 insertions(+), 559 deletions(-) create mode 100644 .claude/rules/migrations.md create mode 100644 .trajectories/active/traj_7ludwvz45veh.json create mode 100644 .trajectories/completed/2026-01/traj_1k5if5snst2e.json create mode 100644 .trajectories/completed/2026-01/traj_1k5if5snst2e.md create mode 100644 .trajectories/completed/2026-01/traj_ajs7zqfux4wc.json create mode 100644 .trajectories/completed/2026-01/traj_ajs7zqfux4wc.md create mode 100644 .trajectories/completed/2026-01/traj_cxofprm2m2en.json create mode 100644 .trajectories/completed/2026-01/traj_cxofprm2m2en.md create mode 100755 scripts/cloud-setup.sh create mode 100644 src/cloud/db/migrations/0003_nango_user_columns.sql create mode 100644 src/cloud/db/migrations/0004_repositories_nango_columns.sql create mode 100644 src/cloud/db/migrations/0005_github_installations.sql create mode 100644 src/dashboard/app/providers/page.tsx create mode 100644 src/dashboard/app/signup/page.tsx create mode 100644 src/trajectory/detection.test.ts diff --git a/.beads/issues.jsonl b/.beads/issues.jsonl index 1fb8e6d5..eaf28bb8 100644 --- a/.beads/issues.jsonl +++ b/.beads/issues.jsonl @@ -115,6 +115,7 @@ {"id":"agent-relay-451","title":"Fix empty continuity handoff files - parse SESSION_END content","status":"closed","priority":2,"issue_type":"bug","created_at":"2026-01-03T14:27:49.747598+01:00","updated_at":"2026-01-03T14:33:27.122823+01:00","closed_at":"2026-01-03T14:33:21.048043+01:00"} {"id":"agent-relay-452","title":"Trajectories should populate agents array with agent who started it","description":"When trail start is called, the trajectory's agents array is empty. It should automatically associate the agent who started the trajectory.","status":"completed","priority":2,"issue_type":"bug","created_at":"2026-01-03T14:28:39.57+01:00","updated_at":"2026-01-03T15:56:25.663159+01:00"} {"id":"agent-relay-453","title":"BUG: Spawn command fails silently when CLI not specified","description":"Users can send `-\u003erelay:spawn WorkerName` without a CLI type, but the parser silently ignores it because it requires both name AND cli. \n\nParse code at pty-wrapper.ts:931 checks `parts.length \u003e= 2` which fails for commands like:\n- `-\u003erelay:spawn Investigator`\n\nShould either:\n1. Make CLI optional with sensible default (claude)\n2. Provide error feedback when CLI is missing\n\nThis blocks relay spawn/release functionality entirely.","status":"closed","priority":0,"issue_type":"bug","assignee":"Backend","created_at":"2026-01-03T16:43:37.927258+01:00","updated_at":"2026-01-03T16:50:11.02666+01:00","closed_at":"2026-01-03T16:50:11.02666+01:00"} +{"id":"agent-relay-454","title":"OpenCode headless mode integration","description":"Integrate OpenCode's headless mode (opencode run) with Agent Relay. Options: 1) Create MCP server adapter for agent-relay that OpenCode can use, 2) Document OpenCode config to work with relay. See: https://github.com/anomalyco/opencode/issues/953","status":"open","priority":3,"issue_type":"feature","created_at":"2026-01-04T01:01:55.715466+01:00","updated_at":"2026-01-04T01:01:55.715466+01:00"} {"id":"agent-relay-47z","title":"Express 5 may have breaking changes from Express 4 patterns","description":"package.json uses express@5.2.1 which is a major version with breaking changes from Express 4. Verify: (1) Error handling middleware patterns, (2) Router behavior, (3) Body parsing (express.json vs body-parser).","status":"open","priority":2,"issue_type":"task","created_at":"2025-12-20T00:18:49.269841+01:00","updated_at":"2025-12-20T00:18:49.269841+01:00"} {"id":"agent-relay-4e0","title":"Fix message truncation - messages cut off at source","description":"Root cause found: parser.ts:40 inline regex only captures single line. Multi-line messages are split by parsePassThrough() at line 206. Fix options: (1) Allow continuation lines in inline format, (2) Use block format for multi-line, (3) Add heuristic to join lines until next @relay pattern.","status":"closed","priority":2,"issue_type":"bug","assignee":"MistyShelter","created_at":"2025-12-19T23:40:35.082717+01:00","updated_at":"2025-12-20T00:03:54.806087+01:00","closed_at":"2025-12-20T00:03:54.806087+01:00"} {"id":"agent-relay-4ft","title":"Merge project info into status command","status":"closed","priority":2,"issue_type":"task","assignee":"Pruner","created_at":"2025-12-19T21:59:52.685495+01:00","updated_at":"2025-12-19T22:06:44.276187+01:00","closed_at":"2025-12-19T22:06:44.276187+01:00"} diff --git a/.claude/rules/migrations.md b/.claude/rules/migrations.md new file mode 100644 index 00000000..a77d154c --- /dev/null +++ b/.claude/rules/migrations.md @@ -0,0 +1,74 @@ +--- +paths: + - "src/cloud/db/**/*.ts" + - "src/cloud/db/migrations/**/*.sql" + - "drizzle.config.ts" +--- + +# Database Migration Conventions + +## Drizzle ORM Migration Workflow + +This project uses Drizzle ORM with PostgreSQL. Migrations run automatically on server startup via `runMigrations()`. + +## When Schema Changes + +After modifying `src/cloud/db/schema.ts`: + +1. **Generate migration**: `npm run db:generate` +2. **Review the generated SQL** in `src/cloud/db/migrations/` +3. **Verify it's incremental** - should only contain ALTER/CREATE statements for changes, NOT recreate entire schema +4. **Test locally**: Restart server or run `npm run db:migrate` + +## Common Issues + +### Full Schema Recreation Instead of Incremental + +If `db:generate` creates a migration that recreates all tables: + +1. **Delete the bad migration file** from `migrations/` +2. **Remove its entry** from `migrations/meta/_journal.json` +3. **Delete any corrupt snapshot** in `migrations/meta/` +4. **Create incremental migration manually** using `ALTER TABLE ... ADD COLUMN IF NOT EXISTS` + +### Migration Not Applied + +If schema has columns that aren't in the database: + +1. Check if migration file exists in `migrations/` +2. Check if entry exists in `migrations/meta/_journal.json` +3. Verify migration ran: check `__drizzle_migrations` table in database + +## Writing Safe Migrations + +```sql +-- Use IF NOT EXISTS for idempotent migrations +ALTER TABLE users ADD COLUMN IF NOT EXISTS new_column VARCHAR(255); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_users_new_column ON users(new_column); +``` + +## Commands Reference + +```bash +npm run db:generate # Generate migration from schema diff +npm run db:migrate # Run pending migrations +npm run db:push # Push schema directly (dev only, can lose data) +npm run db:studio # Open Drizzle Studio GUI +``` + +## Production Safety + +- Always use `IF NOT EXISTS` / `IF EXISTS` for idempotent migrations +- Never use `db:push` in production - it can drop columns +- Test migrations on a copy of production data before deploying +- Migrations run on server startup - ensure they're fast and safe + +## Migration File Naming + +Files are named `NNNN_description.sql` where NNNN is sequential: +- `0001_initial.sql` +- `0002_add_feature.sql` +- `0003_nango_user_columns.sql` + +The `_journal.json` tracks which migrations have been applied. diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 0c1ca6b5..c2006537 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -1,6 +1,9 @@ name: Docker on: + push: + branches: + - main release: types: [published] workflow_dispatch: diff --git a/.gitignore b/.gitignore index 870230b0..795cbb3d 100644 --- a/.gitignore +++ b/.gitignore @@ -39,3 +39,6 @@ coverage/ .next src/dashboard/out + +.env.local +.env diff --git a/.trajectories/active/traj_7ludwvz45veh.json b/.trajectories/active/traj_7ludwvz45veh.json new file mode 100644 index 00000000..bd93a8c0 --- /dev/null +++ b/.trajectories/active/traj_7ludwvz45veh.json @@ -0,0 +1,58 @@ +{ + "id": "traj_7ludwvz45veh", + "version": 1, + "task": { + "title": "Provider CLI auth flow for cloud workspaces", + "source": { + "system": "plain", + "id": "pre-launch-fixes" + } + }, + "status": "active", + "startedAt": "2026-01-04T00:05:43.304Z", + "agents": [ + { + "name": "khaliqgant", + "role": "lead", + "joinedAt": "2026-01-04T00:05:43.304Z" + } + ], + "chapters": [ + { + "id": "chap_esd2ffqy9f0f", + "title": "Work", + "agentName": "default", + "startedAt": "2026-01-04T00:05:51.196Z", + "events": [ + { + "ts": 1767485151196, + "type": "decision", + "content": "Pre-seed Claude CLI config to skip interactive setup: Pre-seed Claude CLI config to skip interactive setup", + "raw": { + "question": "Pre-seed Claude CLI config to skip interactive setup", + "chosen": "Pre-seed Claude CLI config to skip interactive setup", + "alternatives": [], + "reasoning": "Claude CLI has interactive first-run (theme selection, etc). Alternative was web terminal (xterm.js) which is more flexible but complex. Pre-seeding config is simpler for MVP. May revisit for web terminal if other CLIs have similar issues." + }, + "significance": "high" + }, + { + "ts": 1767485196636, + "type": "decision", + "content": "Add settings page for CLI provider management: Add settings page for CLI provider management", + "raw": { + "question": "Add settings page for CLI provider management", + "chosen": "Add settings page for CLI provider management", + "alternatives": [], + "reasoning": "Users should be able to connect additional AI providers after initial setup. Settings page in workspace dashboard will allow connecting Claude, Codex, OpenCode, Droid at any time, not just during initial workspace setup." + }, + "significance": "high" + } + ] + } + ], + "commits": [], + "filesChanged": [], + "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay", + "tags": [] +} \ No newline at end of file diff --git a/.trajectories/completed/2026-01/traj_1k5if5snst2e.json b/.trajectories/completed/2026-01/traj_1k5if5snst2e.json new file mode 100644 index 00000000..0a63e8cd --- /dev/null +++ b/.trajectories/completed/2026-01/traj_1k5if5snst2e.json @@ -0,0 +1,65 @@ +{ + "id": "traj_1k5if5snst2e", + "version": 1, + "task": { + "title": "Fix 404 errors on auth endpoints", + "source": { + "system": "plain", + "id": "api-auth-session-404" + } + }, + "status": "completed", + "startedAt": "2026-01-03T19:55:20.964Z", + "agents": [ + { + "name": "Backend", + "role": "lead", + "joinedAt": "2026-01-03T19:55:20.965Z" + } + ], + "chapters": [ + { + "id": "chap_baircdnx9e02", + "title": "Work", + "agentName": "default", + "startedAt": "2026-01-03T21:14:10.404Z", + "events": [ + { + "ts": 1767474850405, + "type": "decision", + "content": "Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking: Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking", + "raw": { + "question": "Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking", + "chosen": "Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking", + "alternatives": [], + "reasoning": "Using Nango Proxy instead of direct token fetches provides automatic token refresh and cleaner code. Database schema was missing nango_connection_id, incoming_connection_id, and pending_installation_request columns needed for the two-connection OAuth pattern." + }, + "significance": "high" + }, + { + "ts": 1767474871478, + "type": "decision", + "content": "Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch: Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch", + "raw": { + "question": "Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch", + "chosen": "Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch", + "alternatives": [], + "reasoning": "Browser popup blockers require window.open() to be called synchronously within the user's click event handler. Awaiting the session token first broke the gesture chain. Solution: open popup immediately (shows loading), then fetch token async, then set token to enable the UI." + }, + "significance": "high" + } + ], + "endedAt": "2026-01-03T21:14:38.934Z" + } + ], + "commits": [], + "filesChanged": [], + "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay", + "tags": [], + "completedAt": "2026-01-03T21:14:38.934Z", + "retrospective": { + "summary": "Fixed Nango OAuth popup blocker issue in login and signup pages by reordering operations to open popup synchronously before async token fetch", + "approach": "Standard approach", + "confidence": 0.9 + } +} \ No newline at end of file diff --git a/.trajectories/completed/2026-01/traj_1k5if5snst2e.md b/.trajectories/completed/2026-01/traj_1k5if5snst2e.md new file mode 100644 index 00000000..787372a8 --- /dev/null +++ b/.trajectories/completed/2026-01/traj_1k5if5snst2e.md @@ -0,0 +1,37 @@ +# Trajectory: Fix 404 errors on auth endpoints + +> **Status:** โœ… Completed +> **Task:** api-auth-session-404 +> **Confidence:** 90% +> **Started:** January 3, 2026 at 08:55 PM +> **Completed:** January 3, 2026 at 10:14 PM + +--- + +## Summary + +Fixed Nango OAuth popup blocker issue in login and signup pages by reordering operations to open popup synchronously before async token fetch + +**Approach:** Standard approach + +--- + +## Key Decisions + +### Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking +- **Chose:** Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking +- **Reasoning:** Using Nango Proxy instead of direct token fetches provides automatic token refresh and cleaner code. Database schema was missing nango_connection_id, incoming_connection_id, and pending_installation_request columns needed for the two-connection OAuth pattern. + +### Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch +- **Chose:** Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch +- **Reasoning:** Browser popup blockers require window.open() to be called synchronously within the user's click event handler. Awaiting the session token first broke the gesture chain. Solution: open popup immediately (shows loading), then fetch token async, then set token to enable the UI. + +--- + +## Chapters + +### 1. Work +*Agent: default* + +- Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking: Fixed Nango integration: updated to use Nango Proxy for GitHub API calls, fixed popup blocking in OAuth flow, added missing database columns for user connection tracking +- Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch: Fixed popup:blocked_by_browser error by opening Nango Connect UI synchronously before async session fetch diff --git a/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.json b/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.json new file mode 100644 index 00000000..d8ece9f1 --- /dev/null +++ b/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.json @@ -0,0 +1,49 @@ +{ + "id": "traj_ajs7zqfux4wc", + "version": 1, + "task": { + "title": "Fix Nango popup blocked - match my-senior-dev pattern exactly" + }, + "status": "completed", + "startedAt": "2026-01-03T21:22:52.243Z", + "agents": [ + { + "name": "khaliqgant", + "role": "lead", + "joinedAt": "2026-01-03T21:22:52.243Z" + } + ], + "chapters": [ + { + "id": "chap_8tdna5ynwc1z", + "title": "Work", + "agentName": "default", + "startedAt": "2026-01-03T21:22:59.275Z", + "events": [ + { + "ts": 1767475379276, + "type": "decision", + "content": "Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking: Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking", + "raw": { + "question": "Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking", + "chosen": "Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking", + "alternatives": [], + "reasoning": "" + }, + "significance": "high" + } + ], + "endedAt": "2026-01-03T21:23:07.802Z" + } + ], + "commits": [], + "filesChanged": [], + "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay", + "tags": [], + "completedAt": "2026-01-03T21:23:07.802Z", + "retrospective": { + "summary": "Rewrote login, signup, and connect-repos pages to exactly match my-senior-dev Nango pattern. Key changes: removed ConnectUI ref and .close() calls, added authSucceededRef to track auth state, use 'connectionId' in event.payload type guard.", + "approach": "Standard approach", + "confidence": 0.85 + } +} \ No newline at end of file diff --git a/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.md b/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.md new file mode 100644 index 00000000..a4d96c11 --- /dev/null +++ b/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.md @@ -0,0 +1,23 @@ +# Trajectory: Fix Nango popup blocked - match my-senior-dev pattern exactly + +> **Status:** โœ… Completed +> **Confidence:** 85% +> **Started:** January 3, 2026 at 10:22 PM +> **Completed:** January 3, 2026 at 10:23 PM + +--- + +## Summary + +Rewrote login, signup, and connect-repos pages to exactly match my-senior-dev Nango pattern. Key changes: removed ConnectUI ref and .close() calls, added authSucceededRef to track auth state, use 'connectionId' in event.payload type guard. + +**Approach:** Standard approach + +--- + +## Chapters + +### 1. Work +*Agent: default* + +- Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking: Rewrote all auth pages to exactly match my-senior-dev pattern: no ConnectUI ref, no .close() calls, use authSucceededRef for tracking diff --git a/.trajectories/completed/2026-01/traj_cxofprm2m2en.json b/.trajectories/completed/2026-01/traj_cxofprm2m2en.json new file mode 100644 index 00000000..ddcd9b4f --- /dev/null +++ b/.trajectories/completed/2026-01/traj_cxofprm2m2en.json @@ -0,0 +1,49 @@ +{ + "id": "traj_cxofprm2m2en", + "version": 1, + "task": { + "title": "Fix Nango popup blocked by browser - use constructor pattern" + }, + "status": "completed", + "startedAt": "2026-01-03T21:18:15.384Z", + "agents": [ + { + "name": "khaliqgant", + "role": "lead", + "joinedAt": "2026-01-03T21:18:15.384Z" + } + ], + "chapters": [ + { + "id": "chap_5n4ibkpf4je1", + "title": "Work", + "agentName": "default", + "startedAt": "2026-01-03T21:18:25.048Z", + "events": [ + { + "ts": 1767475105049, + "type": "decision", + "content": "Pass connectSessionToken to Nango constructor instead of using setSessionToken(): Pass connectSessionToken to Nango constructor instead of using setSessionToken()", + "raw": { + "question": "Pass connectSessionToken to Nango constructor instead of using setSessionToken()", + "chosen": "Pass connectSessionToken to Nango constructor instead of using setSessionToken()", + "alternatives": [], + "reasoning": "The prpm app pattern works: new Nango({ connectSessionToken }) followed by openConnectUI(). This differs from our broken approach of new Nango() + setSessionToken() + open(). When the token is passed via constructor, Nango internally handles the popup differently and avoids browser popup blockers." + }, + "significance": "high" + } + ], + "endedAt": "2026-01-03T21:18:33.901Z" + } + ], + "commits": [], + "filesChanged": [], + "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay", + "tags": [], + "completedAt": "2026-01-03T21:18:33.901Z", + "retrospective": { + "summary": "Fixed popup:blocked_by_browser error by using Nango constructor pattern: new Nango({ connectSessionToken }) instead of setSessionToken(). Updated login, signup, and connect-repos pages to match prpm app pattern.", + "approach": "Standard approach", + "confidence": 0.9 + } +} \ No newline at end of file diff --git a/.trajectories/completed/2026-01/traj_cxofprm2m2en.md b/.trajectories/completed/2026-01/traj_cxofprm2m2en.md new file mode 100644 index 00000000..ff523159 --- /dev/null +++ b/.trajectories/completed/2026-01/traj_cxofprm2m2en.md @@ -0,0 +1,31 @@ +# Trajectory: Fix Nango popup blocked by browser - use constructor pattern + +> **Status:** โœ… Completed +> **Confidence:** 90% +> **Started:** January 3, 2026 at 10:18 PM +> **Completed:** January 3, 2026 at 10:18 PM + +--- + +## Summary + +Fixed popup:blocked_by_browser error by using Nango constructor pattern: new Nango({ connectSessionToken }) instead of setSessionToken(). Updated login, signup, and connect-repos pages to match prpm app pattern. + +**Approach:** Standard approach + +--- + +## Key Decisions + +### Pass connectSessionToken to Nango constructor instead of using setSessionToken() +- **Chose:** Pass connectSessionToken to Nango constructor instead of using setSessionToken() +- **Reasoning:** The prpm app pattern works: new Nango({ connectSessionToken }) followed by openConnectUI(). This differs from our broken approach of new Nango() + setSessionToken() + open(). When the token is passed via constructor, Nango internally handles the popup differently and avoids browser popup blockers. + +--- + +## Chapters + +### 1. Work +*Agent: default* + +- Pass connectSessionToken to Nango constructor instead of using setSessionToken(): Pass connectSessionToken to Nango constructor instead of using setSessionToken() diff --git a/.trajectories/index.json b/.trajectories/index.json index d7be6e71..ff2d8b06 100644 --- a/.trajectories/index.json +++ b/.trajectories/index.json @@ -1,6 +1,6 @@ { "version": 1, - "lastUpdated": "2026-01-03T19:22:22.783Z", + "lastUpdated": "2026-01-04T00:06:36.637Z", "trajectories": { "traj_ozd98si6a7ns": { "title": "Fix thinking indicator showing on all messages", @@ -253,6 +253,33 @@ "startedAt": "2026-01-03T19:17:32.797Z", "completedAt": "2026-01-03T19:22:22.762Z", "path": "/home/user/relay/.trajectories/completed/2026-01/traj_yvdadtvdgnz3.json" + }, + "traj_1k5if5snst2e": { + "title": "Fix 404 errors on auth endpoints", + "status": "completed", + "startedAt": "2026-01-03T19:55:20.964Z", + "completedAt": "2026-01-03T21:14:38.934Z", + "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_1k5if5snst2e.json" + }, + "traj_cxofprm2m2en": { + "title": "Fix Nango popup blocked by browser - use constructor pattern", + "status": "completed", + "startedAt": "2026-01-03T21:18:15.384Z", + "completedAt": "2026-01-03T21:18:33.901Z", + "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_cxofprm2m2en.json" + }, + "traj_ajs7zqfux4wc": { + "title": "Fix Nango popup blocked - match my-senior-dev pattern exactly", + "status": "completed", + "startedAt": "2026-01-03T21:22:52.243Z", + "completedAt": "2026-01-03T21:23:07.802Z", + "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_ajs7zqfux4wc.json" + }, + "traj_7ludwvz45veh": { + "title": "Provider CLI auth flow for cloud workspaces", + "status": "active", + "startedAt": "2026-01-04T00:05:43.304Z", + "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/active/traj_7ludwvz45veh.json" } } } \ No newline at end of file diff --git a/deploy/init-db.sql b/deploy/init-db.sql index 8c977bdd..66d21d31 100644 --- a/deploy/init-db.sql +++ b/deploy/init-db.sql @@ -1,5 +1,5 @@ -- Agent Relay Cloud - Database bootstrap --- Deprecated: use migrations in src/cloud/db/migrations/0001_initial.sql --- This file is kept as a convenience wrapper for local psql usage. +-- Migrations are handled by Drizzle ORM at server startup. +-- This file is kept for Docker entrypoint compatibility. -\\i ../src/cloud/db/migrations/0001_initial.sql +-- No-op: Drizzle migrations run automatically diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index b2a6113c..0e09d8a6 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -1,11 +1,36 @@ # Agent Relay Workspace # Runs a user's workspace with the relay daemon and agent orchestration +FROM node:20-slim AS builder + +WORKDIR /app + +# Install build dependencies for native modules (node-pty, better-sqlite3) +RUN apt-get update && apt-get install -y \ + python3 \ + make \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# Copy package files and scripts (needed for postinstall) +COPY package*.json ./ +COPY scripts ./scripts + +# Install dependencies (production only, skip tmux in CI) +ENV CI=true +RUN npm ci --omit=dev + +# Copy pre-built dist (build before docker build) +COPY dist ./dist + +# --- + FROM node:20-slim WORKDIR /app -# Install system dependencies for AI CLIs +# Install system dependencies for AI CLIs and git +# Note: tmux not needed - daemon uses node-pty directly RUN apt-get update && apt-get install -y \ bash \ ca-certificates \ @@ -14,29 +39,41 @@ RUN apt-get update && apt-get install -y \ python3 \ && rm -rf /var/lib/apt/lists/* -# Install Claude CLI (if available) -# RUN npm install -g @anthropic-ai/claude-code - -# Copy pre-built agent-relay -COPY --from=ghcr.io/khaliqgant/agent-relay:latest /app/dist ./dist -COPY --from=ghcr.io/khaliqgant/agent-relay:latest /app/node_modules ./node_modules -COPY --from=ghcr.io/khaliqgant/agent-relay:latest /app/package*.json ./ +# Copy from builder +COPY --from=builder /app/dist ./dist +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package*.json ./ COPY deploy/workspace/entrypoint.sh /entrypoint.sh RUN chmod +x /entrypoint.sh +# Install Codex globally as root (requires write to /usr/local) +RUN npm install -g @openai/codex + # Create workspace directory -RUN mkdir -p /workspace /data +RUN mkdir -p /workspace/repos /data # Create non-root user RUN useradd -m -u 1001 workspace RUN chown -R workspace:workspace /app /workspace /data USER workspace +# Install AI CLIs as workspace user (they install to ~/.local/bin) +# Claude +RUN curl -fsSL https://claude.ai/install.sh | bash +# Pre-seed Claude config to skip interactive onboarding +RUN mkdir -p /home/workspace/.claude && \ + echo '{"theme":"dark","hasCompletedOnboarding":true}' > /home/workspace/.claude/settings.local.json +# OpenCode +RUN curl -fsSL https://opencode.ai/install | bash +# Droid +RUN curl -fsSL https://app.factory.ai/cli | sh + # Environment ENV NODE_ENV=production ENV PORT=3888 ENV AGENT_RELAY_DATA_DIR=/data ENV AGENT_RELAY_DASHBOARD_PORT=3888 +ENV PATH="/home/workspace/.local/bin:$PATH" # Expose ports # 3888 - Dashboard/API diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 16140e1e..f593273d 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -8,9 +8,9 @@ # - Example workspace (optional) # # After starting, access: -# - Landing page: http://localhost:3000 -# - Dashboard: http://localhost:3000/dashboard -# - API: http://localhost:3000/api +# - Landing page: http://localhost:4567 +# - Dashboard: http://localhost:4567/dashboard +# - API: http://localhost:4567/api version: '3.8' @@ -26,7 +26,7 @@ services: - postgres_data:/var/lib/postgresql/data - ./deploy/init-db.sql:/docker-entrypoint-initdb.d/init.sql:ro ports: - - "5432:5432" + - "5433:5432" healthcheck: test: ["CMD-SHELL", "pg_isready -U agent_relay"] interval: 5s @@ -52,11 +52,11 @@ services: context: . dockerfile: Dockerfile ports: - - "3000:3000" + - "4567:4567" environment: NODE_ENV: development - PORT: 3000 - PUBLIC_URL: http://localhost:3000 + PORT: 4567 + PUBLIC_URL: http://localhost:4567 # Database DATABASE_URL: postgres://agent_relay:dev_password@postgres:5432/agent_relay @@ -95,7 +95,7 @@ services: # Mount docker socket for local workspace provisioning - /var/run/docker.sock:/var/run/docker.sock healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:3000/health"] + test: ["CMD", "curl", "-f", "http://localhost:4567/health"] interval: 10s timeout: 5s retries: 3 diff --git a/package.json b/package.json index ad6ee446..eca9f49d 100644 --- a/package.json +++ b/package.json @@ -55,9 +55,10 @@ "services:up": "docker compose -f docker-compose.dev.yml up -d postgres redis && echo 'โœ“ Postgres and Redis running'", "services:down": "docker compose -f docker-compose.dev.yml down", "services:logs": "docker compose -f docker-compose.dev.yml logs -f postgres redis", - "cloud:api": "node dist/cloud/index.js", - "precloud": "npm run clean && tsc && chmod +x dist/cli/index.js && npm run services:up", - "cloud": "concurrently -n api,daemon,dashboard -c cyan,blue,magenta \"npm run cloud:api\" \"npm run dev:daemon\" \"npm run dev:dashboard\"" + "cloud:setup": "./scripts/cloud-setup.sh", + "cloud:api": "node -r dotenv/config dist/cloud/index.js", + "precloud": "./scripts/cloud-setup.sh --skip-data", + "cloud": "concurrently -n api,daemon,dashboard -c cyan,blue,magenta \"npm run cloud:api\" \"npm run dev:daemon\" \"npm run dev:next\"" }, "keywords": [ "agent", diff --git a/scripts/cloud-setup.sh b/scripts/cloud-setup.sh new file mode 100755 index 00000000..47228b6c --- /dev/null +++ b/scripts/cloud-setup.sh @@ -0,0 +1,96 @@ +#!/bin/bash +# Cloud local development setup script +# Usage: ./scripts/cloud-setup.sh [--skip-docker] [--skip-migrate] [--skip-data] + +set -e + +SKIP_DOCKER=false +SKIP_MIGRATE=false +SKIP_DATA=false + +# Parse arguments +for arg in "$@"; do + case $arg in + --skip-docker) SKIP_DOCKER=true ;; + --skip-migrate) SKIP_MIGRATE=true ;; + --skip-data) SKIP_DATA=true ;; + esac +done + +echo "๐Ÿš€ Setting up Agent Relay Cloud (local dev)" +echo "" + +# Step 1: Start Docker services +if [ "$SKIP_DOCKER" = false ]; then + echo "๐Ÿ“ฆ Starting Docker services (Postgres + Redis)..." + docker compose -f docker-compose.dev.yml up -d postgres redis + + # Wait for Postgres to be ready + echo "โณ Waiting for Postgres to be ready..." + until docker compose -f docker-compose.dev.yml exec -T postgres pg_isready -U postgres > /dev/null 2>&1; do + sleep 1 + done + echo "โœ“ Postgres is ready" +else + echo "โญ๏ธ Skipping Docker setup" +fi + +# Step 2: Build TypeScript +echo "" +echo "๐Ÿ”จ Building TypeScript..." +npm run build > /dev/null 2>&1 +echo "โœ“ Build complete" + +# Step 3: Run migrations +if [ "$SKIP_MIGRATE" = false ]; then + echo "" + echo "๐Ÿ“Š Running database migrations..." + npm run db:migrate 2>&1 | grep -E "(Applied|already applied|Error)" || true + echo "โœ“ Migrations complete" +else + echo "โญ๏ธ Skipping migrations" +fi + +# Step 4: Set up test data (only if server is running) +if [ "$SKIP_DATA" = false ]; then + echo "" + echo "๐Ÿงช Setting up test data..." + + # Check if cloud server is running, if not start it temporarily + if ! curl -s http://localhost:4567/api/health > /dev/null 2>&1; then + echo " Starting cloud server temporarily for setup..." + node dist/cloud/index.js & + SERVER_PID=$! + sleep 3 + STARTED_SERVER=true + fi + + # Create test data + RESPONSE=$(curl -s -X POST http://localhost:4567/api/test/setup-local-cloud \ + -H "Content-Type: application/json" \ + -c /tmp/relay-cookies.txt \ + -d '{"repoName": "test-org/test-repo", "workspaceName": "Local Dev"}' 2>&1 || echo '{"error": "Failed to connect"}') + + if echo "$RESPONSE" | grep -q '"success":true'; then + echo "โœ“ Test data created" + echo " Cookie saved to /tmp/relay-cookies.txt" + else + echo "โš ๏ธ Could not create test data (server may need to be running)" + fi + + # Stop temp server if we started it + if [ "$STARTED_SERVER" = true ]; then + kill $SERVER_PID 2>/dev/null || true + fi +else + echo "โญ๏ธ Skipping test data setup" +fi + +echo "" +echo "โœ… Setup complete!" +echo "" +echo "To start the cloud server:" +echo " npm run cloud" +echo "" +echo "Then open: http://localhost:4567/app" +echo "" diff --git a/src/cloud/api/github-app.ts b/src/cloud/api/github-app.ts index dc20ff70..526f0f0e 100644 --- a/src/cloud/api/github-app.ts +++ b/src/cloud/api/github-app.ts @@ -125,30 +125,16 @@ githubAppRouter.post('/repos/:id/issues', async (req: Request, res: Response) => return res.status(400).json({ error: 'Repository not connected via Nango' }); } - // Get token and create issue via GitHub API - const token = await nangoService.getGithubAppToken(repository.nangoConnectionId); + // Create issue via Nango Proxy (handles token injection automatically) const [owner, repo] = repository.githubFullName.split('/'); - - const response = await fetch(`https://api.github.com/repos/${owner}/${repo}/issues`, { - method: 'POST', - headers: { - Authorization: `Bearer ${token}`, - Accept: 'application/vnd.github+json', - 'X-GitHub-Api-Version': '2022-11-28', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ title, body: body || '', labels }), - }); - - if (!response.ok) { - const error = await response.text(); - throw new Error(`Failed to create issue: ${response.status} ${error}`); - } - - const issue = await response.json() as { id: number; number: number; html_url: string }; + const issue = await nangoService.createGithubIssue( + repository.nangoConnectionId, + owner, + repo, + { title, body: body || '', labels } + ); res.json({ - id: issue.id, number: issue.number, url: issue.html_url, }); @@ -182,29 +168,16 @@ githubAppRouter.post('/repos/:id/pulls', async (req: Request, res: Response) => return res.status(400).json({ error: 'Repository not connected via Nango' }); } - const token = await nangoService.getGithubAppToken(repository.nangoConnectionId); + // Create PR via Nango Proxy (handles token injection automatically) const [owner, repo] = repository.githubFullName.split('/'); - - const response = await fetch(`https://api.github.com/repos/${owner}/${repo}/pulls`, { - method: 'POST', - headers: { - Authorization: `Bearer ${token}`, - Accept: 'application/vnd.github+json', - 'X-GitHub-Api-Version': '2022-11-28', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ title, body: body || '', head, base }), - }); - - if (!response.ok) { - const error = await response.text(); - throw new Error(`Failed to create PR: ${response.status} ${error}`); - } - - const pr = await response.json() as { id: number; number: number; html_url: string }; + const pr = await nangoService.createGithubPullRequest( + repository.nangoConnectionId, + owner, + repo, + { title, body: body || '', head, base } + ); res.json({ - id: pr.id, number: pr.number, url: pr.html_url, }); @@ -237,30 +210,16 @@ githubAppRouter.post('/repos/:id/comments', async (req: Request, res: Response) return res.status(400).json({ error: 'Repository not connected via Nango' }); } - const token = await nangoService.getGithubAppToken(repository.nangoConnectionId); + // Add comment via Nango Proxy (handles token injection automatically) const [owner, repo] = repository.githubFullName.split('/'); - - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}/comments`, - { - method: 'POST', - headers: { - Authorization: `Bearer ${token}`, - Accept: 'application/vnd.github+json', - 'X-GitHub-Api-Version': '2022-11-28', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ body }), - } + const comment = await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + issueNumber, + body ); - if (!response.ok) { - const error = await response.text(); - throw new Error(`Failed to add comment: ${response.status} ${error}`); - } - - const comment = await response.json() as { id: number; html_url: string }; - res.json({ id: comment.id, url: comment.html_url, diff --git a/src/cloud/api/nango-auth.ts b/src/cloud/api/nango-auth.ts index 32987796..c50e93fd 100644 --- a/src/cloud/api/nango-auth.ts +++ b/src/cloud/api/nango-auth.ts @@ -71,8 +71,13 @@ nangoAuthRouter.get('/login-status/:connectionId', async (req: Request, res: Res // Clear incoming connection ID await db.users.clearIncomingConnectionId(user.id); + // Check if user has any repos connected + const repos = await db.repositories.findByUserId(user.id); + const hasRepos = repos.length > 0; + res.json({ ready: true, + hasRepos, user: { id: user.id, githubUsername: user.githubUsername, @@ -169,17 +174,28 @@ nangoAuthRouter.get('/repo-status/:connectionId', requireAuth, async (req: Reque * Handle Nango webhooks for auth and sync events */ nangoAuthRouter.post('/webhook', async (req: Request, res: Response) => { - const signature = req.headers['x-nango-signature'] as string | undefined; - const rawBody = JSON.stringify(req.body); + // Use the preserved raw body from express.json verify callback + const rawBody = (req as Request & { rawBody?: string }).rawBody || JSON.stringify(req.body); + + // Verify signature using the new verifyIncomingWebhookRequest method + const hasSignature = req.headers['x-nango-signature'] || req.headers['x-nango-hmac-sha256']; + const isDev = process.env.NODE_ENV !== 'production'; - // Verify signature - if (!nangoService.verifyWebhookSignature(rawBody, signature)) { - console.error('[nango-webhook] Invalid signature'); - return res.status(401).json({ error: 'Invalid signature' }); + if (hasSignature) { + if (!nangoService.verifyWebhookSignature(rawBody, req.headers as Record)) { + console.error('[nango-webhook] Invalid signature'); + return res.status(401).json({ error: 'Invalid signature' }); + } + console.log('[nango-webhook] Signature verified'); + } else if (!isDev) { + console.error('[nango-webhook] Missing signature in production'); + return res.status(401).json({ error: 'Missing signature' }); + } else { + console.warn('[nango-webhook] Skipping signature verification in development (no signature)'); } const payload = req.body; - console.log(`[nango-webhook] Received ${payload.type} event`); + console.log(`[nango-webhook] Received ${payload.type} event`, JSON.stringify(payload, null, 2)); try { switch (payload.type) { @@ -191,6 +207,11 @@ nangoAuthRouter.post('/webhook', async (req: Request, res: Response) => { console.log('[nango-webhook] Sync event received'); break; + case 'forward': + // Nango forwards events from providers - typically not needed for our flow + console.log('[nango-webhook] Forward event from provider (ignored)'); + break; + default: console.log(`[nango-webhook] Unhandled event type: ${payload.type}`); } @@ -224,6 +245,11 @@ async function handleAuthWebhook(payload: { /** * Handle GitHub login webhook + * + * Three scenarios: + * 1. New user - Create user record, keep connection as permanent + * 2. Returning user with existing connection - Store incoming ID for polling, delete temp connection + * 3. Existing user, first connection - Set connection ID as permanent */ async function handleLoginWebhook( connectionId: string, @@ -231,21 +257,15 @@ async function handleLoginWebhook( ): Promise { // Get GitHub user info via Nango proxy const githubUser = await nangoService.getGithubUser(connectionId); + const githubId = String(githubUser.id); // Check if user already exists - const existingUser = await db.users.findByGithubId(String(githubUser.id)); - - if (existingUser) { - // Returning user - store temp connection for polling - await db.users.update(existingUser.id, { - incomingConnectionId: connectionId, - }); + const existingUser = await db.users.findByGithubId(githubId); - console.log(`[nango-webhook] Returning user login: ${githubUser.login}`); - } else { - // New user - create record + // SCENARIO 1: New user + if (!existingUser) { const newUser = await db.users.upsert({ - githubId: String(githubUser.id), + githubId, githubUsername: githubUser.login, email: githubUser.email || null, avatarUrl: githubUser.avatar_url || null, @@ -260,7 +280,49 @@ async function handleLoginWebhook( }); console.log(`[nango-webhook] New user created: ${githubUser.login}`); + return; + } + + // SCENARIO 2: Returning user with existing connection - delete temp connection + if (existingUser.nangoConnectionId && existingUser.nangoConnectionId !== connectionId) { + console.log(`[nango-webhook] Returning user: ${githubUser.login}`, { + permanentConnectionId: existingUser.nangoConnectionId, + incomingConnectionId: connectionId, + }); + + // Store incoming connection ID for polling + await db.users.update(existingUser.id, { + incomingConnectionId: connectionId, + githubUsername: githubUser.login, + avatarUrl: githubUser.avatar_url || null, + }); + + // Delete the temporary connection from Nango to prevent duplicates + try { + await nangoService.deleteConnection(connectionId, NANGO_INTEGRATIONS.GITHUB_USER); + console.log(`[nango-webhook] Deleted temp connection for returning user`); + } catch (error) { + console.error(`[nango-webhook] Failed to delete temp connection:`, error); + // Non-fatal - continue anyway + } + + return; } + + // SCENARIO 3: Existing user, first connection (or same connection) + console.log(`[nango-webhook] First/same connection for existing user: ${githubUser.login}`); + await db.users.update(existingUser.id, { + nangoConnectionId: connectionId, + incomingConnectionId: connectionId, + githubUsername: githubUser.login, + avatarUrl: githubUser.avatar_url || null, + }); + + // Update connection with user ID + await nangoService.updateEndUser(connectionId, NANGO_INTEGRATIONS.GITHUB_USER, { + id: existingUser.id, + email: existingUser.email || undefined, + }); } /** @@ -270,9 +332,22 @@ async function handleRepoAuthWebhook( connectionId: string, endUser?: { id?: string; email?: string } ): Promise { - const userId = endUser?.id; + let userId = endUser?.id; + + // Fallback: If endUser.id not in webhook, fetch connection metadata from Nango if (!userId) { - console.error('[nango-webhook] No user ID in repo auth webhook'); + console.log('[nango-webhook] No user ID in webhook payload, fetching from connection metadata...'); + try { + const connection = await nangoService.getConnection(connectionId, NANGO_INTEGRATIONS.GITHUB_APP); + userId = connection.end_user?.id; + console.log(`[nango-webhook] Got user ID from connection: ${userId || 'not found'}`); + } catch (err) { + console.error('[nango-webhook] Failed to fetch connection metadata:', err); + } + } + + if (!userId) { + console.error('[nango-webhook] No user ID found - cannot sync repos'); return; } @@ -283,6 +358,34 @@ async function handleRepoAuthWebhook( } try { + // Get the GitHub App installation ID + const githubInstallationId = await nangoService.getGithubAppInstallationId(connectionId); + let installationUuid: string | null = null; + + if (githubInstallationId) { + // Find or create the github_installations record + let installation = await db.githubInstallations.findByInstallationId(String(githubInstallationId)); + + if (!installation) { + // Create a new installation record + // We need to get more info about the installation - for now use user info + installation = await db.githubInstallations.upsert({ + installationId: String(githubInstallationId), + accountType: 'user', // Could be 'organization' - we'd need to detect this + accountLogin: user.githubUsername || 'unknown', + accountId: user.githubId || 'unknown', + installedById: user.id, + permissions: {}, + events: [], + }); + console.log(`[nango-webhook] Created installation record for ${githubInstallationId}`); + } + + installationUuid = installation.id; + } else { + console.warn('[nango-webhook] Could not get installation ID from Nango connection'); + } + // Fetch repos the user has access to const { repositories: repos } = await nangoService.listGithubAppRepos(connectionId); @@ -295,6 +398,7 @@ async function handleRepoAuthWebhook( isPrivate: repo.private, defaultBranch: repo.default_branch, nangoConnectionId: connectionId, + installationId: installationUuid, syncStatus: 'synced', lastSyncedAt: new Date(), }); @@ -303,7 +407,7 @@ async function handleRepoAuthWebhook( // Clear any pending installation request await db.users.clearPendingInstallationRequest(user.id); - console.log(`[nango-webhook] Synced ${repos.length} repos for ${user.githubUsername}`); + console.log(`[nango-webhook] Synced ${repos.length} repos for ${user.githubUsername} (installation: ${githubInstallationId || 'unknown'})`); } catch (error: unknown) { const err = error as { message?: string }; diff --git a/src/cloud/api/providers.ts b/src/cloud/api/providers.ts index 4e695dfd..aec90154 100644 --- a/src/cloud/api/providers.ts +++ b/src/cloud/api/providers.ts @@ -27,43 +27,74 @@ providersRouter.use(requireAuth); * * When providers add OAuth support, we can switch to device flow. */ -const PROVIDERS = { +// Base provider properties +interface BaseProvider { + name: string; + displayName: string; + description: string; + color: string; +} + +// CLI-based auth provider (Claude, OpenCode, Droid) +interface CliProvider extends BaseProvider { + authStrategy: 'cli'; + cliCommand: string; + credentialPath: string; +} + +// Device flow OAuth provider (Google) +interface DeviceFlowProvider extends BaseProvider { + authStrategy: 'device_flow'; + deviceCodeUrl: string; + tokenUrl: string; + userInfoUrl: string; + scopes: string[]; +} + +type Provider = CliProvider | DeviceFlowProvider; + +const PROVIDERS: Record = { anthropic: { name: 'Anthropic', displayName: 'Claude', description: 'Claude Code - recommended for code tasks', - // Auth strategy: CLI-based until Anthropic adds OAuth - authStrategy: 'cli' as const, - cliCommand: 'claude login', - credentialPath: '~/.claude/credentials.json', // Where Claude stores tokens - // Future OAuth endpoints (hypothetical - for when Anthropic implements) - deviceCodeUrl: 'https://api.anthropic.com/oauth/device/code', - tokenUrl: 'https://api.anthropic.com/oauth/token', - userInfoUrl: 'https://api.anthropic.com/v1/user', - scopes: ['claude-code:execute', 'user:read'], + authStrategy: 'cli', + cliCommand: 'claude', + credentialPath: '~/.claude/credentials.json', color: '#D97757', }, - openai: { + codex: { name: 'OpenAI', displayName: 'Codex', - description: 'Codex CLI for AI-assisted coding', - // Auth strategy: CLI-based until OpenAI adds OAuth - authStrategy: 'cli' as const, - cliCommand: 'codex auth', + description: 'Codex - OpenAI coding assistant', + authStrategy: 'cli', + cliCommand: 'codex login', credentialPath: '~/.codex/credentials.json', - // Future OAuth endpoints (hypothetical) - deviceCodeUrl: 'https://auth.openai.com/device/code', - tokenUrl: 'https://auth.openai.com/oauth/token', - userInfoUrl: 'https://api.openai.com/v1/user', - scopes: ['openid', 'profile', 'email', 'codex:execute'], color: '#10A37F', }, + opencode: { + name: 'OpenCode', + displayName: 'OpenCode', + description: 'OpenCode - AI coding assistant', + authStrategy: 'cli', + cliCommand: 'opencode', + credentialPath: '~/.opencode/credentials.json', + color: '#00D4AA', + }, + droid: { + name: 'Factory', + displayName: 'Droid', + description: 'Droid - Factory AI coding agent', + authStrategy: 'cli', + cliCommand: 'droid', + credentialPath: '~/.factory/credentials.json', + color: '#6366F1', + }, google: { name: 'Google', displayName: 'Gemini', description: 'Gemini - multi-modal capabilities', - // Auth strategy: Real OAuth device flow (works today!) - authStrategy: 'device_flow' as const, + authStrategy: 'device_flow', deviceCodeUrl: 'https://oauth2.googleapis.com/device/code', tokenUrl: 'https://oauth2.googleapis.com/token', userInfoUrl: 'https://www.googleapis.com/oauth2/v2/userinfo', @@ -72,6 +103,16 @@ const PROVIDERS = { }, }; +// Type guard for device flow providers +function isDeviceFlowProvider(provider: Provider): provider is DeviceFlowProvider { + return provider.authStrategy === 'device_flow'; +} + +// Type guard for CLI providers +function isCliProvider(provider: Provider): provider is CliProvider { + return provider.authStrategy === 'cli'; +} + type ProviderType = keyof typeof PROVIDERS; // In-memory store for active device flows (use Redis in production) @@ -206,7 +247,13 @@ providersRouter.post('/:provider/connect', async (req: Request, res: Response) = } // Device flow auth (Google) - start OAuth device flow - const clientConfig = config.providers[provider]; + // At this point, we know it's a device flow provider (CLI was handled above) + if (!isDeviceFlowProvider(providerConfig)) { + return res.status(400).json({ error: 'Provider does not support device flow' }); + } + + // Only google is configured for device flow in config + const clientConfig = provider === 'google' ? config.providers.google : undefined; if (!clientConfig) { return res.status(400).json({ error: `Provider ${provider} not configured` }); } @@ -221,7 +268,7 @@ providersRouter.post('/:provider/connect', async (req: Request, res: Response) = body: new URLSearchParams({ client_id: clientConfig.clientId, scope: providerConfig.scopes.join(' '), - ...((provider === 'google') && { client_secret: (clientConfig as any).clientSecret }), + ...((provider === 'google') && { client_secret: clientConfig.clientSecret }), }), }); @@ -298,7 +345,7 @@ providersRouter.post('/:provider/verify', async (req: Request, res: Response) => userId, provider, accessToken: 'cli-authenticated', // Placeholder - real token from CLI - scopes: providerConfig.scopes, + scopes: [], // CLI auth doesn't use scopes providerAccountEmail: req.body.email, // User can optionally provide }); @@ -313,6 +360,73 @@ providersRouter.post('/:provider/verify', async (req: Request, res: Response) => } }); +/** + * POST /api/providers/:provider/api-key + * Connect a provider using an API key (for cloud-hosted workspaces) + */ +providersRouter.post('/:provider/api-key', async (req: Request, res: Response) => { + const { provider } = req.params as { provider: ProviderType }; + const userId = req.session.userId!; + const { apiKey } = req.body; + + if (!apiKey || typeof apiKey !== 'string') { + return res.status(400).json({ error: 'API key is required' }); + } + + const providerConfig = PROVIDERS[provider]; + if (!providerConfig) { + return res.status(404).json({ error: 'Unknown provider' }); + } + + // Validate the API key by making a test request + try { + let isValid = false; + + if (provider === 'anthropic') { + // Test Anthropic API key + const testRes = await fetch('https://api.anthropic.com/v1/messages', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-api-key': apiKey, + 'anthropic-version': '2023-06-01', + }, + body: JSON.stringify({ + model: 'claude-3-haiku-20240307', + max_tokens: 1, + messages: [{ role: 'user', content: 'hi' }], + }), + }); + // 200 = valid, 401 = invalid key, 400/other = might still be valid key + isValid = testRes.status !== 401; + } else { + // For other providers, just accept the key + isValid = true; + } + + if (!isValid) { + return res.status(400).json({ error: 'Invalid API key' }); + } + + // Store the API key - use scopes from device flow providers, empty for CLI providers + const scopes = isDeviceFlowProvider(providerConfig) ? providerConfig.scopes : []; + await vault.storeCredential({ + userId, + provider, + accessToken: apiKey, + scopes, + }); + + res.json({ + success: true, + message: `${providerConfig.displayName} connected`, + }); + } catch (error) { + console.error(`Error connecting ${provider} with API key:`, error); + res.status(500).json({ error: 'Failed to connect provider' }); + } +}); + /** * GET /api/providers/:provider/status/:flowId * Check status of device flow @@ -393,6 +507,12 @@ providersRouter.delete('/:provider/flow/:flowId', (req: Request, res: Response) async function pollForToken(flowId: string, provider: ProviderType, clientId: string) { const providerConfig = PROVIDERS[provider]; + // Only device flow providers can poll for tokens + if (!isDeviceFlowProvider(providerConfig)) { + console.error(`Provider ${provider} does not support device flow polling`); + return; + } + const poll = async (intervalMs: number) => { const current = await loadFlow(flowId); if (!current || current.status !== 'pending') return; @@ -491,19 +611,21 @@ async function storeProviderTokens( ) { const providerConfig = PROVIDERS[provider]; - // Fetch user info from provider + // Fetch user info from provider (only device flow providers have userInfoUrl) let userInfo: { id?: string; email?: string } = {}; - try { - const response = await fetch(providerConfig.userInfoUrl, { - headers: { - Authorization: `Bearer ${tokens.accessToken}`, - }, - }); - if (response.ok) { - userInfo = await response.json() as { id?: string; email?: string }; + if (isDeviceFlowProvider(providerConfig)) { + try { + const response = await fetch(providerConfig.userInfoUrl, { + headers: { + Authorization: `Bearer ${tokens.accessToken}`, + }, + }); + if (response.ok) { + userInfo = await response.json() as { id?: string; email?: string }; + } + } catch (error) { + console.error('Error fetching user info:', error); } - } catch (error) { - console.error('Error fetching user info:', error); } // Encrypt and store diff --git a/src/cloud/api/test-helpers.ts b/src/cloud/api/test-helpers.ts index ffc3e4e7..9b205969 100644 --- a/src/cloud/api/test-helpers.ts +++ b/src/cloud/api/test-helpers.ts @@ -10,7 +10,10 @@ import { Router, Request, Response } from 'express'; import { randomUUID, createHash, randomBytes } from 'crypto'; import { getDb } from '../db/drizzle.js'; -import { users, linkedDaemons } from '../db/schema.js'; +import { users, linkedDaemons, workspaces, repositories } from '../db/schema.js'; +import { getProvisioner } from '../provisioner/index.js'; +import { db } from '../db/index.js'; +import { nangoService } from '../services/nango.js'; export const testHelpersRouter = Router(); @@ -157,3 +160,485 @@ testHelpersRouter.get('/status', (req: Request, res: Response) => { timestamp: new Date().toISOString(), }); }); + +/** + * POST /api/test/create-mock-workspace + * Creates a mock workspace pointing to a local dashboard server + * + * Use this to test the cloud flow locally without real provisioning. + * The workspace will have publicUrl pointing to localhost:3889. + */ +testHelpersRouter.post('/create-mock-workspace', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + try { + const { name, publicUrl } = req.body; + const userId = req.session.userId; + + if (!userId) { + return res.status(401).json({ error: 'Must be logged in. Use /api/test/create-user first or log in via OAuth.' }); + } + + const db = getDb(); + + // Create a mock workspace with local publicUrl + const [workspace] = await db.insert(workspaces).values({ + userId, + name: name || 'Local Test Workspace', + status: 'running', + publicUrl: publicUrl || 'http://localhost:3889', + computeProvider: 'docker', + computeId: `mock-${randomUUID().slice(0, 8)}`, + config: { + providers: ['anthropic'], + repositories: [], + supervisorEnabled: true, + maxAgents: 10, + }, + }).returning(); + + res.json({ + workspaceId: workspace.id, + name: workspace.name, + status: workspace.status, + publicUrl: workspace.publicUrl, + message: 'Mock workspace created. Start agent-relay locally and navigate to /app.', + }); + } catch (error) { + console.error('Error creating mock workspace:', error); + res.status(500).json({ error: 'Failed to create mock workspace' }); + } +}); + +/** + * POST /api/test/create-mock-repo + * Creates a mock repository for the current user + * + * Use this to test the cloud flow without connecting real GitHub repos. + */ +testHelpersRouter.post('/create-mock-repo', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + try { + const { fullName, isPrivate } = req.body; + const userId = req.session.userId; + + if (!userId) { + return res.status(401).json({ error: 'Must be logged in. Use /api/test/create-user first or log in via OAuth.' }); + } + + if (!fullName) { + return res.status(400).json({ error: 'fullName is required (e.g., "owner/repo")' }); + } + + const db = getDb(); + + // Create a mock repository + const [repo] = await db.insert(repositories).values({ + userId, + githubId: Math.floor(Math.random() * 1000000), + githubFullName: fullName, + isPrivate: isPrivate ?? false, + defaultBranch: 'main', + syncStatus: 'synced', + nangoConnectionId: `mock-connection-${randomUUID().slice(0, 8)}`, + lastSyncedAt: new Date(), + }).returning(); + + res.json({ + repoId: repo.id, + fullName: repo.githubFullName, + isPrivate: repo.isPrivate, + message: 'Mock repository created.', + }); + } catch (error) { + console.error('Error creating mock repo:', error); + res.status(500).json({ error: 'Failed to create mock repo' }); + } +}); + +/** + * POST /api/test/login-as + * Quick login for testing - creates session for existing or new test user + */ +testHelpersRouter.post('/login-as', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + try { + const { username } = req.body; + const db = getDb(); + + // Find or create user + let user; + const existingUsers = await db.select().from(users).limit(1); + + if (existingUsers.length > 0 && !username) { + user = existingUsers[0]; + } else { + const testId = `test-${randomUUID()}`; + const [newUser] = await db.insert(users).values({ + email: `${username || testId}@test.local`, + githubId: testId, + githubUsername: username || 'test-user', + avatarUrl: null, + plan: 'free', + }).returning(); + user = newUser; + } + + // Set session + req.session.userId = user.id; + + res.json({ + success: true, + userId: user.id, + username: user.githubUsername, + message: 'Logged in. You can now access /app and other authenticated routes.', + }); + } catch (error) { + console.error('Error in login-as:', error); + res.status(500).json({ error: 'Failed to login' }); + } +}); + +/** + * GET /api/test/setup-local-cloud + * One-shot setup: creates user, mock repo, and mock workspace + * + * After calling this, start agent-relay locally and go to /app + */ +testHelpersRouter.post('/setup-local-cloud', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + try { + const { repoName, workspaceName } = req.body; + const db = getDb(); + + // 1. Create or get test user + const testId = `test-${randomUUID().slice(0, 8)}`; + const [user] = await db.insert(users).values({ + email: `${testId}@test.local`, + githubId: testId, + githubUsername: 'local-tester', + avatarUrl: null, + plan: 'free', + }).returning(); + + // Set session + req.session.userId = user.id; + + // 2. Create mock repository + const [repo] = await db.insert(repositories).values({ + userId: user.id, + githubId: Math.floor(Math.random() * 1000000), + githubFullName: repoName || 'test-org/test-repo', + isPrivate: false, + defaultBranch: 'main', + syncStatus: 'synced', + nangoConnectionId: `mock-${randomUUID().slice(0, 8)}`, + lastSyncedAt: new Date(), + }).returning(); + + // 3. Create mock workspace pointing to local dashboard + const [workspace] = await db.insert(workspaces).values({ + userId: user.id, + name: workspaceName || 'Local Development', + status: 'running', + publicUrl: 'http://localhost:3889', + computeProvider: 'docker', + computeId: `mock-${randomUUID().slice(0, 8)}`, + config: { + providers: ['anthropic'], + repositories: [repo.githubFullName], + supervisorEnabled: true, + maxAgents: 10, + }, + }).returning(); + + res.json({ + success: true, + user: { + id: user.id, + username: user.githubUsername, + }, + repo: { + id: repo.id, + fullName: repo.githubFullName, + }, + workspace: { + id: workspace.id, + name: workspace.name, + publicUrl: workspace.publicUrl, + }, + instructions: [ + '1. Start agent-relay daemon: npm run dev (or agent-relay daemon)', + '2. Go to http://localhost:4567/app', + '3. The app should auto-connect to the local workspace', + '4. The WebSocket will connect to ws://localhost:3889/ws', + ], + }); + } catch (error) { + console.error('Error in setup-local-cloud:', error); + res.status(500).json({ error: 'Failed to setup local cloud' }); + } +}); + +/** + * POST /api/test/provision-real-workspace + * Provision a REAL Docker container using your Nango GitHub App connection. + * + * This tests the full flow including: + * - Fetching GitHub App token from Nango + * - Spinning up a Docker container + * - Cloning your actual repositories + * + * Prerequisites: + * - Must be logged in (via real OAuth or /api/test/login-as) + * - Must have connected repos via /connect-repos (real Nango GitHub App OAuth) + * - Docker must be running locally + * - COMPUTE_PROVIDER must be 'docker' (default for dev) + */ +testHelpersRouter.post('/provision-real-workspace', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ + error: 'Must be logged in. Use real OAuth or /api/test/login-as first.', + }); + } + + try { + const { name, repositoryFullName, providers, githubToken } = req.body; + + // Get user's connected repositories + const userRepos = await db.repositories.findByUserId(userId); + const reposWithNango = userRepos.filter(r => r.nangoConnectionId); + + if (reposWithNango.length === 0) { + return res.status(400).json({ + error: 'No repositories with Nango connection found. Complete /connect-repos first with real GitHub OAuth.', + hint: 'Go to http://localhost:4567/connect-repos and connect your GitHub App, or pass githubToken directly', + }); + } + + // Determine which repo to use + let targetRepo = reposWithNango[0]; + if (repositoryFullName) { + const found = reposWithNango.find(r => r.githubFullName === repositoryFullName); + if (!found) { + return res.status(400).json({ + error: `Repository ${repositoryFullName} not found or not connected via Nango`, + availableRepos: reposWithNango.map(r => r.githubFullName), + }); + } + targetRepo = found; + } + + // Use the real provisioner (Docker in dev mode) + const provisioner = getProvisioner(); + + const result = await provisioner.provision({ + userId, + name: name || `Test Workspace - ${targetRepo.githubFullName}`, + providers: providers || ['anthropic'], // Default to anthropic if not specified + repositories: [targetRepo.githubFullName], + supervisorEnabled: true, + maxAgents: 10, + // Allow passing GitHub token directly for local testing + githubToken: githubToken || undefined, + }); + + if (result.status === 'error') { + return res.status(500).json({ + error: 'Provisioning failed', + details: result.error, + }); + } + + res.json({ + success: true, + workspace: { + id: result.workspaceId, + status: result.status, + publicUrl: result.publicUrl, + }, + repository: targetRepo.githubFullName, + instructions: [ + `1. Workspace is running at ${result.publicUrl}`, + `2. Repository ${targetRepo.githubFullName} should be cloned`, + `3. Go to http://localhost:4567/app to connect`, + `4. Check container: docker logs ar-${result.workspaceId.substring(0, 8)}`, + `5. Verify clone: docker exec ar-${result.workspaceId.substring(0, 8)} ls /workspace/repos`, + ], + }); + } catch (error) { + console.error('Error provisioning real workspace:', error); + res.status(500).json({ + error: 'Failed to provision workspace', + details: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * GET /api/test/my-repos + * List current user's connected repositories (for debugging) + */ +testHelpersRouter.get('/my-repos', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ error: 'Not logged in' }); + } + + try { + const repos = await db.repositories.findByUserId(userId); + + res.json({ + userId, + repositories: repos.map(r => ({ + id: r.id, + fullName: r.githubFullName, + isPrivate: r.isPrivate, + hasNangoConnection: !!r.nangoConnectionId, + nangoConnectionId: r.nangoConnectionId, // For debugging + syncStatus: r.syncStatus, + })), + }); + } catch (error) { + console.error('Error fetching repos:', error); + res.status(500).json({ error: 'Failed to fetch repositories' }); + } +}); + +/** + * GET /api/test/my-workspaces + * List current user's workspaces (for debugging) + */ +testHelpersRouter.get('/my-workspaces', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ error: 'Not logged in' }); + } + + try { + const userWorkspaces = await db.workspaces.findByUserId(userId); + + res.json({ + userId, + workspaces: userWorkspaces.map(w => ({ + id: w.id, + name: w.name, + status: w.status, + publicUrl: w.publicUrl, + computeProvider: w.computeProvider, + computeId: w.computeId, + config: w.config, + })), + }); + } catch (error) { + console.error('Error fetching workspaces:', error); + res.status(500).json({ error: 'Failed to fetch workspaces' }); + } +}); + +/** + * GET /api/test/nango-token + * Test fetching GitHub App token from Nango (for debugging) + */ +testHelpersRouter.get('/nango-token', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ error: 'Not logged in' }); + } + + try { + const repos = await db.repositories.findByUserId(userId); + const repoWithConnection = repos.find(r => r.nangoConnectionId); + + if (!repoWithConnection?.nangoConnectionId) { + return res.status(400).json({ + error: 'No Nango connection found', + repos: repos.map(r => ({ fullName: r.githubFullName, nangoConnectionId: r.nangoConnectionId })), + }); + } + + console.log('[test] Fetching token for connection:', repoWithConnection.nangoConnectionId); + + const token = await nangoService.getGithubAppToken(repoWithConnection.nangoConnectionId); + + res.json({ + success: true, + connectionId: repoWithConnection.nangoConnectionId, + tokenLength: token.length, + tokenPrefix: token.substring(0, 10) + '...', + }); + } catch (error) { + console.error('[test] Nango token fetch error:', error); + res.status(500).json({ + error: 'Failed to fetch token', + details: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * DELETE /api/test/workspace/:id + * Delete/deprovision a workspace (for cleanup) + */ +testHelpersRouter.delete('/workspace/:id', async (req: Request, res: Response) => { + if (!isTestMode) { + return res.status(403).json({ error: 'Test endpoints disabled in production' }); + } + + const userId = req.session.userId; + if (!userId) { + return res.status(401).json({ error: 'Not logged in' }); + } + + try { + const { id } = req.params; + const workspace = await db.workspaces.findById(id); + + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + if (workspace.userId !== userId) { + return res.status(403).json({ error: 'Not your workspace' }); + } + + const provisioner = getProvisioner(); + await provisioner.deprovision(id); + + res.json({ + success: true, + message: `Workspace ${id} deleted`, + }); + } catch (error) { + console.error('Error deleting workspace:', error); + res.status(500).json({ error: 'Failed to delete workspace' }); + } +}); diff --git a/src/cloud/api/workspaces.ts b/src/cloud/api/workspaces.ts index 65e3adcb..b2fd0256 100644 --- a/src/cloud/api/workspaces.ts +++ b/src/cloud/api/workspaces.ts @@ -530,9 +530,155 @@ async function removeDomainFromCompute(workspace: Workspace): Promise { // Railway and Docker: similar cleanup } +/** + * POST /api/workspaces/:id/connect-provider + * Trigger CLI login flow for a provider (claude, codex, opencode, droid) + * Returns the OAuth URL for the user to complete authentication + */ +const PROVIDER_CLI_COMMANDS: Record = { + anthropic: { command: 'claude', displayName: 'Claude' }, + codex: { command: 'codex login', displayName: 'Codex' }, + opencode: { command: 'opencode', displayName: 'OpenCode' }, + droid: { command: 'droid', displayName: 'Droid' }, +}; + +workspacesRouter.post('/:id/connect-provider', async (req: Request, res: Response) => { + const userId = req.session.userId!; + const { id } = req.params; + const { provider } = req.body; + + const providerConfig = PROVIDER_CLI_COMMANDS[provider]; + if (!provider || !providerConfig) { + return res.status(400).json({ + error: 'Valid provider is required', + validProviders: Object.keys(PROVIDER_CLI_COMMANDS), + }); + } + + try { + const workspace = await db.workspaces.findById(id); + + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + if (workspace.userId !== userId) { + return res.status(403).json({ error: 'Unauthorized' }); + } + + if (workspace.status !== 'running') { + return res.status(400).json({ error: 'Workspace must be running to connect providers' }); + } + + const containerName = workspace.computeId; + + if (!containerName) { + return res.status(400).json({ error: 'Workspace has no compute instance' }); + } + + // Run the CLI login command in the container and capture output + const { execSync } = await import('child_process'); + + try { + // For Docker containers, run the command and capture the OAuth URL + // The CLI typically outputs something like: + // "Please visit https://... to authenticate" + const output = execSync( + `docker exec ${containerName} timeout 10 ${providerConfig.command} 2>&1 || true`, + { encoding: 'utf-8', timeout: 15000 } + ); + + // Parse OAuth URL from output + const urlMatch = output.match(/https:\/\/[^\s]+/); + + if (urlMatch) { + res.json({ + success: true, + provider, + authUrl: urlMatch[0], + message: `Visit the URL to authenticate with ${providerConfig.displayName}`, + instructions: [ + '1. Click the authentication URL below', + '2. Complete the login in your browser', + '3. Return here - your workspace will automatically detect the credentials', + ], + }); + } else { + // CLI might already be authenticated or returned different output + res.json({ + success: false, + provider, + output: output.substring(0, 500), // First 500 chars for debugging + message: 'Could not extract authentication URL. The provider may already be connected.', + }); + } + } catch (execError) { + const errorMsg = execError instanceof Error ? execError.message : 'Unknown error'; + console.error(`[workspace] CLI login error for ${provider}:`, errorMsg); + + res.status(500).json({ + error: 'Failed to start authentication flow', + details: errorMsg, + }); + } + } catch (error) { + console.error('Error connecting provider:', error); + res.status(500).json({ error: 'Failed to connect provider' }); + } +}); + +/** + * POST /api/workspaces/:id/proxy/* + * Proxy API requests to the workspace container + * This allows the dashboard to make REST calls through the cloud server + */ +workspacesRouter.all('/:id/proxy/{*proxyPath}', async (req: Request, res: Response) => { + const userId = req.session.userId!; + const { id, proxyPath } = req.params; + + try { + const workspace = await db.workspaces.findById(id); + + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + if (workspace.userId !== userId) { + return res.status(403).json({ error: 'Unauthorized' }); + } + + if (workspace.status !== 'running' || !workspace.publicUrl) { + return res.status(400).json({ error: 'Workspace is not running' }); + } + + // Forward the request to the workspace + const targetUrl = `${workspace.publicUrl}/api/${proxyPath}`; + + const fetchOptions: RequestInit = { + method: req.method, + headers: { + 'Content-Type': 'application/json', + }, + }; + + if (req.method !== 'GET' && req.method !== 'HEAD') { + fetchOptions.body = JSON.stringify(req.body); + } + + const proxyRes = await fetch(targetUrl, fetchOptions); + const data = await proxyRes.json(); + + res.status(proxyRes.status).json(data); + } catch (error) { + console.error('[workspace-proxy] Error:', error); + res.status(500).json({ error: 'Failed to proxy request to workspace' }); + } +}); + /** * POST /api/workspaces/quick * Quick provision: one-click with defaults + * Providers are optional - can be connected after workspace creation via CLI login */ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: Response) => { const userId = req.session.userId!; @@ -543,18 +689,12 @@ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: R } try { - // Get user's connected providers + // Get user's connected providers (optional now) const credentials = await db.credentials.findByUserId(userId); const providers = credentials .filter((c) => c.provider !== 'github') .map((c) => c.provider); - if (providers.length === 0) { - return res.status(400).json({ - error: 'No AI providers connected. Please connect at least one provider.', - }); - } - // Create workspace with defaults const provisioner = getProvisioner(); const workspaceName = name || `Workspace for ${repositoryFullName}`; @@ -562,7 +702,7 @@ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: R const result = await provisioner.provision({ userId, name: workspaceName, - providers, + providers: providers.length > 0 ? providers : [], // Empty is OK now repositories: [repositoryFullName], supervisorEnabled: true, maxAgents: 10, @@ -579,7 +719,10 @@ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: R workspaceId: result.workspaceId, status: result.status, publicUrl: result.publicUrl, - message: 'Workspace provisioned successfully!', + providersConnected: providers.length > 0, + message: providers.length > 0 + ? 'Workspace provisioned successfully!' + : 'Workspace provisioned! Connect an AI provider to start using agents.', }); } catch (error) { console.error('Error quick provisioning:', error); diff --git a/src/cloud/config.ts b/src/cloud/config.ts index b2dcadcc..9173b2d7 100644 --- a/src/cloud/config.ts +++ b/src/cloud/config.ts @@ -85,8 +85,8 @@ function optionalEnv(name: string): string | undefined { export function loadConfig(): CloudConfig { return { - port: parseInt(process.env.PORT || '3000', 10), - publicUrl: process.env.PUBLIC_URL || 'http://localhost:3000', + port: parseInt(process.env.PORT || '4567', 10), + publicUrl: process.env.PUBLIC_URL || 'http://localhost:4567', sessionSecret: requireEnv('SESSION_SECRET'), databaseUrl: requireEnv('DATABASE_URL'), diff --git a/src/cloud/db/migrations/0003_nango_user_columns.sql b/src/cloud/db/migrations/0003_nango_user_columns.sql new file mode 100644 index 00000000..1f0313ea --- /dev/null +++ b/src/cloud/db/migrations/0003_nango_user_columns.sql @@ -0,0 +1,15 @@ +-- Add Nango OAuth connection columns to users table +-- These columns support the two-connection pattern: +-- - nango_connection_id: Permanent login connection +-- - incoming_connection_id: Temp connection for polling during login +-- - pending_installation_request: Tracks org approval wait state + +ALTER TABLE users ADD COLUMN IF NOT EXISTS nango_connection_id VARCHAR(255); +--> statement-breakpoint +ALTER TABLE users ADD COLUMN IF NOT EXISTS incoming_connection_id VARCHAR(255); +--> statement-breakpoint +ALTER TABLE users ADD COLUMN IF NOT EXISTS pending_installation_request TIMESTAMP; +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_users_nango_connection ON users(nango_connection_id); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_users_incoming_connection ON users(incoming_connection_id); diff --git a/src/cloud/db/migrations/0004_repositories_nango_columns.sql b/src/cloud/db/migrations/0004_repositories_nango_columns.sql new file mode 100644 index 00000000..e4baafc2 --- /dev/null +++ b/src/cloud/db/migrations/0004_repositories_nango_columns.sql @@ -0,0 +1,10 @@ +-- Add Nango connection columns to repositories table +-- These columns support GitHub App OAuth via Nango + +ALTER TABLE repositories ADD COLUMN IF NOT EXISTS installation_id UUID; +--> statement-breakpoint +ALTER TABLE repositories ADD COLUMN IF NOT EXISTS nango_connection_id VARCHAR(255); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_repositories_installation_id ON repositories(installation_id); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_repositories_nango_connection ON repositories(nango_connection_id); diff --git a/src/cloud/db/migrations/0005_github_installations.sql b/src/cloud/db/migrations/0005_github_installations.sql new file mode 100644 index 00000000..7137d068 --- /dev/null +++ b/src/cloud/db/migrations/0005_github_installations.sql @@ -0,0 +1,35 @@ +-- Create github_installations table and add foreign key to repositories +-- This table tracks GitHub App installations for accessing repos + +CREATE TABLE IF NOT EXISTS github_installations ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + installation_id VARCHAR(255) UNIQUE NOT NULL, + account_type VARCHAR(50) NOT NULL, + account_login VARCHAR(255) NOT NULL, + account_id VARCHAR(255) NOT NULL, + installed_by_id UUID REFERENCES users(id) ON DELETE SET NULL, + permissions JSONB DEFAULT '{}', + events TEXT[], + suspended BOOLEAN NOT NULL DEFAULT false, + suspended_at TIMESTAMP, + suspended_by VARCHAR(255), + created_at TIMESTAMP DEFAULT NOW() NOT NULL, + updated_at TIMESTAMP DEFAULT NOW() NOT NULL +); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_github_installations_account_login ON github_installations(account_login); +--> statement-breakpoint +CREATE INDEX IF NOT EXISTS idx_github_installations_installed_by ON github_installations(installed_by_id); +--> statement-breakpoint +-- Add foreign key constraint to repositories.installation_id +ALTER TABLE repositories + ADD CONSTRAINT fk_repositories_installation + FOREIGN KEY (installation_id) + REFERENCES github_installations(id) + ON DELETE SET NULL; +--> statement-breakpoint +-- Add updated_at trigger for github_installations +DROP TRIGGER IF EXISTS trg_github_installations_updated_at ON github_installations; +CREATE TRIGGER trg_github_installations_updated_at + BEFORE UPDATE ON github_installations + FOR EACH ROW EXECUTE FUNCTION touch_updated_at(); diff --git a/src/cloud/db/migrations/meta/_journal.json b/src/cloud/db/migrations/meta/_journal.json index 7e7e1959..bf418cfc 100644 --- a/src/cloud/db/migrations/meta/_journal.json +++ b/src/cloud/db/migrations/meta/_journal.json @@ -15,6 +15,27 @@ "when": 1735776000000, "tag": "0002_agent_sessions", "breakpoints": true + }, + { + "idx": 2, + "version": "5", + "when": 1735862400000, + "tag": "0003_nango_user_columns", + "breakpoints": true + }, + { + "idx": 3, + "version": "5", + "when": 1735948800000, + "tag": "0004_repositories_nango_columns", + "breakpoints": true + }, + { + "idx": 4, + "version": "5", + "when": 1736035200000, + "tag": "0005_github_installations", + "breakpoints": true } ] } diff --git a/src/cloud/provisioner/index.ts b/src/cloud/provisioner/index.ts index 183205a8..2d1b76e9 100644 --- a/src/cloud/provisioner/index.ts +++ b/src/cloud/provisioner/index.ts @@ -11,6 +11,7 @@ import { nangoService } from '../services/nango.js'; const WORKSPACE_PORT = 3888; const FETCH_TIMEOUT_MS = 10_000; +const WORKSPACE_IMAGE = process.env.WORKSPACE_IMAGE || 'ghcr.io/agentworkforce/relay-workspace:latest'; /** * Get a fresh GitHub App installation token from Nango. @@ -104,6 +105,8 @@ export interface ProvisionConfig { repositories: string[]; supervisorEnabled?: boolean; maxAgents?: number; + /** Direct GitHub token for testing (bypasses Nango lookup) */ + githubToken?: string; } export interface ProvisionResult { @@ -231,7 +234,7 @@ class FlyProvisioner implements ComputeProvisioner { body: JSON.stringify({ region: this.region, config: { - image: 'ghcr.io/khaliqgant/agent-relay-workspace:latest', + image: WORKSPACE_IMAGE, env: { WORKSPACE_ID: workspace.id, SUPERVISOR_ENABLED: String(workspace.config.supervisorEnabled ?? false), @@ -539,7 +542,7 @@ class RailwayProvisioner implements ComputeProvisioner { projectId, name: 'workspace', source: { - image: 'ghcr.io/khaliqgant/agent-relay-workspace:latest', + image: WORKSPACE_IMAGE, }, }, }, @@ -751,7 +754,7 @@ class DockerProvisioner implements ComputeProvisioner { try { execSync( - `docker run -d --name ${containerName} -p ${hostPort}:${WORKSPACE_PORT} ${envArgs.join(' ')} ghcr.io/khaliqgant/agent-relay-workspace:latest`, + `docker run -d --name ${containerName} -p ${hostPort}:${WORKSPACE_PORT} ${envArgs.join(' ')} ${WORKSPACE_IMAGE}`, { stdio: 'pipe' } ); @@ -863,13 +866,20 @@ export class WorkspaceProvisioner { } // GitHub token is required for cloning repositories - // Use Nango GitHub App token (fresh installation token, not from vault) + // Use direct token if provided (for testing), otherwise get from Nango if (config.repositories.length > 0) { - const githubToken = await getGithubAppTokenForUser(config.userId); - if (githubToken) { - credentials.set('github', githubToken); + if (config.githubToken) { + // Direct token provided (for testing) + credentials.set('github', config.githubToken); + console.log('[provisioner] Using provided GitHub token'); } else { - console.warn(`[provisioner] No GitHub App token for user ${config.userId}; repository cloning may fail.`); + // Get fresh installation token from Nango GitHub App + const githubToken = await getGithubAppTokenForUser(config.userId); + if (githubToken) { + credentials.set('github', githubToken); + } else { + console.warn(`[provisioner] No GitHub App token for user ${config.userId}; repository cloning may fail.`); + } } } diff --git a/src/cloud/server.ts b/src/cloud/server.ts index 52c2ed9e..a9db80f1 100644 --- a/src/cloud/server.ts +++ b/src/cloud/server.ts @@ -64,16 +64,19 @@ export async function createServer(): Promise { await redisClient.connect(); // Middleware - // Configure helmet to allow Next.js inline scripts + // Configure helmet to allow Next.js inline scripts and Nango Connect UI app.use(helmet({ contentSecurityPolicy: { directives: { defaultSrc: ["'self'"], - scriptSrc: ["'self'", "'unsafe-inline'", "'unsafe-eval'"], - styleSrc: ["'self'", "'unsafe-inline'", "https://fonts.googleapis.com"], - fontSrc: ["'self'", "https://fonts.gstatic.com"], + scriptSrc: ["'self'", "'unsafe-inline'", "'unsafe-eval'", "https://connect.nango.dev"], + styleSrc: ["'self'", "'unsafe-inline'", "https://fonts.googleapis.com", "https://connect.nango.dev"], + fontSrc: ["'self'", "https://fonts.gstatic.com", "data:"], imgSrc: ["'self'", "data:", "https:"], - connectSrc: ["'self'", "wss:", "ws:", "https:"], + connectSrc: ["'self'", "wss:", "ws:", "https:", "https://api.nango.dev", "https://connect.nango.dev"], + frameSrc: ["'self'", "https://connect.nango.dev", "https://github.com"], + childSrc: ["'self'", "https://connect.nango.dev", "blob:"], + workerSrc: ["'self'", "blob:"], }, }, })); @@ -83,7 +86,13 @@ export async function createServer(): Promise { credentials: true, }) ); - app.use(express.json()); + // Custom JSON parser that preserves raw body for webhook signature verification + app.use(express.json({ + verify: (req: Request, _res, buf) => { + // Store raw body for webhook signature verification + (req as Request & { rawBody?: string }).rawBody = buf.toString(); + }, + })); // Session middleware app.use( @@ -218,9 +227,12 @@ export async function createServer(): Promise { // Serve static dashboard files (Next.js static export) // Path: dist/cloud/server.js -> ../../src/dashboard/out const dashboardPath = path.join(__dirname, '../../src/dashboard/out'); - app.use(express.static(dashboardPath)); - // SPA fallback - serve index.html for all non-API routes + // Serve static files with .html extension fallback for clean URLs + // e.g., /signup will try /signup.html + app.use(express.static(dashboardPath, { extensions: ['html'] })); + + // SPA fallback - serve index.html for all non-API routes that don't match static files // Express 5 requires named wildcard params instead of bare '*' app.get('/{*splat}', (req, res, next) => { // Don't serve index.html for API routes diff --git a/src/cloud/services/nango.ts b/src/cloud/services/nango.ts index 64ef4181..11d0d556 100644 --- a/src/cloud/services/nango.ts +++ b/src/cloud/services/nango.ts @@ -1,6 +1,6 @@ -import crypto from 'crypto'; import { Nango } from '@nangohq/node'; import type { AxiosResponse } from 'axios'; +import crypto from 'node:crypto'; import { getConfig } from '../config.js'; export const NANGO_INTEGRATIONS = { @@ -56,7 +56,8 @@ class NangoService { /** * Retrieve an installation access token from a GitHub App connection. - * Nango will refresh the token when refreshGithubAppJwtToken=true. + * Use this ONLY when you need the raw token (e.g., for git clone URLs). + * For API calls, use the proxy methods instead. */ async getGithubAppToken(connectionId: string): Promise { const token = await this.client.getToken( @@ -65,29 +66,115 @@ class NangoService { false, true ); - if (typeof token !== 'string') { - throw new Error('Expected GitHub App token to be a string'); + + // Handle different return formats from Nango + if (typeof token === 'string') { + return token; + } + + // Nango may return an object with access_token + if (token && typeof token === 'object') { + const tokenObj = token as { access_token?: string; token?: string }; + if (tokenObj.access_token) { + return tokenObj.access_token; + } + if (tokenObj.token) { + return tokenObj.token; + } } - return token; + + console.error('[nango] Unexpected token format:', typeof token, token); + throw new Error('Expected GitHub App token to be a string'); } /** - * List repositories available to a GitHub App installation using the Nango connection. + * List repositories available to a GitHub App installation using the Nango Proxy. + * The proxy automatically handles token injection and refresh. + * @see https://nango.dev/docs/implementation-guides/requests-proxy/implement-requests-proxy */ async listGithubAppRepos(connectionId: string): Promise<{ repositories: Array<{ id: number; full_name: string; private: boolean; default_branch: string }> }> { - const token = await this.getGithubAppToken(connectionId); - const response = await fetch('https://api.github.com/installation/repositories?per_page=100', { - method: 'GET', - headers: { - Authorization: `Bearer ${token}`, - Accept: 'application/vnd.github+json', - }, - }); - if (!response.ok) { - const text = await response.text(); - throw new Error(`Failed to list installation repositories: ${text}`); + const response = await this.client.get<{ repositories: Array<{ id: number; full_name: string; private: boolean; default_branch: string }> }>({ + connectionId, + providerConfigKey: NANGO_INTEGRATIONS.GITHUB_APP, + endpoint: '/installation/repositories', + params: { per_page: '100' }, + }) as AxiosResponse<{ repositories: Array<{ id: number; full_name: string; private: boolean; default_branch: string }> }>; + return response.data; + } + + /** + * Get the GitHub App installation ID from a connection. + * The installation ID is stored in connection_config.installation_id + */ + async getGithubAppInstallationId(connectionId: string): Promise { + try { + const connection = await this.client.getConnection(NANGO_INTEGRATIONS.GITHUB_APP, connectionId); + // Extract installation_id from connection_config (where Nango stores it for GitHub App OAuth) + const connectionConfig = (connection as { connection_config?: Record }).connection_config; + if (connectionConfig?.installation_id) { + return Number(connectionConfig.installation_id); + } + console.warn('[nango] No installation_id in connection_config'); + return null; + } catch (err) { + console.error('[nango] Failed to get installation ID:', err); + return null; } - return response.json() as Promise<{ repositories: Array<{ id: number; full_name: string; private: boolean; default_branch: string }> }>; + } + + /** + * Create an issue via Nango Proxy. + */ + async createGithubIssue( + connectionId: string, + owner: string, + repo: string, + data: { title: string; body?: string; labels?: string[] } + ): Promise<{ number: number; html_url: string }> { + const response = await this.client.post<{ number: number; html_url: string }>({ + connectionId, + providerConfigKey: NANGO_INTEGRATIONS.GITHUB_APP, + endpoint: `/repos/${owner}/${repo}/issues`, + data, + }) as AxiosResponse<{ number: number; html_url: string }>; + return response.data; + } + + /** + * Create a pull request via Nango Proxy. + */ + async createGithubPullRequest( + connectionId: string, + owner: string, + repo: string, + data: { title: string; body?: string; head: string; base: string } + ): Promise<{ number: number; html_url: string }> { + const response = await this.client.post<{ number: number; html_url: string }>({ + connectionId, + providerConfigKey: NANGO_INTEGRATIONS.GITHUB_APP, + endpoint: `/repos/${owner}/${repo}/pulls`, + data, + }) as AxiosResponse<{ number: number; html_url: string }>; + return response.data; + } + + /** + * Add a comment to an issue via Nango Proxy. + */ + async addGithubIssueComment( + connectionId: string, + owner: string, + repo: string, + issueNumber: number, + body: string + ): Promise<{ id: number; html_url: string }> { + const response = await this.client.post<{ id: number; html_url: string }>({ + connectionId, + providerConfigKey: NANGO_INTEGRATIONS.GITHUB_APP, + endpoint: `/repos/${owner}/${repo}/issues/${issueNumber}/comments`, + data: { body }, + }) as AxiosResponse<{ id: number; html_url: string }>; + return response.data; } /** @@ -101,12 +188,63 @@ class NangoService { } /** - * Verify webhook signature sent by Nango using HMAC SHA256 with the secret key. + * Delete a connection from Nango. + * + * Used to remove temporary session connections for returning users + * to prevent duplicate connections in Nango. In the two-connection pattern, + * new users get a permanent connection but returning users authenticate + * with a temporary one that gets deleted. + * + * @param connectionId - Nango connection ID to delete + * @param providerConfigKey - The integration key (e.g., 'github') */ - verifyWebhookSignature(rawBody: string, signature?: string | string[] | null): boolean { - if (!signature || typeof signature !== 'string') return false; - const expected = crypto.createHmac('sha256', this.secret).update(rawBody).digest('hex'); - return signature === expected; + async deleteConnection(connectionId: string, providerConfigKey: string): Promise { + await this.client.deleteConnection(providerConfigKey, connectionId); + } + + /** + * Get connection metadata including end_user info. + * Useful when webhook doesn't include end_user data. + */ + async getConnection(connectionId: string, providerConfigKey: string): Promise<{ + id: number; + connection_id: string; + provider_config_key: string; + end_user?: { id?: string; email?: string }; + metadata?: Record; + }> { + const connection = await this.client.getConnection(providerConfigKey, connectionId); + return connection as unknown as { + id: number; + connection_id: string; + provider_config_key: string; + end_user?: { id?: string; email?: string }; + metadata?: Record; + }; + } + + /** + * Verify webhook signature sent by Nango. + * Uses the new verifyIncomingWebhookRequest method. + * @see https://nango.dev/docs/reference/sdks/node#verify-webhook-signature + */ + verifyWebhookSignature(rawBody: string, headers: Record): boolean { + try { + // Use the new method: verifyIncomingWebhookRequest(body, headers) + return this.client.verifyIncomingWebhookRequest(rawBody, headers as Record); + } catch (err) { + console.error('[nango] verifyIncomingWebhookRequest error:', err); + // Fall back to manual HMAC verification using the secret key + const signature = headers['x-nango-signature'] as string | undefined; + const hmacSha256 = headers['x-nango-hmac-sha256'] as string | undefined; + if (!signature && !hmacSha256) return false; + + const expectedSignature = crypto + .createHmac('sha256', this.secret) + .update(rawBody) + .digest('hex'); + return signature === expectedSignature || hmacSha256 === expectedSignature; + } } } diff --git a/src/dashboard/app/app/page.tsx b/src/dashboard/app/app/page.tsx index f3b2dc16..f8452d34 100644 --- a/src/dashboard/app/app/page.tsx +++ b/src/dashboard/app/app/page.tsx @@ -1,13 +1,730 @@ /** * Dashboard V2 - Main App Page * - * Entry point for the dashboard application (after login). + * In cloud mode: Shows workspace selection and connects to selected workspace's dashboard. + * In local mode: Connects to local daemon WebSocket. */ 'use client'; +import React, { useState, useEffect, useCallback } from 'react'; import { App } from '../../react-components/App'; +import { LogoIcon } from '../../react-components/Logo'; +import { setActiveWorkspaceId } from '../../lib/api'; + +interface Workspace { + id: string; + name: string; + status: 'provisioning' | 'running' | 'stopped' | 'error'; + publicUrl?: string; + providers?: string[]; + repositories?: string[]; + createdAt: string; +} + +interface Repository { + id: string; + fullName: string; + isPrivate: boolean; + defaultBranch: string; + syncStatus: string; + hasNangoConnection: boolean; +} + +interface ProviderInfo { + id: string; + name: string; + displayName: string; + color: string; + cliCommand?: string; +} + +interface ProviderAuthState { + provider: ProviderInfo; + authUrl?: string; + status: 'starting' | 'waiting' | 'success' | 'error'; + error?: string; +} + +type PageState = 'loading' | 'local' | 'select-workspace' | 'no-workspaces' | 'connect-provider' | 'connecting' | 'connected' | 'error'; + +// Available AI providers +const AI_PROVIDERS: ProviderInfo[] = [ + { id: 'anthropic', name: 'Anthropic', displayName: 'Claude', color: '#D97757', cliCommand: 'claude' }, + { id: 'codex', name: 'OpenAI', displayName: 'Codex', color: '#10A37F', cliCommand: 'codex login' }, + { id: 'opencode', name: 'OpenCode', displayName: 'OpenCode', color: '#00D4AA', cliCommand: 'opencode' }, + { id: 'droid', name: 'Factory', displayName: 'Droid', color: '#6366F1', cliCommand: 'droid' }, +]; export default function DashboardPage() { - return ; + const [state, setState] = useState('loading'); + const [workspaces, setWorkspaces] = useState([]); + const [repos, setRepos] = useState([]); + const [selectedWorkspace, setSelectedWorkspace] = useState(null); + const [wsUrl, setWsUrl] = useState(undefined); + const [error, setError] = useState(null); + // Track cloud mode for potential future use + const [_isCloudMode, setIsCloudMode] = useState(false); + const [csrfToken, setCsrfToken] = useState(null); + const [providerAuth, setProviderAuth] = useState(null); + + // Check if we're in cloud mode and fetch data + useEffect(() => { + const init = async () => { + try { + // Check session to determine if we're in cloud mode + const sessionRes = await fetch('/api/auth/session', { credentials: 'include' }); + + // If session endpoint doesn't exist (404), we're in local mode + if (sessionRes.status === 404) { + setIsCloudMode(false); + setState('local'); + return; + } + + // Capture CSRF token from response header + const token = sessionRes.headers.get('X-CSRF-Token'); + if (token) { + setCsrfToken(token); + } + + const session = await sessionRes.json(); + + if (!session.authenticated) { + // Cloud mode but not authenticated - redirect to login + window.location.href = '/login'; + return; + } + + // Cloud mode - fetch workspaces and repos + setIsCloudMode(true); + + const [workspacesRes, reposRes] = await Promise.all([ + fetch('/api/workspaces', { credentials: 'include' }), + fetch('/api/github-app/repos', { credentials: 'include' }), + ]); + + if (!workspacesRes.ok) { + if (workspacesRes.status === 401) { + window.location.href = '/login'; + return; + } + throw new Error('Failed to fetch workspaces'); + } + + const workspacesData = await workspacesRes.json(); + const reposData = reposRes.ok ? await reposRes.json() : { repositories: [] }; + + setWorkspaces(workspacesData.workspaces || []); + setRepos(reposData.repositories || []); + + // Determine next state based on workspace availability + const runningWorkspaces = (workspacesData.workspaces || []).filter( + (w: Workspace) => w.status === 'running' && w.publicUrl + ); + + if (runningWorkspaces.length === 1) { + // Auto-connect to the only running workspace + connectToWorkspace(runningWorkspaces[0]); + } else if (runningWorkspaces.length > 1) { + setState('select-workspace'); + } else if ((workspacesData.workspaces || []).length > 0) { + // Has workspaces but none running + setState('select-workspace'); + } else if ((reposData.repositories || []).length > 0) { + // Has repos but no workspaces - show create workspace + setState('no-workspaces'); + } else { + // No repos, no workspaces - redirect to connect repos + window.location.href = '/connect-repos'; + } + } catch (err) { + // If session check fails with 404, assume local mode + if (err instanceof TypeError && err.message.includes('Failed to fetch')) { + setIsCloudMode(false); + setState('local'); + return; + } + console.error('Init error:', err); + setError(err instanceof Error ? err.message : 'Failed to initialize'); + setState('error'); + } + }; + + init(); + }, []); + + const connectToWorkspace = useCallback((workspace: Workspace) => { + if (!workspace.publicUrl) { + setError('Workspace has no public URL'); + setState('error'); + return; + } + + setSelectedWorkspace(workspace); + setState('connecting'); + + // Set the active workspace ID for API proxying + setActiveWorkspaceId(workspace.id); + + // Derive WebSocket URL from public URL + // e.g., https://workspace-abc.agentrelay.dev -> wss://workspace-abc.agentrelay.dev/ws + const url = new URL(workspace.publicUrl); + const wsProtocol = url.protocol === 'https:' ? 'wss:' : 'ws:'; + const derivedWsUrl = `${wsProtocol}//${url.host}/ws`; + + setWsUrl(derivedWsUrl); + setState('connected'); + }, []); + + const handleCreateWorkspace = useCallback(async (repoFullName: string) => { + setState('loading'); + setError(null); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + + const res = await fetch('/api/workspaces/quick', { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ repositoryFullName: repoFullName }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to create workspace'); + } + + // Poll for workspace to be ready + const pollForReady = async (workspaceId: string) => { + const maxAttempts = 60; // 2 minutes with 2s interval + let attempts = 0; + + while (attempts < maxAttempts) { + const statusRes = await fetch(`/api/workspaces/${workspaceId}/status`, { + credentials: 'include', + }); + const statusData = await statusRes.json(); + + if (statusData.status === 'running') { + // Fetch updated workspace info + const wsRes = await fetch(`/api/workspaces/${workspaceId}`, { + credentials: 'include', + }); + const wsData = await wsRes.json(); + if (wsData.publicUrl) { + // Store workspace and show provider connection screen + setSelectedWorkspace(wsData); + setState('connect-provider'); + return; + } + } else if (statusData.status === 'error') { + throw new Error('Workspace provisioning failed'); + } + + await new Promise(resolve => setTimeout(resolve, 2000)); + attempts++; + } + + throw new Error('Workspace provisioning timed out'); + }; + + await pollForReady(data.workspaceId); + } catch (err) { + console.error('Create workspace error:', err); + setError(err instanceof Error ? err.message : 'Failed to create workspace'); + setState('no-workspaces'); + } + }, [connectToWorkspace, csrfToken]); + + // Handle connecting an AI provider via CLI login + const handleConnectProvider = useCallback(async (provider: ProviderInfo) => { + if (!selectedWorkspace) return; + + setProviderAuth({ provider, status: 'starting' }); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + + const res = await fetch(`/api/workspaces/${selectedWorkspace.id}/connect-provider`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ provider: provider.id }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to start provider auth'); + } + + if (data.authUrl) { + // Auto-open the auth URL in a popup + const width = 600; + const height = 700; + const left = window.screenX + (window.outerWidth - width) / 2; + const top = window.screenY + (window.outerHeight - height) / 2; + window.open( + data.authUrl, + `${provider.displayName} Login`, + `width=${width},height=${height},left=${left},top=${top},popup=yes` + ); + setProviderAuth({ provider, authUrl: data.authUrl, status: 'waiting' }); + } else { + // No auth URL means already authenticated or error + setProviderAuth({ provider, status: 'success' }); + // Auto-continue after 2 seconds + setTimeout(() => { + setProviderAuth(null); + connectToWorkspace(selectedWorkspace); + }, 2000); + } + } catch (err) { + setProviderAuth({ + provider, + status: 'error', + error: err instanceof Error ? err.message : 'Failed to connect provider', + }); + } + }, [selectedWorkspace, csrfToken, connectToWorkspace]); + + // Skip provider connection and continue to workspace + const handleSkipProvider = useCallback(() => { + if (selectedWorkspace) { + setProviderAuth(null); + connectToWorkspace(selectedWorkspace); + } + }, [selectedWorkspace, connectToWorkspace]); + + const handleStartWorkspace = useCallback(async (workspace: Workspace) => { + setState('loading'); + setError(null); + + try { + const headers: Record = {}; + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + + const res = await fetch(`/api/workspaces/${workspace.id}/restart`, { + method: 'POST', + credentials: 'include', + headers, + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to start workspace'); + } + + // Poll for workspace to be ready + const maxAttempts = 60; + let attempts = 0; + + while (attempts < maxAttempts) { + const statusRes = await fetch(`/api/workspaces/${workspace.id}/status`, { + credentials: 'include', + }); + const statusData = await statusRes.json(); + + if (statusData.status === 'running') { + const wsRes = await fetch(`/api/workspaces/${workspace.id}`, { + credentials: 'include', + }); + const wsData = await wsRes.json(); + if (wsData.publicUrl) { + connectToWorkspace({ ...workspace, ...wsData }); + return; + } + } + + await new Promise(resolve => setTimeout(resolve, 2000)); + attempts++; + } + + throw new Error('Workspace start timed out'); + } catch (err) { + console.error('Start workspace error:', err); + setError(err instanceof Error ? err.message : 'Failed to start workspace'); + setState('select-workspace'); + } + }, [connectToWorkspace, csrfToken]); + + // Loading state + if (state === 'loading') { + return ( +
+
+ + + + +

Loading...

+
+
+ ); + } + + // Local mode - just render the App component + if (state === 'local') { + return ; + } + + // Connected to workspace - render App with workspace's WebSocket + if (state === 'connected' && wsUrl) { + return ; + } + + // Connecting state + if (state === 'connecting') { + return ( +
+
+ + + + +

Connecting to {selectedWorkspace?.name}...

+

{selectedWorkspace?.publicUrl}

+
+
+ ); + } + + // Error state + if (state === 'error') { + return ( +
+
+
+ + + +
+

Something went wrong

+

{error}

+ +
+
+ ); + } + + // Connect provider state - show after workspace is ready + if (state === 'connect-provider' && selectedWorkspace) { + return ( +
+ {/* Background grid */} +
+
+
+ +
+ {/* Logo */} +
+ +

Connect AI Provider

+

+ Your workspace {selectedWorkspace.name} is ready! +
Connect an AI provider to start using agents. +

+
+ + {/* Provider auth modal */} + {providerAuth && ( +
+
+
+ {providerAuth.provider.displayName[0]} +
+
+

{providerAuth.provider.displayName}

+

+ {providerAuth.status === 'starting' && 'Starting login...'} + {providerAuth.status === 'waiting' && 'Complete login in the popup'} + {providerAuth.status === 'success' && 'Connected!'} + {providerAuth.status === 'error' && providerAuth.error} +

+
+
+ + {providerAuth.status === 'waiting' && providerAuth.authUrl && ( +
+
+ + + + + Complete login in the popup window +
+

+ A popup window should have opened. If it didn't, click below: +

+ + Open Login Page Manually + + +
+ )} + + {providerAuth.status === 'error' && ( + + )} +
+ )} + + {/* Provider list */} + {!providerAuth && ( +
+

Choose an AI Provider

+
+ {AI_PROVIDERS.map((provider) => ( + + ))} +
+
+ )} + + {/* Skip button */} +
+ +
+
+
+ ); + } + + // Workspace selection / no workspaces UI + return ( +
+ {/* Background grid */} +
+
+
+ +
+ {/* Logo */} +
+ +

Agent Relay

+

+ {state === 'no-workspaces' ? 'Create a workspace to get started' : 'Select a workspace'} +

+
+ + {error && ( +
+

{error}

+
+ )} + + {/* Workspaces list */} + {state === 'select-workspace' && workspaces.length > 0 && ( +
+

Your Workspaces

+
+ {workspaces.map((workspace) => ( +
+
+
+
+

{workspace.name}

+

+ {workspace.status === 'running' ? 'Running' : + workspace.status === 'provisioning' ? 'Starting...' : + workspace.status === 'stopped' ? 'Stopped' : 'Error'} +

+
+
+
+ {workspace.status === 'running' && workspace.publicUrl ? ( + + ) : workspace.status === 'stopped' ? ( + + ) : workspace.status === 'provisioning' ? ( + Starting... + ) : ( + Failed + )} +
+
+ ))} +
+ + {repos.length > 0 && ( +
+

Or create a new workspace:

+
+ {repos.slice(0, 3).map((repo) => ( + + ))} +
+
+ )} +
+ )} + + {/* No workspaces - create first one */} + {state === 'no-workspaces' && ( +
+

Create Your First Workspace

+

+ Select a repository to create a workspace where agents can work on your code. +

+ + {repos.length > 0 ? ( +
+ {repos.map((repo) => ( + + ))} +
+ ) : ( +
+

No repositories connected yet.

+ + + + + Connect GitHub + +
+ )} +
+ )} + + {/* Navigation */} +
+ + Manage Repositories + + ยท + +
+
+
+ ); } diff --git a/src/dashboard/app/connect-repos/page.tsx b/src/dashboard/app/connect-repos/page.tsx index ae1179b6..2944bd9d 100644 --- a/src/dashboard/app/connect-repos/page.tsx +++ b/src/dashboard/app/connect-repos/page.tsx @@ -1,19 +1,17 @@ /** * Connect Repos Page - GitHub App OAuth via Nango * - * Allows authenticated users to connect their GitHub repositories - * via the GitHub App OAuth flow (separate from login). + * Key: Initialize Nango on page load, not on click. + * This avoids popup blockers by ensuring openConnectUI is synchronous. */ 'use client'; -import React, { useState, useEffect, useCallback, useRef } from 'react'; -import Nango, { ConnectUI } from '@nangohq/frontend'; -import type { ConnectUIEvent } from '@nangohq/frontend'; -import { cloudApi } from '../../lib/cloudApi'; +import React, { useState, useEffect, useRef } from 'react'; +import Nango from '@nangohq/frontend'; import { LogoIcon } from '../../react-components/Logo'; -type ConnectState = 'checking' | 'idle' | 'loading' | 'connecting' | 'polling' | 'pending-approval' | 'success' | 'error'; +type ConnectState = 'checking' | 'ready' | 'connecting' | 'polling' | 'pending-approval' | 'success' | 'error'; interface ConnectedRepo { id: string; @@ -27,148 +25,198 @@ export default function ConnectReposPage() { const [error, setError] = useState(null); const [repos, setRepos] = useState([]); const [pendingMessage, setPendingMessage] = useState(null); - const pollIntervalRef = useRef(null); - const connectUIRef = useRef(null); + const [statusMessage, setStatusMessage] = useState(''); - // Check session on mount - useEffect(() => { - const checkSession = async () => { - const session = await cloudApi.checkSession(); - if (!session.authenticated) { - // Redirect to login - window.location.href = '/login'; - return; - } - setState('idle'); - }; - checkSession(); - }, []); + // Store Nango instance - initialized on mount + const nangoRef = useRef | null>(null); - // Cleanup on unmount + // Check session and initialize Nango on mount useEffect(() => { - return () => { - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } - if (connectUIRef.current) { - connectUIRef.current.close(); + let mounted = true; + + const init = async () => { + try { + // Check if authenticated + const sessionRes = await fetch('/api/auth/session', { credentials: 'include' }); + const session = await sessionRes.json(); + if (!session.authenticated) { + window.location.href = '/login'; + return; + } + + if (!mounted) return; + + // Get Nango session token for repo connection + const nangoRes = await fetch('/api/auth/nango/repo-session', { + credentials: 'include', + }); + const nangoData = await nangoRes.json(); + + if (!mounted) return; + + if (!nangoRes.ok || !nangoData.sessionToken) { + if (nangoData?.sessionExpired || nangoData?.code === 'SESSION_EXPIRED') { + window.location.href = '/login'; + return; + } + setError('Failed to initialize. Please refresh the page.'); + setState('error'); + return; + } + + // Create Nango instance NOW, not on click + nangoRef.current = new Nango({ connectSessionToken: nangoData.sessionToken }); + setState('ready'); + } catch { + if (mounted) { + window.location.href = '/login'; + } } }; + + init(); + return () => { mounted = false; }; }, []); - // Poll for repo sync completion - const startPolling = useCallback((connId: string) => { - setState('polling'); + const checkRepoStatus = async (connectionId: string): Promise<{ + ready: boolean; + pendingApproval?: boolean; + message?: string; + repos?: ConnectedRepo[]; + }> => { + const response = await fetch(`/api/auth/nango/repo-status/${connectionId}`, { + credentials: 'include', + }); + if (!response.ok) { + throw new Error('Status not ready'); + } + return response.json(); + }; - pollIntervalRef.current = setInterval(async () => { - try { - const result = await cloudApi.checkNangoRepoStatus(connId); - if (result.success) { - if (result.data.pendingApproval) { - // Org approval pending - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } + const handleAuthSuccess = async (connectionId: string) => { + try { + setState('polling'); + setStatusMessage('Syncing repositories...'); + + const pollStartTime = Date.now(); + const maxPollTime = 5 * 60 * 1000; + const pollInterval = 2000; + + const pollForRepos = async (): Promise => { + const elapsed = Date.now() - pollStartTime; + + if (elapsed > maxPollTime) { + throw new Error('Connection timed out. Please try again.'); + } + + try { + const result = await checkRepoStatus(connectionId); + if (result.pendingApproval) { setState('pending-approval'); - setPendingMessage(result.data.message || 'Waiting for organization admin approval'); - } else if (result.data.ready && result.data.repos) { - // Repos synced successfully - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } - setRepos(result.data.repos); + setPendingMessage(result.message || 'Waiting for organization admin approval'); + return; + } else if (result.ready && result.repos) { + setRepos(result.repos); setState('success'); + return; } + + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForRepos(); + } catch { + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForRepos(); } - } catch (err) { - console.error('Polling error:', err); - } - }, 2000); + }; - // Timeout after 5 minutes - setTimeout(() => { - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - pollIntervalRef.current = null; - setState('error'); - setError('Connection timed out. Please try again.'); - } - }, 5 * 60 * 1000); - }, []); + await pollForRepos(); + } catch (err) { + console.error('[AUTH] Error:', err); + setError(err instanceof Error ? err.message : 'Connection failed'); + setState('error'); + setStatusMessage(''); + } + }; - // Handle connect button click - const handleConnect = useCallback(async () => { - setState('loading'); + // Use nango.auth() instead of openConnectUI to avoid popup blocker issues + const handleConnect = async () => { + if (!nangoRef.current) { + setError('Not ready. Please refresh the page.'); + return; + } + + setState('connecting'); setError(null); + setStatusMessage('Connecting to GitHub...'); try { - // Create Nango instance and open Connect UI first (shows loading state) - const nango = new Nango(); - - const handleEvent = (event: ConnectUIEvent) => { - if (event.type === 'connect') { - // Connection successful - start polling for repo sync - const connectionId = event.payload.connectionId; - startPolling(connectionId); - if (connectUIRef.current) { - connectUIRef.current.close(); - } - } else if (event.type === 'close') { - // User closed without connecting - setState('idle'); - } else if (event.type === 'error') { - setState('error'); - setError(event.payload.errorMessage || 'Connection failed'); - if (connectUIRef.current) { - connectUIRef.current.close(); - } - } - }; + // Use github-app-oauth for GitHub App installation + const result = await nangoRef.current.auth('github-app-oauth'); + if (result && 'connectionId' in result) { + await handleAuthSuccess(result.connectionId); + } else { + throw new Error('No connection ID returned'); + } + } catch (err: unknown) { + const error = err as Error & { type?: string }; + console.error('GitHub App auth error:', error); - // Open Connect UI (shows loading until token is set) - connectUIRef.current = nango.openConnectUI({ - onEvent: handleEvent, - }); - connectUIRef.current.open(); - setState('connecting'); - - // Get repo session token from backend and set it - const sessionResult = await cloudApi.getNangoRepoSession(); - if (!sessionResult.success) { - if (connectUIRef.current) { - connectUIRef.current.close(); - } - if (sessionResult.sessionExpired) { - window.location.href = '/login'; - return; - } - throw new Error(sessionResult.error || 'Failed to create session'); + // Don't show error for user-cancelled auth + if (error.type === 'user_cancelled' || error.message?.includes('closed')) { + setStatusMessage(''); + // Re-initialize for next attempt + fetch('/api/auth/nango/repo-session', { credentials: 'include' }) + .then(res => res.json()) + .then(data => { + if (data.sessionToken) { + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setState('ready'); + } + }); + return; } - // Set the session token - this enables the Connect UI - connectUIRef.current.setSessionToken(sessionResult.data.sessionToken); - } catch (err) { - console.error('Connect error:', err); + setError(error.message || 'Connection failed'); setState('error'); - setError(err instanceof Error ? err.message : 'Failed to connect'); + setStatusMessage(''); } - }, [startPolling]); + }; - // Handle retry - const handleRetry = useCallback(() => { - setState('idle'); + const handleRetry = async () => { setError(null); setRepos([]); setPendingMessage(null); - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); + setStatusMessage(''); + setState('checking'); + + // Re-initialize Nango for the retry + try { + const nangoRes = await fetch('/api/auth/nango/repo-session', { + credentials: 'include', + }); + const nangoData = await nangoRes.json(); + + if (!nangoRes.ok || !nangoData.sessionToken) { + if (nangoData?.sessionExpired || nangoData?.code === 'SESSION_EXPIRED') { + window.location.href = '/login'; + return; + } + setError('Failed to initialize. Please refresh the page.'); + setState('error'); + return; + } + + nangoRef.current = new Nango({ connectSessionToken: nangoData.sessionToken }); + setState('ready'); + } catch { + setError('Failed to initialize. Please refresh the page.'); + setState('error'); } - }, []); + }; - // Continue to dashboard - const handleContinue = useCallback(() => { + const handleContinue = () => { window.location.href = '/app'; - }, []); + }; if (state === 'checking') { return ( @@ -178,12 +226,15 @@ export default function ConnectReposPage() { -

Checking session...

+

Loading...

); } + const isConnecting = state === 'connecting' || state === 'polling'; + const isReady = state === 'ready'; + return (
{/* Background grid */} @@ -220,13 +271,9 @@ export default function ConnectReposPage() {

Repositories Connected!

- {/* Repo list */}
{repos.map((repo) => ( -
+
@@ -256,19 +303,12 @@ export default function ConnectReposPage() {

{pendingMessage}

An organization admin needs to approve the GitHub App installation. - You'll be able to connect once approved.

- -
@@ -282,10 +322,7 @@ export default function ConnectReposPage() {

Connection Failed

{error}

-
@@ -298,10 +335,16 @@ export default function ConnectReposPage() {

Syncing Repositories

-

Fetching your repositories...

+

{statusMessage || 'Fetching your repositories...'}

) : (
+ {error && ( +
+

{error}

+
+ )} +

What this enables:

    @@ -328,16 +371,16 @@ export default function ConnectReposPage() { -
)}
- {/* Back link */}
Back to dashboard diff --git a/src/dashboard/app/login/page.tsx b/src/dashboard/app/login/page.tsx index 51236f5c..40c1075b 100644 --- a/src/dashboard/app/login/page.tsx +++ b/src/dashboard/app/login/page.tsx @@ -1,135 +1,154 @@ /** * Login Page - GitHub OAuth via Nango * - * Uses Nango Connect UI for GitHub authentication with polling - * to detect when login completes. + * Key: Initialize Nango on page load, not on click. + * This avoids popup blockers by ensuring openConnectUI is synchronous. + * See: https://arveknudsen.com/posts/avoiding-popup-blocking-when-authing-with-google/ */ 'use client'; -import React, { useState, useEffect, useCallback, useRef } from 'react'; -import Nango, { ConnectUI } from '@nangohq/frontend'; -import type { ConnectUIEvent } from '@nangohq/frontend'; -import { cloudApi } from '../../lib/cloudApi'; +import React, { useState, useEffect, useRef } from 'react'; +import Nango from '@nangohq/frontend'; import { LogoIcon } from '../../react-components/Logo'; -type LoginState = 'idle' | 'loading' | 'connecting' | 'polling' | 'success' | 'error'; - export default function LoginPage() { - const [state, setState] = useState('idle'); - const [error, setError] = useState(null); - const pollIntervalRef = useRef(null); - const connectUIRef = useRef(null); + const [isReady, setIsReady] = useState(false); + const [isAuthenticating, setIsAuthenticating] = useState(false); + const [authStatus, setAuthStatus] = useState(''); + const [error, setError] = useState(''); - // Cleanup on unmount - useEffect(() => { - return () => { - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } - if (connectUIRef.current) { - connectUIRef.current.close(); - } - }; - }, []); + // Store Nango instance and session token - initialized on mount + const nangoRef = useRef | null>(null); - // Poll for login completion - const startPolling = useCallback((connId: string) => { - setState('polling'); + // Initialize Nango with session token on page load + useEffect(() => { + let mounted = true; - // Poll every 1 second - pollIntervalRef.current = setInterval(async () => { + const init = async () => { try { - const result = await cloudApi.checkNangoLoginStatus(connId); - if (result.success && result.data.ready) { - // Login complete - stop polling and redirect - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - } - setState('success'); - // Redirect to dashboard after a brief success message - setTimeout(() => { - window.location.href = '/app'; - }, 1000); + const response = await fetch('/api/auth/nango/login-session', { + credentials: 'include', + }); + const data = await response.json(); + + if (!mounted) return; + + if (!response.ok || !data.sessionToken) { + setError('Failed to initialize. Please refresh the page.'); + return; } + + // Create Nango instance NOW, not on click + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setIsReady(true); } catch (err) { - console.error('Polling error:', err); - } - }, 1000); - - // Timeout after 5 minutes - setTimeout(() => { - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); - pollIntervalRef.current = null; - setState('error'); - setError('Login timed out. Please try again.'); + if (mounted) { + console.error('Init error:', err); + setError('Failed to initialize. Please refresh the page.'); + } } - }, 5 * 60 * 1000); + }; + + init(); + return () => { mounted = false; }; }, []); - // Handle login button click - const handleLogin = useCallback(async () => { - setState('loading'); - setError(null); + const checkAuthStatus = async (connectionId: string): Promise<{ ready: boolean; hasRepos?: boolean }> => { + const response = await fetch(`/api/auth/nango/login-status/${connectionId}`, { + credentials: 'include', + }); + if (!response.ok) { + throw new Error('Auth status not ready'); + } + return response.json(); + }; + const handleAuthSuccess = async (connectionId: string) => { try { - // Create Nango instance and open Connect UI first (shows loading state) - const nango = new Nango(); - - const handleEvent = (event: ConnectUIEvent) => { - if (event.type === 'connect') { - // Connection successful - start polling - const connectionId = event.payload.connectionId; - startPolling(connectionId); - if (connectUIRef.current) { - connectUIRef.current.close(); - } - } else if (event.type === 'close') { - // User closed without connecting - setState('idle'); - } else if (event.type === 'error') { - setState('error'); - setError(event.payload.errorMessage || 'Connection failed'); - if (connectUIRef.current) { - connectUIRef.current.close(); - } + setAuthStatus('Completing authentication...'); + + const pollStartTime = Date.now(); + const maxPollTime = 30000; + const pollInterval = 1000; + + const pollForAuth = async (): Promise => { + const elapsed = Date.now() - pollStartTime; + + if (elapsed > maxPollTime) { + throw new Error('Authentication timed out. Please try again.'); } - }; - // Open Connect UI (shows loading until token is set) - connectUIRef.current = nango.openConnectUI({ - onEvent: handleEvent, - }); - connectUIRef.current.open(); - setState('connecting'); - - // Get session token from backend and set it - const sessionResult = await cloudApi.getNangoLoginSession(); - if (!sessionResult.success) { - if (connectUIRef.current) { - connectUIRef.current.close(); + try { + const result = await checkAuthStatus(connectionId); + if (result && result.ready) { + // Redirect to connect-repos if no repos, otherwise to app + window.location.href = result.hasRepos ? '/app' : '/connect-repos'; + return; + } + + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForAuth(); + } catch { + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForAuth(); } - throw new Error(sessionResult.error || 'Failed to create login session'); - } + }; - // Set the session token - this enables the Connect UI - connectUIRef.current.setSessionToken(sessionResult.data.sessionToken); + await pollForAuth(); } catch (err) { - console.error('Login error:', err); - setState('error'); - setError(err instanceof Error ? err.message : 'Login failed'); + console.error('[AUTH] Authentication error:', err); + setError(err instanceof Error ? err.message : 'Authentication failed'); + setIsAuthenticating(false); + setAuthStatus(''); } - }, [startPolling]); - - // Retry login - const handleRetry = useCallback(() => { - setState('idle'); - setError(null); - if (pollIntervalRef.current) { - clearInterval(pollIntervalRef.current); + }; + + // Use nango.auth() instead of openConnectUI to avoid popup blocker issues + const handleGitHubAuth = async () => { + if (!nangoRef.current) { + setError('Not ready. Please refresh the page.'); + return; } - }, []); + + setIsAuthenticating(true); + setError(''); + setAuthStatus('Connecting to GitHub...'); + + try { + const result = await nangoRef.current.auth('github'); + if (result && 'connectionId' in result) { + await handleAuthSuccess(result.connectionId); + } else { + throw new Error('No connection ID returned'); + } + } catch (err: unknown) { + const error = err as Error & { type?: string }; + console.error('GitHub auth error:', error); + + // Don't show error for user-cancelled auth + if (error.type === 'user_cancelled' || error.message?.includes('closed')) { + setIsAuthenticating(false); + setAuthStatus(''); + // Re-initialize for next attempt + fetch('/api/auth/nango/login-session', { credentials: 'include' }) + .then(res => res.json()) + .then(data => { + if (data.sessionToken) { + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setIsReady(true); + } + }); + return; + } + + setError(error.message || 'Authentication failed'); + setIsAuthenticating(false); + setAuthStatus(''); + } + }; + + const isLoading = !isReady || isAuthenticating; return (
@@ -156,81 +175,67 @@ export default function LoginPage() { {/* Login Card */}
- {state === 'success' ? ( -
-
- - - -
-

Welcome!

-

Redirecting to dashboard...

-
- ) : state === 'error' ? ( -
- {/* Back to home */} + {/* Sign up link */} + + {/* Back to home */} + diff --git a/src/dashboard/app/providers/page.tsx b/src/dashboard/app/providers/page.tsx new file mode 100644 index 00000000..442ce91b --- /dev/null +++ b/src/dashboard/app/providers/page.tsx @@ -0,0 +1,220 @@ +/** + * Providers Page + * + * Connect AI providers (Anthropic, OpenAI, etc.) to enable workspace creation. + */ + +'use client'; + +import React, { useState, useEffect } from 'react'; +import { LogoIcon } from '../../react-components/Logo'; + +interface Provider { + id: string; + name: string; + displayName: string; + description: string; + color: string; + isConnected: boolean; + connectedAs?: string; +} + +export default function ProvidersPage() { + const [providers, setProviders] = useState([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [connectingProvider, setConnectingProvider] = useState(null); + const [apiKey, setApiKey] = useState(''); + const [csrfToken, setCsrfToken] = useState(null); + + useEffect(() => { + const fetchProviders = async () => { + try { + const res = await fetch('/api/providers', { credentials: 'include' }); + + // Capture CSRF token + const token = res.headers.get('X-CSRF-Token'); + if (token) setCsrfToken(token); + + if (!res.ok) { + if (res.status === 401) { + window.location.href = '/login'; + return; + } + throw new Error('Failed to fetch providers'); + } + + const data = await res.json(); + setProviders(data.providers || []); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to load providers'); + } finally { + setLoading(false); + } + }; + + fetchProviders(); + }, []); + + const handleConnect = async (providerId: string) => { + if (!apiKey.trim()) { + setError('Please enter an API key'); + return; + } + + setError(null); + setConnectingProvider(providerId); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/providers/${providerId}/api-key`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ apiKey: apiKey.trim() }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to connect provider'); + } + + // Update provider state + setProviders(prev => + prev.map(p => (p.id === providerId ? { ...p, isConnected: true } : p)) + ); + setApiKey(''); + setConnectingProvider(null); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to connect'); + setConnectingProvider(null); + } + }; + + const hasConnectedProvider = providers.some(p => p.isConnected && p.id !== 'github'); + + if (loading) { + return ( +
+
+ + + + +

Loading providers...

+
+
+ ); + } + + return ( +
+ {/* Background grid */} +
+
+
+ +
+ {/* Logo */} +
+ +

Connect AI Providers

+

+ Add your API keys to enable AI-powered coding assistants in your workspace. +

+
+ + {error && ( +
+

{error}

+
+ )} + + {/* Providers list */} +
+ {providers + .filter(p => p.id !== 'github') // Don't show GitHub here + .map(provider => ( +
+
+
+
+ {provider.displayName[0]} +
+
+

{provider.displayName}

+

{provider.description}

+
+
+ {provider.isConnected && ( + + Connected + + )} +
+ + {!provider.isConnected && ( +
+ { + setConnectingProvider(provider.id); + setApiKey(e.target.value); + }} + onFocus={() => setConnectingProvider(provider.id)} + className="flex-1 px-4 py-2 bg-bg-deep border border-border-subtle rounded-lg text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan/50" + /> + +
+ )} +
+ ))} +
+ + {/* Continue button */} +
+ {hasConnectedProvider ? ( + + Continue to Dashboard + + ) : ( +

+ Connect at least one AI provider to continue +

+ )} + + + Skip for now + +
+
+
+ ); +} diff --git a/src/dashboard/app/signup/page.tsx b/src/dashboard/app/signup/page.tsx new file mode 100644 index 00000000..92876ee3 --- /dev/null +++ b/src/dashboard/app/signup/page.tsx @@ -0,0 +1,343 @@ +/** + * Signup Page - GitHub OAuth via Nango + * + * Key: Initialize Nango on page load, not on click. + * This avoids popup blockers by ensuring openConnectUI is synchronous. + */ + +'use client'; + +import React, { useState, useEffect, useRef } from 'react'; +import Nango from '@nangohq/frontend'; +import { LogoIcon } from '../../react-components/Logo'; + +export default function SignupPage() { + const [isReady, setIsReady] = useState(false); + const [isAuthenticating, setIsAuthenticating] = useState(false); + const [authStatus, setAuthStatus] = useState(''); + const [error, setError] = useState(''); + const [redirectTarget, setRedirectTarget] = useState('/app'); + const [showSuccess, setShowSuccess] = useState(false); + + // Store Nango instance - initialized on mount + const nangoRef = useRef | null>(null); + + // Initialize Nango with session token on page load + useEffect(() => { + let mounted = true; + + const init = async () => { + // Check if already logged in + try { + const sessionRes = await fetch('/api/auth/session', { credentials: 'include' }); + const session = await sessionRes.json(); + if (session.authenticated) { + await handlePostAuthRedirect(); + return; + } + } catch { + // Not logged in, continue + } + + // Get Nango session token + try { + const response = await fetch('/api/auth/nango/login-session', { + credentials: 'include', + }); + const data = await response.json(); + + if (!mounted) return; + + if (!response.ok || !data.sessionToken) { + setError('Failed to initialize. Please refresh the page.'); + return; + } + + // Create Nango instance NOW, not on click + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setIsReady(true); + } catch (err) { + if (mounted) { + console.error('Init error:', err); + setError('Failed to initialize. Please refresh the page.'); + } + } + }; + + init(); + return () => { mounted = false; }; + }, []); + + const handlePostAuthRedirect = async () => { + setAuthStatus('Setting up your account...'); + + try { + const response = await fetch('/api/github-app/repos', { credentials: 'include' }); + const data = await response.json(); + + if (data.repositories && data.repositories.length > 0) { + setRedirectTarget('/app'); + } else { + setRedirectTarget('/connect-repos'); + } + + setShowSuccess(true); + + setTimeout(() => { + window.location.href = data.repositories && data.repositories.length > 0 + ? '/app' + : '/connect-repos'; + }, 1500); + } catch (err) { + console.error('Error checking repos:', err); + setRedirectTarget('/connect-repos'); + setShowSuccess(true); + setTimeout(() => { + window.location.href = '/connect-repos'; + }, 1500); + } + }; + + const checkAuthStatus = async (connectionId: string): Promise<{ ready: boolean }> => { + const response = await fetch(`/api/auth/nango/login-status/${connectionId}`, { + credentials: 'include', + }); + if (!response.ok) { + throw new Error('Auth status not ready'); + } + return response.json(); + }; + + const handleAuthSuccess = async (connectionId: string) => { + try { + setAuthStatus('Completing authentication...'); + + const pollStartTime = Date.now(); + const maxPollTime = 30000; + const pollInterval = 1000; + + const pollForAuth = async (): Promise => { + const elapsed = Date.now() - pollStartTime; + + if (elapsed > maxPollTime) { + throw new Error('Authentication timed out. Please try again.'); + } + + try { + const result = await checkAuthStatus(connectionId); + if (result && result.ready) { + await handlePostAuthRedirect(); + return; + } + + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForAuth(); + } catch { + await new Promise(resolve => setTimeout(resolve, pollInterval)); + return pollForAuth(); + } + }; + + await pollForAuth(); + } catch (err) { + console.error('[AUTH] Authentication error:', err); + setError(err instanceof Error ? err.message : 'Authentication failed'); + setIsAuthenticating(false); + setAuthStatus(''); + } + }; + + // Use nango.auth() instead of openConnectUI to avoid popup blocker issues + const handleGitHubAuth = async () => { + if (!nangoRef.current) { + setError('Not ready. Please refresh the page.'); + return; + } + + setIsAuthenticating(true); + setError(''); + setAuthStatus('Connecting to GitHub...'); + + try { + const result = await nangoRef.current.auth('github'); + if (result && 'connectionId' in result) { + await handleAuthSuccess(result.connectionId); + } else { + throw new Error('No connection ID returned'); + } + } catch (err: unknown) { + const error = err as Error & { type?: string }; + console.error('GitHub auth error:', error); + + // Don't show error for user-cancelled auth + if (error.type === 'user_cancelled' || error.message?.includes('closed')) { + setIsAuthenticating(false); + setAuthStatus(''); + // Re-initialize for next attempt + fetch('/api/auth/nango/login-session', { credentials: 'include' }) + .then(res => res.json()) + .then(data => { + if (data.sessionToken) { + nangoRef.current = new Nango({ connectSessionToken: data.sessionToken }); + setIsReady(true); + } + }); + return; + } + + setError(error.message || 'Authentication failed'); + setIsAuthenticating(false); + setAuthStatus(''); + } + }; + + const isLoading = !isReady || isAuthenticating; + + return ( +
+ {/* Background grid */} +
+
+
+ + {/* Glow orbs */} +
+
+
+
+ + {/* Content */} +
+ {/* Logo */} +
+ +

Get Started

+

+ Create your account and start orchestrating AI agents +

+
+ + {/* Signup Card */} +
+ {showSuccess ? ( +
+
+ + + +
+

Welcome to Agent Relay!

+

+ {redirectTarget === '/connect-repos' + ? "Let's connect your repositories..." + : 'Redirecting to dashboard...'} +

+
+ ) : isAuthenticating ? ( +
+
+ + + + +
+

Creating Account

+

{authStatus || 'Connecting to GitHub...'}

+
+ ) : ( +
+ {error && ( +
+

{error}

+
+ )} + + {/* Features list */} +
+
+
+ + + +
+ Deploy AI agents in seconds +
+
+
+ + + +
+ Real-time agent collaboration +
+
+
+ + + +
+ Secure credential management +
+
+ + + +

+ By signing up, you agree to our{' '} + Terms of Service + {' '}and{' '} + Privacy Policy +

+
+ )} +
+ + {/* Already have account */} +
+

+ Already have an account?{' '} + + Sign in + +

+
+ + {/* Back to home */} + +
+
+ ); +} diff --git a/src/dashboard/landing/styles.css b/src/dashboard/landing/styles.css index 668603ac..8eed7a85 100644 --- a/src/dashboard/landing/styles.css +++ b/src/dashboard/landing/styles.css @@ -477,6 +477,7 @@ animation: fadeIn 1s ease forwards; animation-delay: 0.6s; opacity: 0; + overflow: hidden; } @keyframes fadeIn { @@ -489,6 +490,7 @@ aspect-ratio: 1; max-width: 500px; margin: 0 auto; + overflow: hidden; } .network-lines { @@ -569,6 +571,10 @@ color: var(--text-secondary); text-transform: uppercase; letter-spacing: 0.5px; + white-space: nowrap; + text-overflow: ellipsis; + max-width: 80px; + overflow: hidden; } .data-packet { @@ -1491,6 +1497,30 @@ section { } } +/* ============================================ + RESPONSIVE - Small Tablets Edge Case (850px) + Fixes agent network display at intermediate widths + ============================================ */ +@media (max-width: 850px) { + .hero-visual { + max-width: 320px; + } + + .agent-network { + max-width: 320px; + } + + .agent-icon { + width: 50px; + height: 50px; + font-size: 20px; + } + + .agent-label { + font-size: 11px; + } +} + /* ============================================ RESPONSIVE - Tablets (768px) ============================================ */ diff --git a/src/dashboard/lib/api.ts b/src/dashboard/lib/api.ts index f23ac4ad..ac880eab 100644 --- a/src/dashboard/lib/api.ts +++ b/src/dashboard/lib/api.ts @@ -21,6 +21,30 @@ import type { // API base URL - relative in browser, can be configured for SSR const API_BASE = ''; +// Workspace ID for cloud mode proxying +let activeWorkspaceId: string | null = null; + +/** + * Set the active workspace ID for API proxying in cloud mode + */ +export function setActiveWorkspaceId(workspaceId: string | null): void { + activeWorkspaceId = workspaceId; +} + +/** + * Get the API URL, accounting for cloud mode proxying + * @param path - API path like '/api/spawn' or '/api/send' + */ +function getApiUrl(path: string): string { + if (activeWorkspaceId) { + // In cloud mode, proxy through the cloud server + // Strip /api/ prefix since the proxy endpoint adds it back + const proxyPath = path.startsWith('/api/') ? path.substring(5) : path.replace(/^\//, ''); + return `/api/workspaces/${activeWorkspaceId}/proxy/${proxyPath}`; + } + return `${API_BASE}${path}`; +} + /** * Dashboard data received from WebSocket */ @@ -174,7 +198,7 @@ export const api = { */ async sendMessage(request: SendMessageRequest): Promise> { try { - const response = await fetch(`${API_BASE}/api/send`, { + const response = await fetch(getApiUrl('/api/send'), { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), @@ -220,7 +244,7 @@ export const api = { data = file.data; } - const response = await fetch(`${API_BASE}/api/upload`, { + const response = await fetch(getApiUrl('/api/upload'), { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ filename, mimeType, data }), @@ -247,7 +271,7 @@ export const api = { */ async spawnAgent(request: SpawnAgentRequest): Promise { try { - const response = await fetch(`${API_BASE}/api/spawn`, { + const response = await fetch(getApiUrl('/api/spawn'), { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), @@ -264,7 +288,7 @@ export const api = { */ async getSpawnedAgents(): Promise }>> { try { - const response = await fetch(`${API_BASE}/api/spawned`); + const response = await fetch(getApiUrl('/api/spawned')); const result = await response.json() as { success?: boolean; agents?: Array<{ name: string; cli: string; startedAt: string }>; error?: string }; if (response.ok && result.success) { @@ -282,7 +306,7 @@ export const api = { */ async releaseAgent(name: string): Promise> { try { - const response = await fetch(`${API_BASE}/api/spawned/${encodeURIComponent(name)}`, { + const response = await fetch(getApiUrl(`/api/spawned/${encodeURIComponent(name)}`), { method: 'DELETE', }); @@ -303,7 +327,7 @@ export const api = { */ async getData(): Promise> { try { - const response = await fetch(`${API_BASE}/api/data`); + const response = await fetch(getApiUrl('/api/data')); const data = await response.json() as DashboardData; if (response.ok) { @@ -321,7 +345,7 @@ export const api = { */ async getBridgeData(): Promise> { try { - const response = await fetch(`${API_BASE}/api/bridge`); + const response = await fetch(getApiUrl('/api/bridge')); const data = await response.json() as FleetData; if (response.ok) { @@ -339,7 +363,7 @@ export const api = { */ async getMetrics(): Promise> { try { - const response = await fetch(`${API_BASE}/api/metrics`); + const response = await fetch(getApiUrl('/api/metrics')); const data = await response.json(); if (response.ok) { @@ -368,7 +392,7 @@ export const api = { if (params?.since) query.set('since', String(params.since)); if (params?.limit) query.set('limit', String(params.limit)); - const response = await fetch(`${API_BASE}/api/history/sessions?${query}`); + const response = await fetch(getApiUrl(`/api/history/sessions?${query}`)); const data = await response.json(); if (response.ok) { @@ -403,7 +427,7 @@ export const api = { if (params?.order) query.set('order', params.order); if (params?.search) query.set('search', params.search); - const response = await fetch(`${API_BASE}/api/history/messages?${query}`); + const response = await fetch(getApiUrl(`/api/history/messages?${query}`)); const data = await response.json(); if (response.ok) { @@ -421,7 +445,7 @@ export const api = { */ async getHistoryConversations(): Promise> { try { - const response = await fetch(`${API_BASE}/api/history/conversations`); + const response = await fetch(getApiUrl('/api/history/conversations')); const data = await response.json(); if (response.ok) { @@ -439,7 +463,7 @@ export const api = { */ async getHistoryMessage(id: string): Promise> { try { - const response = await fetch(`${API_BASE}/api/history/message/${encodeURIComponent(id)}`); + const response = await fetch(getApiUrl(`/api/history/message/${encodeURIComponent(id)}`)); const data = await response.json(); if (response.ok) { @@ -457,7 +481,7 @@ export const api = { */ async getHistoryStats(): Promise> { try { - const response = await fetch(`${API_BASE}/api/history/stats`); + const response = await fetch(getApiUrl('/api/history/stats')); const data = await response.json(); if (response.ok) { @@ -484,7 +508,7 @@ export const api = { if (params?.query) queryParams.set('q', params.query); if (params?.limit) queryParams.set('limit', String(params.limit)); - const response = await fetch(`${API_BASE}/api/files?${queryParams}`); + const response = await fetch(getApiUrl(`/api/files?${queryParams}`)); const data = await response.json(); if (response.ok) { @@ -504,7 +528,7 @@ export const api = { */ async getDecisions(): Promise> { try { - const response = await fetch(`${API_BASE}/api/decisions`); + const response = await fetch(getApiUrl('/api/decisions')); const data = await response.json(); if (response.ok && data.success) { @@ -522,7 +546,7 @@ export const api = { */ async approveDecision(id: string, optionId?: string, response?: string): Promise> { try { - const res = await fetch(`${API_BASE}/api/decisions/${encodeURIComponent(id)}/approve`, { + const res = await fetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}/approve`), { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ optionId, response }), @@ -545,7 +569,7 @@ export const api = { */ async rejectDecision(id: string, reason?: string): Promise> { try { - const res = await fetch(`${API_BASE}/api/decisions/${encodeURIComponent(id)}/reject`, { + const res = await fetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}/reject`), { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ reason }), @@ -568,7 +592,7 @@ export const api = { */ async dismissDecision(id: string): Promise> { try { - const res = await fetch(`${API_BASE}/api/decisions/${encodeURIComponent(id)}`, { + const res = await fetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}`), { method: 'DELETE', }); @@ -591,7 +615,7 @@ export const api = { */ async getFleetServers(): Promise> { try { - const response = await fetch(`${API_BASE}/api/fleet/servers`); + const response = await fetch(getApiUrl('/api/fleet/servers')); const data = await response.json(); if (response.ok && data.success) { @@ -609,7 +633,7 @@ export const api = { */ async getFleetStats(): Promise> { try { - const response = await fetch(`${API_BASE}/api/fleet/stats`); + const response = await fetch(getApiUrl('/api/fleet/stats')); const data = await response.json(); if (response.ok && data.success) { @@ -636,7 +660,7 @@ export const api = { if (params?.status) queryParams.set('status', params.status); if (params?.agent) queryParams.set('agent', params.agent); - const response = await fetch(`${API_BASE}/api/tasks?${queryParams}`); + const response = await fetch(getApiUrl(`/api/tasks?${queryParams}`)); const data = await response.json(); if (response.ok && data.success) { @@ -659,7 +683,7 @@ export const api = { priority: 'low' | 'medium' | 'high' | 'critical'; }): Promise> { try { - const response = await fetch(`${API_BASE}/api/tasks`, { + const response = await fetch(getApiUrl('/api/tasks'), { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), @@ -685,7 +709,7 @@ export const api = { result?: string; }): Promise> { try { - const response = await fetch(`${API_BASE}/api/tasks/${encodeURIComponent(id)}`, { + const response = await fetch(getApiUrl(`/api/tasks/${encodeURIComponent(id)}`), { method: 'PATCH', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(updates), @@ -708,7 +732,7 @@ export const api = { */ async cancelTask(id: string): Promise> { try { - const response = await fetch(`${API_BASE}/api/tasks/${encodeURIComponent(id)}`, { + const response = await fetch(getApiUrl(`/api/tasks/${encodeURIComponent(id)}`), { method: 'DELETE', }); @@ -737,7 +761,7 @@ export const api = { description?: string; }): Promise> { try { - const response = await fetch(`${API_BASE}/api/beads`, { + const response = await fetch(getApiUrl('/api/beads'), { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), @@ -764,7 +788,7 @@ export const api = { thread?: string; }): Promise> { try { - const response = await fetch(`${API_BASE}/api/relay/send`, { + const response = await fetch(getApiUrl('/api/relay/send'), { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), diff --git a/src/dashboard/lib/cloudApi.ts b/src/dashboard/lib/cloudApi.ts index 3a327968..63f927f6 100644 --- a/src/dashboard/lib/cloudApi.ts +++ b/src/dashboard/lib/cloudApi.ts @@ -386,4 +386,21 @@ export const cloudApi = { { method: 'POST' } ); }, + + // ===== GitHub App API ===== + + /** + * Get user's connected repositories + */ + async getRepos() { + return cloudFetch<{ repositories: Array<{ + id: string; + fullName: string; + isPrivate: boolean; + defaultBranch: string; + syncStatus: string; + hasNangoConnection: boolean; + lastSyncedAt?: string; + }> }>('/api/github-app/repos'); + }, }; diff --git a/src/dashboard/next.config.js b/src/dashboard/next.config.js index 2cc9be8a..5e426c2c 100644 --- a/src/dashboard/next.config.js +++ b/src/dashboard/next.config.js @@ -2,7 +2,7 @@ const nextConfig = { // Static export - generates HTML/JS/CSS that can be served by any server output: 'export', - distDir: 'out', + // Export output goes to 'out/' by default with output: 'export' // Disable strict mode for now during development reactStrictMode: true, diff --git a/src/dashboard/package-lock.json b/src/dashboard/package-lock.json index 141ea4d6..aede51dc 100644 --- a/src/dashboard/package-lock.json +++ b/src/dashboard/package-lock.json @@ -8,7 +8,7 @@ "name": "@agent-relay/dashboard-v2", "version": "1.0.0", "dependencies": { - "@nangohq/frontend": "^0.69.20", + "@nangohq/frontend": "^0.69.14", "@tailwindcss/postcss": "^4.1.18", "@xterm/addon-fit": "^0.11.0", "@xterm/addon-search": "^0.16.0", diff --git a/src/dashboard/react-components/SettingsPanel.tsx b/src/dashboard/react-components/SettingsPanel.tsx index 4a237af9..08084e92 100644 --- a/src/dashboard/react-components/SettingsPanel.tsx +++ b/src/dashboard/react-components/SettingsPanel.tsx @@ -49,12 +49,31 @@ export const defaultSettings: Settings = { }, }; +interface AIProvider { + id: string; + name: string; + displayName: string; + description: string; + color: string; + cliCommand: string; + isConnected?: boolean; +} + +const AI_PROVIDERS: AIProvider[] = [ + { id: 'anthropic', name: 'Anthropic', displayName: 'Claude', description: 'Claude Code - recommended for code tasks', color: '#D97757', cliCommand: 'claude' }, + { id: 'codex', name: 'OpenAI', displayName: 'Codex', description: 'Codex - OpenAI coding assistant', color: '#10A37F', cliCommand: 'codex login' }, + { id: 'opencode', name: 'OpenCode', displayName: 'OpenCode', description: 'OpenCode - AI coding assistant', color: '#00D4AA', cliCommand: 'opencode' }, + { id: 'droid', name: 'Factory', displayName: 'Droid', description: 'Droid - Factory AI coding agent', color: '#6366F1', cliCommand: 'droid' }, +]; + export interface SettingsPanelProps { isOpen: boolean; onClose: () => void; settings: Settings; onSettingsChange: (settings: Settings) => void; onResetSettings?: () => void; + workspaceId?: string; // For cloud mode provider connection + csrfToken?: string; // For cloud mode API calls } export function SettingsPanel({ diff --git a/src/hooks/trajectory-hooks.ts b/src/hooks/trajectory-hooks.ts index 5ca15fa8..9699fa08 100644 --- a/src/hooks/trajectory-hooks.ts +++ b/src/hooks/trajectory-hooks.ts @@ -18,6 +18,8 @@ import { TrajectoryIntegration, getTrajectoryIntegration, detectPhaseFromContent, + detectToolCalls, + detectErrors, getCompactTrailInstructions, type PDEROPhase, } from '../trajectory/integration.js'; @@ -32,6 +34,10 @@ export interface TrajectoryHooksOptions { agentName: string; /** Whether to auto-detect phase transitions */ autoDetectPhase?: boolean; + /** Whether to detect and record tool calls */ + detectTools?: boolean; + /** Whether to detect and record errors */ + detectErrors?: boolean; /** Whether to inject trail instructions on session start */ injectInstructions?: boolean; /** Whether to prompt for retrospective on session end */ @@ -44,6 +50,10 @@ export interface TrajectoryHooksOptions { interface TrajectoryHooksState { trajectory: TrajectoryIntegration; lastDetectedPhase?: PDEROPhase; + /** Set of tool calls already recorded to avoid duplicates */ + seenTools: Set; + /** Set of errors already recorded to avoid duplicates */ + seenErrors: Set; options: TrajectoryHooksOptions; } @@ -63,8 +73,12 @@ interface TrajectoryHooksState { export function createTrajectoryHooks(options: TrajectoryHooksOptions): LifecycleHooks { const state: TrajectoryHooksState = { trajectory: getTrajectoryIntegration(options.projectId, options.agentName), + seenTools: new Set(), + seenErrors: new Set(), options: { autoDetectPhase: true, + detectTools: true, + detectErrors: true, injectInstructions: true, promptRetrospective: true, ...options, @@ -149,21 +163,49 @@ Or if you need to document learnings: } /** - * Output hook - auto-detects PDERO phase transitions + * Output hook - auto-detects PDERO phase transitions, tool calls, and errors */ function createOutputHook(state: TrajectoryHooksState) { return async (ctx: OutputContext): Promise => { const { trajectory, options } = state; - if (!options.autoDetectPhase) { - return; + // Detect and record phase transitions + if (options.autoDetectPhase) { + const detectedPhase = detectPhaseFromContent(ctx.content); + + if (detectedPhase && detectedPhase !== state.lastDetectedPhase) { + state.lastDetectedPhase = detectedPhase; + await trajectory.transition(detectedPhase, 'Auto-detected from output'); + } } - const detectedPhase = detectPhaseFromContent(ctx.content); + // Detect and record tool calls + // Note: We deduplicate by tool+status to record each unique tool type once per session + // (e.g., "Read" started, "Read" completed). This provides a summary of tools used + // without flooding the trajectory with every individual invocation. + if (options.detectTools) { + const tools = detectToolCalls(ctx.content); + for (const tool of tools) { + const key = `${tool.tool}:${tool.status || 'started'}`; + if (!state.seenTools.has(key)) { + state.seenTools.add(key); + const statusLabel = tool.status === 'completed' ? ' (completed)' : ''; + await trajectory.event(`Tool: ${tool.tool}${statusLabel}`, 'tool_call'); + } + } + } - if (detectedPhase && detectedPhase !== state.lastDetectedPhase) { - state.lastDetectedPhase = detectedPhase; - await trajectory.transition(detectedPhase, 'Auto-detected from output'); + // Detect and record errors + if (options.detectErrors) { + const errors = detectErrors(ctx.content); + for (const error of errors) { + // Deduplicate by message content + if (!state.seenErrors.has(error.message)) { + state.seenErrors.add(error.message); + const prefix = error.type === 'warning' ? 'Warning' : 'Error'; + await trajectory.event(`${prefix}: ${error.message}`, 'error'); + } + } } }; } diff --git a/src/trajectory/detection.test.ts b/src/trajectory/detection.test.ts new file mode 100644 index 00000000..e4b06f9b --- /dev/null +++ b/src/trajectory/detection.test.ts @@ -0,0 +1,151 @@ +/** + * Tests for trajectory detection functions + */ + +import { describe, it, expect } from 'vitest'; +import { detectToolCalls, detectErrors } from './integration.js'; + +describe('detectToolCalls', () => { + it('detects tool completion markers', () => { + const output = ` +โœ“ Read file.ts +โœ” Bash completed +`; + const tools = detectToolCalls(output); + expect(tools).toHaveLength(2); + expect(tools[0].tool).toBe('Read'); + expect(tools[0].status).toBe('completed'); + expect(tools[1].tool).toBe('Bash'); + expect(tools[1].status).toBe('completed'); + }); + + it('detects tool invocation patterns', () => { + const output = ` +Using tool Read to read the file +Calling Bash command +`; + const tools = detectToolCalls(output); + expect(tools.length).toBeGreaterThan(0); + expect(tools.some(t => t.tool === 'Read' || t.tool === 'Bash')).toBe(true); + }); + + it('deduplicates tools by position', () => { + const output = ` +โœ“ Read file.ts +โœ“ Read file.ts +`; + const tools = detectToolCalls(output); + // Should detect both as they're at different positions + expect(tools).toHaveLength(2); + }); + + it('handles empty output', () => { + const tools = detectToolCalls(''); + expect(tools).toHaveLength(0); + }); + + it('handles output with no tools', () => { + const tools = detectToolCalls('Just some regular text without any tools.'); + expect(tools).toHaveLength(0); + }); + + it('detects newer tools like Skill and TaskOutput', () => { + const output = ` +โœ“ Skill invoked +TaskOutput({"task_id": "123"}) +`; + const tools = detectToolCalls(output); + expect(tools.some(t => t.tool === 'Skill')).toBe(true); + expect(tools.some(t => t.tool === 'TaskOutput')).toBe(true); + }); +}); + +describe('detectErrors', () => { + it('detects JavaScript/TypeScript errors', () => { + const output = ` +TypeError: Cannot read property 'foo' of undefined + at Object. (test.ts:10:5) +`; + const errors = detectErrors(output); + expect(errors.length).toBeGreaterThan(0); + expect(errors.some(e => e.message.includes('TypeError'))).toBe(true); + expect(errors[0].type).toBe('error'); + }); + + it('detects test failures', () => { + const output = ` +FAIL src/test.ts +โœ— Test case failed +`; + const errors = detectErrors(output); + expect(errors.length).toBeGreaterThan(0); + expect(errors.some(e => e.type === 'error')).toBe(true); + }); + + it('detects warnings', () => { + const output = ` +warning: Package is deprecated +WARN: Something might be wrong +`; + const errors = detectErrors(output); + expect(errors.some(e => e.type === 'warning')).toBe(true); + }); + + it('detects command failures', () => { + const output = ` +Command failed with exit code 1 +Exit code: 127 +`; + const errors = detectErrors(output); + expect(errors.length).toBeGreaterThan(0); + }); + + it('deduplicates errors by message', () => { + const output = ` +Error: Something went wrong +Error: Something went wrong +`; + const errors = detectErrors(output); + // The detection may find two different patterns matching (generic "Error:" prefix) + // but should deduplicate if the exact same message is found multiple times + expect(errors.length).toBeGreaterThan(0); + // Count unique messages about "Something went wrong" + const wrongMessages = errors.filter(e => e.message.includes('Something went wrong')); + // At least one should be found + expect(wrongMessages.length).toBeGreaterThanOrEqual(1); + }); + + it('handles empty output', () => { + const errors = detectErrors(''); + expect(errors).toHaveLength(0); + }); + + it('handles output with no errors', () => { + const errors = detectErrors('Everything is working fine. Success!'); + expect(errors).toHaveLength(0); + }); + + it('truncates long error messages', () => { + const longMessage = 'Error: ' + 'x'.repeat(500); + const errors = detectErrors(longMessage); + expect(errors.length).toBeGreaterThan(0); + expect(errors[0].message.length).toBeLessThanOrEqual(200); + }); + + it('detects TypeScript compilation errors', () => { + const output = ` +error TS2339: Property 'foo' does not exist on type 'Bar'. +error[E0001]: Some rust error +`; + const errors = detectErrors(output); + expect(errors.length).toBeGreaterThan(0); + expect(errors.some(e => e.message.includes('TS2339'))).toBe(true); + }); + + it('does not match "error handling" as an error', () => { + const output = 'Implementing error handling for the API.'; + const errors = detectErrors(output); + // Should not detect "error handling" as an error + expect(errors).toHaveLength(0); + }); +}); diff --git a/src/trajectory/integration.ts b/src/trajectory/integration.ts index fb49a3cd..12349a0a 100644 --- a/src/trajectory/integration.ts +++ b/src/trajectory/integration.ts @@ -588,6 +588,168 @@ export function detectPhaseFromContent(content: string): PDEROPhase | undefined return undefined; } +/** + * Detected tool call information + */ +export interface DetectedToolCall { + tool: string; + args?: string; + status?: 'started' | 'completed' | 'failed'; +} + +/** + * Detected error information + */ +export interface DetectedError { + type: 'error' | 'warning' | 'failure'; + message: string; + stack?: string; +} + +/** + * All known Claude Code tool names + */ +const TOOL_NAMES = [ + 'Read', 'Write', 'Edit', 'Bash', 'Glob', 'Grep', 'Task', 'TaskOutput', + 'WebFetch', 'WebSearch', 'NotebookEdit', 'TodoWrite', 'AskUserQuestion', + 'KillShell', 'EnterPlanMode', 'ExitPlanMode', 'Skill', 'SlashCommand', +]; + +const TOOL_NAME_PATTERN = TOOL_NAMES.join('|'); + +/** + * Tool call patterns for Claude Code and similar AI CLIs + */ +const TOOL_PATTERNS = [ + // Claude Code tool invocations (displayed in output with parenthesis/braces) + new RegExp(`(?:^|\\n)\\s*(?:${TOOL_NAME_PATTERN})\\s*[({]`, 'i'), + // Tool completion markers (checkmarks, spinners) + new RegExp(`(?:^|\\n)\\s*(?:โœ“|โœ”|โ ‹|โ ™|โ น|โ ธ|โ ผ|โ ด|โ ฆ|โ ง|โ ‡|โ )\\s*(${TOOL_NAME_PATTERN})`, 'i'), + // Function call patterns (explicit mentions) + new RegExp(`(?:^|\\n)\\s*(?:Calling|Using|Invoking)\\s+(?:tool\\s+)?['"]?(${TOOL_NAME_PATTERN})['"]?`, 'i'), + // Tool result patterns + new RegExp(`(?:^|\\n)\\s*(?:Tool result|Result from)\\s*:?\\s*(${TOOL_NAME_PATTERN})`, 'i'), +]; + +/** + * Error patterns for detecting failures in output + * Note: Patterns are ordered from most specific to least specific + */ +const ERROR_PATTERNS = [ + // JavaScript/TypeScript runtime errors (most specific) + /(?:^|\n)((?:TypeError|ReferenceError|SyntaxError|RangeError|EvalError|URIError):\s*.+)/i, + // Named Error with message (e.g., "Error: Something went wrong") + /(?:^|\n)(Error:\s+.+)/, + // Failed assertions + /(?:^|\n)\s*(AssertionError:\s*.+)/i, + // Test failures (Vitest, Jest patterns) + /(?:^|\n)\s*(FAIL\s+\S+\.(?:ts|js|tsx|jsx))/i, + /(?:^|\n)\s*(โœ—|โœ˜|ร—)\s+(.+)/, + // Command/process failures + /(?:^|\n)\s*(Command failed[^\n]+)/i, + /(?:^|\n)\s*((?:Exit|exit)\s+code[:\s]+[1-9]\d*)/i, + /(?:^|\n)\s*(exited with (?:code\s+)?[1-9]\d*)/i, + // Node.js/system errors + /(?:^|\n)\s*(EACCES|EPERM|ENOENT|ECONNREFUSED|ETIMEDOUT|ENOTFOUND)(?::\s*.+)?/, + // Build/compile errors (webpack, tsc, etc.) + /(?:^|\n)\s*(error TS\d+:\s*.+)/i, + /(?:^|\n)\s*(error\[\S+\]:\s*.+)/i, +]; + +/** + * Warning patterns for detecting potential issues + */ +const WARNING_PATTERNS = [ + /(?:^|\n)\s*(?:warning|WARN|โš ๏ธ?)\s*[:\[]?\s*(.+)/i, + /(?:^|\n)\s*(?:deprecated|DEPRECATED):\s*(.+)/i, +]; + +/** + * Detect tool calls from agent output + * + * @example + * ```typescript + * const tools = detectToolCalls(output); + * // Returns: [{ tool: 'Read', args: 'file.ts' }, { tool: 'Bash', status: 'completed' }] + * ``` + */ +export function detectToolCalls(content: string): DetectedToolCall[] { + const detected: DetectedToolCall[] = []; + const seenTools = new Set(); + const toolNameExtractor = new RegExp(`\\b(${TOOL_NAME_PATTERN})\\b`, 'i'); + + for (const pattern of TOOL_PATTERNS) { + const matches = content.matchAll(new RegExp(pattern.source, 'gi')); + for (const match of matches) { + // Extract tool name from the match + const fullMatch = match[0]; + const toolNameMatch = fullMatch.match(toolNameExtractor); + if (toolNameMatch) { + const tool = toolNameMatch[1]; + // Avoid duplicates by position (same tool at same position) + const key = `${tool}:${match.index}`; + if (!seenTools.has(key)) { + seenTools.add(key); + detected.push({ + tool, + status: fullMatch.includes('โœ“') || fullMatch.includes('โœ”') ? 'completed' : 'started', + }); + } + } + } + } + + return detected; +} + +/** + * Detect errors from agent output + * + * @example + * ```typescript + * const errors = detectErrors(output); + * // Returns: [{ type: 'error', message: 'TypeError: Cannot read property...' }] + * ``` + */ +export function detectErrors(content: string): DetectedError[] { + const detected: DetectedError[] = []; + const seenMessages = new Set(); + + // Check for error patterns + for (const pattern of ERROR_PATTERNS) { + const matches = content.matchAll(new RegExp(pattern, 'gi')); + for (const match of matches) { + const message = match[1] || match[0]; + const cleanMessage = message.trim().slice(0, 200); // Limit length + if (!seenMessages.has(cleanMessage)) { + seenMessages.add(cleanMessage); + detected.push({ + type: 'error', + message: cleanMessage, + }); + } + } + } + + // Check for warning patterns + for (const pattern of WARNING_PATTERNS) { + const matches = content.matchAll(new RegExp(pattern, 'gi')); + for (const match of matches) { + const message = match[1] || match[0]; + const cleanMessage = message.trim().slice(0, 200); + if (!seenMessages.has(cleanMessage)) { + seenMessages.add(cleanMessage); + detected.push({ + type: 'warning', + message: cleanMessage, + }); + } + } + } + + return detected; +} + /** * TrajectoryIntegration class for managing trajectory state * diff --git a/src/wrapper/shared.ts b/src/wrapper/shared.ts index 8e14f8df..59bbaa12 100644 --- a/src/wrapper/shared.ts +++ b/src/wrapper/shared.ts @@ -41,7 +41,7 @@ export interface InjectionMetrics { /** * CLI types for special handling */ -export type CliType = 'claude' | 'codex' | 'gemini' | 'droid' | 'spawned' | 'other'; +export type CliType = 'claude' | 'codex' | 'gemini' | 'droid' | 'opencode' | 'spawned' | 'other'; /** * Injection timing constants @@ -167,6 +167,7 @@ export function detectCliType(command: string): CliType { if (cmdLower.includes('codex')) return 'codex'; if (cmdLower.includes('claude')) return 'claude'; if (cmdLower.includes('droid')) return 'droid'; + if (cmdLower.includes('opencode')) return 'opencode'; return 'other'; } @@ -186,7 +187,7 @@ export const CLI_QUIRKS = { * Others may interpret the escape sequences literally. */ supportsBracketedPaste: (cli: CliType): boolean => { - return cli === 'claude' || cli === 'codex' || cli === 'gemini'; + return cli === 'claude' || cli === 'codex' || cli === 'gemini' || cli === 'opencode'; }, /** @@ -207,6 +208,7 @@ export const CLI_QUIRKS = { gemini: /^[>โ€บยป]\s*$/, codex: /^[>โ€บยป]\s*$/, droid: /^[>โ€บยป]\s*$/, + opencode: /^[>โ€บยป]\s*$/, spawned: /^[>โ€บยป]\s*$/, other: /^[>$%#โžœโ€บยป]\s*$/, }; diff --git a/src/wrapper/tmux-wrapper.ts b/src/wrapper/tmux-wrapper.ts index 352246b3..dc7bcada 100644 --- a/src/wrapper/tmux-wrapper.ts +++ b/src/wrapper/tmux-wrapper.ts @@ -27,6 +27,8 @@ import { TrajectoryIntegration, getTrajectoryIntegration, detectPhaseFromContent, + detectToolCalls, + detectErrors, getCompactTrailInstructions, getTrailEnvVars, type PDEROPhase, @@ -102,7 +104,7 @@ export interface TmuxWrapperConfig { /** Polling interval when waiting for clear input (ms) */ inputWaitPollMs?: number; /** CLI type for special handling (auto-detected from command if not set) */ - cliType?: 'claude' | 'codex' | 'gemini' | 'droid' | 'other'; + cliType?: 'claude' | 'codex' | 'gemini' | 'droid' | 'opencode' | 'other'; /** Enable tmux mouse mode for scroll passthrough (default: true) */ mouseMode?: boolean; /** Relay prefix pattern (default: '->relay:') */ @@ -173,6 +175,8 @@ export class TmuxWrapper { private tmuxPath: string; // Resolved path to tmux binary (system or bundled) private trajectory?: TrajectoryIntegration; // Trajectory tracking via trail private lastDetectedPhase?: PDEROPhase; // Track last auto-detected PDERO phase + private seenToolCalls: Set = new Set(); // Dedup tool call trajectory events + private seenErrors: Set = new Set(); // Dedup error trajectory events private continuity?: ContinuityManager; // Session continuity management private processedContinuityCommands: Set = new Set(); // Dedup continuity commands private agentId?: string; // Unique agent ID for resume functionality @@ -207,6 +211,8 @@ export class TmuxWrapper { this.cliType = 'claude'; } else if (cmdLower.includes('droid')) { this.cliType = 'droid'; + } else if (cmdLower.includes('opencode')) { + this.cliType = 'opencode'; } else { this.cliType = 'other'; } @@ -306,11 +312,13 @@ export class TmuxWrapper { } /** - * Detect PDERO phase from output content and auto-transition if needed + * Detect PDERO phase from output content and auto-transition if needed. + * Also detects tool calls and errors, recording them to the trajectory. */ private detectAndTransitionPhase(content: string): void { if (!this.trajectory) return; + // Detect phase transitions const detectedPhase = detectPhaseFromContent(content); if (detectedPhase && detectedPhase !== this.lastDetectedPhase) { const currentPhase = this.trajectory.getPhase(); @@ -320,6 +328,30 @@ export class TmuxWrapper { this.logStderr(`Phase transition: ${currentPhase || 'none'} โ†’ ${detectedPhase}`); } } + + // Detect and record tool calls + // Note: We deduplicate by tool+status to record each unique tool type once per session + // (e.g., "Read" started, "Read" completed). This provides a summary of tools used + // without flooding the trajectory with every individual invocation. + const tools = detectToolCalls(content); + for (const tool of tools) { + const key = `${tool.tool}:${tool.status || 'started'}`; + if (!this.seenToolCalls.has(key)) { + this.seenToolCalls.add(key); + const statusLabel = tool.status === 'completed' ? ' (completed)' : ''; + this.trajectory.event(`Tool: ${tool.tool}${statusLabel}`, 'tool_call'); + } + } + + // Detect and record errors + const errors = detectErrors(content); + for (const error of errors) { + if (!this.seenErrors.has(error.message)) { + this.seenErrors.add(error.message); + const prefix = error.type === 'warning' ? 'Warning' : 'Error'; + this.trajectory.event(`${prefix}: ${error.message}`, 'error'); + } + } } /** @@ -1586,7 +1618,7 @@ export class TmuxWrapper { // Set tmux buffer then paste // Skip bracketed paste (-p) for CLIs that don't handle it properly (droid, other) await execAsync(`"${this.tmuxPath}" set-buffer -- "${escaped}"`); - const useBracketedPaste = this.cliType === 'claude' || this.cliType === 'codex' || this.cliType === 'gemini'; + const useBracketedPaste = this.cliType === 'claude' || this.cliType === 'codex' || this.cliType === 'gemini' || this.cliType === 'opencode'; if (useBracketedPaste) { await execAsync(`"${this.tmuxPath}" paste-buffer -t ${this.sessionName} -p`); } else { From 3943315e89dcdd4e40d3e32216305e41dabc72ed Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 00:46:07 +0000 Subject: [PATCH 002/103] Add multi-CLI support and Providers settings tab - Add Gemini, OpenCode, and Droid to spawn modal agent templates - Add Providers tab to SettingsPanel for API key management - Add Gemini CLI installation to workspace Dockerfile - Update landing page to show all 5 supported CLIs (Claude, Codex, Gemini, OpenCode, Droid) with proper branding colors --- deploy/workspace/Dockerfile | 2 + src/dashboard/landing/LandingPage.tsx | 13 +- .../react-components/SettingsPanel.tsx | 128 +++++++++++++++++- src/dashboard/react-components/SpawnModal.tsx | 23 +++- 4 files changed, 156 insertions(+), 10 deletions(-) diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index 0e09d8a6..6ab4e4ab 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -67,6 +67,8 @@ RUN mkdir -p /home/workspace/.claude && \ RUN curl -fsSL https://opencode.ai/install | bash # Droid RUN curl -fsSL https://app.factory.ai/cli | sh +# Gemini +RUN npm install -g @anthropic-ai/gemini-cli || curl -fsSL https://gemini.google.com/cli/install.sh | bash # Environment ENV NODE_ENV=production diff --git a/src/dashboard/landing/LandingPage.tsx b/src/dashboard/landing/LandingPage.tsx index d6e35c6c..bee8ffc3 100644 --- a/src/dashboard/landing/LandingPage.tsx +++ b/src/dashboard/landing/LandingPage.tsx @@ -11,9 +11,11 @@ import { Logo, LogoIcon, LogoHero } from '../react-components/Logo'; // Agent providers with their signature colors const PROVIDERS = { - claude: { name: 'Claude', color: '#00D9FF', icon: 'โ—ˆ' }, - codex: { name: 'Codex', color: '#FF6B35', icon: 'โฌก' }, - gemini: { name: 'Gemini', color: '#00FFC8', icon: 'โ—‡' }, + claude: { name: 'Claude', color: '#D97757', icon: 'โ—ˆ' }, + codex: { name: 'Codex', color: '#10A37F', icon: 'โฌก' }, + gemini: { name: 'Gemini', color: '#4285F4', icon: 'โ—‡' }, + opencode: { name: 'OpenCode', color: '#00D4AA', icon: 'โ—†' }, + droid: { name: 'Droid', color: '#6366F1', icon: 'โฌข' }, }; // Simulated agent messages for the live demo @@ -466,11 +468,6 @@ function ProvidersSection() {
Supported
))} -
-
โ—Ž
-
More Coming
-
2025
-
); diff --git a/src/dashboard/react-components/SettingsPanel.tsx b/src/dashboard/react-components/SettingsPanel.tsx index 08084e92..d1d257a2 100644 --- a/src/dashboard/react-components/SettingsPanel.tsx +++ b/src/dashboard/react-components/SettingsPanel.tsx @@ -62,6 +62,7 @@ interface AIProvider { const AI_PROVIDERS: AIProvider[] = [ { id: 'anthropic', name: 'Anthropic', displayName: 'Claude', description: 'Claude Code - recommended for code tasks', color: '#D97757', cliCommand: 'claude' }, { id: 'codex', name: 'OpenAI', displayName: 'Codex', description: 'Codex - OpenAI coding assistant', color: '#10A37F', cliCommand: 'codex login' }, + { id: 'gemini', name: 'Google', displayName: 'Gemini', description: 'Gemini - Google AI coding assistant', color: '#4285F4', cliCommand: 'gemini' }, { id: 'opencode', name: 'OpenCode', displayName: 'OpenCode', description: 'OpenCode - AI coding assistant', color: '#00D4AA', cliCommand: 'opencode' }, { id: 'droid', name: 'Factory', displayName: 'Droid', description: 'Droid - Factory AI coding agent', color: '#6366F1', cliCommand: 'droid' }, ]; @@ -82,8 +83,14 @@ export function SettingsPanel({ settings, onSettingsChange, onResetSettings, + workspaceId, + csrfToken, }: SettingsPanelProps) { - const [activeTab, setActiveTab] = useState<'appearance' | 'notifications' | 'connection'>('appearance'); + const [activeTab, setActiveTab] = useState<'appearance' | 'notifications' | 'connection' | 'providers'>('appearance'); + const [providerStatus, setProviderStatus] = useState>({}); + const [connectingProvider, setConnectingProvider] = useState(null); + const [apiKeyInput, setApiKeyInput] = useState(''); + const [providerError, setProviderError] = useState(null); const updateSetting = useCallback( ( @@ -165,6 +172,17 @@ export function SettingsPanel({ Connection +
@@ -301,6 +319,104 @@ export function SettingsPanel({
)} + + {activeTab === 'providers' && ( +
+
+ +

+ Connect AI providers to spawn agents. API keys are stored securely. +

+ + {providerError && ( +
+ {providerError} +
+ )} + +
+ {AI_PROVIDERS.map((provider) => ( +
+
+
+
+ {provider.displayName[0]} +
+
+

{provider.displayName}

+

{provider.description}

+
+
+ {providerStatus[provider.id] && ( + + Connected + + )} +
+ + {!providerStatus[provider.id] && ( +
+ { + setConnectingProvider(provider.id); + setApiKeyInput(e.target.value); + }} + onFocus={() => setConnectingProvider(provider.id)} + className="flex-1 py-2 px-3 border border-border rounded-md text-sm bg-bg-tertiary text-text-primary placeholder-text-muted focus:outline-none focus:border-accent" + /> + +
+ )} + +
+ CLI: {provider.cliCommand} +
+
+ ))} +
+
+
+ )}
@@ -433,3 +549,13 @@ function MonitorIcon() { ); } + +function ProviderIcon() { + return ( + + + + + + ); +} diff --git a/src/dashboard/react-components/SpawnModal.tsx b/src/dashboard/react-components/SpawnModal.tsx index 666a16de..d421bb70 100644 --- a/src/dashboard/react-components/SpawnModal.tsx +++ b/src/dashboard/react-components/SpawnModal.tsx @@ -24,7 +24,7 @@ export interface SpawnConfig { function deriveShadowMode(command: string): 'subagent' | 'process' { const base = command.trim().split(' ')[0].toLowerCase(); - if (base.startsWith('claude') || base === 'codex' || base === 'opencode') return 'subagent'; + if (base.startsWith('claude') || base === 'codex' || base === 'opencode' || base === 'gemini' || base === 'droid') return 'subagent'; return 'process'; } @@ -52,6 +52,27 @@ const AGENT_TEMPLATES = [ description: 'OpenAI Codex agent', icon: 'โšก', }, + { + id: 'gemini', + name: 'Gemini', + command: 'gemini', + description: 'Google Gemini CLI agent', + icon: '๐Ÿ’Ž', + }, + { + id: 'opencode', + name: 'OpenCode', + command: 'opencode', + description: 'OpenCode AI agent', + icon: '๐Ÿ”ท', + }, + { + id: 'droid', + name: 'Droid', + command: 'droid', + description: 'Factory Droid agent', + icon: '๐Ÿค–', + }, { id: 'custom', name: 'Custom', From 63d8be6570ed11d497a41f7cbd858c9073457993 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 06:05:37 +0000 Subject: [PATCH 003/103] Add git token gateway for auto-refreshing GitHub credentials - Add /api/git/token endpoint that fetches fresh tokens via Nango - Create git-credential-relay helper script for workspace containers - Install gh CLI in workspace Dockerfile - Update entrypoint.sh to configure gateway-based git auth - Update Fly, Railway, and Docker provisioners to pass gateway credentials - Tokens auto-refresh via Nango, solving the 1-hour expiry problem The gateway pattern ensures agents can always push to GitHub: 1. Agent runs `git push` 2. Git calls credential helper 3. Helper calls /api/git/token with workspace token 4. Cloud API fetches fresh token from Nango 5. Nango returns cached or refreshed GitHub App installation token --- deploy/workspace/Dockerfile | 12 +- deploy/workspace/entrypoint.sh | 44 +++++++- deploy/workspace/git-credential-relay | 74 +++++++++++++ src/cloud/api/git.ts | 153 ++++++++++++++++++++++++++ src/cloud/provisioner/index.ts | 52 +++++++++ src/cloud/server.ts | 2 + 6 files changed, 334 insertions(+), 3 deletions(-) create mode 100644 deploy/workspace/git-credential-relay create mode 100644 src/cloud/api/git.ts diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index 6ab4e4ab..9d5b82fd 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -37,6 +37,15 @@ RUN apt-get update && apt-get install -y \ curl \ git \ python3 \ + jq \ + && rm -rf /var/lib/apt/lists/* + +# Install GitHub CLI (gh) +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \ + && chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && apt-get update \ + && apt-get install -y gh \ && rm -rf /var/lib/apt/lists/* # Copy from builder @@ -44,7 +53,8 @@ COPY --from=builder /app/dist ./dist COPY --from=builder /app/node_modules ./node_modules COPY --from=builder /app/package*.json ./ COPY deploy/workspace/entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh +COPY deploy/workspace/git-credential-relay /usr/local/bin/git-credential-relay +RUN chmod +x /entrypoint.sh /usr/local/bin/git-credential-relay # Install Codex globally as root (requires write to /usr/local) RUN npm install -g @openai/codex diff --git a/deploy/workspace/entrypoint.sh b/deploy/workspace/entrypoint.sh index b60f02a4..3975189a 100644 --- a/deploy/workspace/entrypoint.sh +++ b/deploy/workspace/entrypoint.sh @@ -16,8 +16,47 @@ REPO_LIST="${REPOSITORIES:-}" mkdir -p "${WORKSPACE_DIR}" cd "${WORKSPACE_DIR}" -# Configure Git credentials for GitHub clones (avoid storing tokens in remotes) -if [[ -n "${GITHUB_TOKEN:-}" ]]; then +# Configure Git credentials via the gateway (tokens auto-refresh via Nango) +# The credential helper fetches fresh tokens from the cloud API on each git operation +if [[ -n "${CLOUD_API_URL:-}" && -n "${WORKSPACE_ID:-}" && -n "${WORKSPACE_TOKEN:-}" ]]; then + log "Configuring git credential helper (gateway mode)" + git config --global credential.helper "/usr/local/bin/git-credential-relay" + git config --global credential.useHttpPath true + export GIT_TERMINAL_PROMPT=0 + + # Configure gh CLI to use the same token mechanism + # gh auth login expects a token via stdin or GH_TOKEN env var + # We'll set up a wrapper that fetches fresh tokens + mkdir -p "${HOME}/.config/gh" + cat > "${HOME}/.config/gh/hosts.yml" < "/tmp/gh-token-helper.sh" <<'GHEOF' +#!/usr/bin/env bash +# Fetch fresh token for gh CLI +response=$(curl -sf \ + -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ + "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" 2>/dev/null) +echo "$response" | grep -o '"token":"[^"]*"' | cut -d'"' -f4 +GHEOF + chmod +x "/tmp/gh-token-helper.sh" + + # gh CLI will use GH_TOKEN if set; we export a function to refresh it + # For now, set it once at startup (will be refreshed by the credential helper for git operations) + export GH_TOKEN=$(/tmp/gh-token-helper.sh 2>/dev/null || echo "") + if [[ -n "${GH_TOKEN}" ]]; then + log "GitHub CLI configured with fresh token" + else + log "WARN: Could not fetch GitHub token for gh CLI" + fi + +# Fallback: Use static GITHUB_TOKEN if provided (legacy mode) +elif [[ -n "${GITHUB_TOKEN:-}" ]]; then + log "Configuring git credentials (legacy static token mode)" GIT_ASKPASS_SCRIPT="/tmp/git-askpass.sh" cat > "${GIT_ASKPASS_SCRIPT}" <<'EOF' #!/usr/bin/env bash @@ -31,6 +70,7 @@ EOF chmod +x "${GIT_ASKPASS_SCRIPT}" export GIT_ASKPASS="${GIT_ASKPASS_SCRIPT}" export GIT_TERMINAL_PROMPT=0 + export GH_TOKEN="${GITHUB_TOKEN}" fi clone_or_update_repo() { diff --git a/deploy/workspace/git-credential-relay b/deploy/workspace/git-credential-relay new file mode 100644 index 00000000..c2984846 --- /dev/null +++ b/deploy/workspace/git-credential-relay @@ -0,0 +1,74 @@ +#!/usr/bin/env bash +# +# Git Credential Helper for Agent Relay Workspaces +# +# This script fetches fresh GitHub tokens from the cloud API gateway. +# Nango handles token refresh, so tokens are always valid. +# +# Usage: git config --global credential.helper /usr/local/bin/git-credential-relay +# +# Environment variables: +# WORKSPACE_ID - Required: The workspace ID for token lookup +# CLOUD_API_URL - Required: The cloud API base URL +# WORKSPACE_TOKEN - Required: Bearer token for API auth +# + +set -euo pipefail + +# Only handle 'get' operation +if [[ "${1:-}" != "get" ]]; then + exit 0 +fi + +# Read input from git (protocol=https, host=github.com, etc.) +declare -A input +while IFS='=' read -r key value; do + [[ -z "$key" ]] && break + input["$key"]="$value" +done + +# Only provide credentials for github.com +host="${input[host]:-}" +if [[ "$host" != "github.com" ]]; then + exit 0 +fi + +# Check required environment variables +if [[ -z "${WORKSPACE_ID:-}" ]]; then + echo "git-credential-relay: WORKSPACE_ID not set" >&2 + exit 1 +fi + +if [[ -z "${CLOUD_API_URL:-}" ]]; then + echo "git-credential-relay: CLOUD_API_URL not set" >&2 + exit 1 +fi + +if [[ -z "${WORKSPACE_TOKEN:-}" ]]; then + echo "git-credential-relay: WORKSPACE_TOKEN not set" >&2 + exit 1 +fi + +# Fetch fresh token from gateway +response=$(curl -sf \ + -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ + "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" \ + 2>/dev/null) || { + echo "git-credential-relay: Failed to fetch token from gateway" >&2 + exit 1 +} + +# Parse JSON response +token=$(echo "$response" | grep -o '"token":"[^"]*"' | cut -d'"' -f4) +username=$(echo "$response" | grep -o '"username":"[^"]*"' | cut -d'"' -f4) + +if [[ -z "$token" ]]; then + echo "git-credential-relay: No token in response" >&2 + exit 1 +fi + +# Output credentials in git credential format +echo "protocol=https" +echo "host=github.com" +echo "username=${username:-x-access-token}" +echo "password=${token}" diff --git a/src/cloud/api/git.ts b/src/cloud/api/git.ts new file mode 100644 index 00000000..e517ec21 --- /dev/null +++ b/src/cloud/api/git.ts @@ -0,0 +1,153 @@ +/** + * Git Gateway API Routes + * + * Provides fresh GitHub tokens to workspace containers for git operations. + * This gateway pattern ensures tokens are always valid (Nango handles refresh). + */ + +import crypto from 'crypto'; +import { Router, Request, Response } from 'express'; +import { db } from '../db/index.js'; +import { nangoService } from '../services/nango.js'; +import { getConfig } from '../config.js'; + +export const gitRouter = Router(); + +/** + * Generate expected workspace token using HMAC + */ +function generateExpectedToken(workspaceId: string): string { + const config = getConfig(); + return crypto + .createHmac('sha256', config.sessionSecret) + .update(`workspace:${workspaceId}`) + .digest('hex'); +} + +/** + * Verify workspace access token + * Workspaces authenticate with a secret passed at provisioning time + */ +function verifyWorkspaceToken(req: Request, workspaceId: string): boolean { + const authHeader = req.get('authorization'); + if (!authHeader?.startsWith('Bearer ')) { + return false; + } + const providedToken = authHeader.slice(7); + const expectedToken = generateExpectedToken(workspaceId); + + // Use timing-safe comparison to prevent timing attacks + try { + return crypto.timingSafeEqual( + Buffer.from(providedToken), + Buffer.from(expectedToken) + ); + } catch { + return false; + } +} + +/** + * GET /api/git/token + * Get a fresh GitHub token for git operations + * + * Query params: + * - workspaceId: The workspace requesting the token + * + * Returns: { token: string, expiresAt?: string } + * + * This endpoint is called by the git credential helper in workspace containers. + * It fetches a fresh GitHub App installation token via Nango. + */ +gitRouter.get('/token', async (req: Request, res: Response) => { + const { workspaceId } = req.query; + + if (!workspaceId || typeof workspaceId !== 'string') { + return res.status(400).json({ error: 'workspaceId is required' }); + } + + // Verify the request is from a valid workspace + if (!verifyWorkspaceToken(req, workspaceId)) { + return res.status(401).json({ error: 'Invalid workspace token' }); + } + + try { + // Get workspace to find the user + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + const userId = workspace.userId; + + // Find a repository with a Nango connection for this user + const repos = await db.repositories.findByUserId(userId); + const repoWithConnection = repos.find(r => r.nangoConnectionId); + + if (!repoWithConnection?.nangoConnectionId) { + return res.status(404).json({ + error: 'No GitHub App connection found', + hint: 'Connect a repository via the GitHub App to enable git operations', + }); + } + + // Get fresh token from Nango (auto-refreshes if needed) + const token = await nangoService.getGithubAppToken(repoWithConnection.nangoConnectionId); + + // GitHub App installation tokens expire after 1 hour + const expiresAt = new Date(Date.now() + 55 * 60 * 1000).toISOString(); // 55 min buffer + + res.json({ + token, + expiresAt, + username: 'x-access-token', // GitHub App tokens use this as username + }); + } catch (error) { + console.error('[git] Error getting token:', error); + res.status(500).json({ error: 'Failed to get GitHub token' }); + } +}); + +/** + * POST /api/git/token + * Same as GET but accepts body params (for compatibility with some git credential helpers) + */ +gitRouter.post('/token', async (req: Request, res: Response) => { + const workspaceId = req.body.workspaceId || req.query.workspaceId; + + if (!workspaceId || typeof workspaceId !== 'string') { + return res.status(400).json({ error: 'workspaceId is required' }); + } + + if (!verifyWorkspaceToken(req, workspaceId)) { + return res.status(401).json({ error: 'Invalid workspace token' }); + } + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + const repos = await db.repositories.findByUserId(workspace.userId); + const repoWithConnection = repos.find(r => r.nangoConnectionId); + + if (!repoWithConnection?.nangoConnectionId) { + return res.status(404).json({ + error: 'No GitHub App connection found', + }); + } + + const token = await nangoService.getGithubAppToken(repoWithConnection.nangoConnectionId); + const expiresAt = new Date(Date.now() + 55 * 60 * 1000).toISOString(); + + res.json({ + token, + expiresAt, + username: 'x-access-token', + }); + } catch (error) { + console.error('[git] Error getting token:', error); + res.status(500).json({ error: 'Failed to get GitHub token' }); + } +}); diff --git a/src/cloud/provisioner/index.ts b/src/cloud/provisioner/index.ts index 2d1b76e9..c131a4d3 100644 --- a/src/cloud/provisioner/index.ts +++ b/src/cloud/provisioner/index.ts @@ -167,6 +167,8 @@ class FlyProvisioner implements ComputeProvisioner { private org: string; private region: string; private workspaceDomain?: string; + private cloudApiUrl: string; + private sessionSecret: string; constructor() { const config = getConfig(); @@ -177,6 +179,20 @@ class FlyProvisioner implements ComputeProvisioner { this.org = config.compute.fly.org; this.region = config.compute.fly.region || 'sjc'; this.workspaceDomain = config.compute.fly.workspaceDomain; + this.cloudApiUrl = config.publicUrl; + this.sessionSecret = config.sessionSecret; + } + + /** + * Generate a workspace token for API authentication + * This is a simple HMAC - in production, consider using JWTs + */ + private generateWorkspaceToken(workspaceId: string): string { + const crypto = require('crypto'); + return crypto + .createHmac('sha256', this.sessionSecret) + .update(`workspace:${workspaceId}`) + .digest('hex'); } async provision( @@ -243,6 +259,9 @@ class FlyProvisioner implements ComputeProvisioner { PROVIDERS: (workspace.config.providers ?? []).join(','), PORT: String(WORKSPACE_PORT), AGENT_RELAY_DASHBOARD_PORT: String(WORKSPACE_PORT), + // Git gateway configuration + CLOUD_API_URL: this.cloudApiUrl, + WORKSPACE_TOKEN: this.generateWorkspaceToken(workspace.id), }, services: [ { @@ -482,6 +501,8 @@ class FlyProvisioner implements ComputeProvisioner { */ class RailwayProvisioner implements ComputeProvisioner { private apiToken: string; + private cloudApiUrl: string; + private sessionSecret: string; constructor() { const config = getConfig(); @@ -489,6 +510,16 @@ class RailwayProvisioner implements ComputeProvisioner { throw new Error('Railway configuration missing'); } this.apiToken = config.compute.railway.apiToken; + this.cloudApiUrl = config.publicUrl; + this.sessionSecret = config.sessionSecret; + } + + private generateWorkspaceToken(workspaceId: string): string { + const crypto = require('crypto'); + return crypto + .createHmac('sha256', this.sessionSecret) + .update(`workspace:${workspaceId}`) + .digest('hex'); } async provision( @@ -561,6 +592,8 @@ class RailwayProvisioner implements ComputeProvisioner { PROVIDERS: (workspace.config.providers ?? []).join(','), PORT: String(WORKSPACE_PORT), AGENT_RELAY_DASHBOARD_PORT: String(WORKSPACE_PORT), + CLOUD_API_URL: this.cloudApiUrl, + WORKSPACE_TOKEN: this.generateWorkspaceToken(workspace.id), }; for (const [provider, token] of credentials) { @@ -727,6 +760,23 @@ class RailwayProvisioner implements ComputeProvisioner { * Local Docker provisioner (for development/self-hosted) */ class DockerProvisioner implements ComputeProvisioner { + private cloudApiUrl: string; + private sessionSecret: string; + + constructor() { + const config = getConfig(); + this.cloudApiUrl = config.publicUrl; + this.sessionSecret = config.sessionSecret; + } + + private generateWorkspaceToken(workspaceId: string): string { + const crypto = require('crypto'); + return crypto + .createHmac('sha256', this.sessionSecret) + .update(`workspace:${workspaceId}`) + .digest('hex'); + } + async provision( workspace: Workspace, credentials: Map @@ -742,6 +792,8 @@ class DockerProvisioner implements ComputeProvisioner { `-e PROVIDERS=${(workspace.config.providers ?? []).join(',')}`, `-e PORT=${WORKSPACE_PORT}`, `-e AGENT_RELAY_DASHBOARD_PORT=${WORKSPACE_PORT}`, + `-e CLOUD_API_URL=${this.cloudApiUrl}`, + `-e WORKSPACE_TOKEN=${this.generateWorkspaceToken(workspace.id)}`, ]; for (const [provider, token] of credentials) { diff --git a/src/cloud/server.ts b/src/cloud/server.ts index a9db80f1..904eef77 100644 --- a/src/cloud/server.ts +++ b/src/cloud/server.ts @@ -41,6 +41,7 @@ import { testHelpersRouter } from './api/test-helpers.js'; import { webhooksRouter } from './api/webhooks.js'; import { githubAppRouter } from './api/github-app.js'; import { nangoAuthRouter } from './api/nango-auth.js'; +import { gitRouter } from './api/git.js'; export interface CloudServer { app: Express; @@ -217,6 +218,7 @@ export async function createServer(): Promise { app.use('/api/webhooks', webhooksRouter); app.use('/api/github-app', githubAppRouter); app.use('/api/auth/nango', nangoAuthRouter); + app.use('/api/git', gitRouter); // Test helper routes (only available in non-production) if (process.env.NODE_ENV !== 'production') { From f1d2d6b0aa716fc8579e8d61c19425fc6971b435 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 06:09:55 +0000 Subject: [PATCH 004/103] Use jq for robust JSON parsing in git credential helper - Replace fragile grep/cut JSON parsing with jq - Add better error handling for API errors - Show API error messages in credential helper output --- deploy/workspace/entrypoint.sh | 4 +++- deploy/workspace/git-credential-relay | 20 ++++++++++++++------ 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/deploy/workspace/entrypoint.sh b/deploy/workspace/entrypoint.sh index 3975189a..733cd8cb 100644 --- a/deploy/workspace/entrypoint.sh +++ b/deploy/workspace/entrypoint.sh @@ -41,7 +41,9 @@ EOF response=$(curl -sf \ -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" 2>/dev/null) -echo "$response" | grep -o '"token":"[^"]*"' | cut -d'"' -f4 +if [[ -n "$response" ]]; then + echo "$response" | jq -r '.token // empty' +fi GHEOF chmod +x "/tmp/gh-token-helper.sh" diff --git a/deploy/workspace/git-credential-relay b/deploy/workspace/git-credential-relay index c2984846..874e9dee 100644 --- a/deploy/workspace/git-credential-relay +++ b/deploy/workspace/git-credential-relay @@ -53,17 +53,25 @@ fi response=$(curl -sf \ -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" \ - 2>/dev/null) || { + 2>/dev/null) + +if [[ -z "$response" ]]; then echo "git-credential-relay: Failed to fetch token from gateway" >&2 exit 1 -} +fi -# Parse JSON response -token=$(echo "$response" | grep -o '"token":"[^"]*"' | cut -d'"' -f4) -username=$(echo "$response" | grep -o '"username":"[^"]*"' | cut -d'"' -f4) +# Parse JSON response using jq (more robust than grep) +token=$(echo "$response" | jq -r '.token // empty') +username=$(echo "$response" | jq -r '.username // "x-access-token"') if [[ -z "$token" ]]; then - echo "git-credential-relay: No token in response" >&2 + # Check if there's an error message + error=$(echo "$response" | jq -r '.error // empty') + if [[ -n "$error" ]]; then + echo "git-credential-relay: $error" >&2 + else + echo "git-credential-relay: No token in response" >&2 + fi exit 1 fi From 34eabd1ef9bbc022817345d47752881eee4801c4 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 06:25:57 +0000 Subject: [PATCH 005/103] Add agent policy enforcement system - Create AgentPolicyService with multi-level fallback: 1. Repo config (.claude/agents/*.md) 2. User-level PRPM policies (~/.config/agent-relay/policies/) 3. Cloud workspace policy (from dashboard) 4. Built-in defaults - Add spawn authorization with policy checks - Inject policy instructions into agent task prompts - Add cloud API endpoint for workspace policy configuration - Add audit logging for policy decisions Policy is enforced at runtime via AGENT_POLICY_ENFORCEMENT=1 env var. Agents are informed of their restrictions via injected prompts. --- docs/agent-policy-snippet.md | 40 ++ prpm.json | 23 + src/bridge/spawner.ts | 79 +++- src/bridge/types.ts | 11 + src/cloud/api/policy.ts | 258 +++++++++++ src/cloud/db/index.ts | 2 + src/cloud/db/schema.ts | 36 ++ src/policy/agent-policy.ts | 866 +++++++++++++++++++++++++++++++++++ 8 files changed, 1308 insertions(+), 7 deletions(-) create mode 100644 docs/agent-policy-snippet.md create mode 100644 src/cloud/api/policy.ts create mode 100644 src/policy/agent-policy.ts diff --git a/docs/agent-policy-snippet.md b/docs/agent-policy-snippet.md new file mode 100644 index 00000000..8d85700d --- /dev/null +++ b/docs/agent-policy-snippet.md @@ -0,0 +1,40 @@ +# Agent Policy + +You are operating under organizational agent policies. These policies govern your interactions with other agents and tools. + +## Your Permissions + +Check the policy service for your specific permissions. If no explicit restrictions are defined, you have full permissions. + +## General Rules + +1. **Spawn Authorization**: Only spawn agents you are authorized to spawn. Check with Lead before spawning if unsure. + +2. **Message Routing**: Only message agents you are authorized to communicate with. Use proper channels. + +3. **Tool Usage**: Only use tools you are authorized to use. Read-only operations are generally safer. + +4. **Rate Limits**: Respect rate limits on messages. Don't spam other agents. + +## Restricted Agents + +Workers and non-lead agents typically have these restrictions: +- Cannot spawn other agents without Lead approval +- Can only message Lead, Coordinator, and their assigned peers +- Limited to read-only tools unless explicitly granted write access + +## Lead Agents + +Lead agents typically have elevated permissions: +- Can spawn Worker agents +- Can message all agents +- Can use all tools +- Responsible for enforcing policy on spawned agents + +## Enforcement + +Policy violations are blocked at runtime. If your action is blocked, you'll receive a denial message explaining why. Do not attempt to circumvent policy restrictions. + +## Checking Your Policy + +To see your current policy, ask Lead or check the dashboard at `/api/policy/:workspaceId`. diff --git a/prpm.json b/prpm.json index e624d1c9..16989848 100644 --- a/prpm.json +++ b/prpm.json @@ -94,6 +94,29 @@ ".claude/agents/shadow-auditor.md", ".claude/agents/shadow-active.md" ] + }, + { + "name": "agent-policy-snippet", + "version": "1.0.0", + "description": "Agent policy rules snippet - informs agents of spawn, messaging, and tool restrictions", + "format": "generic", + "subtype": "snippet", + "snippet": { + "target": "AGENTS.md", + "position": "append", + "header": "Agent Policy" + }, + "tags": [ + "policy", + "rules", + "permissions", + "authorization", + "security", + "multi-agent" + ], + "files": [ + "docs/agent-policy-snippet.md" + ] } ] } diff --git a/src/bridge/spawner.ts b/src/bridge/spawner.ts index 2b784d6c..6bbd4703 100644 --- a/src/bridge/spawner.ts +++ b/src/bridge/spawner.ts @@ -11,6 +11,7 @@ import { getProjectPaths } from '../utils/project-namespace.js'; import { resolveCommand } from '../utils/command-resolver.js'; import { PtyWrapper, type PtyWrapperConfig, type SummaryEvent, type SessionEndEvent } from '../wrapper/pty-wrapper.js'; import { selectShadowCli } from './shadow-cli.js'; +import { AgentPolicyService, type CloudPolicyFetcher, type PolicyDecision } from '../policy/agent-policy.js'; import type { SpawnRequest, SpawnResult, @@ -74,6 +75,8 @@ export class AgentSpawner { private dashboardPort?: number; private onAgentDeath?: OnAgentDeathCallback; private cloudPersistence?: CloudPersistenceHandler; + private policyService?: AgentPolicyService; + private policyEnforcementEnabled = false; constructor(projectRoot: string, _tmuxSession?: string, dashboardPort?: number) { const paths = getProjectPaths(projectRoot); @@ -86,6 +89,39 @@ export class AgentSpawner { // Ensure logs directory exists fs.mkdirSync(this.logsDir, { recursive: true }); + + // Initialize policy service if enforcement is enabled + if (process.env.AGENT_POLICY_ENFORCEMENT === '1') { + this.policyEnforcementEnabled = true; + this.policyService = new AgentPolicyService({ + projectRoot: this.projectRoot, + workspaceId: process.env.WORKSPACE_ID, + strictMode: process.env.AGENT_POLICY_STRICT === '1', + }); + console.log('[spawner] Policy enforcement enabled'); + } + } + + /** + * Set cloud policy fetcher for workspace-level policies + */ + setCloudPolicyFetcher(fetcher: CloudPolicyFetcher): void { + if (this.policyService) { + // Recreate policy service with cloud fetcher + this.policyService = new AgentPolicyService({ + projectRoot: this.projectRoot, + workspaceId: process.env.WORKSPACE_ID, + cloudFetcher: fetcher, + strictMode: process.env.AGENT_POLICY_STRICT === '1', + }); + } + } + + /** + * Get the policy service (for external access to policy checks) + */ + getPolicyService(): AgentPolicyService | undefined { + return this.policyService; } /** @@ -166,7 +202,7 @@ export class AgentSpawner { * Spawn a new worker agent using node-pty */ async spawn(request: SpawnRequest): Promise { - const { name, cli, task, team } = request; + const { name, cli, task, team, spawnerName } = request; const debug = process.env.DEBUG_SPAWN === '1'; // Check if worker already exists @@ -178,6 +214,23 @@ export class AgentSpawner { }; } + // Policy enforcement: check if the spawner is authorized to spawn this agent + if (this.policyEnforcementEnabled && this.policyService && spawnerName) { + const decision = await this.policyService.canSpawn(spawnerName, name, cli); + if (!decision.allowed) { + console.warn(`[spawner] Policy blocked spawn: ${spawnerName} -> ${name}: ${decision.reason}`); + return { + success: false, + name, + error: `Policy denied: ${decision.reason}`, + policyDecision: decision, + }; + } + if (debug) { + console.log(`[spawner:debug] Policy allowed spawn: ${spawnerName} -> ${name} (source: ${decision.policySource})`); + } + } + try { // Parse CLI command const cliParts = cli.split(' '); @@ -308,10 +361,22 @@ export class AgentSpawner { }; } + // Build the full message: policy instructions (if any) + task + let fullMessage = task || ''; + + // Prepend policy instructions if enforcement is enabled + if (this.policyEnforcementEnabled && this.policyService) { + const policyInstruction = await this.policyService.getPolicyInstruction(name); + if (policyInstruction) { + fullMessage = `${policyInstruction}\n\n${fullMessage}`; + if (debug) console.log(`[spawner:debug] Prepended policy instructions to task for ${name}`); + } + } + // Send task via relay message if provided (not via direct PTY injection) // This ensures the agent is ready to receive before processing the task - if (task && task.trim()) { - if (debug) console.log(`[spawner:debug] Will send task via relay: ${task.substring(0, 50)}...`); + if (fullMessage && fullMessage.trim()) { + if (debug) console.log(`[spawner:debug] Will send task via relay: ${fullMessage.substring(0, 50)}...`); // If we have dashboard API, send task as relay message if (this.dashboardPort) { @@ -323,7 +388,7 @@ export class AgentSpawner { headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ to: name, - message: task, + message: fullMessage, from: '__spawner__', }), }); @@ -333,16 +398,16 @@ export class AgentSpawner { } else { console.warn(`[spawner] Failed to send task via relay: ${result.error}`); // Fall back to direct injection - pty.write(task + '\r'); + pty.write(fullMessage + '\r'); } } catch (err: any) { console.warn(`[spawner] Relay send failed, falling back to direct injection: ${err.message}`); - pty.write(task + '\r'); + pty.write(fullMessage + '\r'); } } else { // No dashboard API available - use direct injection as fallback if (debug) console.log(`[spawner:debug] No dashboard API, using direct injection`); - pty.write(task + '\r'); + pty.write(fullMessage + '\r'); } } diff --git a/src/bridge/types.ts b/src/bridge/types.ts index 5d05e556..e604c883 100644 --- a/src/bridge/types.ts +++ b/src/bridge/types.ts @@ -41,6 +41,8 @@ export interface SpawnRequest { task: string; /** Optional team name to organize agents under */ team?: string; + /** Name of the agent requesting the spawn (for policy enforcement) */ + spawnerName?: string; /** Shadow execution mode (subagent = no extra process) */ shadowMode?: 'subagent' | 'process'; /** Primary agent to shadow (if this agent is a shadow) */ @@ -53,12 +55,21 @@ export interface SpawnRequest { shadowSpeakOn?: Array<'SESSION_END' | 'CODE_WRITTEN' | 'REVIEW_REQUEST' | 'EXPLICIT_ASK' | 'ALL_MESSAGES'>; } +/** Policy decision details */ +export interface PolicyDecision { + allowed: boolean; + reason: string; + policySource: 'repo' | 'local' | 'workspace' | 'default'; +} + export interface SpawnResult { success: boolean; name: string; /** PID of the spawned process (for pty-based workers) */ pid?: number; error?: string; + /** Policy decision details if spawn was blocked by policy */ + policyDecision?: PolicyDecision; } export interface WorkerInfo { diff --git a/src/cloud/api/policy.ts b/src/cloud/api/policy.ts new file mode 100644 index 00000000..060f6fa2 --- /dev/null +++ b/src/cloud/api/policy.ts @@ -0,0 +1,258 @@ +/** + * Agent Policy API Routes + * + * Provides endpoints for managing workspace-level agent policies. + * These policies serve as fallbacks when repos don't have .claude/policies/ files. + */ + +import { Router, Request, Response } from 'express'; +import { db } from '../db/index.js'; +import type { WorkspaceAgentPolicy, AgentPolicyRule } from '../db/schema.js'; + +export const policyRouter = Router(); + +/** + * GET /api/policy/:workspaceId + * Get the agent policy for a workspace + */ +policyRouter.get('/:workspaceId', async (req: Request, res: Response) => { + const { workspaceId } = req.params; + const userId = (req as any).userId; + + if (!userId) { + return res.status(401).json({ error: 'Unauthorized' }); + } + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + // Check user has access to this workspace + if (workspace.userId !== userId) { + const member = await db.workspaceMembers.findByWorkspaceAndUser(workspaceId, userId); + if (!member) { + return res.status(403).json({ error: 'Access denied' }); + } + } + + // Return the policy (or default if not set) + const policy = workspace.config?.agentPolicy ?? getDefaultPolicy(); + + res.json({ + workspaceId, + policy, + source: workspace.config?.agentPolicy ? 'workspace' : 'default', + }); + } catch (error) { + console.error('[policy] Error getting policy:', error); + res.status(500).json({ error: 'Failed to get policy' }); + } +}); + +/** + * PUT /api/policy/:workspaceId + * Update the agent policy for a workspace + */ +policyRouter.put('/:workspaceId', async (req: Request, res: Response) => { + const { workspaceId } = req.params; + const userId = (req as any).userId; + const policy = req.body.policy as WorkspaceAgentPolicy; + + if (!userId) { + return res.status(401).json({ error: 'Unauthorized' }); + } + + if (!policy || typeof policy !== 'object') { + return res.status(400).json({ error: 'Policy object is required' }); + } + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + // Only owner can update policy + if (workspace.userId !== userId) { + const member = await db.workspaceMembers.findByWorkspaceAndUser(workspaceId, userId); + if (!member || !['owner', 'admin'].includes(member.role)) { + return res.status(403).json({ error: 'Only owners and admins can update policy' }); + } + } + + // Validate policy structure + const validationError = validatePolicy(policy); + if (validationError) { + return res.status(400).json({ error: validationError }); + } + + // Update workspace config with new policy + const newConfig = { + ...workspace.config, + agentPolicy: policy, + }; + + await db.workspaces.update(workspaceId, { config: newConfig }); + + res.json({ + success: true, + workspaceId, + policy, + }); + } catch (error) { + console.error('[policy] Error updating policy:', error); + res.status(500).json({ error: 'Failed to update policy' }); + } +}); + +/** + * DELETE /api/policy/:workspaceId + * Reset workspace policy to defaults + */ +policyRouter.delete('/:workspaceId', async (req: Request, res: Response) => { + const { workspaceId } = req.params; + const userId = (req as any).userId; + + if (!userId) { + return res.status(401).json({ error: 'Unauthorized' }); + } + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + // Only owner can reset policy + if (workspace.userId !== userId) { + const member = await db.workspaceMembers.findByWorkspaceAndUser(workspaceId, userId); + if (!member || member.role !== 'owner') { + return res.status(403).json({ error: 'Only owners can reset policy' }); + } + } + + // Remove policy from config + const { agentPolicy, ...restConfig } = workspace.config ?? {}; + await db.workspaces.update(workspaceId, { config: restConfig }); + + res.json({ + success: true, + workspaceId, + policy: getDefaultPolicy(), + source: 'default', + }); + } catch (error) { + console.error('[policy] Error resetting policy:', error); + res.status(500).json({ error: 'Failed to reset policy' }); + } +}); + +/** + * GET /api/policy/:workspaceId/internal + * Internal endpoint for workspace containers to fetch policy + * Uses workspace token authentication (not user auth) + */ +policyRouter.get('/:workspaceId/internal', async (req: Request, res: Response) => { + const { workspaceId } = req.params; + + // This endpoint should be called with the workspace token + // The git.ts file has the token verification logic we can reuse + // For now, we'll trust the workspace ID from container requests + + try { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + const policy = workspace.config?.agentPolicy ?? getDefaultPolicy(); + + res.json({ + defaultPolicy: policy.defaultPolicy, + agents: policy.agents ?? [], + settings: policy.settings ?? { + requireExplicitAgents: false, + auditEnabled: true, + maxTotalAgents: 50, + }, + }); + } catch (error) { + console.error('[policy] Error getting internal policy:', error); + res.status(500).json({ error: 'Failed to get policy' }); + } +}); + +/** + * Get default policy + */ +function getDefaultPolicy(): WorkspaceAgentPolicy { + return { + defaultPolicy: { + name: '*', + allowedTools: undefined, // All tools allowed + canSpawn: undefined, // Can spawn any + canMessage: undefined, // Can message any + maxSpawns: 10, + rateLimit: 60, + canBeSpawned: true, + }, + agents: [], + settings: { + requireExplicitAgents: false, + auditEnabled: true, + maxTotalAgents: 50, + }, + }; +} + +/** + * Validate policy structure + */ +function validatePolicy(policy: WorkspaceAgentPolicy): string | null { + // Validate defaultPolicy + if (policy.defaultPolicy && typeof policy.defaultPolicy !== 'object') { + return 'defaultPolicy must be an object'; + } + + // Validate agents array + if (policy.agents) { + if (!Array.isArray(policy.agents)) { + return 'agents must be an array'; + } + + for (let i = 0; i < policy.agents.length; i++) { + const agent = policy.agents[i]; + if (!agent.name || typeof agent.name !== 'string') { + return `agents[${i}].name is required and must be a string`; + } + + // Validate arrays + if (agent.allowedTools && !Array.isArray(agent.allowedTools)) { + return `agents[${i}].allowedTools must be an array`; + } + if (agent.canSpawn && !Array.isArray(agent.canSpawn)) { + return `agents[${i}].canSpawn must be an array`; + } + if (agent.canMessage && !Array.isArray(agent.canMessage)) { + return `agents[${i}].canMessage must be an array`; + } + + // Validate numbers + if (agent.maxSpawns !== undefined && typeof agent.maxSpawns !== 'number') { + return `agents[${i}].maxSpawns must be a number`; + } + if (agent.rateLimit !== undefined && typeof agent.rateLimit !== 'number') { + return `agents[${i}].rateLimit must be a number`; + } + } + } + + // Validate settings + if (policy.settings && typeof policy.settings !== 'object') { + return 'settings must be an object'; + } + + return null; +} diff --git a/src/cloud/db/index.ts b/src/cloud/db/index.ts index ed846149..7cf52067 100644 --- a/src/cloud/db/index.ts +++ b/src/cloud/db/index.ts @@ -19,6 +19,8 @@ export type { Workspace, NewWorkspace, WorkspaceConfig, + WorkspaceAgentPolicy, + AgentPolicyRule, WorkspaceMember, NewWorkspaceMember, ProjectGroup, diff --git a/src/cloud/db/schema.ts b/src/cloud/db/schema.ts index 8335277e..a5a5c7e5 100644 --- a/src/cloud/db/schema.ts +++ b/src/cloud/db/schema.ts @@ -118,6 +118,40 @@ export const credentialsRelations = relations(credentials, ({ one }) => ({ // Workspaces // ============================================================================ +// Agent policy types for workspace-level enforcement +export interface AgentPolicyRule { + /** Agent name pattern (supports wildcards: "Lead", "Worker*", "*") */ + name: string; + /** Allowed tools (empty = all allowed, ["none"] = no tools) */ + allowedTools?: string[]; + /** Agents this agent can spawn (empty = can spawn any) */ + canSpawn?: string[]; + /** Agents this agent can message (empty = can message any) */ + canMessage?: string[]; + /** Maximum concurrent spawns allowed */ + maxSpawns?: number; + /** Rate limit: messages per minute */ + rateLimit?: number; + /** Whether this agent can be spawned by others */ + canBeSpawned?: boolean; +} + +export interface WorkspaceAgentPolicy { + /** Default policy for agents without explicit config */ + defaultPolicy?: AgentPolicyRule; + /** Named agent policies */ + agents?: AgentPolicyRule[]; + /** Global settings */ + settings?: { + /** Require explicit agent definitions (reject unknown agents) */ + requireExplicitAgents?: boolean; + /** Enable audit logging */ + auditEnabled?: boolean; + /** Maximum total agents */ + maxTotalAgents?: number; + }; +} + // Workspace configuration type export interface WorkspaceConfig { providers?: string[]; @@ -125,6 +159,8 @@ export interface WorkspaceConfig { supervisorEnabled?: boolean; maxAgents?: number; resourceTier?: 'small' | 'medium' | 'large' | 'xlarge'; + /** Agent policy for this workspace (enforced when repos don't have agents.md) */ + agentPolicy?: WorkspaceAgentPolicy; } export const workspaces = pgTable('workspaces', { diff --git a/src/policy/agent-policy.ts b/src/policy/agent-policy.ts new file mode 100644 index 00000000..f54514b5 --- /dev/null +++ b/src/policy/agent-policy.ts @@ -0,0 +1,866 @@ +/** + * Agent Policy Service + * + * Manages agent permissions and rules with multi-level fallback: + * 1. Repo-level policy (.claude/agents/*.md) + * 2. Workspace-level policy (from cloud API) + * 3. Built-in safe defaults + * + * Provides spawn authorization, tool permission checks, and audit logging. + */ + +import fs from 'node:fs'; +import path from 'node:path'; +import { findAgentConfig, type AgentConfig } from '../utils/agent-config.js'; + +import os from 'node:os'; + +/** + * PRPM-style policy file format (YAML or JSON) + * + * Policy files are loaded from (in order of precedence): + * 1. User-level: ~/.config/agent-relay/policies/*.yaml (NOT in source control) + * 2. Cloud: Workspace config from dashboard (stored in database) + * + * PRPM packages install to the user-level location to avoid polluting repos. + * Install via: prpm install @org/strict-agent-rules --global + * + * Example policy file (~/.config/agent-relay/policies/strict-rules.yaml): + * ```yaml + * name: strict-spawn-rules + * version: 1.0.0 + * description: Restrict agent spawning to leads only + * + * agents: + * - name: Lead + * canSpawn: ["*"] + * canMessage: ["*"] + * - name: Worker* + * canSpawn: [] + * canMessage: ["Lead", "Coordinator"] + * + * settings: + * requireExplicitAgents: false + * auditEnabled: true + * ``` + */ + +/** + * Agent policy definition + */ +export interface AgentPolicy { + /** Agent name pattern (supports wildcards: "Lead", "Worker*", "*") */ + name: string; + /** Allowed tools (empty = all allowed, ["none"] = no tools) */ + allowedTools?: string[]; + /** Agents this agent can spawn (empty = can spawn any) */ + canSpawn?: string[]; + /** Agents this agent can message (empty = can message any) */ + canMessage?: string[]; + /** Maximum concurrent spawns allowed */ + maxSpawns?: number; + /** Rate limit: messages per minute */ + rateLimit?: number; + /** Whether this agent can be spawned by others */ + canBeSpawned?: boolean; + /** Custom metadata */ + metadata?: Record; +} + +/** + * Workspace-level policy configuration + */ +export interface WorkspacePolicy { + /** Default policy for agents without explicit config */ + defaultPolicy: AgentPolicy; + /** Named agent policies */ + agents: AgentPolicy[]; + /** Global settings */ + settings: { + /** Require explicit agent definitions (reject unknown agents) */ + requireExplicitAgents: boolean; + /** Enable audit logging */ + auditEnabled: boolean; + /** Maximum total agents */ + maxTotalAgents: number; + }; +} + +/** + * Policy decision with reasoning + */ +export interface PolicyDecision { + allowed: boolean; + reason: string; + policySource: 'repo' | 'local' | 'workspace' | 'default'; + matchedPolicy?: AgentPolicy; +} + +/** + * Audit log entry + */ +export interface AuditEntry { + timestamp: number; + action: 'spawn' | 'message' | 'tool' | 'release'; + actor: string; + target?: string; + decision: PolicyDecision; + context?: Record; +} + +/** Built-in safe defaults when no policy exists */ +const DEFAULT_POLICY: AgentPolicy = { + name: '*', + allowedTools: undefined, // All tools allowed by default + canSpawn: undefined, // Can spawn any agent + canMessage: undefined, // Can message any agent + maxSpawns: 10, + rateLimit: 60, // 60 messages per minute + canBeSpawned: true, +}; + +/** Restrictive defaults for unknown agents in strict mode */ +const STRICT_DEFAULT_POLICY: AgentPolicy = { + name: '*', + allowedTools: ['Read', 'Grep', 'Glob'], // Read-only by default + canSpawn: [], // Cannot spawn + canMessage: ['Lead', 'Coordinator'], // Can only message leads + maxSpawns: 0, + rateLimit: 10, + canBeSpawned: false, +}; + +/** + * Cloud policy fetcher interface + * Implement this to fetch workspace policies from cloud API + */ +export interface CloudPolicyFetcher { + getWorkspacePolicy(workspaceId: string): Promise; +} + +export class AgentPolicyService { + private projectRoot: string; + private workspaceId?: string; + private cloudFetcher?: CloudPolicyFetcher; + private cachedWorkspacePolicy?: WorkspacePolicy; + private cachedLocalPolicy?: WorkspacePolicy; + private policyCacheExpiry = 0; + private localPolicyCacheExpiry = 0; + private auditLog: AuditEntry[] = []; + private strictMode: boolean; + + /** Cache TTL in milliseconds (5 minutes) */ + private static readonly CACHE_TTL_MS = 5 * 60 * 1000; + /** Local policy cache TTL (1 minute - files can change) */ + private static readonly LOCAL_CACHE_TTL_MS = 60 * 1000; + /** Maximum audit log entries to keep in memory */ + private static readonly MAX_AUDIT_ENTRIES = 1000; + + constructor(options: { + projectRoot: string; + workspaceId?: string; + cloudFetcher?: CloudPolicyFetcher; + strictMode?: boolean; + }) { + this.projectRoot = options.projectRoot; + this.workspaceId = options.workspaceId; + this.cloudFetcher = options.cloudFetcher; + this.strictMode = options.strictMode ?? false; + } + + /** + * Get the user-level policies directory + * Uses ~/.config/agent-relay/policies/ (not in source control) + */ + private getUserPoliciesDir(): string { + const configDir = process.env.AGENT_RELAY_CONFIG_DIR ?? + path.join(os.homedir(), '.config', 'agent-relay'); + return path.join(configDir, 'policies'); + } + + /** + * Load policies from user-level directory (PRPM-installable) + * Files are YAML/JSON with agent policy definitions + * Location: ~/.config/agent-relay/policies/*.yaml + */ + private loadLocalPolicies(): WorkspacePolicy | null { + // Check cache + if (this.cachedLocalPolicy && Date.now() < this.localPolicyCacheExpiry) { + return this.cachedLocalPolicy; + } + + const policiesDir = this.getUserPoliciesDir(); + if (!fs.existsSync(policiesDir)) { + return null; + } + + try { + const files = fs.readdirSync(policiesDir).filter(f => + f.endsWith('.yaml') || f.endsWith('.yml') || f.endsWith('.json') + ); + + if (files.length === 0) { + return null; + } + + // Merge all policy files + const mergedAgents: AgentPolicy[] = []; + let mergedSettings: WorkspacePolicy['settings'] = { + requireExplicitAgents: false, + auditEnabled: true, + maxTotalAgents: 50, + }; + let mergedDefault: AgentPolicy = { ...DEFAULT_POLICY }; + + for (const file of files) { + const filePath = path.join(policiesDir, file); + const content = fs.readFileSync(filePath, 'utf-8'); + + let parsed: Record; + if (file.endsWith('.json')) { + parsed = JSON.parse(content); + } else { + // Simple YAML parsing for policy files + parsed = this.parseSimpleYaml(content); + } + + // Merge agents + if (Array.isArray(parsed.agents)) { + for (const agent of parsed.agents) { + if (agent && typeof agent === 'object' && 'name' in agent) { + mergedAgents.push(agent as AgentPolicy); + } + } + } + + // Merge settings (later files override) + if (parsed.settings && typeof parsed.settings === 'object') { + mergedSettings = { ...mergedSettings, ...parsed.settings as Record }; + } + + // Merge default policy + if (parsed.defaultPolicy && typeof parsed.defaultPolicy === 'object') { + mergedDefault = { ...mergedDefault, ...parsed.defaultPolicy as AgentPolicy }; + } + } + + const policy: WorkspacePolicy = { + defaultPolicy: mergedDefault, + agents: mergedAgents, + settings: mergedSettings, + }; + + this.cachedLocalPolicy = policy; + this.localPolicyCacheExpiry = Date.now() + AgentPolicyService.LOCAL_CACHE_TTL_MS; + + return policy; + } catch (err) { + console.error('[policy] Failed to load local policies:', err); + return null; + } + } + + /** + * Simple YAML parser for policy files + * Handles basic key: value and arrays + */ + private parseSimpleYaml(content: string): Record { + const result: Record = {}; + const lines = content.split('\n'); + let currentKey = ''; + let currentArray: unknown[] | null = null; + let currentObject: Record | null = null; + let indent = 0; + + for (const line of lines) { + const trimmed = line.trim(); + + // Skip comments and empty lines + if (!trimmed || trimmed.startsWith('#')) continue; + + // Calculate indentation + const lineIndent = line.length - line.trimStart().length; + + // Array item + if (trimmed.startsWith('- ')) { + const value = trimmed.slice(2).trim(); + + // Object in array (e.g., "- name: Worker") + if (value.includes(':')) { + const [key, val] = value.split(':').map(s => s.trim()); + currentObject = { [key]: this.parseValue(val) }; + if (currentArray) { + currentArray.push(currentObject); + } + } else { + // Simple array value + if (currentArray) { + currentArray.push(this.parseValue(value)); + } + } + continue; + } + + // Key: value pair + const colonIdx = trimmed.indexOf(':'); + if (colonIdx > 0) { + const key = trimmed.slice(0, colonIdx).trim(); + const value = trimmed.slice(colonIdx + 1).trim(); + + // If we're inside an object in an array + if (currentObject && lineIndent > indent) { + currentObject[key] = this.parseValue(value); + continue; + } + + // Top-level or section key + if (value === '' || value === '|' || value === '>') { + // Start of array or nested object + currentKey = key; + currentArray = []; + currentObject = null; + indent = lineIndent; + result[key] = currentArray; + } else { + // Simple key: value + if (lineIndent === 0) { + result[key] = this.parseValue(value); + currentKey = ''; + currentArray = null; + currentObject = null; + } else if (currentObject) { + currentObject[key] = this.parseValue(value); + } + } + } + } + + return result; + } + + /** + * Parse a YAML value string + */ + private parseValue(value: string): unknown { + if (!value || value === '~' || value === 'null') return null; + if (value === 'true') return true; + if (value === 'false') return false; + + // Array notation [a, b, c] + if (value.startsWith('[') && value.endsWith(']')) { + const inner = value.slice(1, -1); + if (!inner.trim()) return []; + return inner.split(',').map(s => { + const trimmed = s.trim().replace(/^["']|["']$/g, ''); + return trimmed; + }); + } + + // Number + if (/^-?\d+(\.\d+)?$/.test(value)) { + return parseFloat(value); + } + + // String (remove quotes if present) + return value.replace(/^["']|["']$/g, ''); + } + + /** + * Check if an agent can spawn another agent + */ + async canSpawn( + spawnerName: string, + targetName: string, + targetCli: string + ): Promise { + const spawnerPolicy = await this.getAgentPolicy(spawnerName); + const targetPolicy = await this.getAgentPolicy(targetName); + + // Check if target can be spawned + if (targetPolicy.matchedPolicy?.canBeSpawned === false) { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${targetName}" is not allowed to be spawned`, + policySource: targetPolicy.policySource, + matchedPolicy: targetPolicy.matchedPolicy, + }; + this.audit('spawn', spawnerName, targetName, decision, { cli: targetCli }); + return decision; + } + + // Check if spawner can spawn + const canSpawnList = spawnerPolicy.matchedPolicy?.canSpawn; + if (canSpawnList !== undefined && canSpawnList.length > 0) { + const canSpawn = this.matchesPattern(targetName, canSpawnList); + if (!canSpawn) { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${spawnerName}" is not allowed to spawn "${targetName}"`, + policySource: spawnerPolicy.policySource, + matchedPolicy: spawnerPolicy.matchedPolicy, + }; + this.audit('spawn', spawnerName, targetName, decision, { cli: targetCli }); + return decision; + } + } + + // Check max spawns (would need spawn count tracking - placeholder) + const decision: PolicyDecision = { + allowed: true, + reason: 'Spawn permitted by policy', + policySource: spawnerPolicy.policySource, + matchedPolicy: spawnerPolicy.matchedPolicy, + }; + this.audit('spawn', spawnerName, targetName, decision, { cli: targetCli }); + return decision; + } + + /** + * Check if an agent can send a message to another agent + */ + async canMessage( + senderName: string, + recipientName: string + ): Promise { + const senderPolicy = await this.getAgentPolicy(senderName); + + const canMessageList = senderPolicy.matchedPolicy?.canMessage; + if (canMessageList !== undefined && canMessageList.length > 0) { + const canMessage = this.matchesPattern(recipientName, canMessageList); + if (!canMessage) { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${senderName}" is not allowed to message "${recipientName}"`, + policySource: senderPolicy.policySource, + matchedPolicy: senderPolicy.matchedPolicy, + }; + this.audit('message', senderName, recipientName, decision); + return decision; + } + } + + const decision: PolicyDecision = { + allowed: true, + reason: 'Message permitted by policy', + policySource: senderPolicy.policySource, + matchedPolicy: senderPolicy.matchedPolicy, + }; + this.audit('message', senderName, recipientName, decision); + return decision; + } + + /** + * Check if an agent can use a specific tool + */ + async canUseTool(agentName: string, toolName: string): Promise { + const policy = await this.getAgentPolicy(agentName); + + const allowedTools = policy.matchedPolicy?.allowedTools; + if (allowedTools !== undefined) { + // ["none"] means no tools allowed + if (allowedTools.length === 1 && allowedTools[0] === 'none') { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${agentName}" is not allowed to use any tools`, + policySource: policy.policySource, + matchedPolicy: policy.matchedPolicy, + }; + this.audit('tool', agentName, toolName, decision); + return decision; + } + + // Check if tool is in allowed list + const allowed = this.matchesPattern(toolName, allowedTools); + if (!allowed) { + const decision: PolicyDecision = { + allowed: false, + reason: `Agent "${agentName}" is not allowed to use tool "${toolName}"`, + policySource: policy.policySource, + matchedPolicy: policy.matchedPolicy, + }; + this.audit('tool', agentName, toolName, decision); + return decision; + } + } + + const decision: PolicyDecision = { + allowed: true, + reason: 'Tool usage permitted by policy', + policySource: policy.policySource, + matchedPolicy: policy.matchedPolicy, + }; + this.audit('tool', agentName, toolName, decision); + return decision; + } + + /** + * Get the effective policy for an agent + * Fallback chain: repo config โ†’ user PRPM policies โ†’ cloud workspace โ†’ defaults + */ + async getAgentPolicy(agentName: string): Promise<{ + matchedPolicy: AgentPolicy; + policySource: 'repo' | 'local' | 'workspace' | 'default'; + }> { + // 1. Try repo-level config (.claude/agents/*.md) + const repoConfig = findAgentConfig(agentName, this.projectRoot); + if (repoConfig) { + return { + matchedPolicy: this.configToPolicy(repoConfig), + policySource: 'repo', + }; + } + + // 2. Try user-level PRPM policies (~/.config/agent-relay/policies/*.yaml) + const localPolicy = this.loadLocalPolicies(); + if (localPolicy) { + // Check for strict mode in local policy + if (localPolicy.settings?.requireExplicitAgents) { + const matchedPolicy = this.findMatchingPolicy(agentName, localPolicy.agents); + if (matchedPolicy) { + return { matchedPolicy, policySource: 'local' }; + } + // Unknown agent in strict mode + return { + matchedPolicy: { ...STRICT_DEFAULT_POLICY, name: agentName }, + policySource: 'local', + }; + } + + // Find matching policy + const matchedPolicy = this.findMatchingPolicy(agentName, localPolicy.agents); + if (matchedPolicy) { + return { matchedPolicy, policySource: 'local' }; + } + + // Use local default + if (localPolicy.defaultPolicy) { + return { + matchedPolicy: { ...localPolicy.defaultPolicy, name: agentName }, + policySource: 'local', + }; + } + } + + // 3. Try workspace-level policy from cloud + const workspacePolicy = await this.getWorkspacePolicy(); + if (workspacePolicy) { + // Check for strict mode + if (workspacePolicy.settings?.requireExplicitAgents) { + // In strict mode, unknown agents get restrictive defaults + const matchedPolicy = this.findMatchingPolicy(agentName, workspacePolicy.agents); + if (matchedPolicy) { + return { matchedPolicy, policySource: 'workspace' }; + } + // Unknown agent in strict mode + return { + matchedPolicy: { ...STRICT_DEFAULT_POLICY, name: agentName }, + policySource: 'workspace', + }; + } + + // Find matching policy + const matchedPolicy = this.findMatchingPolicy(agentName, workspacePolicy.agents); + if (matchedPolicy) { + return { matchedPolicy, policySource: 'workspace' }; + } + + // Use workspace default + if (workspacePolicy.defaultPolicy) { + return { + matchedPolicy: { ...workspacePolicy.defaultPolicy, name: agentName }, + policySource: 'workspace', + }; + } + } + + // 4. Fall back to built-in defaults + const defaultPolicy = this.strictMode ? STRICT_DEFAULT_POLICY : DEFAULT_POLICY; + return { + matchedPolicy: { ...defaultPolicy, name: agentName }, + policySource: 'default', + }; + } + + /** + * Get workspace policy from cloud (with caching) + */ + private async getWorkspacePolicy(): Promise { + if (!this.workspaceId || !this.cloudFetcher) { + return null; + } + + // Check cache + if (this.cachedWorkspacePolicy && Date.now() < this.policyCacheExpiry) { + return this.cachedWorkspacePolicy; + } + + try { + const policy = await this.cloudFetcher.getWorkspacePolicy(this.workspaceId); + if (policy) { + this.cachedWorkspacePolicy = policy; + this.policyCacheExpiry = Date.now() + AgentPolicyService.CACHE_TTL_MS; + } + return policy; + } catch (err) { + console.error('[policy] Failed to fetch workspace policy:', err); + // Return cached policy if available, even if expired + return this.cachedWorkspacePolicy ?? null; + } + } + + /** + * Find matching policy from a list (supports wildcards) + */ + private findMatchingPolicy(agentName: string, policies: AgentPolicy[]): AgentPolicy | null { + // First try exact match + const exactMatch = policies.find(p => p.name.toLowerCase() === agentName.toLowerCase()); + if (exactMatch) return exactMatch; + + // Then try pattern match + for (const policy of policies) { + if (this.matchesPattern(agentName, [policy.name])) { + return policy; + } + } + + return null; + } + + /** + * Check if a name matches any pattern in the list + * Supports: exact match, prefix* match, *suffix match, * (all) + */ + private matchesPattern(name: string, patterns: string[]): boolean { + const lowerName = name.toLowerCase(); + for (const pattern of patterns) { + const lowerPattern = pattern.toLowerCase(); + + // Wildcard all + if (lowerPattern === '*') return true; + + // Exact match + if (lowerPattern === lowerName) return true; + + // Prefix match (e.g., "Worker*" matches "WorkerA") + if (lowerPattern.endsWith('*')) { + const prefix = lowerPattern.slice(0, -1); + if (lowerName.startsWith(prefix)) return true; + } + + // Suffix match (e.g., "*Lead" matches "TeamLead") + if (lowerPattern.startsWith('*')) { + const suffix = lowerPattern.slice(1); + if (lowerName.endsWith(suffix)) return true; + } + } + return false; + } + + /** + * Convert AgentConfig to AgentPolicy + */ + private configToPolicy(config: AgentConfig): AgentPolicy { + return { + name: config.name, + allowedTools: config.allowedTools, + // Other fields come from defaults since repo config doesn't specify them + canSpawn: undefined, + canMessage: undefined, + maxSpawns: 10, + rateLimit: 60, + canBeSpawned: true, + }; + } + + /** + * Record an audit entry + */ + private audit( + action: AuditEntry['action'], + actor: string, + target: string | undefined, + decision: PolicyDecision, + context?: Record + ): void { + const entry: AuditEntry = { + timestamp: Date.now(), + action, + actor, + target, + decision, + context, + }; + + this.auditLog.push(entry); + + // Trim log if too large + if (this.auditLog.length > AgentPolicyService.MAX_AUDIT_ENTRIES) { + this.auditLog = this.auditLog.slice(-AgentPolicyService.MAX_AUDIT_ENTRIES / 2); + } + + // Log denied actions + if (!decision.allowed) { + console.warn(`[policy] DENIED: ${action} by ${actor}${target ? ` -> ${target}` : ''}: ${decision.reason}`); + } + } + + /** + * Get audit log entries + */ + getAuditLog(options?: { + limit?: number; + action?: AuditEntry['action']; + actor?: string; + deniedOnly?: boolean; + }): AuditEntry[] { + let entries = [...this.auditLog]; + + if (options?.action) { + entries = entries.filter(e => e.action === options.action); + } + if (options?.actor) { + entries = entries.filter(e => e.actor === options.actor); + } + if (options?.deniedOnly) { + entries = entries.filter(e => !e.decision.allowed); + } + if (options?.limit) { + entries = entries.slice(-options.limit); + } + + return entries; + } + + /** + * Clear audit log + */ + clearAuditLog(): void { + this.auditLog = []; + } + + /** + * Invalidate cached workspace policy + */ + invalidateCache(): void { + this.cachedWorkspacePolicy = undefined; + this.policyCacheExpiry = 0; + } + + /** + * Get a human-readable policy summary for an agent + * This can be injected into agent prompts to inform them of their permissions + */ + async getPolicySummary(agentName: string): Promise { + const { matchedPolicy, policySource } = await this.getAgentPolicy(agentName); + + const lines: string[] = [ + `# Agent Policy for ${agentName}`, + `Source: ${policySource}`, + '', + ]; + + // Tools + if (matchedPolicy.allowedTools) { + if (matchedPolicy.allowedTools.length === 1 && matchedPolicy.allowedTools[0] === 'none') { + lines.push('**Tools**: No tools allowed'); + } else { + lines.push(`**Allowed Tools**: ${matchedPolicy.allowedTools.join(', ')}`); + } + } else { + lines.push('**Tools**: All tools allowed'); + } + + // Spawning + if (matchedPolicy.canSpawn) { + if (matchedPolicy.canSpawn.length === 0) { + lines.push('**Spawning**: Cannot spawn other agents'); + } else { + lines.push(`**Can Spawn**: ${matchedPolicy.canSpawn.join(', ')}`); + } + } else { + lines.push('**Spawning**: Can spawn any agent'); + } + + // Messaging + if (matchedPolicy.canMessage) { + if (matchedPolicy.canMessage.length === 0) { + lines.push('**Messaging**: Cannot message other agents'); + } else { + lines.push(`**Can Message**: ${matchedPolicy.canMessage.join(', ')}`); + } + } else { + lines.push('**Messaging**: Can message any agent'); + } + + // Limits + if (matchedPolicy.maxSpawns !== undefined) { + lines.push(`**Max Spawns**: ${matchedPolicy.maxSpawns}`); + } + if (matchedPolicy.rateLimit !== undefined) { + lines.push(`**Rate Limit**: ${matchedPolicy.rateLimit} messages/min`); + } + + return lines.join('\n'); + } + + /** + * Get a concise policy instruction for injection into agent prompts + */ + async getPolicyInstruction(agentName: string): Promise { + const { matchedPolicy, policySource } = await this.getAgentPolicy(agentName); + + // Only generate instructions if there are restrictions + const hasRestrictions = + matchedPolicy.allowedTools !== undefined || + matchedPolicy.canSpawn !== undefined || + matchedPolicy.canMessage !== undefined; + + if (!hasRestrictions) { + return null; // No restrictions, no need to inform agent + } + + const restrictions: string[] = []; + + if (matchedPolicy.allowedTools) { + if (matchedPolicy.allowedTools.length === 1 && matchedPolicy.allowedTools[0] === 'none') { + restrictions.push('You are not allowed to use any tools.'); + } else { + restrictions.push(`You may only use these tools: ${matchedPolicy.allowedTools.join(', ')}.`); + } + } + + if (matchedPolicy.canSpawn) { + if (matchedPolicy.canSpawn.length === 0) { + restrictions.push('You are not allowed to spawn other agents.'); + } else { + restrictions.push(`You may only spawn these agents: ${matchedPolicy.canSpawn.join(', ')}.`); + } + } + + if (matchedPolicy.canMessage) { + if (matchedPolicy.canMessage.length === 0) { + restrictions.push('You are not allowed to message other agents.'); + } else { + restrictions.push(`You may only message these agents: ${matchedPolicy.canMessage.join(', ')}.`); + } + } + + if (restrictions.length === 0) { + return null; + } + + return `[Policy Restrictions]\n${restrictions.join('\n')}`; + } +} + +/** + * Create a policy service for a project + */ +export function createPolicyService(options: { + projectRoot: string; + workspaceId?: string; + cloudFetcher?: CloudPolicyFetcher; + strictMode?: boolean; +}): AgentPolicyService { + return new AgentPolicyService(options); +} From 6bcbddc99b7f297942bda637e83fa310b2507694 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 06:33:18 +0000 Subject: [PATCH 006/103] Enable automatic policy enforcement for cloud workspaces - Add AGENT_POLICY_ENFORCEMENT=1 to entrypoint.sh for cloud mode - Create cloud policy fetcher to call /api/policy/:workspaceId/internal - Policy is automatically fetched and enforced without user action --- deploy/workspace/entrypoint.sh | 11 +++++ src/policy/cloud-policy-fetcher.ts | 78 ++++++++++++++++++++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 src/policy/cloud-policy-fetcher.ts diff --git a/deploy/workspace/entrypoint.sh b/deploy/workspace/entrypoint.sh index 733cd8cb..a5ae0ad8 100644 --- a/deploy/workspace/entrypoint.sh +++ b/deploy/workspace/entrypoint.sh @@ -111,6 +111,17 @@ if [[ -n "${REPO_LIST}" ]]; then done fi +# ============================================================================ +# Configure agent policy enforcement for cloud workspaces +# Policy is fetched from cloud API and enforced at runtime +# ============================================================================ + +if [[ -n "${CLOUD_API_URL:-}" && -n "${WORKSPACE_ID:-}" ]]; then + log "Enabling agent policy enforcement" + export AGENT_POLICY_ENFORCEMENT=1 + # Policy is fetched from ${CLOUD_API_URL}/api/policy/${WORKSPACE_ID}/internal +fi + # ============================================================================ # Configure AI provider credentials # Create credential files that CLIs expect from ENV vars passed by provisioner diff --git a/src/policy/cloud-policy-fetcher.ts b/src/policy/cloud-policy-fetcher.ts new file mode 100644 index 00000000..9b8be6e7 --- /dev/null +++ b/src/policy/cloud-policy-fetcher.ts @@ -0,0 +1,78 @@ +/** + * Cloud Policy Fetcher + * + * Fetches workspace agent policies from the cloud API. + * Used by workspace containers to get their policy configuration. + */ + +import type { CloudPolicyFetcher, WorkspacePolicy, AgentPolicy } from './agent-policy.js'; + +/** + * Create a cloud policy fetcher for workspace containers + */ +export function createCloudPolicyFetcher(): CloudPolicyFetcher | null { + const cloudApiUrl = process.env.CLOUD_API_URL; + const workspaceId = process.env.WORKSPACE_ID; + const workspaceToken = process.env.WORKSPACE_TOKEN; + + if (!cloudApiUrl || !workspaceId) { + return null; + } + + return { + async getWorkspacePolicy(requestedWorkspaceId: string): Promise { + // Only allow fetching policy for this workspace + if (requestedWorkspaceId !== workspaceId) { + console.warn(`[policy-fetcher] Attempted to fetch policy for different workspace: ${requestedWorkspaceId}`); + return null; + } + + try { + const url = `${cloudApiUrl}/api/policy/${workspaceId}/internal`; + const headers: Record = { + 'Content-Type': 'application/json', + }; + + // Add auth header if we have a workspace token + if (workspaceToken) { + headers['Authorization'] = `Bearer ${workspaceToken}`; + } + + const response = await fetch(url, { headers }); + + if (!response.ok) { + console.error(`[policy-fetcher] Failed to fetch policy: ${response.status} ${response.statusText}`); + return null; + } + + const data = await response.json() as { + defaultPolicy?: AgentPolicy; + agents?: AgentPolicy[]; + settings?: WorkspacePolicy['settings']; + }; + + // Convert API response to WorkspacePolicy + const policy: WorkspacePolicy = { + defaultPolicy: data.defaultPolicy ?? { + name: '*', + maxSpawns: 10, + rateLimit: 60, + canBeSpawned: true, + }, + agents: data.agents ?? [], + settings: data.settings ?? { + requireExplicitAgents: false, + auditEnabled: true, + maxTotalAgents: 50, + }, + }; + + console.log(`[policy-fetcher] Fetched policy for workspace ${workspaceId}: ${policy.agents.length} agent rules`); + return policy; + } catch (error) { + console.error('[policy-fetcher] Error fetching policy:', error); + return null; + } + }, + }; +} From 2f56f4e0ca7c38a4eba7a2df180863ebb23f668b Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 06:46:30 +0000 Subject: [PATCH 007/103] Inject relay communication rules into spawned agents Spawned agents in arbitrary repos won't have the agent-relay-snippet installed in their AGENTS.md or CLAUDE.md files. This change: - Loads the relay snippet (docs/agent-relay-snippet.md) at spawn time - Prepends communication rules to all spawned agent task prompts - Includes fallback minimal snippet if file not found Also fixes build errors in policy.ts by using correct db query methods. --- src/bridge/spawner.ts | 63 ++++++++++++++++++++++++++++++++++++++++- src/cloud/api/policy.ts | 13 +++++---- 2 files changed, 70 insertions(+), 6 deletions(-) diff --git a/src/bridge/spawner.ts b/src/bridge/spawner.ts index 6bbd4703..4f386d43 100644 --- a/src/bridge/spawner.ts +++ b/src/bridge/spawner.ts @@ -6,6 +6,7 @@ import fs from 'node:fs'; import path from 'node:path'; +import { fileURLToPath } from 'node:url'; import { sleep } from './utils.js'; import { getProjectPaths } from '../utils/project-namespace.js'; import { resolveCommand } from '../utils/command-resolver.js'; @@ -65,6 +66,58 @@ export type OnAgentDeathCallback = (info: { resumeInstructions?: string; }) => void; +/** + * Load the relay communication snippet. + * This defines how agents communicate via relay patterns. + * Cached after first load. + */ +let relaySnippetCache: string | null = null; + +function loadRelaySnippet(): string { + if (relaySnippetCache !== null) { + return relaySnippetCache; + } + + try { + // Resolve path relative to this file's location + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); + // From src/bridge/ go up to package root, then into docs/ + const snippetPath = path.resolve(__dirname, '../../docs/agent-relay-snippet.md'); + + if (fs.existsSync(snippetPath)) { + relaySnippetCache = fs.readFileSync(snippetPath, 'utf-8'); + console.log('[spawner] Loaded relay communication snippet'); + } else { + // Fallback: minimal relay instructions + relaySnippetCache = `# Agent Relay Communication + +You are connected to an agent relay system. Use these patterns to communicate: + +## Sending Messages +\`\`\` +->relay:AgentName <<< +Your message here.>>> +\`\`\` + +## Communication Protocol +- **ACK immediately** when receiving a task +- **Report completion** with DONE: summary + +## Common Patterns +- \`->relay:Lead <<>>\` +- \`->relay:Lead <<>>\` +`; + console.log('[spawner] Using fallback relay snippet (docs/agent-relay-snippet.md not found)'); + } + } catch (err: any) { + console.error('[spawner] Failed to load relay snippet:', err.message); + relaySnippetCache = ''; + } + + return relaySnippetCache; +} + export class AgentSpawner { private activeWorkers: Map = new Map(); private agentsPath: string; @@ -361,9 +414,17 @@ export class AgentSpawner { }; } - // Build the full message: policy instructions (if any) + task + // Build the full message: relay snippet + policy instructions (if any) + task let fullMessage = task || ''; + // Always prepend relay communication rules so agents know how to communicate + // This is essential because target repos may not have the snippet installed + const relaySnippet = loadRelaySnippet(); + if (relaySnippet) { + fullMessage = `${relaySnippet}\n\n---\n\n${fullMessage}`; + if (debug) console.log(`[spawner:debug] Prepended relay communication rules for ${name}`); + } + // Prepend policy instructions if enforcement is enabled if (this.policyEnforcementEnabled && this.policyService) { const policyInstruction = await this.policyService.getPolicyInstruction(name); diff --git a/src/cloud/api/policy.ts b/src/cloud/api/policy.ts index 060f6fa2..934d786c 100644 --- a/src/cloud/api/policy.ts +++ b/src/cloud/api/policy.ts @@ -31,7 +31,8 @@ policyRouter.get('/:workspaceId', async (req: Request, res: Response) => { // Check user has access to this workspace if (workspace.userId !== userId) { - const member = await db.workspaceMembers.findByWorkspaceAndUser(workspaceId, userId); + const members = await db.workspaceMembers.findByWorkspaceId(workspaceId); + const member = members.find(m => m.userId === userId); if (!member) { return res.status(403).json({ error: 'Access denied' }); } @@ -76,7 +77,8 @@ policyRouter.put('/:workspaceId', async (req: Request, res: Response) => { // Only owner can update policy if (workspace.userId !== userId) { - const member = await db.workspaceMembers.findByWorkspaceAndUser(workspaceId, userId); + const members = await db.workspaceMembers.findByWorkspaceId(workspaceId); + const member = members.find(m => m.userId === userId); if (!member || !['owner', 'admin'].includes(member.role)) { return res.status(403).json({ error: 'Only owners and admins can update policy' }); } @@ -94,7 +96,7 @@ policyRouter.put('/:workspaceId', async (req: Request, res: Response) => { agentPolicy: policy, }; - await db.workspaces.update(workspaceId, { config: newConfig }); + await db.workspaces.updateConfig(workspaceId, newConfig); res.json({ success: true, @@ -127,7 +129,8 @@ policyRouter.delete('/:workspaceId', async (req: Request, res: Response) => { // Only owner can reset policy if (workspace.userId !== userId) { - const member = await db.workspaceMembers.findByWorkspaceAndUser(workspaceId, userId); + const members = await db.workspaceMembers.findByWorkspaceId(workspaceId); + const member = members.find(m => m.userId === userId); if (!member || member.role !== 'owner') { return res.status(403).json({ error: 'Only owners can reset policy' }); } @@ -135,7 +138,7 @@ policyRouter.delete('/:workspaceId', async (req: Request, res: Response) => { // Remove policy from config const { agentPolicy, ...restConfig } = workspace.config ?? {}; - await db.workspaces.update(workspaceId, { config: restConfig }); + await db.workspaces.updateConfig(workspaceId, restConfig as any); res.json({ success: true, From 56f94df21cc6fd657758bbd4af7698630344455c Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 06:53:07 +0000 Subject: [PATCH 008/103] Include protocol rules (trajectories, session persistence) in spawned agents Spawned agents now receive both: - agent-relay-snippet: Basic communication patterns (->relay: syntax) - agent-relay-protocol: Advanced features including: - Session persistence (SUMMARY/SESSION_END blocks) - Session continuity (->continuity: patterns) - Work trajectories (trail commands - MANDATORY) - Cross-project messaging - Dashboard integration This ensures all spawned agents can properly track their work via trajectories and report session status for dashboard monitoring. --- src/bridge/spawner.ts | 51 +++++++++++++++++++++++++++++++++++-------- 1 file changed, 42 insertions(+), 9 deletions(-) diff --git a/src/bridge/spawner.ts b/src/bridge/spawner.ts index 4f386d43..c997589b 100644 --- a/src/bridge/spawner.ts +++ b/src/bridge/spawner.ts @@ -72,10 +72,11 @@ export type OnAgentDeathCallback = (info: { * Cached after first load. */ let relaySnippetCache: string | null = null; +let relayProtocolCache: string | null = null; -function loadRelaySnippet(): string { - if (relaySnippetCache !== null) { - return relaySnippetCache; +function loadRelaySnippets(): string { + if (relaySnippetCache !== null && relayProtocolCache !== null) { + return `${relaySnippetCache}\n\n${relayProtocolCache}`; } try { @@ -84,7 +85,9 @@ function loadRelaySnippet(): string { const __dirname = path.dirname(__filename); // From src/bridge/ go up to package root, then into docs/ const snippetPath = path.resolve(__dirname, '../../docs/agent-relay-snippet.md'); + const protocolPath = path.resolve(__dirname, '../../docs/agent-relay-protocol.md'); + // Load base communication snippet if (fs.existsSync(snippetPath)) { relaySnippetCache = fs.readFileSync(snippetPath, 'utf-8'); console.log('[spawner] Loaded relay communication snippet'); @@ -110,12 +113,41 @@ Your message here.>>> `; console.log('[spawner] Using fallback relay snippet (docs/agent-relay-snippet.md not found)'); } + + // Load protocol snippet (session persistence, trajectories, etc.) + if (fs.existsSync(protocolPath)) { + relayProtocolCache = fs.readFileSync(protocolPath, 'utf-8'); + console.log('[spawner] Loaded relay protocol snippet'); + } else { + // Fallback: minimal protocol instructions + relayProtocolCache = `# Agent Relay Protocol + +## Work Trajectories (Required) + +Record your work using trail commands: + +\`\`\`bash +trail start "Task description" +trail decision "Choice made" --reasoning "Why" +trail complete --summary "What was done" --confidence 0.85 +\`\`\` + +## Session End + +When done, output: +\`\`\` +[[SESSION_END]]Work complete.[[/SESSION_END]] +\`\`\` +`; + console.log('[spawner] Using fallback protocol snippet (docs/agent-relay-protocol.md not found)'); + } } catch (err: any) { - console.error('[spawner] Failed to load relay snippet:', err.message); - relaySnippetCache = ''; + console.error('[spawner] Failed to load relay snippets:', err.message); + relaySnippetCache = relaySnippetCache || ''; + relayProtocolCache = relayProtocolCache || ''; } - return relaySnippetCache; + return `${relaySnippetCache}\n\n${relayProtocolCache}`; } export class AgentSpawner { @@ -419,9 +451,10 @@ export class AgentSpawner { // Always prepend relay communication rules so agents know how to communicate // This is essential because target repos may not have the snippet installed - const relaySnippet = loadRelaySnippet(); - if (relaySnippet) { - fullMessage = `${relaySnippet}\n\n---\n\n${fullMessage}`; + // Includes both base communication patterns AND protocol rules (trajectories, session persistence) + const relayRules = loadRelaySnippets(); + if (relayRules) { + fullMessage = `${relayRules}\n\n---\n\n${fullMessage}`; if (debug) console.log(`[spawner:debug] Prepended relay communication rules for ${name}`); } From c0a5702fe1301f3de3b89d7f3f40c8ccd2dc8189 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 06:59:06 +0000 Subject: [PATCH 009/103] Make trajectories opt-in with user-level fallback storage DECISIONS RECORDED: 1. Default trajectories to opt-out (user-level storage) - Most repos won't want trajectory files in source control - Users must explicitly opt-in via .relay/config.json 2. Store user-level trajectories in ~/.config/agent-relay/trajectories// - XDG-compliant path, project-isolated, survives repo deletion 3. Settings configurable after GitHub app setup - Happens during workspace onboarding flow Changes: - Add src/trajectory/config.ts for trajectory storage configuration - Update integration.ts to use config-based paths - Merge trajectory indexes from both repo and user-level locations - Pass correct env vars to trail CLI for storage location --- src/trajectory/config.ts | 195 ++++++++++++++++++++++++++++++++++ src/trajectory/integration.ts | 76 +++++++++++-- 2 files changed, 262 insertions(+), 9 deletions(-) create mode 100644 src/trajectory/config.ts diff --git a/src/trajectory/config.ts b/src/trajectory/config.ts new file mode 100644 index 00000000..d5317cb0 --- /dev/null +++ b/src/trajectory/config.ts @@ -0,0 +1,195 @@ +/** + * Trajectory Configuration + * + * Handles repo-level opt-in/opt-out for trajectory storage. + * When trajectories are opt-out (not in source control), they're stored + * in the user's home directory instead of the repo. + * + * DECISIONS: + * 1. Default behavior: trajectories are OPT-OUT (stored outside repo) + * - Reasoning: Most repos won't want trajectory files in source control + * - Users must explicitly opt-in to store in repo + * + * 2. Setting location: .relay/config.json in repo root + * - Reasoning: Keeps relay config separate from .claude/ which may have other uses + * - Alternative considered: .claude/settings.json - rejected to avoid conflicts + * + * 3. User-level storage: ~/.config/agent-relay/trajectories// + * - Reasoning: XDG-compliant, project-isolated, survives repo deletion + */ + +import { existsSync, readFileSync, mkdirSync } from 'node:fs'; +import { join } from 'node:path'; +import { homedir } from 'node:os'; +import { createHash } from 'node:crypto'; +import { getProjectPaths } from '../utils/project-namespace.js'; + +/** + * Relay config structure + */ +export interface RelayConfig { + /** Trajectory settings */ + trajectories?: { + /** + * Whether to store trajectories in the repo (.trajectories/) + * Default: false (stored in ~/.config/agent-relay/trajectories/) + */ + storeInRepo?: boolean; + }; +} + +/** + * Cache for config to avoid repeated file reads + */ +let configCache: { path: string; config: RelayConfig; mtime: number } | null = null; + +/** + * Get the relay config file path + */ +export function getRelayConfigPath(projectRoot?: string): string { + const root = projectRoot ?? getProjectPaths().projectRoot; + return join(root, '.relay', 'config.json'); +} + +/** + * Read the relay config from the repo + */ +export function readRelayConfig(projectRoot?: string): RelayConfig { + const configPath = getRelayConfigPath(projectRoot); + + // Check cache + if (configCache && configCache.path === configPath) { + try { + const stat = require('fs').statSync(configPath); + if (stat.mtimeMs === configCache.mtime) { + return configCache.config; + } + } catch { + // File may not exist or be readable + } + } + + try { + if (!existsSync(configPath)) { + return {}; + } + + const content = readFileSync(configPath, 'utf-8'); + const config = JSON.parse(content) as RelayConfig; + + // Update cache + try { + const stat = require('fs').statSync(configPath); + configCache = { path: configPath, config, mtime: stat.mtimeMs }; + } catch { + // Ignore cache update failures + } + + return config; + } catch (err) { + console.warn('[trajectory-config] Failed to read config:', err); + return {}; + } +} + +/** + * Check if trajectories should be stored in the repo + */ +export function shouldStoreInRepo(projectRoot?: string): boolean { + const config = readRelayConfig(projectRoot); + // Default to false - trajectories are stored outside repo by default + return config.trajectories?.storeInRepo === true; +} + +/** + * Get a hash of the project path for user-level storage isolation + */ +export function getProjectHash(projectRoot?: string): string { + const root = projectRoot ?? getProjectPaths().projectRoot; + return createHash('sha256').update(root).digest('hex').slice(0, 12); +} + +/** + * Get the user-level trajectories directory + */ +export function getUserTrajectoriesDir(projectRoot?: string): string { + const projectHash = getProjectHash(projectRoot); + const configDir = process.env.XDG_CONFIG_HOME || join(homedir(), '.config'); + return join(configDir, 'agent-relay', 'trajectories', projectHash); +} + +/** + * Get the repo-level trajectories directory + */ +export function getRepoTrajectoriesDir(projectRoot?: string): string { + const root = projectRoot ?? getProjectPaths().projectRoot; + return join(root, '.trajectories'); +} + +/** + * Get the primary trajectories directory based on config + * This is where new trajectories will be written + */ +export function getPrimaryTrajectoriesDir(projectRoot?: string): string { + if (shouldStoreInRepo(projectRoot)) { + return getRepoTrajectoriesDir(projectRoot); + } + return getUserTrajectoriesDir(projectRoot); +} + +/** + * Get all trajectories directories (for reading) + * Returns both repo and user-level if they exist + */ +export function getAllTrajectoriesDirs(projectRoot?: string): string[] { + const dirs: string[] = []; + + const repoDir = getRepoTrajectoriesDir(projectRoot); + if (existsSync(repoDir)) { + dirs.push(repoDir); + } + + const userDir = getUserTrajectoriesDir(projectRoot); + if (existsSync(userDir)) { + dirs.push(userDir); + } + + return dirs; +} + +/** + * Ensure the primary trajectories directory exists + */ +export function ensureTrajectoriesDir(projectRoot?: string): string { + const dir = getPrimaryTrajectoriesDir(projectRoot); + mkdirSync(dir, { recursive: true }); + return dir; +} + +/** + * Get trajectory environment variables for trail CLI + * Sets TRAJECTORIES_DATA_DIR to the appropriate location + */ +export function getTrajectoryEnvVars(projectRoot?: string): Record { + const dataDir = getPrimaryTrajectoriesDir(projectRoot); + return { + TRAJECTORIES_DATA_DIR: dataDir, + }; +} + +/** + * Check if project has opted into repo-level trajectory storage + */ +export function isTrajectoryOptedIn(projectRoot?: string): boolean { + return shouldStoreInRepo(projectRoot); +} + +/** + * Get a human-readable description of where trajectories are stored + */ +export function getTrajectoriesStorageDescription(projectRoot?: string): string { + if (shouldStoreInRepo(projectRoot)) { + return `repo (.trajectories/)`; + } + return `user (~/.config/agent-relay/trajectories/)`; +} diff --git a/src/trajectory/integration.ts b/src/trajectory/integration.ts index 12349a0a..810d6cef 100644 --- a/src/trajectory/integration.ts +++ b/src/trajectory/integration.ts @@ -18,6 +18,11 @@ import { spawn, execSync } from 'node:child_process'; import { readFileSync, existsSync } from 'node:fs'; import { join } from 'node:path'; import { getProjectPaths } from '../utils/project-namespace.js'; +import { + getPrimaryTrajectoriesDir, + getAllTrajectoriesDirs, + getTrajectoryEnvVars, +} from './config.js'; /** * Trajectory index file structure @@ -72,19 +77,19 @@ interface TrajectoryFile { } /** - * Get the trajectories directory path + * Get the primary trajectories directory path (for writing) + * Uses config to determine if repo or user-level storage */ function getTrajectoriesDir(): string { - const { projectRoot } = getProjectPaths(); - return join(projectRoot, '.trajectories'); + return getPrimaryTrajectoriesDir(); } /** - * Read the trajectory index file directly from filesystem + * Read a single trajectory index file from a directory */ -function readTrajectoryIndex(): TrajectoryIndex | null { +function readSingleTrajectoryIndex(trajectoriesDir: string): TrajectoryIndex | null { try { - const indexPath = join(getTrajectoriesDir(), 'index.json'); + const indexPath = join(trajectoriesDir, 'index.json'); if (!existsSync(indexPath)) { return null; } @@ -95,6 +100,51 @@ function readTrajectoryIndex(): TrajectoryIndex | null { } } +/** + * Read and merge trajectory indexes from all locations (repo + user-level) + * This allows reading trajectories from both places + */ +function readTrajectoryIndex(): TrajectoryIndex | null { + const dirs = getAllTrajectoriesDirs(); + + if (dirs.length === 0) { + return null; + } + + // Read and merge all indexes + let mergedIndex: TrajectoryIndex | null = null; + + for (const dir of dirs) { + const index = readSingleTrajectoryIndex(dir); + if (!index) continue; + + if (!mergedIndex) { + mergedIndex = index; + } else { + // Merge trajectories, preferring more recent entries + for (const [id, entry] of Object.entries(index.trajectories)) { + const existing = mergedIndex.trajectories[id]; + if (!existing) { + mergedIndex.trajectories[id] = entry; + } else { + // Keep the more recently updated one + const existingTime = new Date(existing.completedAt || existing.startedAt).getTime(); + const newTime = new Date(entry.completedAt || entry.startedAt).getTime(); + if (newTime > existingTime) { + mergedIndex.trajectories[id] = entry; + } + } + } + // Update lastUpdated to most recent + if (new Date(index.lastUpdated) > new Date(mergedIndex.lastUpdated)) { + mergedIndex.lastUpdated = index.lastUpdated; + } + } + } + + return mergedIndex; +} + /** * Read a specific trajectory file directly from filesystem */ @@ -149,12 +199,16 @@ export interface DecisionOptions { /** * Run a trail CLI command + * Uses config-based environment to control trajectory storage location */ async function runTrail(args: string[]): Promise<{ success: boolean; output: string; error?: string }> { return new Promise((resolve) => { + // Get trajectory env vars to set correct storage location + const trajectoryEnv = getTrajectoryEnvVars(); + const proc = spawn('trail', args, { cwd: getProjectPaths().projectRoot, - env: process.env, + env: { ...process.env, ...trajectoryEnv }, stdio: ['pipe', 'pipe', 'pipe'], }); @@ -1025,11 +1079,15 @@ export function getCompactTrailInstructions(): string { /** * Get environment variables for trail CLI + * If dataDir is not provided, uses config-based storage location */ -export function getTrailEnvVars(projectId: string, agentName: string, dataDir: string): Record { +export function getTrailEnvVars(projectId: string, agentName: string, dataDir?: string): Record { + // Use config-based path if dataDir not explicitly provided + const effectiveDataDir = dataDir ?? getPrimaryTrajectoriesDir(); + return { TRAJECTORIES_PROJECT: projectId, - TRAJECTORIES_DATA_DIR: dataDir, + TRAJECTORIES_DATA_DIR: effectiveDataDir, TRAJECTORIES_AGENT: agentName, TRAIL_AUTO_PHASE: '1', // Enable auto phase detection }; From d88c74e9b654e30f5f112dfc700dc08b9fae5664 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 06:59:46 +0000 Subject: [PATCH 010/103] Update trajectory with decisions for opt-in storage feature --- .trajectories/active/traj_7ludwvz45veh.json | 36 +++++++++++++++++++++ .trajectories/index.json | 4 +-- 2 files changed, 38 insertions(+), 2 deletions(-) diff --git a/.trajectories/active/traj_7ludwvz45veh.json b/.trajectories/active/traj_7ludwvz45veh.json index bd93a8c0..79bd07ee 100644 --- a/.trajectories/active/traj_7ludwvz45veh.json +++ b/.trajectories/active/traj_7ludwvz45veh.json @@ -47,6 +47,42 @@ "reasoning": "Users should be able to connect additional AI providers after initial setup. Settings page in workspace dashboard will allow connecting Claude, Codex, OpenCode, Droid at any time, not just during initial workspace setup." }, "significance": "high" + }, + { + "ts": 1767509811642, + "type": "decision", + "content": "Default trajectories to opt-out (user-level storage): Default trajectories to opt-out (user-level storage)", + "raw": { + "question": "Default trajectories to opt-out (user-level storage)", + "chosen": "Default trajectories to opt-out (user-level storage)", + "alternatives": [], + "reasoning": "Most repos won't want trajectory files in source control. Users must explicitly opt-in to store in repo via .relay/config.json" + }, + "significance": "high" + }, + { + "ts": 1767509822845, + "type": "decision", + "content": "Store user-level trajectories in ~/.config/agent-relay/trajectories//: Store user-level trajectories in ~/.config/agent-relay/trajectories//", + "raw": { + "question": "Store user-level trajectories in ~/.config/agent-relay/trajectories//", + "chosen": "Store user-level trajectories in ~/.config/agent-relay/trajectories//", + "alternatives": [], + "reasoning": "XDG-compliant path, project-isolated via hash to prevent collisions, survives repo deletion" + }, + "significance": "high" + }, + { + "ts": 1767509882710, + "type": "decision", + "content": "Trajectory settings configurable after GitHub app setup: Trajectory settings configurable after GitHub app setup", + "raw": { + "question": "Trajectory settings configurable after GitHub app setup", + "chosen": "Trajectory settings configurable after GitHub app setup", + "alternatives": [], + "reasoning": "Users should configure .relay/config.json after connecting repo to cloud workspace. This happens in the workspace onboarding flow." + }, + "significance": "high" } ] } diff --git a/.trajectories/index.json b/.trajectories/index.json index ff2d8b06..f66908c4 100644 --- a/.trajectories/index.json +++ b/.trajectories/index.json @@ -1,6 +1,6 @@ { "version": 1, - "lastUpdated": "2026-01-04T00:06:36.637Z", + "lastUpdated": "2026-01-04T06:58:02.712Z", "trajectories": { "traj_ozd98si6a7ns": { "title": "Fix thinking indicator showing on all messages", @@ -279,7 +279,7 @@ "title": "Provider CLI auth flow for cloud workspaces", "status": "active", "startedAt": "2026-01-04T00:05:43.304Z", - "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/active/traj_7ludwvz45veh.json" + "path": "/home/user/relay/.trajectories/active/traj_7ludwvz45veh.json" } } } \ No newline at end of file From daa35d79515e802f8dc72bb34bf22615713316a2 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 07:02:47 +0000 Subject: [PATCH 011/103] Add dashboard settings API with trajectory documentation New endpoints: - GET /api/settings - Returns all settings with documentation - GET /api/settings/trajectory - Get trajectory storage settings - PUT /api/settings/trajectory - Update trajectory storage settings Includes comprehensive documentation for UI: - What trajectories are (PDERO paradigm) - Benefits of trajectory tracking - Why users might opt-in to store in repo - Link to pdero.com for more info Users can configure trajectory preferences after GitHub app setup during workspace onboarding. --- src/dashboard-server/server.ts | 134 +++++++++++++++++++++++++++++++++ 1 file changed, 134 insertions(+) diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index 1c53a6db..36921deb 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -2986,6 +2986,140 @@ Start by greeting the project leads and asking for status updates.`; } }); + // ===== Settings API ===== + + /** + * GET /api/settings - Get all workspace settings with documentation + */ + app.get('/api/settings', async (_req, res) => { + try { + const { readRelayConfig, shouldStoreInRepo, getTrajectoriesStorageDescription } = await import('../trajectory/config.js'); + const config = readRelayConfig(); + + res.json({ + success: true, + settings: { + trajectories: { + storeInRepo: shouldStoreInRepo(), + storageLocation: getTrajectoriesStorageDescription(), + description: 'Trajectories record the journey of agent work using the PDERO paradigm (Plan, Design, Execute, Review, Observe). They capture decisions, phase transitions, and retrospectives.', + benefits: [ + 'Track why decisions were made, not just what was built', + 'Enable session recovery when agents crash or context is lost', + 'Provide learning data for future agents working on similar tasks', + 'Create an audit trail of agent work for review', + ], + learnMore: 'https://pdero.com', + optInReason: 'Enable "Store in repo" to version-control your trajectories alongside your code. This is useful for teams who want to review agent decision-making processes.', + }, + }, + config, + }); + } catch (err: any) { + console.error('[api] Settings error:', err); + res.status(500).json({ + success: false, + error: err.message, + }); + } + }); + + /** + * GET /api/settings/trajectory - Get trajectory storage settings + */ + app.get('/api/settings/trajectory', async (_req, res) => { + try { + const { readRelayConfig, shouldStoreInRepo, getTrajectoriesStorageDescription } = await import('../trajectory/config.js'); + const config = readRelayConfig(); + + res.json({ + success: true, + settings: { + storeInRepo: shouldStoreInRepo(), + storageLocation: getTrajectoriesStorageDescription(), + }, + config: config.trajectories || {}, + // Documentation for the UI + documentation: { + title: 'Trajectory Storage', + description: 'Trajectories record the journey of agent work using the PDERO paradigm (Plan, Design, Execute, Review, Observe).', + whatIsIt: 'A trajectory captures not just what an agent built, but WHY it made specific decisions. This includes phase transitions, key decisions with reasoning, and retrospective summaries.', + benefits: [ + 'Understand agent decision-making for code review', + 'Enable session recovery if agents crash', + 'Train future agents on your codebase patterns', + 'Create audit trails of AI work', + ], + storeInRepoExplanation: 'When enabled, trajectories are stored in .trajectories/ in your repo and can be committed to source control. When disabled (default), they are stored in your user directory (~/.config/agent-relay/trajectories/).', + learnMore: 'https://pdero.com', + }, + }); + } catch (err: any) { + console.error('[api] Settings trajectory error:', err); + res.status(500).json({ + success: false, + error: err.message, + }); + } + }); + + /** + * PUT /api/settings/trajectory - Update trajectory storage settings + * + * Body: { storeInRepo: boolean } + * + * This writes to .relay/config.json in the project root + */ + app.put('/api/settings/trajectory', async (req, res) => { + try { + const { storeInRepo } = req.body; + + if (typeof storeInRepo !== 'boolean') { + return res.status(400).json({ + success: false, + error: 'storeInRepo must be a boolean', + }); + } + + const { getRelayConfigPath, readRelayConfig } = await import('../trajectory/config.js'); + const { getProjectPaths } = await import('../utils/project-namespace.js'); + const { projectRoot } = getProjectPaths(); + + // Read existing config + const config = readRelayConfig(); + + // Update trajectory settings + config.trajectories = { + ...config.trajectories, + storeInRepo, + }; + + // Ensure .relay directory exists + const configPath = getRelayConfigPath(); + const configDir = path.dirname(configPath); + if (!fs.existsSync(configDir)) { + fs.mkdirSync(configDir, { recursive: true }); + } + + // Write updated config + fs.writeFileSync(configPath, JSON.stringify(config, null, 2)); + + res.json({ + success: true, + settings: { + storeInRepo, + storageLocation: storeInRepo ? 'repo (.trajectories/)' : 'user (~/.config/agent-relay/trajectories/)', + }, + }); + } catch (err: any) { + console.error('[api] Settings trajectory update error:', err); + res.status(500).json({ + success: false, + error: err.message, + }); + } + }); + // ===== Decision Queue API ===== interface Decision { From 0fbb377ecf0e77a64b1b4038487fee6e0dc3bc38 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 07:03:05 +0000 Subject: [PATCH 012/103] Update trajectory with settings API decisions --- .trajectories/active/traj_7ludwvz45veh.json | 24 +++++++++++++++++++++ .trajectories/index.json | 2 +- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/.trajectories/active/traj_7ludwvz45veh.json b/.trajectories/active/traj_7ludwvz45veh.json index 79bd07ee..033f563b 100644 --- a/.trajectories/active/traj_7ludwvz45veh.json +++ b/.trajectories/active/traj_7ludwvz45veh.json @@ -83,6 +83,30 @@ "reasoning": "Users should configure .relay/config.json after connecting repo to cloud workspace. This happens in the workspace onboarding flow." }, "significance": "high" + }, + { + "ts": 1767510018871, + "type": "decision", + "content": "Add dashboard API for trajectory preferences: Add dashboard API for trajectory preferences", + "raw": { + "question": "Add dashboard API for trajectory preferences", + "chosen": "Add dashboard API for trajectory preferences", + "alternatives": [], + "reasoning": "Users configure via dashboard after GitHub app setup, during workspace onboarding" + }, + "significance": "high" + }, + { + "ts": 1767510106990, + "type": "decision", + "content": "Add comprehensive settings with trajectory explanations: Add comprehensive settings with trajectory explanations", + "raw": { + "question": "Add comprehensive settings with trajectory explanations", + "chosen": "Add comprehensive settings with trajectory explanations", + "alternatives": [], + "reasoning": "Users need to understand what trajectories are (PDERO paradigm), why they'd opt-in, and link to pdero.com for more info" + }, + "significance": "high" } ] } diff --git a/.trajectories/index.json b/.trajectories/index.json index f66908c4..3dd36e44 100644 --- a/.trajectories/index.json +++ b/.trajectories/index.json @@ -1,6 +1,6 @@ { "version": 1, - "lastUpdated": "2026-01-04T06:58:02.712Z", + "lastUpdated": "2026-01-04T07:01:46.992Z", "trajectories": { "traj_ozd98si6a7ns": { "title": "Fix thinking indicator showing on all messages", From 99dcc7ea732da05573437f39253d27dbcf78e6e9 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 07:06:48 +0000 Subject: [PATCH 013/103] Add trajectories tab to settings panel with PDERO documentation New settings tab includes: - Explanation of what trajectories are (PDERO paradigm) - List of benefits for trajectory tracking - Link to pdero.com for more info - Toggle for repo vs user-level storage - Display of current storage location - Explanation of why teams might opt-in --- .../react-components/SettingsPanel.tsx | 210 +++++++++++++++++- 1 file changed, 209 insertions(+), 1 deletion(-) diff --git a/src/dashboard/react-components/SettingsPanel.tsx b/src/dashboard/react-components/SettingsPanel.tsx index d1d257a2..b77f94d2 100644 --- a/src/dashboard/react-components/SettingsPanel.tsx +++ b/src/dashboard/react-components/SettingsPanel.tsx @@ -77,6 +77,22 @@ export interface SettingsPanelProps { csrfToken?: string; // For cloud mode API calls } +// Trajectory settings state +interface TrajectorySettings { + storeInRepo: boolean; + storageLocation: string; + loading: boolean; + error: string | null; + documentation?: { + title: string; + description: string; + whatIsIt: string; + benefits: string[]; + storeInRepoExplanation: string; + learnMore: string; + }; +} + export function SettingsPanel({ isOpen, onClose, @@ -86,11 +102,81 @@ export function SettingsPanel({ workspaceId, csrfToken, }: SettingsPanelProps) { - const [activeTab, setActiveTab] = useState<'appearance' | 'notifications' | 'connection' | 'providers'>('appearance'); + const [activeTab, setActiveTab] = useState<'appearance' | 'notifications' | 'connection' | 'providers' | 'trajectories'>('appearance'); const [providerStatus, setProviderStatus] = useState>({}); const [connectingProvider, setConnectingProvider] = useState(null); const [apiKeyInput, setApiKeyInput] = useState(''); const [providerError, setProviderError] = useState(null); + const [trajectorySettings, setTrajectorySettings] = useState({ + storeInRepo: false, + storageLocation: '', + loading: true, + error: null, + }); + + // Load trajectory settings on mount + React.useEffect(() => { + if (isOpen && activeTab === 'trajectories') { + fetchTrajectorySettings(); + } + }, [isOpen, activeTab]); + + const fetchTrajectorySettings = async () => { + try { + setTrajectorySettings(prev => ({ ...prev, loading: true, error: null })); + const res = await fetch('/api/settings/trajectory'); + if (!res.ok) throw new Error('Failed to load settings'); + const data = await res.json(); + setTrajectorySettings({ + storeInRepo: data.settings.storeInRepo, + storageLocation: data.settings.storageLocation, + loading: false, + error: null, + documentation: data.documentation, + }); + } catch (err) { + setTrajectorySettings(prev => ({ + ...prev, + loading: false, + error: err instanceof Error ? err.message : 'Failed to load settings', + })); + } + }; + + const updateTrajectorySettings = async (storeInRepo: boolean) => { + try { + setTrajectorySettings(prev => ({ ...prev, loading: true, error: null })); + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch('/api/settings/trajectory', { + method: 'PUT', + credentials: 'include', + headers, + body: JSON.stringify({ storeInRepo }), + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to update settings'); + } + + const data = await res.json(); + setTrajectorySettings(prev => ({ + ...prev, + storeInRepo: data.settings.storeInRepo, + storageLocation: data.settings.storageLocation, + loading: false, + error: null, + })); + } catch (err) { + setTrajectorySettings(prev => ({ + ...prev, + loading: false, + error: err instanceof Error ? err.message : 'Failed to update settings', + })); + } + }; const updateSetting = useCallback( ( @@ -183,6 +269,17 @@ export function SettingsPanel({ Providers +
@@ -417,6 +514,99 @@ export function SettingsPanel({
)} + + {activeTab === 'trajectories' && ( +
+ {trajectorySettings.loading ? ( +
+
Loading settings...
+
+ ) : trajectorySettings.error ? ( +
+

{trajectorySettings.error}

+ +
+ ) : ( + <> + {/* Documentation section */} +
+

+ + What are Trajectories? +

+

+ {trajectorySettings.documentation?.description || + 'Trajectories record the journey of agent work using the PDERO paradigm (Plan, Design, Execute, Review, Observe). They capture decisions, phase transitions, and retrospectives.'} +

+
+

Benefits

+
    + {(trajectorySettings.documentation?.benefits || [ + 'Track why decisions were made, not just what was built', + 'Enable session recovery when agents crash', + 'Provide learning data for future agents', + 'Create audit trails of AI work', + ]).map((benefit, i) => ( +
  • + โœ“ + {benefit} +
  • + ))} +
+
+ + Learn more about PDERO + + +
+ + {/* Settings */} +
+ + + updateTrajectorySettings(v)} + /> + +
+
+ Current storage location +
+ + {trajectorySettings.storageLocation || 'user (~/.config/agent-relay/trajectories/)'} + +
+
+ + {/* Why opt-in info */} +
+

Why opt-in to repo storage?

+

+ Teams who want to review agent decision-making processes can store trajectories + in the repo to version control them alongside code. This makes it easy to understand + why agents made specific choices during code review. +

+
+ + )} +
+ )}
@@ -559,3 +749,21 @@ function ProviderIcon() { ); } + +function TrajectoryIcon() { + return ( + + + + ); +} + +function ExternalLinkIcon() { + return ( + + + + + + ); +} From 929c6b877b54a72c59bb8ba7ff1926b8237d7cc0 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 07:11:57 +0000 Subject: [PATCH 014/103] Record provider auth investigation decisions --- .trajectories/active/traj_7ludwvz45veh.json | 24 +++++++++++++++++++++ .trajectories/index.json | 2 +- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/.trajectories/active/traj_7ludwvz45veh.json b/.trajectories/active/traj_7ludwvz45veh.json index 033f563b..5ee3c6e0 100644 --- a/.trajectories/active/traj_7ludwvz45veh.json +++ b/.trajectories/active/traj_7ludwvz45veh.json @@ -107,6 +107,30 @@ "reasoning": "Users need to understand what trajectories are (PDERO paradigm), why they'd opt-in, and link to pdero.com for more info" }, "significance": "high" + }, + { + "ts": 1767510429532, + "type": "decision", + "content": "Investigate Claude OAuth login flow: Investigate Claude OAuth login flow", + "raw": { + "question": "Investigate Claude OAuth login flow", + "chosen": "Investigate Claude OAuth login flow", + "alternatives": [], + "reasoning": "Current provider setup uses API keys but Claude uses OAuth. Need to bypass interactive prompts and get login URL for popup-based auth." + }, + "significance": "high" + }, + { + "ts": 1767510693303, + "type": "decision", + "content": "Cloud provider auth strategy for Claude: Cloud provider auth strategy for Claude", + "raw": { + "question": "Cloud provider auth strategy for Claude", + "chosen": "Cloud provider auth strategy for Claude", + "alternatives": [], + "reasoning": "Claude uses OAuth in cloud environments. For users connecting accounts: 1) API key works (already supported), 2) CLI setup-token is interactive, 3) Need proper OAuth device flow from Anthropic. Recommend API key for now with improved UX." + }, + "significance": "high" } ] } diff --git a/.trajectories/index.json b/.trajectories/index.json index 3dd36e44..2d509b57 100644 --- a/.trajectories/index.json +++ b/.trajectories/index.json @@ -1,6 +1,6 @@ { "version": 1, - "lastUpdated": "2026-01-04T07:01:46.992Z", + "lastUpdated": "2026-01-04T07:11:33.305Z", "trajectories": { "traj_ozd98si6a7ns": { "title": "Fix thinking indicator showing on all messages", From 624b65fd45a77ca9e3168517d150a3a0594b53a7 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 07:28:52 +0000 Subject: [PATCH 015/103] Fix provider OAuth login with PTY-based URL capture - Switch from child_process.spawn to node-pty for proper TTY emulation - Handle Claude's interactive setup flow (dark mode, auth method prompts) - Auto-respond to prompts to reach the OAuth login URL - Add capture group to URL regex patterns - Handle "already authenticated" case in SettingsPanel - Add OAuth session management with polling for completion - Provide fallback API key input option for providers that support it - Clean up PTY processes properly on cancel/complete/timeout --- src/cloud/api/onboarding.ts | 170 +++++-- .../react-components/SettingsPanel.tsx | 413 +++++++++++++++--- 2 files changed, 488 insertions(+), 95 deletions(-) diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index 670e045a..de318c8d 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -2,11 +2,17 @@ * Onboarding API Routes * * Handles CLI proxy authentication for Claude Code and other providers. - * Spawns CLI tools to get auth URLs, captures tokens. + * Spawns CLI tools via PTY to get auth URLs, captures tokens. + * + * We use node-pty instead of child_process.spawn because: + * 1. Many CLIs detect if they're in a TTY and behave differently + * 2. Interactive OAuth flows often require TTY for proper output + * 3. PTY ensures the CLI outputs auth URLs correctly */ import { Router, Request, Response } from 'express'; -import { spawn, ChildProcess } from 'child_process'; +import * as pty from 'node-pty'; +import type { IPty } from 'node-pty'; import crypto from 'crypto'; import { requireAuth } from './auth.js'; import { db } from '../db/index.js'; @@ -24,13 +30,14 @@ onboardingRouter.use(requireAuth); interface CLIAuthSession { userId: string; provider: string; - process?: ChildProcess; + process?: IPty; authUrl?: string; callbackUrl?: string; status: 'starting' | 'waiting_auth' | 'success' | 'error' | 'timeout'; token?: string; error?: string; createdAt: Date; + output: string; // Accumulated output for debugging } const activeSessions = new Map(); @@ -42,7 +49,11 @@ setInterval(() => { // Remove sessions older than 10 minutes if (now - session.createdAt.getTime() > 10 * 60 * 1000) { if (session.process) { - session.process.kill(); + try { + session.process.kill(); + } catch { + // Process may already be dead + } } activeSessions.delete(id); } @@ -51,29 +62,58 @@ setInterval(() => { /** * CLI commands and URL patterns for each provider + * + * Each CLI tool outputs an OAuth URL when run without credentials. + * We capture stdout/stderr and extract the URL using a simple https:// pattern. + * + * IMPORTANT: These CLIs are interactive - they output the auth URL then wait + * for the user to complete OAuth in their browser. We capture the URL and + * display it in a popup for the user. */ const CLI_AUTH_CONFIG: Record = { anthropic: { - // Claude Code CLI login + // Claude Code CLI - running without args triggers OAuth if not authenticated command: 'claude', - args: ['login', '--no-open'], - // Claude outputs: "Please open: https://..." - urlPattern: /(?:open|visit|go to)[:\s]+(\S+anthropic\S+)/i, - // Token might be in output or in credentials file + args: [], // No args needed - CLI auto-prompts for auth + // Generic URL pattern with capture group - Claude outputs auth URL to stdout + urlPattern: /(https:\/\/[^\s]+)/, credentialPath: '~/.claude/credentials.json', + displayName: 'Claude', }, openai: { - // Codex CLI auth + // Codex CLI - uses 'login' subcommand command: 'codex', - args: ['auth', '--no-browser'], - urlPattern: /(?:open|visit|go to)[:\s]+(\S+openai\S+)/i, + args: ['login'], + urlPattern: /(https:\/\/[^\s]+)/, credentialPath: '~/.codex/credentials.json', + displayName: 'Codex', + }, + google: { + // Gemini CLI + command: 'gemini', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Gemini', + }, + opencode: { + // OpenCode CLI + command: 'opencode', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'OpenCode', + }, + droid: { + // Droid CLI + command: 'droid', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Droid', }, }; @@ -100,63 +140,99 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response provider, status: 'starting', createdAt: new Date(), + output: '', }; activeSessions.set(sessionId, session); try { - // Spawn CLI process - const proc = spawn(config.command, config.args, { - env: { ...process.env, NO_COLOR: '1' }, - stdio: ['pipe', 'pipe', 'pipe'], + // Spawn CLI process via PTY for proper TTY emulation + // This ensures the CLI outputs auth URLs correctly + const proc = pty.spawn(config.command, config.args, { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: process.cwd(), + env: { ...process.env, NO_COLOR: '1', TERM: 'xterm-256color' } as Record, }); session.process = proc; - let _output = ''; - // Capture stdout/stderr for auth URL - const handleOutput = (data: Buffer) => { - const text = data.toString(); - _output += text; + // Track which prompts we've already responded to + let respondedDarkMode = false; + let respondedAuthChoice = false; + + // Capture PTY output for auth URL and handle interactive prompts + proc.onData((data: string) => { + session.output += data; + + // Strip ANSI escape codes for pattern matching + const cleanText = data.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); + const lowerText = cleanText.toLowerCase(); + + // Handle Claude's interactive setup prompts + if (provider === 'anthropic') { + // Dark mode prompt - just press enter to accept default (or 'n' for no) + if (!respondedDarkMode && (lowerText.includes('dark mode') || lowerText.includes('dark theme'))) { + respondedDarkMode = true; + // Press enter to accept default + setTimeout(() => proc.write('\r'), 100); + } + + // Auth method prompt - choose subscription (press enter or '1') + // Claude asks: "Would you like to use your Claude subscription or an API key?" + if (!respondedAuthChoice && ( + lowerText.includes('subscription') || + lowerText.includes('api key') || + lowerText.includes('how would you like to authenticate') + )) { + respondedAuthChoice = true; + // Press enter to select first option (Claude subscription) + setTimeout(() => proc.write('\r'), 100); + } + } // Look for auth URL - const match = text.match(config.urlPattern); + const match = cleanText.match(config.urlPattern); if (match && match[1]) { session.authUrl = match[1]; session.status = 'waiting_auth'; } // Look for success indicators - if (text.toLowerCase().includes('success') || - text.toLowerCase().includes('authenticated') || - text.toLowerCase().includes('logged in')) { + if (lowerText.includes('success') || + lowerText.includes('authenticated') || + lowerText.includes('logged in')) { session.status = 'success'; } - }; - - proc.stdout.on('data', handleOutput); - proc.stderr.on('data', handleOutput); - - proc.on('error', (err) => { - session.status = 'error'; - session.error = `Failed to start CLI: ${err.message}`; }); - proc.on('exit', async (code) => { - if (code === 0 && session.status !== 'error') { + proc.onExit(async ({ exitCode }) => { + if (exitCode === 0 && session.status !== 'error') { session.status = 'success'; // Try to read credentials from file await extractCredentials(session, config); } else if (session.status === 'starting') { session.status = 'error'; - session.error = `CLI exited with code ${code}`; + session.error = `CLI exited with code ${exitCode}`; } }); - // Wait a moment for URL to appear - await new Promise(resolve => setTimeout(resolve, 2000)); + // Wait for URL to appear - longer timeout for Claude's multi-step setup + // Claude asks: dark mode? -> subscription vs API key? -> shows login URL + const waitTime = provider === 'anthropic' ? 5000 : 2000; + await new Promise(resolve => setTimeout(resolve, waitTime)); - // Return session info - if (session.authUrl) { + // Return session info based on current state + if (session.status === 'success') { + // Already authenticated - CLI exited successfully without auth URL + activeSessions.delete(sessionId); + res.json({ + sessionId, + status: 'success', + alreadyAuthenticated: true, + message: `Already authenticated with ${config.displayName}`, + }); + } else if (session.authUrl) { res.json({ sessionId, status: 'waiting_auth', @@ -252,7 +328,11 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, // Clean up session if (session.process) { - session.process.kill(); + try { + session.process.kill(); + } catch { + // Process may already be dead + } } activeSessions.delete(sessionId); @@ -277,7 +357,11 @@ onboardingRouter.post('/cli/:provider/cancel/:sessionId', (req: Request, res: Re const session = activeSessions.get(sessionId); if (session?.userId === userId) { if (session.process) { - session.process.kill(); + try { + session.process.kill(); + } catch { + // Process may already be dead + } } activeSessions.delete(sessionId); } diff --git a/src/dashboard/react-components/SettingsPanel.tsx b/src/dashboard/react-components/SettingsPanel.tsx index b77f94d2..799ee566 100644 --- a/src/dashboard/react-components/SettingsPanel.tsx +++ b/src/dashboard/react-components/SettingsPanel.tsx @@ -56,17 +56,75 @@ interface AIProvider { description: string; color: string; cliCommand: string; + apiKeyUrl?: string; // URL to get API key (fallback) + apiKeyName?: string; // How the API key is labeled on their site + supportsOAuth?: boolean; // Whether CLI-based OAuth is supported isConnected?: boolean; } const AI_PROVIDERS: AIProvider[] = [ - { id: 'anthropic', name: 'Anthropic', displayName: 'Claude', description: 'Claude Code - recommended for code tasks', color: '#D97757', cliCommand: 'claude' }, - { id: 'codex', name: 'OpenAI', displayName: 'Codex', description: 'Codex - OpenAI coding assistant', color: '#10A37F', cliCommand: 'codex login' }, - { id: 'gemini', name: 'Google', displayName: 'Gemini', description: 'Gemini - Google AI coding assistant', color: '#4285F4', cliCommand: 'gemini' }, - { id: 'opencode', name: 'OpenCode', displayName: 'OpenCode', description: 'OpenCode - AI coding assistant', color: '#00D4AA', cliCommand: 'opencode' }, - { id: 'droid', name: 'Factory', displayName: 'Droid', description: 'Droid - Factory AI coding agent', color: '#6366F1', cliCommand: 'droid' }, + { + id: 'anthropic', + name: 'Anthropic', + displayName: 'Claude', + description: 'Claude Code - recommended for code tasks', + color: '#D97757', + cliCommand: 'claude', + apiKeyUrl: 'https://console.anthropic.com/settings/keys', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'openai', + name: 'OpenAI', + displayName: 'Codex', + description: 'Codex - OpenAI coding assistant', + color: '#10A37F', + cliCommand: 'codex login', + apiKeyUrl: 'https://platform.openai.com/api-keys', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'google', + name: 'Google', + displayName: 'Gemini', + description: 'Gemini - Google AI coding assistant', + color: '#4285F4', + cliCommand: 'gemini', + apiKeyUrl: 'https://aistudio.google.com/app/apikey', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'opencode', + name: 'OpenCode', + displayName: 'OpenCode', + description: 'OpenCode - AI coding assistant', + color: '#00D4AA', + cliCommand: 'opencode', + supportsOAuth: true, + }, + { + id: 'droid', + name: 'Factory', + displayName: 'Droid', + description: 'Droid - Factory AI coding agent', + color: '#6366F1', + cliCommand: 'droid', + supportsOAuth: true, + }, ]; +// Auth session state for CLI-based OAuth +interface OAuthSession { + providerId: string; + sessionId: string; + authUrl?: string; + status: 'starting' | 'waiting_auth' | 'success' | 'error'; + error?: string; +} + export interface SettingsPanelProps { isOpen: boolean; onClose: () => void; @@ -107,6 +165,8 @@ export function SettingsPanel({ const [connectingProvider, setConnectingProvider] = useState(null); const [apiKeyInput, setApiKeyInput] = useState(''); const [providerError, setProviderError] = useState(null); + const [oauthSession, setOauthSession] = useState(null); + const [showApiKeyFallback, setShowApiKeyFallback] = useState>({}); const [trajectorySettings, setTrajectorySettings] = useState({ storeInRepo: false, storageLocation: '', @@ -178,6 +238,202 @@ export function SettingsPanel({ } }; + // Start CLI-based OAuth flow for a provider + const startOAuthFlow = async (provider: AIProvider) => { + setProviderError(null); + setConnectingProvider(provider.id); + setOauthSession({ providerId: provider.id, sessionId: '', status: 'starting' }); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${provider.id}/start`, { + method: 'POST', + credentials: 'include', + headers, + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to start authentication'); + } + + // Handle immediate success (already authenticated) + if (data.status === 'success' || data.alreadyAuthenticated) { + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setOauthSession(null); + setConnectingProvider(null); + return; + } + + const session: OAuthSession = { + providerId: provider.id, + sessionId: data.sessionId, + authUrl: data.authUrl, + status: data.status || 'starting', + }; + setOauthSession(session); + + // If we have an auth URL, open it in a popup + if (data.authUrl) { + openAuthPopup(data.authUrl, provider.displayName); + // Start polling for completion + pollAuthStatus(provider.id, data.sessionId); + } else if (data.status === 'starting') { + // Still starting, poll for URL + pollAuthStatus(provider.id, data.sessionId); + } + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to start OAuth'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + // Open auth URL in a popup window + const openAuthPopup = (url: string, providerName: string) => { + const width = 600; + const height = 700; + const left = window.screenX + (window.outerWidth - width) / 2; + const top = window.screenY + (window.outerHeight - height) / 2; + window.open( + url, + `${providerName} Login`, + `width=${width},height=${height},left=${left},top=${top},popup=yes` + ); + }; + + // Poll for OAuth session status + const pollAuthStatus = async (providerId: string, sessionId: string) => { + const maxAttempts = 60; // 5 minutes with 5-second intervals + let attempts = 0; + + const poll = async () => { + if (attempts >= maxAttempts) { + setProviderError('Authentication timed out. Please try again.'); + setOauthSession(null); + setConnectingProvider(null); + return; + } + + try { + const res = await fetch(`/api/onboarding/cli/${providerId}/status/${sessionId}`, { + credentials: 'include', + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to check status'); + } + + if (data.status === 'success') { + // Complete the auth flow + await completeAuthFlow(providerId, sessionId); + return; + } else if (data.status === 'error') { + throw new Error(data.error || 'Authentication failed'); + } else if (data.status === 'waiting_auth' && data.authUrl && !oauthSession?.authUrl) { + // Got the auth URL, open popup + setOauthSession(prev => prev ? { ...prev, authUrl: data.authUrl, status: 'waiting_auth' } : null); + openAuthPopup(data.authUrl, AI_PROVIDERS.find(p => p.id === providerId)?.displayName || 'Provider'); + } + + // Continue polling + attempts++; + setTimeout(poll, 5000); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Auth check failed'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + poll(); + }; + + // Complete OAuth flow + const completeAuthFlow = async (providerId: string, sessionId: string) => { + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${providerId}/complete/${sessionId}`, { + method: 'POST', + credentials: 'include', + headers, + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to complete authentication'); + } + + // Success! + setProviderStatus(prev => ({ ...prev, [providerId]: true })); + setOauthSession(null); + setConnectingProvider(null); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to complete auth'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + // Cancel OAuth flow + const cancelOAuthFlow = async () => { + if (oauthSession?.sessionId) { + try { + await fetch(`/api/onboarding/cli/${oauthSession.providerId}/cancel/${oauthSession.sessionId}`, { + method: 'POST', + credentials: 'include', + }); + } catch { + // Ignore cancel errors + } + } + setOauthSession(null); + setConnectingProvider(null); + }; + + // Submit API key (fallback flow) + const submitApiKey = async (provider: AIProvider) => { + if (!apiKeyInput.trim()) { + setProviderError('Please enter an API key'); + return; + } + + setProviderError(null); + setConnectingProvider(provider.id); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/token/${provider.id}`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ token: apiKeyInput.trim() }), + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to connect'); + } + + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setApiKeyInput(''); + setConnectingProvider(null); + setShowApiKeyFallback(prev => ({ ...prev, [provider.id]: false })); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to connect'); + setConnectingProvider(null); + } + }; + const updateSetting = useCallback( ( category: 'notifications' | 'display' | 'connection', @@ -455,53 +711,106 @@ export function SettingsPanel({
{!providerStatus[provider.id] && ( -
- { - setConnectingProvider(provider.id); - setApiKeyInput(e.target.value); - }} - onFocus={() => setConnectingProvider(provider.id)} - className="flex-1 py-2 px-3 border border-border rounded-md text-sm bg-bg-tertiary text-text-primary placeholder-text-muted focus:outline-none focus:border-accent" - /> - +
+ {/* OAuth flow (primary) */} + {oauthSession?.providerId === provider.id ? ( +
+ {oauthSession.status === 'starting' && ( +
+ โณ + Starting authentication... +
+ )} + {oauthSession.status === 'waiting_auth' && ( + <> +
+ ๐Ÿ” + Complete login in the popup window +
+ {oauthSession.authUrl && ( +
+ Popup didn't open?{' '} + +
+ )} + + )} + +
+ ) : showApiKeyFallback[provider.id] ? ( + /* API key fallback */ +
+
+ { + setConnectingProvider(provider.id); + setApiKeyInput(e.target.value); + }} + onFocus={() => setConnectingProvider(provider.id)} + className="flex-1 py-2 px-3 border border-border rounded-md text-sm bg-bg-tertiary text-text-primary placeholder-text-muted focus:outline-none focus:border-accent" + /> + +
+ {provider.apiKeyUrl && ( +
+ Get your API key from{' '} + + {new URL(provider.apiKeyUrl).hostname} + +
+ )} + +
+ ) : ( + /* Primary connect button */ +
+ + {provider.apiKeyUrl && ( + + )} +
+ )}
)} From 64154ec56e8ddeef954a951fd72843107dee272a Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 07:29:58 +0000 Subject: [PATCH 016/103] Update trajectory with OAuth flow decisions --- .trajectories/active/traj_7ludwvz45veh.json | 24 +++++++++++++++++++++ .trajectories/index.json | 2 +- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/.trajectories/active/traj_7ludwvz45veh.json b/.trajectories/active/traj_7ludwvz45veh.json index 5ee3c6e0..4e12f19c 100644 --- a/.trajectories/active/traj_7ludwvz45veh.json +++ b/.trajectories/active/traj_7ludwvz45veh.json @@ -131,6 +131,30 @@ "reasoning": "Claude uses OAuth in cloud environments. For users connecting accounts: 1) API key works (already supported), 2) CLI setup-token is interactive, 3) Need proper OAuth device flow from Anthropic. Recommend API key for now with improved UX." }, "significance": "high" + }, + { + "ts": 1767511753148, + "type": "decision", + "content": "Use node-pty for CLI OAuth flow: Use node-pty for CLI OAuth flow", + "raw": { + "question": "Use node-pty for CLI OAuth flow", + "chosen": "Use node-pty for CLI OAuth flow", + "alternatives": [], + "reasoning": "Regular spawn with pipes doesn't properly emulate TTY, causing CLIs to behave differently. PTY ensures auth URLs are output correctly and allows sending responses to interactive prompts." + }, + "significance": "high" + }, + { + "ts": 1767511764090, + "type": "decision", + "content": "Auto-respond to Claude interactive setup prompts: Auto-respond to Claude interactive setup prompts", + "raw": { + "question": "Auto-respond to Claude interactive setup prompts", + "chosen": "Auto-respond to Claude interactive setup prompts", + "alternatives": [], + "reasoning": "Claude has multi-step setup: dark mode -> auth method -> login URL. We detect prompts and send enter key to progress through them automatically." + }, + "significance": "high" } ] } diff --git a/.trajectories/index.json b/.trajectories/index.json index 2d509b57..986a88dd 100644 --- a/.trajectories/index.json +++ b/.trajectories/index.json @@ -1,6 +1,6 @@ { "version": 1, - "lastUpdated": "2026-01-04T07:11:33.305Z", + "lastUpdated": "2026-01-04T07:29:24.092Z", "trajectories": { "traj_ozd98si6a7ns": { "title": "Fix thinking indicator showing on all messages", From 8ed22f9c972547581a3eeed7c7659805b537eb14 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 07:38:22 +0000 Subject: [PATCH 017/103] Add robust CLI OAuth testing infrastructure - Refactor CLI auth config with structured PromptHandler interface - Add provider-specific prompt patterns and responses - Export validation functions for config integrity - Create mock CLI script for integration testing - Add comprehensive unit tests (32 tests passing) - Document process for adding new providers Key features: - Each provider has configurable prompts, responses, and timeouts - validateProviderConfig() ensures capture groups, required fields - Mock CLI simulates real interactive flows for testing - Integration test runner for all providers Files: - src/cloud/api/onboarding.ts - Refactored with testable helpers - src/cloud/api/onboarding.test.ts - 32 unit tests - scripts/test-cli-auth/ - Integration testing tools --- scripts/test-cli-auth/README.md | 215 +++++++++++++++ scripts/test-cli-auth/mock-cli.sh | 123 +++++++++ scripts/test-cli-auth/test-oauth-flow.ts | 220 +++++++++++++++ src/cloud/api/onboarding.test.ts | 329 +++++++++++++++++++++++ src/cloud/api/onboarding.ts | 293 ++++++++++++++++---- 5 files changed, 1132 insertions(+), 48 deletions(-) create mode 100644 scripts/test-cli-auth/README.md create mode 100755 scripts/test-cli-auth/mock-cli.sh create mode 100644 scripts/test-cli-auth/test-oauth-flow.ts create mode 100644 src/cloud/api/onboarding.test.ts diff --git a/scripts/test-cli-auth/README.md b/scripts/test-cli-auth/README.md new file mode 100644 index 00000000..4ef961b6 --- /dev/null +++ b/scripts/test-cli-auth/README.md @@ -0,0 +1,215 @@ +# CLI OAuth Flow Testing + +This directory contains tools for testing and validating the CLI-based OAuth authentication flow for AI providers. + +## Quick Start + +```bash +# Make mock CLI executable +chmod +x scripts/test-cli-auth/mock-cli.sh + +# Run all integration tests +npx tsx scripts/test-cli-auth/test-oauth-flow.ts + +# Test a specific provider +npx tsx scripts/test-cli-auth/test-oauth-flow.ts anthropic +``` + +## Architecture + +The CLI OAuth flow works as follows: + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Dashboard UI โ”‚โ”€โ”€โ”€โ”€โ”€โ–ถโ”‚ Onboarding โ”‚โ”€โ”€โ”€โ”€โ”€โ–ถโ”‚ CLI via PTY โ”‚ +โ”‚ (React) โ”‚ โ”‚ API โ”‚ โ”‚ (node-pty) โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ–ฒ โ”‚ โ”‚ + โ”‚ โ”‚ โ–ผ + โ”‚ โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ”‚ โ”‚ โ”‚ Interactive โ”‚ + โ”‚ โ”‚โ—€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”‚ Prompts โ”‚ + โ”‚ โ”‚ auto-respond โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ โ”‚ โ”‚ + โ”‚ โ”‚ โ–ผ + โ”‚ โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ”‚ โ”‚ โ”‚ Auth URL โ”‚ + โ”‚โ—€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”‚ Output โ”‚ + โ”‚ (opens popup) โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ โ”‚ โ”‚ + โ–ผ โ”‚ โ–ผ +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ OAuth Popup โ”‚ โ”‚ โ”‚ Success โ”‚ +โ”‚ (Browser) โ”‚โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ–ถโ”‚ Detection โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +## Adding a New Provider + +### 1. Define the CLI Configuration + +Add a new entry to `CLI_AUTH_CONFIG` in `src/cloud/api/onboarding.ts`: + +```typescript +export const CLI_AUTH_CONFIG: Record = { + // ... existing providers + + newprovider: { + // Required: CLI command to run + command: 'newcli', + + // Required: Command arguments + args: ['auth', 'login'], + + // Required: Pattern to extract auth URL (must have capture group) + urlPattern: /(https:\/\/[^\s]+)/, + + // Optional: Path to credentials file after auth + credentialPath: '~/.newcli/credentials.json', + + // Required: Display name for UI + displayName: 'NewProvider', + + // Required: How long to wait for URL (ms) + waitTimeout: 3000, + + // Required: Interactive prompts to auto-respond to + prompts: [ + { + pattern: /do you trust this/i, + response: 'y\r', + delay: 100, + description: 'Trust prompt', + }, + ], + + // Required: Success indicators + successPatterns: [ + /success/i, + /authenticated/i, + ], + }, +}; +``` + +### 2. Add Mock CLI Behavior + +Update `scripts/test-cli-auth/mock-cli.sh` with the new provider's interactive flow: + +```bash +newprovider) + echo -e "${BLUE}NewProvider CLI${NC}" + sleep "$DELAY" + + echo -e "Do you trust this directory? [y/N] " + read -r -n 1 response 2>/dev/null || true + echo "" + + echo -e "Auth URL:" + echo -e "${GREEN}https://newprovider.com/auth?session=test${NC}" + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Authenticated!${NC}" + ;; +``` + +### 3. Add Unit Tests + +Add tests to `src/cloud/api/onboarding.test.ts`: + +```typescript +describe('newprovider', () => { + const config = CLI_AUTH_CONFIG.newprovider; + + it('has correct command and args', () => { + expect(config.command).toBe('newcli'); + expect(config.args).toEqual(['auth', 'login']); + }); + + it('extracts auth URL from output', () => { + const output = 'Visit https://newprovider.com/auth?id=xxx'; + const match = output.match(config.urlPattern); + expect(match![1]).toContain('https://newprovider.com/auth'); + }); + + // Test each prompt handler + describe('prompt handlers', () => { + it('detects trust prompt', () => { + const prompt = findMatchingPrompt( + 'Do you trust this directory?', + config.prompts, + new Set() + ); + expect(prompt!.description).toBe('Trust prompt'); + }); + }); +}); +``` + +### 4. Run Tests + +```bash +# Unit tests +npx vitest run src/cloud/api/onboarding.test.ts + +# Integration tests +npx tsx scripts/test-cli-auth/test-oauth-flow.ts newprovider +``` + +## Provider Checklist + +When adding or modifying a provider, ensure: + +- [ ] `command` is the correct CLI binary name +- [ ] `args` includes any required subcommands (e.g., `['login']`) +- [ ] `urlPattern` has a capture group `(...)` around the URL +- [ ] `waitTimeout` is long enough for multi-step prompts +- [ ] All interactive prompts are handled in `prompts` array +- [ ] Each prompt has a unique `description` for deduplication +- [ ] `successPatterns` cover all success messages the CLI outputs +- [ ] Mock CLI simulates the real CLI's behavior accurately +- [ ] Unit tests cover URL extraction and prompt detection +- [ ] Integration test passes + +## Testing with Real CLIs + +For testing with actual CLIs (not mocks), you can: + +1. **Docker Container Test**: Spin up a container without credentials: + ```bash + docker run -it --rm node:20 bash + npm install -g @anthropic-ai/claude-code + # Run the onboarding flow + ``` + +2. **Fresh VM**: Use a cloud VM with no cached credentials + +3. **Delete Credentials**: Remove local credential files: + ```bash + rm -rf ~/.claude + rm -rf ~/.codex + # etc. + ``` + +## Troubleshooting + +### URL Not Extracted +- Check if the CLI outputs the URL in expected format +- Verify the `urlPattern` regex matches the output +- Increase `waitTimeout` if prompts take longer + +### Prompts Not Detected +- Run mock CLI manually to see exact prompt text +- Check regex patterns are case-insensitive (`/i` flag) +- Ensure ANSI codes are being stripped before matching + +### Success Not Detected +- Verify CLI outputs one of the success patterns +- Check for typos in pattern (e.g., `logged in` vs `loggedin`) +- Add new patterns if CLI uses different success messages + +## Files + +- `mock-cli.sh` - Simulates CLI interactive flows for testing +- `test-oauth-flow.ts` - Integration test runner +- `README.md` - This documentation diff --git a/scripts/test-cli-auth/mock-cli.sh b/scripts/test-cli-auth/mock-cli.sh new file mode 100755 index 00000000..ae66ed34 --- /dev/null +++ b/scripts/test-cli-auth/mock-cli.sh @@ -0,0 +1,123 @@ +#!/bin/bash +# Mock CLI for testing OAuth flow prompt handling +# Usage: ./mock-cli.sh +# +# This script simulates the interactive prompts of various AI CLI tools +# for testing the onboarding OAuth flow without actual CLI binaries. + +PROVIDER="${1:-claude}" +DELAY="${2:-0.5}" + +# Colors for output +GREEN='\033[0;32m' +BLUE='\033[0;34m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +case "$PROVIDER" in + claude|anthropic) + echo -e "${BLUE}Claude Code CLI${NC}" + echo "" + sleep "$DELAY" + + # Dark mode prompt + echo -e "Would you like to use ${YELLOW}dark mode${NC}? (y/n) " + read -r -n 1 response 2>/dev/null || true + echo "" + sleep "$DELAY" + + # Auth method prompt + echo -e "How would you like to authenticate?" + echo " 1. Use Claude ${YELLOW}subscription${NC} (recommended)" + echo " 2. Use ${YELLOW}API key${NC}" + echo -n "Choice (1-2): " + read -r -n 1 response 2>/dev/null || true + echo "" + sleep "$DELAY" + + # Trust directory prompt + echo -e "Do you ${YELLOW}trust this directory${NC}? [y/N] " + read -r -n 1 response 2>/dev/null || true + echo "" + sleep "$DELAY" + + # Auth URL + echo "" + echo -e "Please visit the following URL to authenticate:" + echo -e "${GREEN}https://console.anthropic.com/oauth/authorize?client_id=mock-test-123&state=abc${NC}" + echo "" + echo "Waiting for authentication..." + + # Wait for completion signal (or timeout) + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Authentication successful!${NC}" + ;; + + codex|openai) + echo -e "${BLUE}Codex CLI${NC}" + echo "" + sleep "$DELAY" + + # Trust directory prompt + echo -e "Do you ${YELLOW}trust this workspace${NC}? [y/N] " + read -r -n 1 response 2>/dev/null || true + echo "" + sleep "$DELAY" + + # Auth URL + echo "" + echo -e "Open this URL to log in:" + echo -e "${GREEN}https://auth.openai.com/authorize?client_id=mock-test-456&state=def${NC}" + echo "" + echo "Waiting..." + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Logged in successfully${NC}" + ;; + + gemini|google) + echo -e "${BLUE}Gemini CLI${NC}" + echo "" + sleep "$DELAY" + + # Auth URL + echo -e "Authenticate at:" + echo -e "${GREEN}https://accounts.google.com/o/oauth2/v2/auth?client_id=mock-test-789${NC}" + echo "" + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Authenticated!${NC}" + ;; + + opencode) + echo -e "${BLUE}OpenCode CLI${NC}" + echo "" + sleep "$DELAY" + + echo -e "Login URL:" + echo -e "${GREEN}https://opencode.ai/auth?session=mock-session${NC}" + echo "" + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Success${NC}" + ;; + + droid) + echo -e "${BLUE}Droid CLI${NC}" + echo "" + sleep "$DELAY" + + echo -e "Visit to authenticate:" + echo -e "${GREEN}https://factory.ai/droid/auth?id=mock-droid${NC}" + echo "" + + read -r -t 30 2>/dev/null || true + echo -e "${GREEN}Authenticated${NC}" + ;; + + *) + echo "Unknown provider: $PROVIDER" + echo "Supported: claude, codex, gemini, opencode, droid" + exit 1 + ;; +esac diff --git a/scripts/test-cli-auth/test-oauth-flow.ts b/scripts/test-cli-auth/test-oauth-flow.ts new file mode 100644 index 00000000..e98536f5 --- /dev/null +++ b/scripts/test-cli-auth/test-oauth-flow.ts @@ -0,0 +1,220 @@ +#!/usr/bin/env npx tsx +/** + * CLI OAuth Flow Integration Test + * + * Tests the prompt handling and URL extraction for each provider + * using mock CLIs that simulate the real interactive flows. + * + * Usage: + * npx tsx scripts/test-cli-auth/test-oauth-flow.ts [provider] + * + * Examples: + * npx tsx scripts/test-cli-auth/test-oauth-flow.ts # Test all providers + * npx tsx scripts/test-cli-auth/test-oauth-flow.ts claude # Test Claude only + */ + +import * as pty from 'node-pty'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, +} from '../../src/cloud/api/onboarding.js'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +interface TestResult { + provider: string; + passed: boolean; + urlExtracted: string | null; + promptsResponded: string[]; + successDetected: boolean; + output: string; + error?: string; +} + +/** + * Test a single provider's OAuth flow using the mock CLI + */ +async function testProvider(providerId: string): Promise { + const config = CLI_AUTH_CONFIG[providerId]; + if (!config) { + return { + provider: providerId, + passed: false, + urlExtracted: null, + promptsResponded: [], + successDetected: false, + output: '', + error: `Unknown provider: ${providerId}`, + }; + } + + const result: TestResult = { + provider: providerId, + passed: false, + urlExtracted: null, + promptsResponded: [], + successDetected: false, + output: '', + }; + + return new Promise((resolve) => { + const mockCliPath = path.join(__dirname, 'mock-cli.sh'); + const respondedPrompts = new Set(); + + // Map provider IDs to mock CLI provider names + const mockProviderName = providerId === 'anthropic' ? 'claude' : + providerId === 'openai' ? 'codex' : + providerId === 'google' ? 'gemini' : providerId; + + const proc = pty.spawn('bash', [mockCliPath, mockProviderName, '0.2'], { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: __dirname, + env: { ...process.env, TERM: 'xterm-256color' }, + }); + + const timeout = setTimeout(() => { + proc.kill(); + result.error = 'Timeout waiting for completion'; + resolve(result); + }, 10000); + + proc.onData((data: string) => { + result.output += data; + + // Check for matching prompts and auto-respond + const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); + if (matchingPrompt) { + respondedPrompts.add(matchingPrompt.description); + result.promptsResponded.push(matchingPrompt.description); + setTimeout(() => { + try { + proc.write(matchingPrompt.response); + } catch { + // Process may have exited + } + }, matchingPrompt.delay ?? 50); + } + + // Look for auth URL + const cleanText = stripAnsiCodes(data); + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !result.urlExtracted) { + result.urlExtracted = match[1]; + } + + // Check for success indicators + if (matchesSuccessPattern(data, config.successPatterns)) { + result.successDetected = true; + } + }); + + proc.onExit(({ exitCode }) => { + clearTimeout(timeout); + + // Determine if test passed + result.passed = !!( + result.urlExtracted && + result.successDetected && + exitCode === 0 + ); + + // Send completion signal to mock CLI + setTimeout(() => resolve(result), 100); + }); + + // For mock CLI, send signal to continue after prompts + setTimeout(() => { + try { + proc.write('\n'); // Signal to continue + } catch { + // Ignore + } + }, 3000); + }); +} + +/** + * Run tests for specified providers or all providers + */ +async function runTests(providers?: string[]) { + const providerIds = providers ?? Object.keys(CLI_AUTH_CONFIG); + + console.log('โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—'); + console.log('โ•‘ CLI OAuth Flow Integration Tests โ•‘'); + console.log('โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + console.log(''); + + const results: TestResult[] = []; + + for (const providerId of providerIds) { + const config = CLI_AUTH_CONFIG[providerId]; + if (!config) { + console.log(`โš ๏ธ Unknown provider: ${providerId}`); + continue; + } + + console.log(`Testing ${config.displayName} (${providerId})...`); + + const result = await testProvider(providerId); + results.push(result); + + if (result.passed) { + console.log(` โœ… PASSED`); + } else { + console.log(` โŒ FAILED${result.error ? `: ${result.error}` : ''}`); + } + + console.log(` URL extracted: ${result.urlExtracted ? 'โœ“' : 'โœ—'}`); + console.log(` Success detected: ${result.successDetected ? 'โœ“' : 'โœ—'}`); + if (result.promptsResponded.length > 0) { + console.log(` Prompts responded: ${result.promptsResponded.join(', ')}`); + } + console.log(''); + } + + // Summary + const passed = results.filter(r => r.passed).length; + const failed = results.filter(r => !r.passed).length; + + console.log('โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + console.log(`Summary: ${passed} passed, ${failed} failed`); + console.log('โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + + // Exit with error if any tests failed + if (failed > 0) { + console.log('\nFailed tests:'); + for (const result of results.filter(r => !r.passed)) { + console.log(` - ${result.provider}: ${result.error || 'See details above'}`); + } + process.exit(1); + } +} + +// Parse CLI args +const args = process.argv.slice(2); +if (args.includes('--help') || args.includes('-h')) { + console.log(` +CLI OAuth Flow Integration Test + +Usage: + npx tsx scripts/test-cli-auth/test-oauth-flow.ts [provider...] + +Examples: + npx tsx scripts/test-cli-auth/test-oauth-flow.ts # Test all providers + npx tsx scripts/test-cli-auth/test-oauth-flow.ts anthropic # Test Claude only + npx tsx scripts/test-cli-auth/test-oauth-flow.ts anthropic openai # Test multiple + +Providers: + ${Object.keys(CLI_AUTH_CONFIG).join(', ')} +`); + process.exit(0); +} + +runTests(args.length > 0 ? args : undefined).catch(console.error); diff --git a/src/cloud/api/onboarding.test.ts b/src/cloud/api/onboarding.test.ts new file mode 100644 index 00000000..5df780b6 --- /dev/null +++ b/src/cloud/api/onboarding.test.ts @@ -0,0 +1,329 @@ +/** + * Onboarding OAuth Flow Tests + * + * Tests the CLI-based OAuth authentication flow for AI providers. + * These tests verify prompt detection, URL extraction, and success patterns + * without requiring actual CLI execution. + */ + +import { describe, it, expect } from 'vitest'; +import { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + validateAllProviderConfigs, + getSupportedProviders, + type CLIAuthConfig, +} from './onboarding.js'; + +describe('CLI Auth Config', () => { + describe('anthropic (Claude)', () => { + const config = CLI_AUTH_CONFIG.anthropic; + + it('has correct command and args', () => { + expect(config.command).toBe('claude'); + expect(config.args).toEqual([]); + }); + + it('extracts auth URL from output', () => { + const output = 'Please visit https://console.anthropic.com/oauth/authorize?client_id=xxx to authenticate'; + const match = output.match(config.urlPattern); + expect(match).toBeTruthy(); + expect(match![1]).toBe('https://console.anthropic.com/oauth/authorize?client_id=xxx'); + }); + + it('handles URL with query params and fragments', () => { + const output = 'Open: https://auth.example.com/login?state=abc123&redirect=xyz#section'; + const match = output.match(config.urlPattern); + expect(match).toBeTruthy(); + expect(match![1]).toContain('https://auth.example.com/login'); + }); + + describe('prompt handlers', () => { + it('detects dark mode prompt', () => { + const respondedPrompts = new Set(); + + const prompt1 = findMatchingPrompt('Would you like dark mode?', config.prompts, respondedPrompts); + expect(prompt1).toBeTruthy(); + expect(prompt1!.description).toBe('Dark mode prompt'); + expect(prompt1!.response).toBe('\r'); + + const prompt2 = findMatchingPrompt('Enable dark theme?', config.prompts, respondedPrompts); + expect(prompt2).toBeTruthy(); + expect(prompt2!.description).toBe('Dark mode prompt'); + }); + + it('detects auth method prompt', () => { + const respondedPrompts = new Set(); + + const prompt1 = findMatchingPrompt( + 'Would you like to use your Claude subscription or an API key?', + config.prompts, + respondedPrompts + ); + expect(prompt1).toBeTruthy(); + expect(prompt1!.description).toBe('Auth method prompt'); + + const prompt2 = findMatchingPrompt( + 'How would you like to authenticate?', + config.prompts, + respondedPrompts + ); + expect(prompt2).toBeTruthy(); + expect(prompt2!.description).toBe('Auth method prompt'); + }); + + it('detects trust directory prompt', () => { + const respondedPrompts = new Set(); + + const prompt = findMatchingPrompt( + 'Do you trust this directory?', + config.prompts, + respondedPrompts + ); + expect(prompt).toBeTruthy(); + expect(prompt!.description).toBe('Trust directory prompt'); + expect(prompt!.response).toBe('y\r'); + }); + + it('does not respond to same prompt twice', () => { + const respondedPrompts = new Set(); + + // First match + const prompt1 = findMatchingPrompt('dark mode?', config.prompts, respondedPrompts); + expect(prompt1).toBeTruthy(); + respondedPrompts.add(prompt1!.description); + + // Second attempt should return null + const prompt2 = findMatchingPrompt('dark mode?', config.prompts, respondedPrompts); + expect(prompt2).toBeNull(); + }); + }); + + describe('success patterns', () => { + it('detects success indicators', () => { + expect(matchesSuccessPattern('Authentication successful!', config.successPatterns)).toBe(true); + expect(matchesSuccessPattern('You are now authenticated', config.successPatterns)).toBe(true); + expect(matchesSuccessPattern('Logged in as user@example.com', config.successPatterns)).toBe(true); + }); + + it('handles case insensitivity', () => { + expect(matchesSuccessPattern('SUCCESS', config.successPatterns)).toBe(true); + expect(matchesSuccessPattern('Authenticated', config.successPatterns)).toBe(true); + }); + + it('does not false positive', () => { + expect(matchesSuccessPattern('Please enter your password', config.successPatterns)).toBe(false); + expect(matchesSuccessPattern('Waiting for authentication...', config.successPatterns)).toBe(false); + }); + }); + }); + + describe('openai (Codex)', () => { + const config = CLI_AUTH_CONFIG.openai; + + it('has correct command and args', () => { + expect(config.command).toBe('codex'); + expect(config.args).toEqual(['login']); + }); + + it('extracts auth URL from output', () => { + const output = 'Visit https://auth.openai.com/authorize?client_id=xxx to login'; + const match = output.match(config.urlPattern); + expect(match).toBeTruthy(); + expect(match![1]).toBe('https://auth.openai.com/authorize?client_id=xxx'); + }); + }); + + describe('all providers', () => { + it('have required fields', () => { + for (const [name, config] of Object.entries(CLI_AUTH_CONFIG)) { + expect(config.command, `${name} missing command`).toBeTruthy(); + expect(config.urlPattern, `${name} missing urlPattern`).toBeInstanceOf(RegExp); + expect(config.displayName, `${name} missing displayName`).toBeTruthy(); + expect(config.waitTimeout, `${name} missing waitTimeout`).toBeGreaterThan(0); + expect(Array.isArray(config.prompts), `${name} prompts should be array`).toBe(true); + expect(Array.isArray(config.successPatterns), `${name} successPatterns should be array`).toBe(true); + } + }); + + it('URL patterns have capture groups', () => { + for (const [name, config] of Object.entries(CLI_AUTH_CONFIG)) { + const testUrl = 'https://example.com/auth'; + const match = testUrl.match(config.urlPattern); + expect(match, `${name} urlPattern should match`).toBeTruthy(); + expect(match![1], `${name} urlPattern should have capture group`).toBe(testUrl); + } + }); + }); +}); + +describe('stripAnsiCodes', () => { + it('removes ANSI escape codes', () => { + const input = '\x1b[32mGreen text\x1b[0m and \x1b[1mbold\x1b[0m'; + expect(stripAnsiCodes(input)).toBe('Green text and bold'); + }); + + it('preserves text without ANSI codes', () => { + const input = 'Plain text without codes'; + expect(stripAnsiCodes(input)).toBe(input); + }); + + it('handles complex ANSI sequences', () => { + const input = '\x1b[38;5;196mRed\x1b[0m \x1b[48;2;0;255;0mGreen BG\x1b[0m'; + expect(stripAnsiCodes(input)).toBe('Red Green BG'); + }); +}); + +describe('matchesSuccessPattern', () => { + const patterns = [/success/i, /authenticated/i, /logged\s*in/i]; + + it('matches patterns case-insensitively', () => { + expect(matchesSuccessPattern('SUCCESS', patterns)).toBe(true); + expect(matchesSuccessPattern('Authenticated!', patterns)).toBe(true); + expect(matchesSuccessPattern('You are logged in', patterns)).toBe(true); + }); + + it('strips ANSI codes before matching', () => { + expect(matchesSuccessPattern('\x1b[32mSuccess!\x1b[0m', patterns)).toBe(true); + }); + + it('returns false when no match', () => { + expect(matchesSuccessPattern('Please wait...', patterns)).toBe(false); + expect(matchesSuccessPattern('Error occurred', patterns)).toBe(false); + }); +}); + +describe('findMatchingPrompt', () => { + const prompts = [ + { pattern: /dark mode/i, response: '\r', description: 'Dark mode' }, + { pattern: /api key/i, response: '2\r', description: 'API key option' }, + ]; + + it('finds matching prompt', () => { + const responded = new Set(); + const match = findMatchingPrompt('Enable dark mode?', prompts, responded); + expect(match).toBeTruthy(); + expect(match!.description).toBe('Dark mode'); + }); + + it('skips already responded prompts', () => { + const responded = new Set(['Dark mode']); + const match = findMatchingPrompt('Enable dark mode?', prompts, responded); + expect(match).toBeNull(); + }); + + it('returns null when no match', () => { + const responded = new Set(); + const match = findMatchingPrompt('Something else', prompts, responded); + expect(match).toBeNull(); + }); + + it('strips ANSI codes before matching', () => { + const responded = new Set(); + const match = findMatchingPrompt('\x1b[1mDark mode?\x1b[0m', prompts, responded); + expect(match).toBeTruthy(); + expect(match!.description).toBe('Dark mode'); + }); +}); + +describe('validateProviderConfig', () => { + it('validates a correct config', () => { + const config: CLIAuthConfig = { + command: 'test-cli', + args: ['login'], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Test', + waitTimeout: 3000, + prompts: [ + { pattern: /test/i, response: '\r', description: 'Test prompt' }, + ], + successPatterns: [/success/i], + }; + expect(validateProviderConfig('test', config)).toBeNull(); + }); + + it('rejects missing command', () => { + const config = { + command: '', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Test', + waitTimeout: 3000, + prompts: [], + successPatterns: [], + } as CLIAuthConfig; + expect(validateProviderConfig('test', config)).toContain('command'); + }); + + it('rejects urlPattern without capture group', () => { + const config: CLIAuthConfig = { + command: 'test-cli', + args: [], + urlPattern: /https:\/\/[^\s]+/, // No capture group! + displayName: 'Test', + waitTimeout: 3000, + prompts: [], + successPatterns: [], + }; + expect(validateProviderConfig('test', config)).toContain('capture group'); + }); + + it('rejects invalid waitTimeout', () => { + const config: CLIAuthConfig = { + command: 'test-cli', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Test', + waitTimeout: 0, + prompts: [], + successPatterns: [], + }; + expect(validateProviderConfig('test', config)).toContain('waitTimeout'); + }); + + it('rejects prompt without description', () => { + const config: CLIAuthConfig = { + command: 'test-cli', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Test', + waitTimeout: 3000, + prompts: [ + { pattern: /test/i, response: '\r', description: '' }, + ], + successPatterns: [], + }; + expect(validateProviderConfig('test', config)).toContain('description'); + }); +}); + +describe('validateAllProviderConfigs', () => { + it('validates all built-in providers', () => { + // Should not throw + expect(() => validateAllProviderConfigs()).not.toThrow(); + }); +}); + +describe('getSupportedProviders', () => { + it('returns list of providers', () => { + const providers = getSupportedProviders(); + expect(providers.length).toBeGreaterThan(0); + + // Check structure + for (const provider of providers) { + expect(provider.id).toBeTruthy(); + expect(provider.displayName).toBeTruthy(); + expect(provider.command).toBeTruthy(); + } + }); + + it('includes anthropic', () => { + const providers = getSupportedProviders(); + const anthropic = providers.find(p => p.id === 'anthropic'); + expect(anthropic).toBeTruthy(); + expect(anthropic!.displayName).toBe('Claude'); + }); +}); diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index de318c8d..2253aec3 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -60,6 +60,43 @@ setInterval(() => { } }, 60000); +/** + * Interactive prompt handler configuration + * Defines patterns to detect prompts and responses to send + */ +interface PromptHandler { + /** Pattern to detect in CLI output (case-insensitive) */ + pattern: RegExp; + /** Response to send (e.g., '\r' for enter, 'y\r' for yes+enter) */ + response: string; + /** Delay before sending response (ms) */ + delay?: number; + /** Description for logging/debugging */ + description: string; +} + +/** + * CLI auth configuration for each provider + */ +export interface CLIAuthConfig { + /** CLI command to run */ + command: string; + /** Arguments to pass */ + args: string[]; + /** Pattern to extract auth URL from output */ + urlPattern: RegExp; + /** Path to credentials file (for reading after auth) */ + credentialPath?: string; + /** Display name for UI */ + displayName: string; + /** Interactive prompts to auto-respond to */ + prompts: PromptHandler[]; + /** Success indicators in output */ + successPatterns: RegExp[]; + /** How long to wait for URL to appear (ms) */ + waitTimeout: number; +} + /** * CLI commands and URL patterns for each provider * @@ -70,53 +107,228 @@ setInterval(() => { * for the user to complete OAuth in their browser. We capture the URL and * display it in a popup for the user. */ -const CLI_AUTH_CONFIG: Record = { +export const CLI_AUTH_CONFIG: Record = { anthropic: { - // Claude Code CLI - running without args triggers OAuth if not authenticated command: 'claude', - args: [], // No args needed - CLI auto-prompts for auth - // Generic URL pattern with capture group - Claude outputs auth URL to stdout + args: [], urlPattern: /(https:\/\/[^\s]+)/, credentialPath: '~/.claude/credentials.json', displayName: 'Claude', + waitTimeout: 5000, + prompts: [ + { + pattern: /dark\s*(mode|theme)/i, + response: '\r', // Press enter to accept default + delay: 100, + description: 'Dark mode prompt', + }, + { + pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, + response: '\r', // Press enter for first option (subscription) + delay: 100, + description: 'Auth method prompt', + }, + { + pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, + response: 'y\r', // Yes to trust + delay: 100, + description: 'Trust directory prompt', + }, + ], + successPatterns: [ + /success/i, + /authenticated/i, + /logged\s*in/i, + ], }, openai: { - // Codex CLI - uses 'login' subcommand command: 'codex', args: ['login'], urlPattern: /(https:\/\/[^\s]+)/, credentialPath: '~/.codex/credentials.json', displayName: 'Codex', + waitTimeout: 3000, + prompts: [ + { + pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, + response: 'y\r', + delay: 100, + description: 'Trust directory prompt', + }, + ], + successPatterns: [ + /success/i, + /authenticated/i, + /logged\s*in/i, + ], }, google: { - // Gemini CLI command: 'gemini', args: [], urlPattern: /(https:\/\/[^\s]+)/, displayName: 'Gemini', + waitTimeout: 3000, + prompts: [], + successPatterns: [ + /success/i, + /authenticated/i, + ], }, opencode: { - // OpenCode CLI command: 'opencode', args: [], urlPattern: /(https:\/\/[^\s]+)/, displayName: 'OpenCode', + waitTimeout: 3000, + prompts: [], + successPatterns: [ + /success/i, + /authenticated/i, + ], }, droid: { - // Droid CLI command: 'droid', args: [], urlPattern: /(https:\/\/[^\s]+)/, displayName: 'Droid', + waitTimeout: 3000, + prompts: [], + successPatterns: [ + /success/i, + /authenticated/i, + ], }, }; +/** + * Strip ANSI escape codes from text + */ +export function stripAnsiCodes(text: string): string { + return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +/** + * Check if text matches any success pattern + */ +export function matchesSuccessPattern(text: string, patterns: RegExp[]): boolean { + const cleanText = stripAnsiCodes(text).toLowerCase(); + return patterns.some(p => p.test(cleanText)); +} + +/** + * Find matching prompt handler for given text + */ +export function findMatchingPrompt( + text: string, + prompts: PromptHandler[], + respondedPrompts: Set +): PromptHandler | null { + const cleanText = stripAnsiCodes(text); + + for (const prompt of prompts) { + // Skip if already responded to this prompt type + if (respondedPrompts.has(prompt.description)) continue; + + if (prompt.pattern.test(cleanText)) { + return prompt; + } + } + + return null; +} + +/** + * Validate a provider's CLI auth configuration + * Returns null if valid, or an error message if invalid + */ +export function validateProviderConfig(providerId: string, config: CLIAuthConfig): string | null { + if (!config.command || typeof config.command !== 'string') { + return `${providerId}: missing or invalid 'command'`; + } + + if (!Array.isArray(config.args)) { + return `${providerId}: 'args' must be an array`; + } + + if (!(config.urlPattern instanceof RegExp)) { + return `${providerId}: 'urlPattern' must be a RegExp`; + } + + // Check urlPattern has a capture group + const testUrl = 'https://example.com/test'; + const match = testUrl.match(config.urlPattern); + if (!match || !match[1]) { + return `${providerId}: 'urlPattern' must have a capture group - got ${config.urlPattern}`; + } + + if (!config.displayName || typeof config.displayName !== 'string') { + return `${providerId}: missing or invalid 'displayName'`; + } + + if (typeof config.waitTimeout !== 'number' || config.waitTimeout <= 0) { + return `${providerId}: 'waitTimeout' must be a positive number`; + } + + if (!Array.isArray(config.prompts)) { + return `${providerId}: 'prompts' must be an array`; + } + + for (let i = 0; i < config.prompts.length; i++) { + const prompt = config.prompts[i]; + if (!(prompt.pattern instanceof RegExp)) { + return `${providerId}: prompt[${i}].pattern must be a RegExp`; + } + if (typeof prompt.response !== 'string') { + return `${providerId}: prompt[${i}].response must be a string`; + } + if (!prompt.description || typeof prompt.description !== 'string') { + return `${providerId}: prompt[${i}].description must be a non-empty string`; + } + } + + if (!Array.isArray(config.successPatterns)) { + return `${providerId}: 'successPatterns' must be an array`; + } + + for (let i = 0; i < config.successPatterns.length; i++) { + if (!(config.successPatterns[i] instanceof RegExp)) { + return `${providerId}: successPatterns[${i}] must be a RegExp`; + } + } + + return null; +} + +/** + * Validate all provider configurations + * Throws an error if any provider is invalid + */ +export function validateAllProviderConfigs(): void { + const errors: string[] = []; + + for (const [providerId, config] of Object.entries(CLI_AUTH_CONFIG)) { + const error = validateProviderConfig(providerId, config); + if (error) { + errors.push(error); + } + } + + if (errors.length > 0) { + throw new Error(`Invalid provider configurations:\n${errors.join('\n')}`); + } +} + +/** + * Get list of supported providers for CLI auth + */ +export function getSupportedProviders(): { id: string; displayName: string; command: string }[] { + return Object.entries(CLI_AUTH_CONFIG).map(([id, config]) => ({ + id, + displayName: config.displayName, + command: config.command, + })); +} + /** * POST /api/onboarding/cli/:provider/start * Start CLI-based auth - spawns the CLI and captures auth URL @@ -158,50 +370,37 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response session.process = proc; // Track which prompts we've already responded to - let respondedDarkMode = false; - let respondedAuthChoice = false; + const respondedPrompts = new Set(); // Capture PTY output for auth URL and handle interactive prompts proc.onData((data: string) => { session.output += data; - // Strip ANSI escape codes for pattern matching - const cleanText = data.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); - const lowerText = cleanText.toLowerCase(); - - // Handle Claude's interactive setup prompts - if (provider === 'anthropic') { - // Dark mode prompt - just press enter to accept default (or 'n' for no) - if (!respondedDarkMode && (lowerText.includes('dark mode') || lowerText.includes('dark theme'))) { - respondedDarkMode = true; - // Press enter to accept default - setTimeout(() => proc.write('\r'), 100); - } - - // Auth method prompt - choose subscription (press enter or '1') - // Claude asks: "Would you like to use your Claude subscription or an API key?" - if (!respondedAuthChoice && ( - lowerText.includes('subscription') || - lowerText.includes('api key') || - lowerText.includes('how would you like to authenticate') - )) { - respondedAuthChoice = true; - // Press enter to select first option (Claude subscription) - setTimeout(() => proc.write('\r'), 100); - } + // Check for matching prompts and auto-respond + const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); + if (matchingPrompt) { + respondedPrompts.add(matchingPrompt.description); + const delay = matchingPrompt.delay ?? 100; + setTimeout(() => { + try { + proc.write(matchingPrompt.response); + console.log(`[onboarding] Auto-responded to: ${matchingPrompt.description}`); + } catch { + // Process may have exited + } + }, delay); } // Look for auth URL + const cleanText = stripAnsiCodes(data); const match = cleanText.match(config.urlPattern); if (match && match[1]) { session.authUrl = match[1]; session.status = 'waiting_auth'; } - // Look for success indicators - if (lowerText.includes('success') || - lowerText.includes('authenticated') || - lowerText.includes('logged in')) { + // Check for success indicators + if (matchesSuccessPattern(data, config.successPatterns)) { session.status = 'success'; } }); @@ -217,10 +416,8 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response } }); - // Wait for URL to appear - longer timeout for Claude's multi-step setup - // Claude asks: dark mode? -> subscription vs API key? -> shows login URL - const waitTime = provider === 'anthropic' ? 5000 : 2000; - await new Promise(resolve => setTimeout(resolve, waitTime)); + // Wait for URL to appear using provider-specific timeout + await new Promise(resolve => setTimeout(resolve, config.waitTimeout)); // Return session info based on current state if (session.status === 'success') { From 8b06c9c10a63a3e60550ba7ba37784757a7c6594 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 07:57:08 +0000 Subject: [PATCH 018/103] Add Docker-based CI tests for CLI OAuth flow - Create Dockerfile with mock CLI symlinks for all providers - Add ci-test-runner.ts with structured JSON output - Add GitHub Actions workflow with: - Unit tests on every push to onboarding.ts - Docker integration tests - Weekly scheduled runs to catch provider changes - Auto-issue creation on scheduled test failures - Update mock-cli.sh to auto-detect provider from $0 - Update README with CI documentation The CI tests ensure: - URL extraction works for each provider - Prompt detection and auto-response works - Tests are repeatable and provider-agnostic --- .github/workflows/cli-oauth-test.yml | 115 +++++++++++ scripts/test-cli-auth/Dockerfile | 44 ++++ scripts/test-cli-auth/README.md | 58 +++++- scripts/test-cli-auth/ci-test-runner.ts | 263 ++++++++++++++++++++++++ scripts/test-cli-auth/mock-cli.sh | 29 ++- scripts/test-cli-auth/package.json | 14 ++ 6 files changed, 519 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/cli-oauth-test.yml create mode 100644 scripts/test-cli-auth/Dockerfile create mode 100644 scripts/test-cli-auth/ci-test-runner.ts create mode 100644 scripts/test-cli-auth/package.json diff --git a/.github/workflows/cli-oauth-test.yml b/.github/workflows/cli-oauth-test.yml new file mode 100644 index 00000000..f974e560 --- /dev/null +++ b/.github/workflows/cli-oauth-test.yml @@ -0,0 +1,115 @@ +name: CLI OAuth Flow Tests + +on: + push: + paths: + - 'src/cloud/api/onboarding.ts' + - 'scripts/test-cli-auth/**' + - '.github/workflows/cli-oauth-test.yml' + pull_request: + paths: + - 'src/cloud/api/onboarding.ts' + - 'scripts/test-cli-auth/**' + # Allow manual trigger + workflow_dispatch: + # Run weekly to catch provider CLI changes + schedule: + - cron: '0 0 * * 0' # Every Sunday at midnight + +jobs: + unit-tests: + name: Unit Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + + - name: Install dependencies + run: npm ci + + - name: Run onboarding unit tests + run: npx vitest run src/cloud/api/onboarding.test.ts + + docker-integration: + name: Docker Integration Tests + runs-on: ubuntu-latest + needs: unit-tests + steps: + - uses: actions/checkout@v4 + + - name: Build test container + run: | + docker build -t cli-oauth-test scripts/test-cli-auth/ + + - name: Run CLI OAuth tests + id: test + run: | + docker run --rm \ + -v ${{ github.workspace }}/test-results:/tmp \ + cli-oauth-test + + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: cli-oauth-test-results + path: test-results/cli-oauth-test-results.json + if-no-files-found: ignore + + - name: Parse and display results + if: always() + run: | + if [ -f test-results/cli-oauth-test-results.json ]; then + echo "### CLI OAuth Test Results" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Provider | Status | URL Found | Duration |" >> $GITHUB_STEP_SUMMARY + echo "|----------|--------|-----------|----------|" >> $GITHUB_STEP_SUMMARY + + cat test-results/cli-oauth-test-results.json | \ + jq -r '.results[] | "| \(.provider) | \(if .passed then "โœ…" else "โŒ" end) | \(if .urlExtracted then "Yes" else "No" end) | \(.duration)ms |"' \ + >> $GITHUB_STEP_SUMMARY + + echo "" >> $GITHUB_STEP_SUMMARY + echo "**Summary:** $(cat test-results/cli-oauth-test-results.json | jq -r '.summary | "\(.passed)/\(.total) passed"')" >> $GITHUB_STEP_SUMMARY + fi + + notify-on-failure: + name: Notify on Failure + runs-on: ubuntu-latest + needs: [unit-tests, docker-integration] + if: failure() && github.event_name == 'schedule' + steps: + - name: Create issue for CI failure + uses: actions/github-script@v7 + with: + script: | + const title = `CLI OAuth Tests Failed - ${new Date().toISOString().split('T')[0]}`; + const body = ` + ## CLI OAuth Integration Tests Failed + + The scheduled CLI OAuth tests have failed. This may indicate: + - A provider has updated their CLI and changed the OAuth flow + - Prompt patterns need to be updated + - URL extraction patterns need adjustment + + ### Action Required + 1. Check the [workflow run](${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}) + 2. Update \`CLI_AUTH_CONFIG\` in \`src/cloud/api/onboarding.ts\` if needed + 3. Update mock CLI behavior in \`scripts/test-cli-auth/mock-cli.sh\` + 4. Re-run tests to verify fixes + + /cc @${context.actor} + `; + + await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: title, + body: body, + labels: ['bug', 'cli-oauth', 'automated'] + }); diff --git a/scripts/test-cli-auth/Dockerfile b/scripts/test-cli-auth/Dockerfile new file mode 100644 index 00000000..021a1186 --- /dev/null +++ b/scripts/test-cli-auth/Dockerfile @@ -0,0 +1,44 @@ +# CLI OAuth Flow Test Container +# +# This container simulates the AI provider CLIs for testing +# the OAuth URL capture flow without actual provider accounts. +# +# Usage: +# docker build -t cli-oauth-test scripts/test-cli-auth/ +# docker run --rm cli-oauth-test +# +# For interactive testing: +# docker run --rm -it cli-oauth-test bash +# claude # Run mock Claude CLI +# codex login # Run mock Codex CLI + +FROM node:20-slim + +# Install dependencies +RUN apt-get update && apt-get install -y \ + bash \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +# Copy mock CLI script +COPY mock-cli.sh /usr/local/bin/mock-cli +RUN chmod +x /usr/local/bin/mock-cli + +# Create symlinks for each provider CLI +# The mock-cli.sh auto-detects the provider from $0 +RUN ln -s /usr/local/bin/mock-cli /usr/local/bin/claude && \ + ln -s /usr/local/bin/mock-cli /usr/local/bin/codex && \ + ln -s /usr/local/bin/mock-cli /usr/local/bin/gemini && \ + ln -s /usr/local/bin/mock-cli /usr/local/bin/opencode && \ + ln -s /usr/local/bin/mock-cli /usr/local/bin/droid + +# Copy test files +COPY ci-test-runner.ts /app/ +COPY package.json /app/ + +# Install test dependencies +RUN npm install + +# Default command runs the CI tests +CMD ["npx", "tsx", "/app/ci-test-runner.ts"] diff --git a/scripts/test-cli-auth/README.md b/scripts/test-cli-auth/README.md index 4ef961b6..8f001ebe 100644 --- a/scripts/test-cli-auth/README.md +++ b/scripts/test-cli-auth/README.md @@ -208,8 +208,64 @@ For testing with actual CLIs (not mocks), you can: - Check for typos in pattern (e.g., `logged in` vs `loggedin`) - Add new patterns if CLI uses different success messages +## CI Integration + +### GitHub Actions + +The workflow `.github/workflows/cli-oauth-test.yml` runs: + +1. **On every push/PR** that modifies: + - `src/cloud/api/onboarding.ts` + - `scripts/test-cli-auth/**` + +2. **Weekly schedule** (Sundays at midnight): + - Catches provider CLI changes early + - Auto-creates GitHub issues on failure + +### Running CI Tests Locally + +```bash +# Build the test container +docker build -t cli-oauth-test scripts/test-cli-auth/ + +# Run all tests +docker run --rm cli-oauth-test + +# Run with results output +docker run --rm -v $(pwd)/test-results:/tmp cli-oauth-test +cat test-results/cli-oauth-test-results.json +``` + +### Test Output Format + +```json +{ + "timestamp": "2024-01-15T10:30:00.000Z", + "results": [ + { + "provider": "anthropic", + "command": "claude", + "passed": true, + "urlExtracted": "https://console.anthropic.com/oauth/...", + "urlValid": true, + "promptsHandled": 3, + "exitCode": 0, + "duration": 1234 + } + ], + "summary": { + "total": 5, + "passed": 5, + "failed": 0 + } +} +``` + ## Files - `mock-cli.sh` - Simulates CLI interactive flows for testing -- `test-oauth-flow.ts` - Integration test runner +- `ci-test-runner.ts` - Docker-based CI test runner +- `test-oauth-flow.ts` - Local integration test runner +- `Dockerfile` - Test container definition +- `package.json` - Test dependencies - `README.md` - This documentation diff --git a/scripts/test-cli-auth/ci-test-runner.ts b/scripts/test-cli-auth/ci-test-runner.ts new file mode 100644 index 00000000..9b9b3563 --- /dev/null +++ b/scripts/test-cli-auth/ci-test-runner.ts @@ -0,0 +1,263 @@ +#!/usr/bin/env npx tsx +/** + * CI Test Runner for CLI OAuth Flow + * + * This script runs in a Docker container and tests each provider's + * CLI OAuth flow to ensure URL extraction works correctly. + * + * Exit codes: + * 0 - All tests passed + * 1 - One or more tests failed + * + * Output format (JSON): + * { "results": [...], "summary": { "passed": N, "failed": N } } + */ + +import * as pty from 'node-pty'; +import { writeFileSync } from 'fs'; + +// Provider configurations - must match CLI_AUTH_CONFIG in onboarding.ts +const PROVIDERS = { + anthropic: { + command: 'claude', + args: [] as string[], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://console.anthropic.com', + prompts: [ + { pattern: /dark\s*(mode|theme)/i, response: '\r' }, + { pattern: /(subscription|api\s*key)/i, response: '\r' }, + { pattern: /trust/i, response: 'y\r' }, + ], + }, + openai: { + command: 'codex', + args: ['login'], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://auth.openai.com', + prompts: [ + { pattern: /trust/i, response: 'y\r' }, + ], + }, + google: { + command: 'gemini', + args: [] as string[], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://accounts.google.com', + prompts: [], + }, + opencode: { + command: 'opencode', + args: [] as string[], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://opencode.ai', + prompts: [], + }, + droid: { + command: 'droid', + args: [] as string[], + urlPattern: /(https:\/\/[^\s]+)/, + expectedUrlPrefix: 'https://factory.ai', + prompts: [], + }, +}; + +interface TestResult { + provider: string; + command: string; + passed: boolean; + urlExtracted: string | null; + urlValid: boolean; + promptsHandled: number; + exitCode: number | null; + duration: number; + output: string; + error?: string; +} + +function stripAnsi(text: string): string { + return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +async function testProvider(providerId: string): Promise { + const config = PROVIDERS[providerId as keyof typeof PROVIDERS]; + if (!config) { + return { + provider: providerId, + command: 'unknown', + passed: false, + urlExtracted: null, + urlValid: false, + promptsHandled: 0, + exitCode: null, + duration: 0, + output: '', + error: `Unknown provider: ${providerId}`, + }; + } + + const startTime = Date.now(); + const result: TestResult = { + provider: providerId, + command: `${config.command} ${config.args.join(' ')}`.trim(), + passed: false, + urlExtracted: null, + urlValid: false, + promptsHandled: 0, + exitCode: null, + duration: 0, + output: '', + }; + + return new Promise((resolve) => { + const respondedPrompts = new Set(); + + try { + const proc = pty.spawn(config.command, config.args, { + name: 'xterm-256color', + cols: 120, + rows: 30, + env: { ...process.env, TERM: 'xterm-256color', NO_COLOR: '1' }, + }); + + const timeout = setTimeout(() => { + proc.kill(); + result.error = 'Timeout waiting for CLI'; + result.duration = Date.now() - startTime; + resolve(result); + }, 15000); + + proc.onData((data: string) => { + result.output += data; + const cleanText = stripAnsi(data); + + // Check for prompts and respond + for (let i = 0; i < config.prompts.length; i++) { + if (respondedPrompts.has(i)) continue; + if (config.prompts[i].pattern.test(cleanText)) { + respondedPrompts.add(i); + result.promptsHandled++; + setTimeout(() => { + try { + proc.write(config.prompts[i].response); + } catch { + // Process may have exited + } + }, 100); + } + } + + // Check for URL + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !result.urlExtracted) { + result.urlExtracted = match[1]; + result.urlValid = result.urlExtracted.startsWith(config.expectedUrlPrefix); + } + }); + + proc.onExit(({ exitCode }) => { + clearTimeout(timeout); + result.exitCode = exitCode; + result.duration = Date.now() - startTime; + + // Determine pass/fail + result.passed = !!( + result.urlExtracted && + result.urlValid && + exitCode === 0 + ); + + resolve(result); + }); + + // Send signal to continue after prompts are done + setTimeout(() => { + try { + proc.write('\n'); + } catch { + // Ignore + } + }, 5000); + } catch (err) { + result.error = err instanceof Error ? err.message : 'Unknown error'; + result.duration = Date.now() - startTime; + resolve(result); + } + }); +} + +async function runAllTests() { + console.log('โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—'); + console.log('โ•‘ CLI OAuth Flow CI Tests โ•‘'); + console.log('โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + console.log(''); + + const results: TestResult[] = []; + + for (const providerId of Object.keys(PROVIDERS)) { + process.stdout.write(`Testing ${providerId}... `); + const result = await testProvider(providerId); + results.push(result); + + if (result.passed) { + console.log('โœ… PASSED'); + } else { + console.log(`โŒ FAILED${result.error ? `: ${result.error}` : ''}`); + } + + // Detailed output + console.log(` Command: ${result.command}`); + console.log(` URL: ${result.urlExtracted || 'NOT FOUND'}`); + console.log(` Valid: ${result.urlValid ? 'Yes' : 'No'}`); + console.log(` Prompts: ${result.promptsHandled}/${PROVIDERS[providerId as keyof typeof PROVIDERS].prompts.length}`); + console.log(` Exit: ${result.exitCode}`); + console.log(` Duration: ${result.duration}ms`); + console.log(''); + } + + // Summary + const passed = results.filter(r => r.passed).length; + const failed = results.filter(r => !r.passed).length; + + console.log('โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + console.log(`Summary: ${passed} passed, ${failed} failed out of ${results.length} tests`); + console.log('โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + + // Write JSON results for CI parsing + const jsonResults = { + timestamp: new Date().toISOString(), + results: results.map(r => ({ + provider: r.provider, + command: r.command, + passed: r.passed, + urlExtracted: r.urlExtracted, + urlValid: r.urlValid, + promptsHandled: r.promptsHandled, + exitCode: r.exitCode, + duration: r.duration, + error: r.error, + })), + summary: { + total: results.length, + passed, + failed, + }, + }; + + // Write to file for CI artifact + try { + writeFileSync('/tmp/cli-oauth-test-results.json', JSON.stringify(jsonResults, null, 2)); + console.log('\nResults written to /tmp/cli-oauth-test-results.json'); + } catch { + // Might not have write access, output to stdout instead + console.log('\n--- JSON Results ---'); + console.log(JSON.stringify(jsonResults, null, 2)); + } + + // Exit with appropriate code + process.exit(failed > 0 ? 1 : 0); +} + +runAllTests().catch((err) => { + console.error('Test runner failed:', err); + process.exit(1); +}); diff --git a/scripts/test-cli-auth/mock-cli.sh b/scripts/test-cli-auth/mock-cli.sh index ae66ed34..c5c899f6 100755 --- a/scripts/test-cli-auth/mock-cli.sh +++ b/scripts/test-cli-auth/mock-cli.sh @@ -1,12 +1,35 @@ #!/bin/bash # Mock CLI for testing OAuth flow prompt handling -# Usage: ./mock-cli.sh +# Usage: ./mock-cli.sh [delay] # # This script simulates the interactive prompts of various AI CLI tools # for testing the onboarding OAuth flow without actual CLI binaries. +# +# When installed as symlinks (e.g., /usr/local/bin/claude -> mock-cli.sh), +# it auto-detects the provider from the command name. + +# Detect provider from how the script was called +SCRIPT_NAME=$(basename "$0") + +case "$SCRIPT_NAME" in + claude) PROVIDER="claude" ;; + codex) PROVIDER="codex" ;; + gemini) PROVIDER="gemini" ;; + opencode) PROVIDER="opencode" ;; + droid) PROVIDER="droid" ;; + mock-cli.sh|mock-cli|mock-cli-impl.sh) + PROVIDER="${1:-claude}" + shift 2>/dev/null || true + ;; + *) PROVIDER="${1:-claude}" ;; +esac + +# Handle 'login' subcommand for codex +if [ "$PROVIDER" = "codex" ] && [ "$1" = "login" ]; then + shift +fi -PROVIDER="${1:-claude}" -DELAY="${2:-0.5}" +DELAY="${1:-0.3}" # Colors for output GREEN='\033[0;32m' diff --git a/scripts/test-cli-auth/package.json b/scripts/test-cli-auth/package.json new file mode 100644 index 00000000..7d5296d2 --- /dev/null +++ b/scripts/test-cli-auth/package.json @@ -0,0 +1,14 @@ +{ + "name": "cli-oauth-test", + "version": "1.0.0", + "description": "CLI OAuth flow testing for agent-relay", + "type": "module", + "scripts": { + "test": "tsx ci-test-runner.ts", + "test:local": "tsx test-oauth-flow.ts" + }, + "dependencies": { + "node-pty": "^1.0.0", + "tsx": "^4.19.0" + } +} From 70b058451b1e515073edf59af15aa8b16d22211e Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:00:13 +0000 Subject: [PATCH 019/103] Use real CLIs instead of mocks for OAuth flow testing - Add Dockerfile.real that installs actual provider CLIs - Add ci-test-real-clis.ts that tests against real CLI binaries - Update GitHub Actions workflow to use real CLIs - Real CLIs catch actual behavior changes immediately - Skips CLIs that aren't installed (doesn't fail) - Imports patterns from onboarding.ts to ensure consistency Benefits over mocks: - No maintenance of mock scripts - Tests actual user experience - Catches new prompts or output changes - Detects CLI updates automatically --- .github/workflows/cli-oauth-test.yml | 16 +- scripts/test-cli-auth/Dockerfile.real | 56 ++++ scripts/test-cli-auth/README.md | 25 +- scripts/test-cli-auth/ci-test-real-clis.ts | 309 +++++++++++++++++++++ 4 files changed, 394 insertions(+), 12 deletions(-) create mode 100644 scripts/test-cli-auth/Dockerfile.real create mode 100644 scripts/test-cli-auth/ci-test-real-clis.ts diff --git a/.github/workflows/cli-oauth-test.yml b/.github/workflows/cli-oauth-test.yml index f974e560..7ea62f76 100644 --- a/.github/workflows/cli-oauth-test.yml +++ b/.github/workflows/cli-oauth-test.yml @@ -35,23 +35,25 @@ jobs: - name: Run onboarding unit tests run: npx vitest run src/cloud/api/onboarding.test.ts - docker-integration: - name: Docker Integration Tests + real-cli-tests: + name: Real CLI Integration Tests runs-on: ubuntu-latest needs: unit-tests steps: - uses: actions/checkout@v4 - - name: Build test container + - name: Build test container with real CLIs run: | - docker build -t cli-oauth-test scripts/test-cli-auth/ + docker build -f scripts/test-cli-auth/Dockerfile.real \ + -t cli-oauth-test-real scripts/test-cli-auth/ - - name: Run CLI OAuth tests + - name: Run CLI OAuth tests against real CLIs id: test run: | + mkdir -p test-results docker run --rm \ -v ${{ github.workspace }}/test-results:/tmp \ - cli-oauth-test + cli-oauth-test-real - name: Upload test results if: always() @@ -81,7 +83,7 @@ jobs: notify-on-failure: name: Notify on Failure runs-on: ubuntu-latest - needs: [unit-tests, docker-integration] + needs: [unit-tests, real-cli-tests] if: failure() && github.event_name == 'schedule' steps: - name: Create issue for CI failure diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real new file mode 100644 index 00000000..f2b7e6ee --- /dev/null +++ b/scripts/test-cli-auth/Dockerfile.real @@ -0,0 +1,56 @@ +# CLI OAuth Flow Test Container - Real CLIs +# +# This container installs the actual AI provider CLIs and tests +# URL extraction from their OAuth flows. +# +# Usage: +# docker build -f Dockerfile.real -t cli-oauth-test-real scripts/test-cli-auth/ +# docker run --rm cli-oauth-test-real +# +# For interactive testing: +# docker run --rm -it cli-oauth-test-real bash + +FROM node:20-slim + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + bash \ + curl \ + git \ + python3 \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +# Install Claude Code CLI +RUN npm install -g @anthropic-ai/claude-code || echo "Claude Code install skipped" + +# Install Codex CLI (OpenAI) +RUN npm install -g @openai/codex || echo "Codex install skipped" + +# Install Gemini CLI (if available) +RUN npm install -g @google/gemini-cli 2>/dev/null || echo "Gemini CLI not available yet" + +# Install OpenCode CLI (if available) +RUN npm install -g opencode 2>/dev/null || echo "OpenCode CLI not available" + +# Install Droid CLI (if available) +RUN npm install -g @factory/droid 2>/dev/null || echo "Droid CLI not available" + +# Copy test files +COPY ci-test-real-clis.ts /app/ +COPY package.json /app/ + +# Install test dependencies +RUN npm install + +# Verify which CLIs are installed +RUN echo "=== Installed CLIs ===" && \ + which claude && claude --version || echo "claude: not found" && \ + which codex && codex --version || echo "codex: not found" && \ + which gemini || echo "gemini: not found" && \ + which opencode || echo "opencode: not found" && \ + which droid || echo "droid: not found" + +# Default command runs the CI tests +CMD ["npx", "tsx", "/app/ci-test-real-clis.ts"] diff --git a/scripts/test-cli-auth/README.md b/scripts/test-cli-auth/README.md index 8f001ebe..4323d370 100644 --- a/scripts/test-cli-auth/README.md +++ b/scripts/test-cli-auth/README.md @@ -225,17 +225,32 @@ The workflow `.github/workflows/cli-oauth-test.yml` runs: ### Running CI Tests Locally ```bash -# Build the test container -docker build -t cli-oauth-test scripts/test-cli-auth/ +# Build the test container with REAL CLIs (recommended) +docker build -f scripts/test-cli-auth/Dockerfile.real \ + -t cli-oauth-test-real scripts/test-cli-auth/ -# Run all tests -docker run --rm cli-oauth-test +# Run tests against real CLIs +docker run --rm cli-oauth-test-real # Run with results output -docker run --rm -v $(pwd)/test-results:/tmp cli-oauth-test +docker run --rm -v $(pwd)/test-results:/tmp cli-oauth-test-real cat test-results/cli-oauth-test-results.json + +# Interactive debugging +docker run --rm -it cli-oauth-test-real bash +claude # Test Claude CLI manually ``` +### Why Real CLIs? + +Using the actual CLIs instead of mocks: +- **Catches real changes** in CLI behavior immediately +- **No maintenance burden** of keeping mocks in sync +- **Tests the actual code path** users will experience +- **Detects new prompts** or changed output formats + +CLIs that aren't installed are skipped (not failed), so tests work even if some providers haven't published CLIs yet. + ### Test Output Format ```json diff --git a/scripts/test-cli-auth/ci-test-real-clis.ts b/scripts/test-cli-auth/ci-test-real-clis.ts new file mode 100644 index 00000000..8fd00720 --- /dev/null +++ b/scripts/test-cli-auth/ci-test-real-clis.ts @@ -0,0 +1,309 @@ +#!/usr/bin/env npx tsx +/** + * CI Test Runner for Real CLI OAuth Flows + * + * Tests the actual CLI tools to verify: + * 1. URL extraction patterns work with real CLI output + * 2. Prompt detection works with real prompts + * 3. Auto-responses navigate through the flow correctly + * + * Exit codes: + * 0 - All available CLIs passed + * 1 - One or more tests failed + * + * Note: CLIs that aren't installed are skipped, not failed. + */ + +import * as pty from 'node-pty'; +import { execSync } from 'child_process'; +import { writeFileSync } from 'fs'; + +// Import the actual config from onboarding.ts +// This ensures tests use the same patterns as production +import { + CLI_AUTH_CONFIG, + stripAnsiCodes, + findMatchingPrompt, + matchesSuccessPattern, +} from '../../src/cloud/api/onboarding.js'; + +interface TestResult { + provider: string; + command: string; + installed: boolean; + passed: boolean; + skipped: boolean; + urlExtracted: string | null; + promptsDetected: string[]; + promptsResponded: string[]; + successDetected: boolean; + exitCode: number | null; + duration: number; + rawOutput: string; + error?: string; +} + +/** + * Check if a CLI is installed + */ +function isCliInstalled(command: string): boolean { + try { + execSync(`which ${command}`, { stdio: 'pipe' }); + return true; + } catch { + return false; + } +} + +/** + * Test a real CLI's OAuth flow + */ +async function testRealCli(providerId: string): Promise { + const config = CLI_AUTH_CONFIG[providerId]; + if (!config) { + return { + provider: providerId, + command: 'unknown', + installed: false, + passed: false, + skipped: true, + urlExtracted: null, + promptsDetected: [], + promptsResponded: [], + successDetected: false, + exitCode: null, + duration: 0, + rawOutput: '', + error: `Unknown provider: ${providerId}`, + }; + } + + const result: TestResult = { + provider: providerId, + command: `${config.command} ${config.args.join(' ')}`.trim(), + installed: isCliInstalled(config.command), + passed: false, + skipped: false, + urlExtracted: null, + promptsDetected: [], + promptsResponded: [], + successDetected: false, + exitCode: null, + duration: 0, + rawOutput: '', + }; + + // Skip if CLI not installed + if (!result.installed) { + result.skipped = true; + result.error = `CLI '${config.command}' not installed`; + return result; + } + + const startTime = Date.now(); + const respondedPrompts = new Set(); + + return new Promise((resolve) => { + try { + const proc = pty.spawn(config.command, config.args, { + name: 'xterm-256color', + cols: 120, + rows: 30, + env: { + ...process.env, + TERM: 'xterm-256color', + NO_COLOR: '1', + // Ensure CLIs don't try to open browsers + BROWSER: 'echo', + DISPLAY: '', + }, + }); + + // Timeout after configured wait time + buffer + const timeout = setTimeout(() => { + proc.kill(); + result.duration = Date.now() - startTime; + + // Even if we timeout, check if we got a URL + if (result.urlExtracted) { + result.passed = true; + } else { + result.error = 'Timeout waiting for auth URL'; + } + + resolve(result); + }, config.waitTimeout + 10000); // Extra buffer for real CLIs + + proc.onData((data: string) => { + result.rawOutput += data; + const cleanText = stripAnsiCodes(data); + + // Check for prompts + const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); + if (matchingPrompt) { + result.promptsDetected.push(matchingPrompt.description); + + // Respond to prompt + respondedPrompts.add(matchingPrompt.description); + result.promptsResponded.push(matchingPrompt.description); + + const delay = matchingPrompt.delay ?? 100; + setTimeout(() => { + try { + proc.write(matchingPrompt.response); + console.log(` [${providerId}] Responded to: ${matchingPrompt.description}`); + } catch { + // Process may have exited + } + }, delay); + } + + // Check for URL + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !result.urlExtracted) { + result.urlExtracted = match[1]; + console.log(` [${providerId}] URL found: ${result.urlExtracted.substring(0, 60)}...`); + } + + // Check for success + if (matchesSuccessPattern(data, config.successPatterns)) { + result.successDetected = true; + } + }); + + proc.onExit(({ exitCode }) => { + clearTimeout(timeout); + result.exitCode = exitCode; + result.duration = Date.now() - startTime; + + // Pass if we got a URL (main goal of OAuth flow) + // Success detection is secondary since we won't complete auth + result.passed = !!result.urlExtracted; + + if (!result.passed && !result.error) { + result.error = 'Failed to extract auth URL from CLI output'; + } + + resolve(result); + }); + + } catch (err) { + result.error = err instanceof Error ? err.message : 'Unknown error'; + result.duration = Date.now() - startTime; + resolve(result); + } + }); +} + +/** + * Run tests for all configured providers + */ +async function runAllTests() { + console.log('โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—'); + console.log('โ•‘ CLI OAuth Flow Tests - Real CLIs โ•‘'); + console.log('โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + console.log(''); + + const results: TestResult[] = []; + const providerIds = Object.keys(CLI_AUTH_CONFIG); + + for (const providerId of providerIds) { + const config = CLI_AUTH_CONFIG[providerId]; + console.log(`Testing ${config.displayName} (${providerId})...`); + + const result = await testRealCli(providerId); + results.push(result); + + if (result.skipped) { + console.log(` โญ๏ธ SKIPPED: ${result.error}`); + } else if (result.passed) { + console.log(` โœ… PASSED`); + } else { + console.log(` โŒ FAILED: ${result.error}`); + } + + console.log(` Installed: ${result.installed ? 'Yes' : 'No'}`); + if (!result.skipped) { + console.log(` URL: ${result.urlExtracted ? 'Extracted' : 'NOT FOUND'}`); + console.log(` Prompts: ${result.promptsResponded.length}/${config.prompts.length} handled`); + console.log(` Duration: ${result.duration}ms`); + } + console.log(''); + } + + // Summary + const installed = results.filter(r => r.installed); + const skipped = results.filter(r => r.skipped); + const passed = results.filter(r => r.passed); + const failed = results.filter(r => !r.passed && !r.skipped); + + console.log('โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + console.log(`Summary:`); + console.log(` Installed: ${installed.length}/${results.length}`); + console.log(` Passed: ${passed.length}`); + console.log(` Failed: ${failed.length}`); + console.log(` Skipped: ${skipped.length}`); + console.log('โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + + // Write JSON results + const jsonResults = { + timestamp: new Date().toISOString(), + results: results.map(r => ({ + provider: r.provider, + command: r.command, + installed: r.installed, + passed: r.passed, + skipped: r.skipped, + urlExtracted: r.urlExtracted ? true : false, + urlSample: r.urlExtracted?.substring(0, 80), + promptsDetected: r.promptsDetected, + promptsResponded: r.promptsResponded, + exitCode: r.exitCode, + duration: r.duration, + error: r.error, + })), + summary: { + total: results.length, + installed: installed.length, + passed: passed.length, + failed: failed.length, + skipped: skipped.length, + }, + }; + + try { + writeFileSync('/tmp/cli-oauth-test-results.json', JSON.stringify(jsonResults, null, 2)); + console.log('\nResults written to /tmp/cli-oauth-test-results.json'); + } catch { + console.log('\n--- JSON Results ---'); + console.log(JSON.stringify(jsonResults, null, 2)); + } + + // Exit with failure only if an installed CLI failed + // Skipped CLIs don't count as failures + if (failed.length > 0) { + console.log('\nFailed CLIs:'); + for (const result of failed) { + console.log(` - ${result.provider}: ${result.error}`); + if (result.rawOutput) { + console.log(` Last 500 chars of output:`); + console.log(` ${result.rawOutput.slice(-500).replace(/\n/g, '\n ')}`); + } + } + process.exit(1); + } + + // Warn if no CLIs were tested + if (installed.length === 0) { + console.log('\nโš ๏ธ WARNING: No CLIs were installed - no actual testing performed!'); + process.exit(0); // Don't fail, but warn + } + + console.log('\nโœ… All installed CLIs passed!'); + process.exit(0); +} + +runAllTests().catch((err) => { + console.error('Test runner failed:', err); + process.exit(1); +}); From e5b2463149e783eda065f0b88a9d31682b8de543 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:01:35 +0000 Subject: [PATCH 020/103] Use same CLI installation methods as workspace Dockerfile - Match deploy/workspace/Dockerfile installation approach - Claude: curl install script + pre-seed config - Codex: npm install -g as root - OpenCode/Droid: curl install scripts - Gemini: npm with fallback to curl - Create testuser for CLI installs (like workspace user) - Add ~/.local/bin to PATH for user-installed CLIs --- scripts/test-cli-auth/Dockerfile.real | 55 ++++++++++++++++++--------- 1 file changed, 37 insertions(+), 18 deletions(-) diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real index f2b7e6ee..456bdcbc 100644 --- a/scripts/test-cli-auth/Dockerfile.real +++ b/scripts/test-cli-auth/Dockerfile.real @@ -3,6 +3,8 @@ # This container installs the actual AI provider CLIs and tests # URL extraction from their OAuth flows. # +# Installation methods match deploy/workspace/Dockerfile to ensure consistency. +# # Usage: # docker build -f Dockerfile.real -t cli-oauth-test-real scripts/test-cli-auth/ # docker run --rm cli-oauth-test-real @@ -12,45 +14,62 @@ FROM node:20-slim -# Install system dependencies +# Install system dependencies (matches deploy/workspace/Dockerfile) RUN apt-get update && apt-get install -y \ bash \ + ca-certificates \ curl \ git \ python3 \ + jq \ && rm -rf /var/lib/apt/lists/* WORKDIR /app -# Install Claude Code CLI -RUN npm install -g @anthropic-ai/claude-code || echo "Claude Code install skipped" +# Install Codex globally as root (matches workspace Dockerfile) +RUN npm install -g @openai/codex || echo "Codex install failed" + +# Create test user (CLIs install to ~/.local/bin) +RUN useradd -m -u 1001 testuser +RUN chown -R testuser:testuser /app +USER testuser + +# Install AI CLIs as testuser (matches workspace Dockerfile installation methods) + +# Claude - uses official install script +RUN curl -fsSL https://claude.ai/install.sh | bash || echo "Claude install failed" +# Pre-seed Claude config to skip interactive onboarding +RUN mkdir -p /home/testuser/.claude && \ + echo '{"theme":"dark","hasCompletedOnboarding":true}' > /home/testuser/.claude/settings.local.json -# Install Codex CLI (OpenAI) -RUN npm install -g @openai/codex || echo "Codex install skipped" +# OpenCode - uses official install script +RUN curl -fsSL https://opencode.ai/install | bash || echo "OpenCode install failed" -# Install Gemini CLI (if available) -RUN npm install -g @google/gemini-cli 2>/dev/null || echo "Gemini CLI not available yet" +# Droid - uses official install script +RUN curl -fsSL https://app.factory.ai/cli | sh || echo "Droid install failed" -# Install OpenCode CLI (if available) -RUN npm install -g opencode 2>/dev/null || echo "OpenCode CLI not available" +# Gemini - try npm first, fall back to install script +RUN npm install -g @anthropic-ai/gemini-cli 2>/dev/null || \ + curl -fsSL https://gemini.google.com/cli/install.sh | bash || \ + echo "Gemini install failed" -# Install Droid CLI (if available) -RUN npm install -g @factory/droid 2>/dev/null || echo "Droid CLI not available" +# Add user's local bin to PATH +ENV PATH="/home/testuser/.local/bin:$PATH" # Copy test files -COPY ci-test-real-clis.ts /app/ -COPY package.json /app/ +COPY --chown=testuser:testuser ci-test-real-clis.ts /app/ +COPY --chown=testuser:testuser package.json /app/ # Install test dependencies RUN npm install # Verify which CLIs are installed RUN echo "=== Installed CLIs ===" && \ - which claude && claude --version || echo "claude: not found" && \ - which codex && codex --version || echo "codex: not found" && \ - which gemini || echo "gemini: not found" && \ - which opencode || echo "opencode: not found" && \ - which droid || echo "droid: not found" + (which claude && claude --version 2>&1 | head -1) || echo "claude: not found" && \ + (which codex && codex --version 2>&1 | head -1) || echo "codex: not found" && \ + (which gemini && gemini --version 2>&1 | head -1) || echo "gemini: not found" && \ + (which opencode && opencode --version 2>&1 | head -1) || echo "opencode: not found" && \ + (which droid && droid --version 2>&1 | head -1) || echo "droid: not found" # Default command runs the CI tests CMD ["npx", "tsx", "/app/ci-test-real-clis.ts"] From e0b2c3013cc41c2dc762edf174e4aa8a2796074f Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:06:06 +0000 Subject: [PATCH 021/103] Refactor CLI OAuth to share PTY runner between production and tests Extract runCLIAuthViaPTY as a reusable function that handles: - PTY spawning with proper TTY emulation - Auto-responding to interactive prompts - Auth URL extraction from CLI output - Success pattern detection - Timeout handling Both the production API endpoint and CI tests now use the same PTY runner function, ensuring consistent behavior and eliminating code duplication. --- scripts/test-cli-auth/ci-test-real-clis.ts | 129 ++++-------- src/cloud/api/onboarding.ts | 221 +++++++++++++++------ 2 files changed, 197 insertions(+), 153 deletions(-) diff --git a/scripts/test-cli-auth/ci-test-real-clis.ts b/scripts/test-cli-auth/ci-test-real-clis.ts index 8fd00720..f2cae9b1 100644 --- a/scripts/test-cli-auth/ci-test-real-clis.ts +++ b/scripts/test-cli-auth/ci-test-real-clis.ts @@ -12,19 +12,20 @@ * 1 - One or more tests failed * * Note: CLIs that aren't installed are skipped, not failed. + * + * IMPORTANT: This test uses the same runCLIAuthViaPTY function as production + * to ensure the PTY handling logic is consistent. */ -import * as pty from 'node-pty'; import { execSync } from 'child_process'; import { writeFileSync } from 'fs'; -// Import the actual config from onboarding.ts -// This ensures tests use the same patterns as production +// Import the actual config and PTY runner from onboarding.ts +// This ensures tests use the EXACT SAME logic as production import { CLI_AUTH_CONFIG, - stripAnsiCodes, - findMatchingPrompt, - matchesSuccessPattern, + runCLIAuthViaPTY, + type PTYAuthResult, } from '../../src/cloud/api/onboarding.js'; interface TestResult { @@ -56,7 +57,10 @@ function isCliInstalled(command: string): boolean { } /** - * Test a real CLI's OAuth flow + * Test a real CLI's OAuth flow using the shared PTY runner + * + * Uses the EXACT SAME runCLIAuthViaPTY function as production to ensure + * the PTY handling logic is consistent between tests and production. */ async function testRealCli(providerId: string): Promise { const config = CLI_AUTH_CONFIG[providerId]; @@ -101,98 +105,35 @@ async function testRealCli(providerId: string): Promise { } const startTime = Date.now(); - const respondedPrompts = new Set(); - - return new Promise((resolve) => { - try { - const proc = pty.spawn(config.command, config.args, { - name: 'xterm-256color', - cols: 120, - rows: 30, - env: { - ...process.env, - TERM: 'xterm-256color', - NO_COLOR: '1', - // Ensure CLIs don't try to open browsers - BROWSER: 'echo', - DISPLAY: '', - }, - }); - - // Timeout after configured wait time + buffer - const timeout = setTimeout(() => { - proc.kill(); - result.duration = Date.now() - startTime; - - // Even if we timeout, check if we got a URL - if (result.urlExtracted) { - result.passed = true; - } else { - result.error = 'Timeout waiting for auth URL'; - } - - resolve(result); - }, config.waitTimeout + 10000); // Extra buffer for real CLIs - - proc.onData((data: string) => { - result.rawOutput += data; - const cleanText = stripAnsiCodes(data); - - // Check for prompts - const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); - if (matchingPrompt) { - result.promptsDetected.push(matchingPrompt.description); - - // Respond to prompt - respondedPrompts.add(matchingPrompt.description); - result.promptsResponded.push(matchingPrompt.description); - - const delay = matchingPrompt.delay ?? 100; - setTimeout(() => { - try { - proc.write(matchingPrompt.response); - console.log(` [${providerId}] Responded to: ${matchingPrompt.description}`); - } catch { - // Process may have exited - } - }, delay); - } - // Check for URL - const match = cleanText.match(config.urlPattern); - if (match && match[1] && !result.urlExtracted) { - result.urlExtracted = match[1]; - console.log(` [${providerId}] URL found: ${result.urlExtracted.substring(0, 60)}...`); - } - - // Check for success - if (matchesSuccessPattern(data, config.successPatterns)) { - result.successDetected = true; - } - }); - - proc.onExit(({ exitCode }) => { - clearTimeout(timeout); - result.exitCode = exitCode; - result.duration = Date.now() - startTime; + // Use the shared PTY runner - SAME code as production + const ptyResult: PTYAuthResult = await runCLIAuthViaPTY(config, { + onAuthUrl: (url) => { + result.urlExtracted = url; + console.log(` [${providerId}] URL found: ${url.substring(0, 60)}...`); + }, + onPromptHandled: (description) => { + result.promptsDetected.push(description); + result.promptsResponded.push(description); + console.log(` [${providerId}] Responded to: ${description}`); + }, + onOutput: (data) => { + result.rawOutput += data; + }, + }); - // Pass if we got a URL (main goal of OAuth flow) - // Success detection is secondary since we won't complete auth - result.passed = !!result.urlExtracted; + result.duration = Date.now() - startTime; + result.exitCode = ptyResult.exitCode; + result.successDetected = ptyResult.success; - if (!result.passed && !result.error) { - result.error = 'Failed to extract auth URL from CLI output'; - } + // Pass if we got a URL (main goal of OAuth flow) + result.passed = !!result.urlExtracted; - resolve(result); - }); + if (!result.passed) { + result.error = ptyResult.error || 'Failed to extract auth URL from CLI output'; + } - } catch (err) { - result.error = err instanceof Error ? err.message : 'Unknown error'; - result.duration = Date.now() - startTime; - resolve(result); - } - }); + return result; } /** diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index 2253aec3..13af6ed3 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -318,6 +318,145 @@ export function validateAllProviderConfigs(): void { } } +/** + * Result of running a CLI auth flow via PTY + */ +export interface PTYAuthResult { + authUrl: string | null; + success: boolean; + promptsHandled: string[]; + output: string; + exitCode: number | null; + error?: string; +} + +/** + * Options for running CLI auth via PTY + */ +export interface PTYAuthOptions { + /** Callback when auth URL is found */ + onAuthUrl?: (url: string) => void; + /** Callback when a prompt is handled */ + onPromptHandled?: (description: string) => void; + /** Callback for raw PTY output */ + onOutput?: (data: string) => void; + /** Environment variables override */ + env?: Record; + /** Working directory */ + cwd?: string; +} + +/** + * Run CLI auth flow via PTY + * + * This is the core PTY runner used by both production and tests. + * It handles: + * - Spawning the CLI with proper TTY emulation + * - Auto-responding to interactive prompts + * - Extracting auth URLs from output + * - Detecting success patterns + * + * @param config - CLI auth configuration for the provider + * @param options - Optional callbacks and overrides + * @returns Promise resolving to auth result + */ +export async function runCLIAuthViaPTY( + config: CLIAuthConfig, + options: PTYAuthOptions = {} +): Promise { + const result: PTYAuthResult = { + authUrl: null, + success: false, + promptsHandled: [], + output: '', + exitCode: null, + }; + + const respondedPrompts = new Set(); + + return new Promise((resolve) => { + try { + const proc = pty.spawn(config.command, config.args, { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: options.cwd || process.cwd(), + env: { + ...process.env, + NO_COLOR: '1', + TERM: 'xterm-256color', + // Prevent CLIs from trying to open browsers + BROWSER: 'echo', + DISPLAY: '', + ...options.env, + } as Record, + }); + + // Timeout handler + const timeout = setTimeout(() => { + proc.kill(); + result.error = 'Timeout waiting for auth URL'; + resolve(result); + }, config.waitTimeout + 5000); + + proc.onData((data: string) => { + result.output += data; + options.onOutput?.(data); + + // Check for matching prompts and auto-respond + const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); + if (matchingPrompt) { + respondedPrompts.add(matchingPrompt.description); + result.promptsHandled.push(matchingPrompt.description); + options.onPromptHandled?.(matchingPrompt.description); + + const delay = matchingPrompt.delay ?? 100; + setTimeout(() => { + try { + proc.write(matchingPrompt.response); + } catch { + // Process may have exited + } + }, delay); + } + + // Look for auth URL + const cleanText = stripAnsiCodes(data); + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !result.authUrl) { + result.authUrl = match[1]; + options.onAuthUrl?.(result.authUrl); + } + + // Check for success indicators + if (matchesSuccessPattern(data, config.successPatterns)) { + result.success = true; + } + }); + + proc.onExit(({ exitCode }) => { + clearTimeout(timeout); + result.exitCode = exitCode; + + // Consider it a success if we got a URL (main goal) + // or if exit code was 0 with success pattern + if (result.authUrl || (exitCode === 0 && result.success)) { + result.success = true; + } + + if (!result.authUrl && !result.success && !result.error) { + result.error = 'Failed to extract auth URL from CLI output'; + } + + resolve(result); + }); + } catch (err) { + result.error = err instanceof Error ? err.message : 'Unknown error'; + resolve(result); + } + }); +} + /** * Get list of supported providers for CLI auth */ @@ -357,70 +496,34 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response activeSessions.set(sessionId, session); try { - // Spawn CLI process via PTY for proper TTY emulation - // This ensures the CLI outputs auth URLs correctly - const proc = pty.spawn(config.command, config.args, { - name: 'xterm-256color', - cols: 120, - rows: 30, - cwd: process.cwd(), - env: { ...process.env, NO_COLOR: '1', TERM: 'xterm-256color' } as Record, - }); - - session.process = proc; - - // Track which prompts we've already responded to - const respondedPrompts = new Set(); - - // Capture PTY output for auth URL and handle interactive prompts - proc.onData((data: string) => { - session.output += data; - - // Check for matching prompts and auto-respond - const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); - if (matchingPrompt) { - respondedPrompts.add(matchingPrompt.description); - const delay = matchingPrompt.delay ?? 100; - setTimeout(() => { - try { - proc.write(matchingPrompt.response); - console.log(`[onboarding] Auto-responded to: ${matchingPrompt.description}`); - } catch { - // Process may have exited - } - }, delay); - } - - // Look for auth URL - const cleanText = stripAnsiCodes(data); - const match = cleanText.match(config.urlPattern); - if (match && match[1]) { - session.authUrl = match[1]; + // Use shared PTY runner for CLI auth + const ptyResult = await runCLIAuthViaPTY(config, { + onAuthUrl: (url) => { + session.authUrl = url; session.status = 'waiting_auth'; - } - - // Check for success indicators - if (matchesSuccessPattern(data, config.successPatterns)) { - session.status = 'success'; - } - }); - - proc.onExit(async ({ exitCode }) => { - if (exitCode === 0 && session.status !== 'error') { - session.status = 'success'; - // Try to read credentials from file - await extractCredentials(session, config); - } else if (session.status === 'starting') { - session.status = 'error'; - session.error = `CLI exited with code ${exitCode}`; - } + }, + onPromptHandled: (description) => { + console.log(`[onboarding] Auto-responded to: ${description}`); + }, + onOutput: (data) => { + session.output += data; + if (matchesSuccessPattern(data, config.successPatterns)) { + session.status = 'success'; + } + }, }); - // Wait for URL to appear using provider-specific timeout - await new Promise(resolve => setTimeout(resolve, config.waitTimeout)); + // Update session with result + if (ptyResult.success && !session.authUrl) { + session.status = 'success'; + await extractCredentials(session, config); + } else if (ptyResult.error && session.status === 'starting') { + session.status = 'error'; + session.error = ptyResult.error; + } // Return session info based on current state - if (session.status === 'success') { + if (session.status === 'success' && !session.authUrl) { // Already authenticated - CLI exited successfully without auth URL activeSessions.delete(sessionId); res.json({ From 6356eceb91985b1c99fca6cb0824d6f17a22dc1a Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:08:14 +0000 Subject: [PATCH 022/103] Fix TypeScript errors in crypto imports and Map iteration - Use namespace import for crypto module (import * as crypto) - Replace for...of Map iteration with forEach to avoid downlevelIteration requirement --- src/cloud/api/onboarding.ts | 6 +++--- src/cloud/vault/index.ts | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index 13af6ed3..d9bc6b09 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -13,7 +13,7 @@ import { Router, Request, Response } from 'express'; import * as pty from 'node-pty'; import type { IPty } from 'node-pty'; -import crypto from 'crypto'; +import * as crypto from 'crypto'; import { requireAuth } from './auth.js'; import { db } from '../db/index.js'; import { vault } from '../vault/index.js'; @@ -45,7 +45,7 @@ const activeSessions = new Map(); // Clean up old sessions periodically setInterval(() => { const now = Date.now(); - for (const [id, session] of activeSessions) { + activeSessions.forEach((session, id) => { // Remove sessions older than 10 minutes if (now - session.createdAt.getTime() > 10 * 60 * 1000) { if (session.process) { @@ -57,7 +57,7 @@ setInterval(() => { } activeSessions.delete(id); } - } + }); }, 60000); /** diff --git a/src/cloud/vault/index.ts b/src/cloud/vault/index.ts index bcccfc71..c6448b9b 100644 --- a/src/cloud/vault/index.ts +++ b/src/cloud/vault/index.ts @@ -4,7 +4,7 @@ * Secure storage for OAuth tokens with AES-256-GCM encryption. */ -import crypto from 'crypto'; +import * as crypto from 'crypto'; import { getConfig } from '../config.js'; import { db } from '../db/index.js'; From 7744abc3d3bf5f9472b97db35b7bce8b39ffe882 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:15:49 +0000 Subject: [PATCH 023/103] Fix spawner test for relay snippet injection Update file system mocks to properly handle relay snippet files: - Mock existsSync to return false for snippet files - Mock readFileSync to return appropriate content based on path - Update assertion to check task is contained in write call --- src/bridge/spawner.test.ts | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/src/bridge/spawner.test.ts b/src/bridge/spawner.test.ts index b38ccc86..8dafdec5 100644 --- a/src/bridge/spawner.test.ts +++ b/src/bridge/spawner.test.ts @@ -67,8 +67,22 @@ describe('AgentSpawner', () => { beforeEach(() => { vi.clearAllMocks(); - existsSyncMock.mockReturnValue(true); - readFileSyncMock.mockReturnValue(JSON.stringify({ agents: [] })); + // Mock file system calls with path-aware responses + existsSyncMock.mockImplementation((filePath: string) => { + // Snippet files don't exist in test environment + if (filePath.includes('agent-relay-snippet') || filePath.includes('agent-relay-protocol')) { + return false; + } + return true; + }); + readFileSyncMock.mockImplementation((filePath: string) => { + // Return agents.json content for registry files + if (typeof filePath === 'string' && filePath.includes('agents.json')) { + return JSON.stringify({ agents: [] }); + } + // Return empty for other files + return ''; + }); writeFileSyncMock.mockImplementation(() => {}); mkdirSyncMock.mockImplementation(() => undefined); mockPtyWrapper.start.mockResolvedValue(undefined); @@ -95,7 +109,10 @@ describe('AgentSpawner', () => { }); expect(spawner.hasWorker('Dev1')).toBe(true); expect(mockPtyWrapper.start).toHaveBeenCalled(); - expect(mockPtyWrapper.write).toHaveBeenCalledWith('Finish the report\r'); + // Task is written to PTY (may include injected snippets, so check task is included) + expect(mockPtyWrapper.write).toHaveBeenCalled(); + const writeCall = mockPtyWrapper.write.mock.calls[0][0]; + expect(writeCall).toContain('Finish the report'); }); it('adds --dangerously-skip-permissions for Claude variants', async () => { From 135e337a231d42b1e87db01b0559e1f09f20997e Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:24:47 +0000 Subject: [PATCH 024/103] Add agent profile panel to dashboard Implements Slack-like agent profile panels showing: - Agent spawn prompt (the task that created the agent) - Persona prompt (agent profile/role instructions) - Persona name, title, and description - Agent metadata (model, capabilities, tags) - First seen/last seen timestamps This helps users understand agent behavior by seeing the prompts that define the agent's role and the task it was spawned with. Components: - AgentProfilePanel: Slide-out panel with profile details - AgentCard: Added profile button to trigger panel - AgentRegistry: Extended to store profile metadata - Dashboard types: Added AgentProfile interface --- src/daemon/agent-registry.ts | 31 ++ src/dashboard/react-components/AgentCard.tsx | 47 ++ .../react-components/AgentProfilePanel.tsx | 414 ++++++++++++++++++ src/dashboard/types/index.ts | 28 ++ 4 files changed, 520 insertions(+) create mode 100644 src/dashboard/react-components/AgentProfilePanel.tsx diff --git a/src/daemon/agent-registry.ts b/src/daemon/agent-registry.ts index cae29d11..f2b4ad0f 100644 --- a/src/daemon/agent-registry.ts +++ b/src/daemon/agent-registry.ts @@ -10,6 +10,26 @@ import { createLogger } from '../utils/logger.js'; const log = createLogger('registry'); +/** + * Agent profile information for display and understanding agent behavior + */ +export interface AgentProfileRecord { + /** Display title/role (e.g., "Lead Developer", "Code Reviewer") */ + title?: string; + /** Short description of what this agent does */ + description?: string; + /** The prompt/task the agent was spawned with */ + spawnPrompt?: string; + /** Agent profile/persona prompt (e.g., lead agent instructions) */ + personaPrompt?: string; + /** Name of the persona preset used (e.g., "lead", "reviewer", "shadow-auditor") */ + personaName?: string; + /** Capabilities or tools available to the agent */ + capabilities?: string[]; + /** Tags for categorization */ + tags?: string[]; +} + export interface AgentRecord { id: string; name: string; @@ -23,6 +43,8 @@ export interface AgentRecord { lastSeen: string; messagesSent: number; messagesReceived: number; + /** Profile information for understanding agent behavior */ + profile?: AgentProfileRecord; } type AgentInput = { @@ -33,6 +55,7 @@ type AgentInput = { task?: string; workingDirectory?: string; team?: string; + profile?: AgentProfileRecord; }; export class AgentRegistry { @@ -60,6 +83,11 @@ export class AgentRegistry { const existing = this.agents.get(agent.name); if (existing) { + // Merge profile data if provided + const mergedProfile = agent.profile + ? { ...existing.profile, ...agent.profile } + : existing.profile; + const updated: AgentRecord = { ...existing, cli: agent.cli ?? existing.cli, @@ -68,6 +96,7 @@ export class AgentRegistry { task: agent.task ?? existing.task, workingDirectory: agent.workingDirectory ?? existing.workingDirectory, team: agent.team ?? existing.team, + profile: mergedProfile, lastSeen: now, }; this.agents.set(agent.name, updated); @@ -84,6 +113,7 @@ export class AgentRegistry { task: agent.task, workingDirectory: agent.workingDirectory, team: agent.team, + profile: agent.profile, firstSeen: now, lastSeen: now, messagesSent: 0, @@ -182,6 +212,7 @@ export class AgentRegistry { task: raw.task, workingDirectory: raw.workingDirectory, team: raw.team, + profile: raw.profile, firstSeen: raw.firstSeen ?? new Date().toISOString(), lastSeen: raw.lastSeen ?? new Date().toISOString(), messagesSent: typeof raw.messagesSent === 'number' ? raw.messagesSent : 0, diff --git a/src/dashboard/react-components/AgentCard.tsx b/src/dashboard/react-components/AgentCard.tsx index 54135b7d..f06fe5f9 100644 --- a/src/dashboard/react-components/AgentCard.tsx +++ b/src/dashboard/react-components/AgentCard.tsx @@ -27,6 +27,7 @@ export interface AgentCardProps { onMessageClick?: (agent: Agent) => void; onReleaseClick?: (agent: Agent) => void; onLogsClick?: (agent: Agent) => void; + onProfileClick?: (agent: Agent) => void; } /** @@ -69,6 +70,7 @@ export function AgentCard({ onMessageClick, onReleaseClick, onLogsClick, + onProfileClick, }: AgentCardProps) { const colors = getAgentColor(agent.name); const initials = getAgentInitials(agent.name); @@ -98,6 +100,11 @@ export function AgentCard({ onLogsClick?.(agent); }; + const handleProfileClick = (e: React.MouseEvent) => { + e.stopPropagation(); + onProfileClick?.(agent); + }; + if (compact) { return (
+ {onProfileClick && ( + + )} {agent.isSpawned && onLogsClick && ( + )} {agent.isSpawned && onLogsClick && ( +
+ + {/* Agent Info */} +
+ {/* Large Avatar */} +
+
+ {initials} +
+ {/* Status indicator */} +
+
+ + {/* Name */} +

+ {displayName} +

+ + {/* Breadcrumb */} + {breadcrumb && ( + + {breadcrumb} + + )} + + {/* Title/Role */} + {profile?.title && ( + + {profile.title} + + )} + + {/* Status */} + +
+ {agent.status.charAt(0).toUpperCase() + agent.status.slice(1)} + {agent.isProcessing && ' - Thinking...'} + + + {/* Tags */} +
+ {agent.cli && ( + + {agent.cli} + + )} + {agent.isSpawned && ( + + Spawned + + )} + {agent.team && ( + + {agent.team} + + )} + {profile?.personaName && ( + + {profile.personaName} + + )} +
+
+ + {/* Details - Scrollable */} +
+
+ {/* Description */} + {profile?.description && ( +
+ +

+ {profile.description} +

+
+ )} + + {/* Current Task */} + {agent.currentTask && ( +
+ +

+ {agent.currentTask} +

+
+ )} + + {/* Spawn Prompt */} + {profile?.spawnPrompt && ( +
+ +
 200 ? 'line-clamp-4' : ''}`}>
+                  {profile.spawnPrompt}
+                
+
+ )} + + {/* Persona Prompt */} + {profile?.personaPrompt && ( +
+ +
 200 ? 'line-clamp-4' : ''}`}>
+                  {profile.personaPrompt}
+                
+
+ )} + + {/* Model */} + {profile?.model && ( +
+ +

+ {profile.model} +

+
+ )} + + {/* Working Directory */} + {profile?.workingDirectory && ( +
+ +

+ {profile.workingDirectory} +

+
+ )} + + {/* Agent ID */} + {agent.agentId && ( +
+ +

+ {agent.agentId} +

+
+ )} + + {/* Capabilities */} + {profile?.capabilities && profile.capabilities.length > 0 && ( +
+ +
+ {profile.capabilities.map((cap, i) => ( + + {cap} + + ))} +
+
+ )} + + {/* Last Seen */} + {agent.lastSeen && ( +
+ +

+ {formatDateTime(agent.lastSeen)} +

+
+ )} + + {/* First Seen */} + {profile?.firstSeen && ( +
+ +

+ {formatDateTime(profile.firstSeen)} +

+
+ )} + + {/* Message Count */} + {agent.messageCount !== undefined && agent.messageCount > 0 && ( +
+ +

+ {agent.messageCount} messages sent +

+
+ )} +
+
+ + {/* Actions */} +
+ {/* Message Button */} + {onMessage && ( + + )} + + {/* Logs Button */} + {agent.isSpawned && onLogs && ( + + )} + + {/* Release Button */} + {agent.isSpawned && onRelease && ( + + )} +
+
+ + ); +} + +/** + * Format a timestamp to a readable date/time + */ +function formatDateTime(timestamp: string): string { + const date = new Date(timestamp); + return date.toLocaleString([], { + month: 'short', + day: 'numeric', + year: 'numeric', + hour: '2-digit', + minute: '2-digit', + }); +} diff --git a/src/dashboard/types/index.ts b/src/dashboard/types/index.ts index f7494a1c..91235df4 100644 --- a/src/dashboard/types/index.ts +++ b/src/dashboard/types/index.ts @@ -24,6 +24,34 @@ export interface Agent { lastMessageReceivedAt?: number; // Timestamp when agent last received a message lastOutputAt?: number; // Timestamp when agent last produced output isStuck?: boolean; // True when agent received message but hasn't responded within threshold + // Profile fields for understanding agent behavior + profile?: AgentProfile; +} + +/** + * Agent profile information - helps users understand agent behavior + */ +export interface AgentProfile { + /** Display title/role (e.g., "Lead Developer", "Code Reviewer") */ + title?: string; + /** Short description of what this agent does */ + description?: string; + /** The prompt/task the agent was spawned with */ + spawnPrompt?: string; + /** Agent profile/persona prompt (e.g., lead agent instructions) */ + personaPrompt?: string; + /** Name of the persona preset used (e.g., "lead", "reviewer", "shadow-auditor") */ + personaName?: string; + /** Model being used (e.g., "claude-3-opus", "gpt-4") */ + model?: string; + /** Working directory */ + workingDirectory?: string; + /** When the agent was first seen */ + firstSeen?: string; + /** Capabilities or tools available to the agent */ + capabilities?: string[]; + /** Tags for categorization */ + tags?: string[]; } export interface AgentSummary { From 2bf0c6e7f257cc4bdd3fc4cbdd1a9f12848d1e21 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:32:31 +0000 Subject: [PATCH 025/103] Fix lint errors and remove dead code - Fix control regex in onboarding.ts for ANSI stripping - Convert require() imports to ES module imports (provisioner, trajectory) - Prefix unused variables with underscore where needed - Remove truly dead code: - Unused spawn/os imports in cli profiler - Unused snapshotCount variable - Unused isCliProvider type guard - Unused getTrajectoriesDir wrapper function - Unused type imports in test files --- src/bridge/spawner.ts | 2 +- src/cli/index.ts | 6 +----- src/cloud/api/monitoring.ts | 2 +- src/cloud/api/onboarding.ts | 1 + src/cloud/api/policy.ts | 4 ++-- src/cloud/api/providers.ts | 5 ----- src/cloud/api/test-helpers.ts | 2 +- src/cloud/provisioner/index.ts | 4 +--- src/dashboard-server/server.ts | 2 +- src/policy/agent-policy.ts | 8 ++++---- src/resiliency/crash-insights.test.ts | 4 ---- src/resiliency/crash-insights.ts | 1 - src/resiliency/memory-monitor.test.ts | 3 +-- src/resiliency/memory-monitor.ts | 2 +- src/trajectory/config.ts | 6 +++--- src/trajectory/integration.ts | 8 -------- 16 files changed, 18 insertions(+), 42 deletions(-) diff --git a/src/bridge/spawner.ts b/src/bridge/spawner.ts index c997589b..26b66bc5 100644 --- a/src/bridge/spawner.ts +++ b/src/bridge/spawner.ts @@ -12,7 +12,7 @@ import { getProjectPaths } from '../utils/project-namespace.js'; import { resolveCommand } from '../utils/command-resolver.js'; import { PtyWrapper, type PtyWrapperConfig, type SummaryEvent, type SessionEndEvent } from '../wrapper/pty-wrapper.js'; import { selectShadowCli } from './shadow-cli.js'; -import { AgentPolicyService, type CloudPolicyFetcher, type PolicyDecision } from '../policy/agent-policy.js'; +import { AgentPolicyService, type CloudPolicyFetcher } from '../policy/agent-policy.js'; import type { SpawnRequest, SpawnResult, diff --git a/src/cli/index.ts b/src/cli/index.ts index dad3972c..fb7f97b2 100644 --- a/src/cli/index.ts +++ b/src/cli/index.ts @@ -2459,7 +2459,7 @@ program console.log(' UNACKNOWLEDGED ALERTS:'); console.log(' โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€'); for (const alert of data.alerts.slice(0, 10)) { - const time = new Date(alert.createdAt).toLocaleString(); + const _time = new Date(alert.createdAt).toLocaleString(); const icon = alert.alertType === 'oom_imminent' ? '๐Ÿ”ด' : alert.alertType === 'critical' ? '๐ŸŸ ' : '๐ŸŸก'; console.log(` ${icon} ${alert.agentName} - ${alert.alertType}`); @@ -2505,8 +2505,6 @@ program outputDir?: string; exposeGc?: boolean; }) => { - const { spawn } = await import('child_process'); - const os = await import('node:os'); const { getProjectPaths } = await import('../utils/project-namespace.js'); if (!commandParts || commandParts.length === 0) { @@ -2570,8 +2568,6 @@ program inboxDir: paths.dataDir, }); - const snapshotCount = 0; - // Start memory sampling const sampleInterval = setInterval(() => { const memUsage = process.memoryUsage(); diff --git a/src/cloud/api/monitoring.ts b/src/cloud/api/monitoring.ts index 9dbdce78..d2d7bced 100644 --- a/src/cloud/api/monitoring.ts +++ b/src/cloud/api/monitoring.ts @@ -15,7 +15,7 @@ import { requireAuth } from './auth.js'; import { db as dbModule } from '../db/index.js'; import { getDb } from '../db/drizzle.js'; import { - linkedDaemons, + linkedDaemons as _linkedDaemons, agentMetrics, agentCrashes, memoryAlerts, diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index d9bc6b09..fdbad759 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -204,6 +204,7 @@ export const CLI_AUTH_CONFIG: Record = { * Strip ANSI escape codes from text */ export function stripAnsiCodes(text: string): string { + // eslint-disable-next-line no-control-regex return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); } diff --git a/src/cloud/api/policy.ts b/src/cloud/api/policy.ts index 934d786c..4b6796ae 100644 --- a/src/cloud/api/policy.ts +++ b/src/cloud/api/policy.ts @@ -7,7 +7,7 @@ import { Router, Request, Response } from 'express'; import { db } from '../db/index.js'; -import type { WorkspaceAgentPolicy, AgentPolicyRule } from '../db/schema.js'; +import type { WorkspaceAgentPolicy } from '../db/schema.js'; export const policyRouter = Router(); @@ -137,7 +137,7 @@ policyRouter.delete('/:workspaceId', async (req: Request, res: Response) => { } // Remove policy from config - const { agentPolicy, ...restConfig } = workspace.config ?? {}; + const { agentPolicy: _agentPolicy, ...restConfig } = workspace.config ?? {}; await db.workspaces.updateConfig(workspaceId, restConfig as any); res.json({ diff --git a/src/cloud/api/providers.ts b/src/cloud/api/providers.ts index aec90154..7ad7448e 100644 --- a/src/cloud/api/providers.ts +++ b/src/cloud/api/providers.ts @@ -108,11 +108,6 @@ function isDeviceFlowProvider(provider: Provider): provider is DeviceFlowProvide return provider.authStrategy === 'device_flow'; } -// Type guard for CLI providers -function isCliProvider(provider: Provider): provider is CliProvider { - return provider.authStrategy === 'cli'; -} - type ProviderType = keyof typeof PROVIDERS; // In-memory store for active device flows (use Redis in production) diff --git a/src/cloud/api/test-helpers.ts b/src/cloud/api/test-helpers.ts index 9b205969..47d10d05 100644 --- a/src/cloud/api/test-helpers.ts +++ b/src/cloud/api/test-helpers.ts @@ -137,7 +137,7 @@ testHelpersRouter.delete('/cleanup', async (req: Request, res: Response) => { } try { - const db = getDb(); + const _db = getDb(); // Delete test data (users with test- prefix in githubId) // Note: This cascades to linked daemons due to FK constraints diff --git a/src/cloud/provisioner/index.ts b/src/cloud/provisioner/index.ts index c131a4d3..ba0e3296 100644 --- a/src/cloud/provisioner/index.ts +++ b/src/cloud/provisioner/index.ts @@ -4,6 +4,7 @@ * One-click provisioning for compute resources (Fly.io, Railway, Docker). */ +import * as crypto from 'crypto'; import { getConfig } from '../config.js'; import { db, Workspace } from '../db/index.js'; import { vault } from '../vault/index.js'; @@ -188,7 +189,6 @@ class FlyProvisioner implements ComputeProvisioner { * This is a simple HMAC - in production, consider using JWTs */ private generateWorkspaceToken(workspaceId: string): string { - const crypto = require('crypto'); return crypto .createHmac('sha256', this.sessionSecret) .update(`workspace:${workspaceId}`) @@ -515,7 +515,6 @@ class RailwayProvisioner implements ComputeProvisioner { } private generateWorkspaceToken(workspaceId: string): string { - const crypto = require('crypto'); return crypto .createHmac('sha256', this.sessionSecret) .update(`workspace:${workspaceId}`) @@ -770,7 +769,6 @@ class DockerProvisioner implements ComputeProvisioner { } private generateWorkspaceToken(workspaceId: string): string { - const crypto = require('crypto'); return crypto .createHmac('sha256', this.sessionSecret) .update(`workspace:${workspaceId}`) diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index 36921deb..ac607a24 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -3083,7 +3083,7 @@ Start by greeting the project leads and asking for status updates.`; const { getRelayConfigPath, readRelayConfig } = await import('../trajectory/config.js'); const { getProjectPaths } = await import('../utils/project-namespace.js'); - const { projectRoot } = getProjectPaths(); + const { projectRoot: _projectRoot } = getProjectPaths(); // Read existing config const config = readRelayConfig(); diff --git a/src/policy/agent-policy.ts b/src/policy/agent-policy.ts index f54514b5..538aa564 100644 --- a/src/policy/agent-policy.ts +++ b/src/policy/agent-policy.ts @@ -267,7 +267,7 @@ export class AgentPolicyService { private parseSimpleYaml(content: string): Record { const result: Record = {}; const lines = content.split('\n'); - let currentKey = ''; + let _currentKey = ''; let currentArray: unknown[] | null = null; let currentObject: Record | null = null; let indent = 0; @@ -316,7 +316,7 @@ export class AgentPolicyService { // Top-level or section key if (value === '' || value === '|' || value === '>') { // Start of array or nested object - currentKey = key; + _currentKey = key; currentArray = []; currentObject = null; indent = lineIndent; @@ -325,7 +325,7 @@ export class AgentPolicyService { // Simple key: value if (lineIndent === 0) { result[key] = this.parseValue(value); - currentKey = ''; + _currentKey = ''; currentArray = null; currentObject = null; } else if (currentObject) { @@ -807,7 +807,7 @@ export class AgentPolicyService { * Get a concise policy instruction for injection into agent prompts */ async getPolicyInstruction(agentName: string): Promise { - const { matchedPolicy, policySource } = await this.getAgentPolicy(agentName); + const { matchedPolicy, policySource: _policySource } = await this.getAgentPolicy(agentName); // Only generate instructions if there are restrictions const hasRestrictions = diff --git a/src/resiliency/crash-insights.test.ts b/src/resiliency/crash-insights.test.ts index 36487432..ae0e5650 100644 --- a/src/resiliency/crash-insights.test.ts +++ b/src/resiliency/crash-insights.test.ts @@ -4,13 +4,9 @@ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; import * as fs from 'fs'; -import * as path from 'path'; -import * as os from 'os'; import { CrashInsightsService, getCrashInsights, - type CrashRecord, - type CrashAnalysis, } from './crash-insights.js'; import type { AgentMemoryMonitor, CrashMemoryContext } from './memory-monitor.js'; diff --git a/src/resiliency/crash-insights.ts b/src/resiliency/crash-insights.ts index 6fe45e6b..b068cb1c 100644 --- a/src/resiliency/crash-insights.ts +++ b/src/resiliency/crash-insights.ts @@ -15,7 +15,6 @@ import * as os from 'os'; import { AgentMemoryMonitor, CrashMemoryContext, - MemorySnapshot, formatBytes, } from './memory-monitor.js'; diff --git a/src/resiliency/memory-monitor.test.ts b/src/resiliency/memory-monitor.test.ts index 33cd5fa2..043f6af0 100644 --- a/src/resiliency/memory-monitor.test.ts +++ b/src/resiliency/memory-monitor.test.ts @@ -8,7 +8,6 @@ import { getMemoryMonitor, formatBytes, type MemorySnapshot, - type AgentMemoryMetrics, type MemoryAlert, } from './memory-monitor.js'; @@ -115,7 +114,7 @@ describe('AgentMemoryMonitor', () => { it('should reset metrics on PID update', () => { monitor.register('test-agent', 12345); - const metrics = monitor.get('test-agent'); + const _metrics = monitor.get('test-agent'); monitor.updatePid('test-agent', 54321); diff --git a/src/resiliency/memory-monitor.ts b/src/resiliency/memory-monitor.ts index 5aba218e..7db8b76c 100644 --- a/src/resiliency/memory-monitor.ts +++ b/src/resiliency/memory-monitor.ts @@ -391,7 +391,7 @@ export class AgentMemoryMonitor extends EventEmitter { if (!metrics) return; const { thresholds } = this.config; - const previousRss = metrics.current.rssBytes; + const _previousRss = metrics.current.rssBytes; const previousAlertLevel = metrics.alertLevel; // Update current snapshot diff --git a/src/trajectory/config.ts b/src/trajectory/config.ts index d5317cb0..dd304c73 100644 --- a/src/trajectory/config.ts +++ b/src/trajectory/config.ts @@ -18,7 +18,7 @@ * - Reasoning: XDG-compliant, project-isolated, survives repo deletion */ -import { existsSync, readFileSync, mkdirSync } from 'node:fs'; +import { existsSync, readFileSync, mkdirSync, statSync } from 'node:fs'; import { join } from 'node:path'; import { homedir } from 'node:os'; import { createHash } from 'node:crypto'; @@ -60,7 +60,7 @@ export function readRelayConfig(projectRoot?: string): RelayConfig { // Check cache if (configCache && configCache.path === configPath) { try { - const stat = require('fs').statSync(configPath); + const stat = statSync(configPath); if (stat.mtimeMs === configCache.mtime) { return configCache.config; } @@ -79,7 +79,7 @@ export function readRelayConfig(projectRoot?: string): RelayConfig { // Update cache try { - const stat = require('fs').statSync(configPath); + const stat = statSync(configPath); configCache = { path: configPath, config, mtime: stat.mtimeMs }; } catch { // Ignore cache update failures diff --git a/src/trajectory/integration.ts b/src/trajectory/integration.ts index 810d6cef..7d4c1c0f 100644 --- a/src/trajectory/integration.ts +++ b/src/trajectory/integration.ts @@ -76,14 +76,6 @@ interface TrajectoryFile { }; } -/** - * Get the primary trajectories directory path (for writing) - * Uses config to determine if repo or user-level storage - */ -function getTrajectoriesDir(): string { - return getPrimaryTrajectoriesDir(); -} - /** * Read a single trajectory index file from a directory */ From b8db7d44bbd358e88ba160fd960493f9eeb10eb5 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:39:02 +0000 Subject: [PATCH 026/103] Fix CLI OAuth test container build - Add make and g++ for node-pty native compilation - Fix Gemini CLI installation (remove non-existent npm package) --- scripts/test-cli-auth/Dockerfile.real | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real index 456bdcbc..e13ef698 100644 --- a/scripts/test-cli-auth/Dockerfile.real +++ b/scripts/test-cli-auth/Dockerfile.real @@ -21,6 +21,8 @@ RUN apt-get update && apt-get install -y \ curl \ git \ python3 \ + make \ + g++ \ jq \ && rm -rf /var/lib/apt/lists/* @@ -48,9 +50,8 @@ RUN curl -fsSL https://opencode.ai/install | bash || echo "OpenCode install fail # Droid - uses official install script RUN curl -fsSL https://app.factory.ai/cli | sh || echo "Droid install failed" -# Gemini - try npm first, fall back to install script -RUN npm install -g @anthropic-ai/gemini-cli 2>/dev/null || \ - curl -fsSL https://gemini.google.com/cli/install.sh | bash || \ +# Gemini CLI - uses Google's install script +RUN curl -fsSL https://gemini.google.com/cli/install.sh | bash || \ echo "Gemini install failed" # Add user's local bin to PATH From 9aa6b5cd6b218f1634bccd89bd36edd78866b1d6 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:41:01 +0000 Subject: [PATCH 027/103] Fix Gemini CLI installation to use @google/gemini-cli The package is @google/gemini-cli, not @anthropic-ai/gemini-cli. Fixed in both production workspace Dockerfile and test container. --- deploy/workspace/Dockerfile | 2 +- scripts/test-cli-auth/Dockerfile.real | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index 9d5b82fd..e738d2e9 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -78,7 +78,7 @@ RUN curl -fsSL https://opencode.ai/install | bash # Droid RUN curl -fsSL https://app.factory.ai/cli | sh # Gemini -RUN npm install -g @anthropic-ai/gemini-cli || curl -fsSL https://gemini.google.com/cli/install.sh | bash +RUN npm install -g @google/gemini-cli # Environment ENV NODE_ENV=production diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real index e13ef698..416439a3 100644 --- a/scripts/test-cli-auth/Dockerfile.real +++ b/scripts/test-cli-auth/Dockerfile.real @@ -50,9 +50,8 @@ RUN curl -fsSL https://opencode.ai/install | bash || echo "OpenCode install fail # Droid - uses official install script RUN curl -fsSL https://app.factory.ai/cli | sh || echo "Droid install failed" -# Gemini CLI - uses Google's install script -RUN curl -fsSL https://gemini.google.com/cli/install.sh | bash || \ - echo "Gemini install failed" +# Gemini CLI - install via npm +RUN npm install -g @google/gemini-cli || echo "Gemini install failed" # Add user's local bin to PATH ENV PATH="/home/testuser/.local/bin:$PATH" From 338f941c0669f5b7a4091cc83ed14515ba783def Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:54:51 +0000 Subject: [PATCH 028/103] Fix CLI OAuth test container build - Extract cli-pty-runner.ts as shared module with minimal deps (only node-pty) - Update test container to use repo root as build context - Fix import paths in ci-test-real-clis.ts - Re-export types from onboarding.ts for backward compatibility This allows the test container to import the PTY runner without pulling in the full server stack (express, db, vault, etc). --- .github/workflows/cli-oauth-test.yml | 2 +- scripts/test-cli-auth/Dockerfile.real | 11 +- scripts/test-cli-auth/ci-test-real-clis.ts | 4 +- src/cloud/api/cli-pty-runner.ts | 420 ++++++++++++++++++++ src/cloud/api/onboarding.ts | 432 ++------------------- 5 files changed, 453 insertions(+), 416 deletions(-) create mode 100644 src/cloud/api/cli-pty-runner.ts diff --git a/.github/workflows/cli-oauth-test.yml b/.github/workflows/cli-oauth-test.yml index 7ea62f76..a748cf2f 100644 --- a/.github/workflows/cli-oauth-test.yml +++ b/.github/workflows/cli-oauth-test.yml @@ -45,7 +45,7 @@ jobs: - name: Build test container with real CLIs run: | docker build -f scripts/test-cli-auth/Dockerfile.real \ - -t cli-oauth-test-real scripts/test-cli-auth/ + -t cli-oauth-test-real . - name: Run CLI OAuth tests against real CLIs id: test diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real index 416439a3..73aed276 100644 --- a/scripts/test-cli-auth/Dockerfile.real +++ b/scripts/test-cli-auth/Dockerfile.real @@ -56,9 +56,14 @@ RUN npm install -g @google/gemini-cli || echo "Gemini install failed" # Add user's local bin to PATH ENV PATH="/home/testuser/.local/bin:$PATH" -# Copy test files -COPY --chown=testuser:testuser ci-test-real-clis.ts /app/ -COPY --chown=testuser:testuser package.json /app/ +# Copy test files and source dependencies +# Context is repo root, so paths are relative to that +COPY --chown=testuser:testuser scripts/test-cli-auth/ci-test-real-clis.ts /app/ +COPY --chown=testuser:testuser scripts/test-cli-auth/package.json /app/ + +# Copy the cli-pty-runner module that the test imports +# Import path ../../src/cloud/api/cli-pty-runner.js from /app/ resolves to /src/cloud/api/ +COPY --chown=testuser:testuser src/cloud/api/cli-pty-runner.ts /src/cloud/api/ # Install test dependencies RUN npm install diff --git a/scripts/test-cli-auth/ci-test-real-clis.ts b/scripts/test-cli-auth/ci-test-real-clis.ts index f2cae9b1..f3d4dbb9 100644 --- a/scripts/test-cli-auth/ci-test-real-clis.ts +++ b/scripts/test-cli-auth/ci-test-real-clis.ts @@ -20,13 +20,13 @@ import { execSync } from 'child_process'; import { writeFileSync } from 'fs'; -// Import the actual config and PTY runner from onboarding.ts +// Import the actual config and PTY runner from cli-pty-runner.ts // This ensures tests use the EXACT SAME logic as production import { CLI_AUTH_CONFIG, runCLIAuthViaPTY, type PTYAuthResult, -} from '../../src/cloud/api/onboarding.js'; +} from '../../src/cloud/api/cli-pty-runner.js'; interface TestResult { provider: string; diff --git a/src/cloud/api/cli-pty-runner.ts b/src/cloud/api/cli-pty-runner.ts new file mode 100644 index 00000000..93f4e72b --- /dev/null +++ b/src/cloud/api/cli-pty-runner.ts @@ -0,0 +1,420 @@ +/** + * CLI PTY Runner + * + * Shared module for running CLI auth flows via PTY. + * Used by both production (onboarding.ts) and tests (ci-test-real-clis.ts). + * + * This module has minimal dependencies (only node-pty) so it can be + * used in isolated test containers without the full server stack. + */ + +import * as pty from 'node-pty'; + +/** + * Interactive prompt handler configuration + * Defines patterns to detect prompts and responses to send + */ +export interface PromptHandler { + /** Pattern to detect in CLI output (case-insensitive) */ + pattern: RegExp; + /** Response to send (e.g., '\r' for enter, 'y\r' for yes+enter) */ + response: string; + /** Delay before sending response (ms) */ + delay?: number; + /** Description for logging/debugging */ + description: string; +} + +/** + * CLI auth configuration for each provider + */ +export interface CLIAuthConfig { + /** CLI command to run */ + command: string; + /** Arguments to pass */ + args: string[]; + /** Pattern to extract auth URL from output */ + urlPattern: RegExp; + /** Path to credentials file (for reading after auth) */ + credentialPath?: string; + /** Display name for UI */ + displayName: string; + /** Interactive prompts to auto-respond to */ + prompts: PromptHandler[]; + /** Success indicators in output */ + successPatterns: RegExp[]; + /** How long to wait for URL to appear (ms) */ + waitTimeout: number; +} + +/** + * CLI commands and URL patterns for each provider + * + * Each CLI tool outputs an OAuth URL when run without credentials. + * We capture stdout/stderr and extract the URL using a simple https:// pattern. + * + * IMPORTANT: These CLIs are interactive - they output the auth URL then wait + * for the user to complete OAuth in their browser. We capture the URL and + * display it in a popup for the user. + */ +export const CLI_AUTH_CONFIG: Record = { + anthropic: { + command: 'claude', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + credentialPath: '~/.claude/credentials.json', + displayName: 'Claude', + waitTimeout: 5000, + prompts: [ + { + pattern: /dark\s*(mode|theme)/i, + response: '\r', // Press enter to accept default + delay: 100, + description: 'Dark mode prompt', + }, + { + pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, + response: '\r', // Press enter for first option (subscription) + delay: 100, + description: 'Auth method prompt', + }, + { + pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, + response: 'y\r', // Yes to trust + delay: 100, + description: 'Trust directory prompt', + }, + ], + successPatterns: [ + /success/i, + /authenticated/i, + /logged\s*in/i, + ], + }, + openai: { + command: 'codex', + args: ['login'], + urlPattern: /(https:\/\/[^\s]+)/, + credentialPath: '~/.codex/credentials.json', + displayName: 'Codex', + waitTimeout: 3000, + prompts: [ + { + pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, + response: 'y\r', + delay: 100, + description: 'Trust directory prompt', + }, + ], + successPatterns: [ + /success/i, + /authenticated/i, + /logged\s*in/i, + ], + }, + google: { + command: 'gemini', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Gemini', + waitTimeout: 3000, + prompts: [], + successPatterns: [ + /success/i, + /authenticated/i, + ], + }, + opencode: { + command: 'opencode', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'OpenCode', + waitTimeout: 3000, + prompts: [], + successPatterns: [ + /success/i, + /authenticated/i, + ], + }, + droid: { + command: 'droid', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Droid', + waitTimeout: 3000, + prompts: [], + successPatterns: [ + /success/i, + /authenticated/i, + ], + }, +}; + +/** + * Strip ANSI escape codes from text + */ +export function stripAnsiCodes(text: string): string { + // eslint-disable-next-line no-control-regex + return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +/** + * Check if text matches any success pattern + */ +export function matchesSuccessPattern(text: string, patterns: RegExp[]): boolean { + const cleanText = stripAnsiCodes(text).toLowerCase(); + return patterns.some(p => p.test(cleanText)); +} + +/** + * Find matching prompt handler for given text + */ +export function findMatchingPrompt( + text: string, + prompts: PromptHandler[], + respondedPrompts: Set +): PromptHandler | null { + const cleanText = stripAnsiCodes(text); + + for (const prompt of prompts) { + // Skip if already responded to this prompt type + if (respondedPrompts.has(prompt.description)) continue; + + if (prompt.pattern.test(cleanText)) { + return prompt; + } + } + + return null; +} + +/** + * Validate a provider's CLI auth configuration + * Returns null if valid, or an error message if invalid + */ +export function validateProviderConfig(providerId: string, config: CLIAuthConfig): string | null { + if (!config.command || typeof config.command !== 'string') { + return `${providerId}: missing or invalid 'command'`; + } + + if (!Array.isArray(config.args)) { + return `${providerId}: 'args' must be an array`; + } + + if (!(config.urlPattern instanceof RegExp)) { + return `${providerId}: 'urlPattern' must be a RegExp`; + } + + // Check urlPattern has a capture group + const testUrl = 'https://example.com/test'; + const match = testUrl.match(config.urlPattern); + if (!match || !match[1]) { + return `${providerId}: 'urlPattern' must have a capture group - got ${config.urlPattern}`; + } + + if (!config.displayName || typeof config.displayName !== 'string') { + return `${providerId}: missing or invalid 'displayName'`; + } + + if (typeof config.waitTimeout !== 'number' || config.waitTimeout <= 0) { + return `${providerId}: 'waitTimeout' must be a positive number`; + } + + if (!Array.isArray(config.prompts)) { + return `${providerId}: 'prompts' must be an array`; + } + + for (let i = 0; i < config.prompts.length; i++) { + const prompt = config.prompts[i]; + if (!(prompt.pattern instanceof RegExp)) { + return `${providerId}: prompt[${i}].pattern must be a RegExp`; + } + if (typeof prompt.response !== 'string') { + return `${providerId}: prompt[${i}].response must be a string`; + } + if (!prompt.description || typeof prompt.description !== 'string') { + return `${providerId}: prompt[${i}].description must be a non-empty string`; + } + } + + if (!Array.isArray(config.successPatterns)) { + return `${providerId}: 'successPatterns' must be an array`; + } + + for (let i = 0; i < config.successPatterns.length; i++) { + if (!(config.successPatterns[i] instanceof RegExp)) { + return `${providerId}: successPatterns[${i}] must be a RegExp`; + } + } + + return null; +} + +/** + * Validate all provider configurations + * Throws an error if any provider is invalid + */ +export function validateAllProviderConfigs(): void { + const errors: string[] = []; + + for (const [providerId, config] of Object.entries(CLI_AUTH_CONFIG)) { + const error = validateProviderConfig(providerId, config); + if (error) { + errors.push(error); + } + } + + if (errors.length > 0) { + throw new Error(`Invalid provider configurations:\n${errors.join('\n')}`); + } +} + +/** + * Result of running a CLI auth flow via PTY + */ +export interface PTYAuthResult { + authUrl: string | null; + success: boolean; + promptsHandled: string[]; + output: string; + exitCode: number | null; + error?: string; +} + +/** + * Options for running CLI auth via PTY + */ +export interface PTYAuthOptions { + /** Callback when auth URL is found */ + onAuthUrl?: (url: string) => void; + /** Callback when a prompt is handled */ + onPromptHandled?: (description: string) => void; + /** Callback for raw PTY output */ + onOutput?: (data: string) => void; + /** Environment variables override */ + env?: Record; + /** Working directory */ + cwd?: string; +} + +/** + * Run CLI auth flow via PTY + * + * This is the core PTY runner used by both production and tests. + * It handles: + * - Spawning the CLI with proper TTY emulation + * - Auto-responding to interactive prompts + * - Extracting auth URLs from output + * - Detecting success patterns + * + * @param config - CLI auth configuration for the provider + * @param options - Optional callbacks and overrides + * @returns Promise resolving to auth result + */ +export async function runCLIAuthViaPTY( + config: CLIAuthConfig, + options: PTYAuthOptions = {} +): Promise { + const result: PTYAuthResult = { + authUrl: null, + success: false, + promptsHandled: [], + output: '', + exitCode: null, + }; + + const respondedPrompts = new Set(); + + return new Promise((resolve) => { + try { + const proc = pty.spawn(config.command, config.args, { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: options.cwd || process.cwd(), + env: { + ...process.env, + NO_COLOR: '1', + TERM: 'xterm-256color', + // Prevent CLIs from trying to open browsers + BROWSER: 'echo', + DISPLAY: '', + ...options.env, + } as Record, + }); + + // Timeout handler + const timeout = setTimeout(() => { + proc.kill(); + result.error = 'Timeout waiting for auth URL'; + resolve(result); + }, config.waitTimeout + 5000); + + proc.onData((data: string) => { + result.output += data; + options.onOutput?.(data); + + // Check for matching prompts and auto-respond + const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); + if (matchingPrompt) { + respondedPrompts.add(matchingPrompt.description); + result.promptsHandled.push(matchingPrompt.description); + options.onPromptHandled?.(matchingPrompt.description); + + const delay = matchingPrompt.delay ?? 100; + setTimeout(() => { + try { + proc.write(matchingPrompt.response); + } catch { + // Process may have exited + } + }, delay); + } + + // Look for auth URL + const cleanText = stripAnsiCodes(data); + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !result.authUrl) { + result.authUrl = match[1]; + options.onAuthUrl?.(result.authUrl); + } + + // Check for success indicators + if (matchesSuccessPattern(data, config.successPatterns)) { + result.success = true; + } + }); + + proc.onExit(({ exitCode }) => { + clearTimeout(timeout); + result.exitCode = exitCode; + + // Consider it a success if we got a URL (main goal) + // or if exit code was 0 with success pattern + if (result.authUrl || (exitCode === 0 && result.success)) { + result.success = true; + } + + if (!result.authUrl && !result.success && !result.error) { + result.error = 'Failed to extract auth URL from CLI output'; + } + + resolve(result); + }); + } catch (err) { + result.error = err instanceof Error ? err.message : 'Unknown error'; + resolve(result); + } + }); +} + +/** + * Get list of supported providers for CLI auth + */ +export function getSupportedProviders(): { id: string; displayName: string; command: string }[] { + return Object.entries(CLI_AUTH_CONFIG).map(([id, config]) => ({ + id, + displayName: config.displayName, + command: config.command, + })); +} diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index fdbad759..37f4e841 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -11,13 +11,34 @@ */ import { Router, Request, Response } from 'express'; -import * as pty from 'node-pty'; import type { IPty } from 'node-pty'; import * as crypto from 'crypto'; import { requireAuth } from './auth.js'; import { db } from '../db/index.js'; import { vault } from '../vault/index.js'; +// Re-export from shared module for backward compatibility +export { + CLI_AUTH_CONFIG, + runCLIAuthViaPTY, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + validateAllProviderConfigs, + getSupportedProviders, + type CLIAuthConfig, + type PTYAuthResult, + type PTYAuthOptions, + type PromptHandler, +} from './cli-pty-runner.js'; + +import { + CLI_AUTH_CONFIG, + runCLIAuthViaPTY, + matchesSuccessPattern, +} from './cli-pty-runner.js'; + export const onboardingRouter = Router(); // All routes require authentication @@ -60,415 +81,6 @@ setInterval(() => { }); }, 60000); -/** - * Interactive prompt handler configuration - * Defines patterns to detect prompts and responses to send - */ -interface PromptHandler { - /** Pattern to detect in CLI output (case-insensitive) */ - pattern: RegExp; - /** Response to send (e.g., '\r' for enter, 'y\r' for yes+enter) */ - response: string; - /** Delay before sending response (ms) */ - delay?: number; - /** Description for logging/debugging */ - description: string; -} - -/** - * CLI auth configuration for each provider - */ -export interface CLIAuthConfig { - /** CLI command to run */ - command: string; - /** Arguments to pass */ - args: string[]; - /** Pattern to extract auth URL from output */ - urlPattern: RegExp; - /** Path to credentials file (for reading after auth) */ - credentialPath?: string; - /** Display name for UI */ - displayName: string; - /** Interactive prompts to auto-respond to */ - prompts: PromptHandler[]; - /** Success indicators in output */ - successPatterns: RegExp[]; - /** How long to wait for URL to appear (ms) */ - waitTimeout: number; -} - -/** - * CLI commands and URL patterns for each provider - * - * Each CLI tool outputs an OAuth URL when run without credentials. - * We capture stdout/stderr and extract the URL using a simple https:// pattern. - * - * IMPORTANT: These CLIs are interactive - they output the auth URL then wait - * for the user to complete OAuth in their browser. We capture the URL and - * display it in a popup for the user. - */ -export const CLI_AUTH_CONFIG: Record = { - anthropic: { - command: 'claude', - args: [], - urlPattern: /(https:\/\/[^\s]+)/, - credentialPath: '~/.claude/credentials.json', - displayName: 'Claude', - waitTimeout: 5000, - prompts: [ - { - pattern: /dark\s*(mode|theme)/i, - response: '\r', // Press enter to accept default - delay: 100, - description: 'Dark mode prompt', - }, - { - pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, - response: '\r', // Press enter for first option (subscription) - delay: 100, - description: 'Auth method prompt', - }, - { - pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, - response: 'y\r', // Yes to trust - delay: 100, - description: 'Trust directory prompt', - }, - ], - successPatterns: [ - /success/i, - /authenticated/i, - /logged\s*in/i, - ], - }, - openai: { - command: 'codex', - args: ['login'], - urlPattern: /(https:\/\/[^\s]+)/, - credentialPath: '~/.codex/credentials.json', - displayName: 'Codex', - waitTimeout: 3000, - prompts: [ - { - pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, - response: 'y\r', - delay: 100, - description: 'Trust directory prompt', - }, - ], - successPatterns: [ - /success/i, - /authenticated/i, - /logged\s*in/i, - ], - }, - google: { - command: 'gemini', - args: [], - urlPattern: /(https:\/\/[^\s]+)/, - displayName: 'Gemini', - waitTimeout: 3000, - prompts: [], - successPatterns: [ - /success/i, - /authenticated/i, - ], - }, - opencode: { - command: 'opencode', - args: [], - urlPattern: /(https:\/\/[^\s]+)/, - displayName: 'OpenCode', - waitTimeout: 3000, - prompts: [], - successPatterns: [ - /success/i, - /authenticated/i, - ], - }, - droid: { - command: 'droid', - args: [], - urlPattern: /(https:\/\/[^\s]+)/, - displayName: 'Droid', - waitTimeout: 3000, - prompts: [], - successPatterns: [ - /success/i, - /authenticated/i, - ], - }, -}; - -/** - * Strip ANSI escape codes from text - */ -export function stripAnsiCodes(text: string): string { - // eslint-disable-next-line no-control-regex - return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); -} - -/** - * Check if text matches any success pattern - */ -export function matchesSuccessPattern(text: string, patterns: RegExp[]): boolean { - const cleanText = stripAnsiCodes(text).toLowerCase(); - return patterns.some(p => p.test(cleanText)); -} - -/** - * Find matching prompt handler for given text - */ -export function findMatchingPrompt( - text: string, - prompts: PromptHandler[], - respondedPrompts: Set -): PromptHandler | null { - const cleanText = stripAnsiCodes(text); - - for (const prompt of prompts) { - // Skip if already responded to this prompt type - if (respondedPrompts.has(prompt.description)) continue; - - if (prompt.pattern.test(cleanText)) { - return prompt; - } - } - - return null; -} - -/** - * Validate a provider's CLI auth configuration - * Returns null if valid, or an error message if invalid - */ -export function validateProviderConfig(providerId: string, config: CLIAuthConfig): string | null { - if (!config.command || typeof config.command !== 'string') { - return `${providerId}: missing or invalid 'command'`; - } - - if (!Array.isArray(config.args)) { - return `${providerId}: 'args' must be an array`; - } - - if (!(config.urlPattern instanceof RegExp)) { - return `${providerId}: 'urlPattern' must be a RegExp`; - } - - // Check urlPattern has a capture group - const testUrl = 'https://example.com/test'; - const match = testUrl.match(config.urlPattern); - if (!match || !match[1]) { - return `${providerId}: 'urlPattern' must have a capture group - got ${config.urlPattern}`; - } - - if (!config.displayName || typeof config.displayName !== 'string') { - return `${providerId}: missing or invalid 'displayName'`; - } - - if (typeof config.waitTimeout !== 'number' || config.waitTimeout <= 0) { - return `${providerId}: 'waitTimeout' must be a positive number`; - } - - if (!Array.isArray(config.prompts)) { - return `${providerId}: 'prompts' must be an array`; - } - - for (let i = 0; i < config.prompts.length; i++) { - const prompt = config.prompts[i]; - if (!(prompt.pattern instanceof RegExp)) { - return `${providerId}: prompt[${i}].pattern must be a RegExp`; - } - if (typeof prompt.response !== 'string') { - return `${providerId}: prompt[${i}].response must be a string`; - } - if (!prompt.description || typeof prompt.description !== 'string') { - return `${providerId}: prompt[${i}].description must be a non-empty string`; - } - } - - if (!Array.isArray(config.successPatterns)) { - return `${providerId}: 'successPatterns' must be an array`; - } - - for (let i = 0; i < config.successPatterns.length; i++) { - if (!(config.successPatterns[i] instanceof RegExp)) { - return `${providerId}: successPatterns[${i}] must be a RegExp`; - } - } - - return null; -} - -/** - * Validate all provider configurations - * Throws an error if any provider is invalid - */ -export function validateAllProviderConfigs(): void { - const errors: string[] = []; - - for (const [providerId, config] of Object.entries(CLI_AUTH_CONFIG)) { - const error = validateProviderConfig(providerId, config); - if (error) { - errors.push(error); - } - } - - if (errors.length > 0) { - throw new Error(`Invalid provider configurations:\n${errors.join('\n')}`); - } -} - -/** - * Result of running a CLI auth flow via PTY - */ -export interface PTYAuthResult { - authUrl: string | null; - success: boolean; - promptsHandled: string[]; - output: string; - exitCode: number | null; - error?: string; -} - -/** - * Options for running CLI auth via PTY - */ -export interface PTYAuthOptions { - /** Callback when auth URL is found */ - onAuthUrl?: (url: string) => void; - /** Callback when a prompt is handled */ - onPromptHandled?: (description: string) => void; - /** Callback for raw PTY output */ - onOutput?: (data: string) => void; - /** Environment variables override */ - env?: Record; - /** Working directory */ - cwd?: string; -} - -/** - * Run CLI auth flow via PTY - * - * This is the core PTY runner used by both production and tests. - * It handles: - * - Spawning the CLI with proper TTY emulation - * - Auto-responding to interactive prompts - * - Extracting auth URLs from output - * - Detecting success patterns - * - * @param config - CLI auth configuration for the provider - * @param options - Optional callbacks and overrides - * @returns Promise resolving to auth result - */ -export async function runCLIAuthViaPTY( - config: CLIAuthConfig, - options: PTYAuthOptions = {} -): Promise { - const result: PTYAuthResult = { - authUrl: null, - success: false, - promptsHandled: [], - output: '', - exitCode: null, - }; - - const respondedPrompts = new Set(); - - return new Promise((resolve) => { - try { - const proc = pty.spawn(config.command, config.args, { - name: 'xterm-256color', - cols: 120, - rows: 30, - cwd: options.cwd || process.cwd(), - env: { - ...process.env, - NO_COLOR: '1', - TERM: 'xterm-256color', - // Prevent CLIs from trying to open browsers - BROWSER: 'echo', - DISPLAY: '', - ...options.env, - } as Record, - }); - - // Timeout handler - const timeout = setTimeout(() => { - proc.kill(); - result.error = 'Timeout waiting for auth URL'; - resolve(result); - }, config.waitTimeout + 5000); - - proc.onData((data: string) => { - result.output += data; - options.onOutput?.(data); - - // Check for matching prompts and auto-respond - const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); - if (matchingPrompt) { - respondedPrompts.add(matchingPrompt.description); - result.promptsHandled.push(matchingPrompt.description); - options.onPromptHandled?.(matchingPrompt.description); - - const delay = matchingPrompt.delay ?? 100; - setTimeout(() => { - try { - proc.write(matchingPrompt.response); - } catch { - // Process may have exited - } - }, delay); - } - - // Look for auth URL - const cleanText = stripAnsiCodes(data); - const match = cleanText.match(config.urlPattern); - if (match && match[1] && !result.authUrl) { - result.authUrl = match[1]; - options.onAuthUrl?.(result.authUrl); - } - - // Check for success indicators - if (matchesSuccessPattern(data, config.successPatterns)) { - result.success = true; - } - }); - - proc.onExit(({ exitCode }) => { - clearTimeout(timeout); - result.exitCode = exitCode; - - // Consider it a success if we got a URL (main goal) - // or if exit code was 0 with success pattern - if (result.authUrl || (exitCode === 0 && result.success)) { - result.success = true; - } - - if (!result.authUrl && !result.success && !result.error) { - result.error = 'Failed to extract auth URL from CLI output'; - } - - resolve(result); - }); - } catch (err) { - result.error = err instanceof Error ? err.message : 'Unknown error'; - resolve(result); - } - }); -} - -/** - * Get list of supported providers for CLI auth - */ -export function getSupportedProviders(): { id: string; displayName: string; command: string }[] { - return Object.entries(CLI_AUTH_CONFIG).map(([id, config]) => ({ - id, - displayName: config.displayName, - command: config.command, - })); -} - /** * POST /api/onboarding/cli/:provider/start * Start CLI-based auth - spawns the CLI and captures auth URL From ab83dce6125b99db66e814d8c7f287e31a64f9cb Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 08:59:01 +0000 Subject: [PATCH 029/103] Fix node-pty module resolution in test container Put cli-pty-runner.ts in /app/ alongside the test file so it can access node_modules. Previously it was in /src/cloud/api/ which couldn't resolve modules from /app/node_modules/. --- scripts/test-cli-auth/Dockerfile.real | 5 ++--- scripts/test-cli-auth/ci-test-real-clis.ts | 4 +++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real index 73aed276..da6b9c2a 100644 --- a/scripts/test-cli-auth/Dockerfile.real +++ b/scripts/test-cli-auth/Dockerfile.real @@ -61,9 +61,8 @@ ENV PATH="/home/testuser/.local/bin:$PATH" COPY --chown=testuser:testuser scripts/test-cli-auth/ci-test-real-clis.ts /app/ COPY --chown=testuser:testuser scripts/test-cli-auth/package.json /app/ -# Copy the cli-pty-runner module that the test imports -# Import path ../../src/cloud/api/cli-pty-runner.js from /app/ resolves to /src/cloud/api/ -COPY --chown=testuser:testuser src/cloud/api/cli-pty-runner.ts /src/cloud/api/ +# Copy the cli-pty-runner module to /app/ so it can access node_modules +COPY --chown=testuser:testuser src/cloud/api/cli-pty-runner.ts /app/ # Install test dependencies RUN npm install diff --git a/scripts/test-cli-auth/ci-test-real-clis.ts b/scripts/test-cli-auth/ci-test-real-clis.ts index f3d4dbb9..8528c804 100644 --- a/scripts/test-cli-auth/ci-test-real-clis.ts +++ b/scripts/test-cli-auth/ci-test-real-clis.ts @@ -22,11 +22,13 @@ import { writeFileSync } from 'fs'; // Import the actual config and PTY runner from cli-pty-runner.ts // This ensures tests use the EXACT SAME logic as production +// Note: In Docker container, both files are in /app/, so use relative import +// For local dev, this path also works from scripts/test-cli-auth/ import { CLI_AUTH_CONFIG, runCLIAuthViaPTY, type PTYAuthResult, -} from '../../src/cloud/api/cli-pty-runner.js'; +} from './cli-pty-runner.js'; interface TestResult { provider: string; From 6711ea36d2a52393b9868b857c48cde43dbc5afe Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 09:06:35 +0000 Subject: [PATCH 030/103] Fix CLI auth commands for Gemini, OpenCode, and Droid - Gemini: Add prompt handler for auth method selection - OpenCode: Use 'opencode auth login' command (not just 'opencode') - Droid: Use '--login' flag and add login prompt handler - Increase waitTimeout to 5000ms for all three providers Based on official CLI documentation: - https://github.com/google-gemini/gemini-cli - https://opencode.ai/docs/cli/ - https://docs.factory.ai/cli/ --- src/cloud/api/cli-pty-runner.ts | 46 +++++++++++++++++++++++++++------ 1 file changed, 38 insertions(+), 8 deletions(-) diff --git a/src/cloud/api/cli-pty-runner.ts b/src/cloud/api/cli-pty-runner.ts index 93f4e72b..0c386b01 100644 --- a/src/cloud/api/cli-pty-runner.ts +++ b/src/cloud/api/cli-pty-runner.ts @@ -117,35 +117,65 @@ export const CLI_AUTH_CONFIG: Record = { args: [], urlPattern: /(https:\/\/[^\s]+)/, displayName: 'Gemini', - waitTimeout: 3000, - prompts: [], + waitTimeout: 5000, + prompts: [ + { + pattern: /login\s*with\s*google|google\s*account|choose.*auth/i, + response: '\r', // Select first option (Login with Google) + delay: 200, + description: 'Auth method selection', + }, + ], successPatterns: [ /success/i, /authenticated/i, + /logged\s*in/i, ], }, opencode: { command: 'opencode', - args: [], + args: ['auth', 'login'], urlPattern: /(https:\/\/[^\s]+)/, displayName: 'OpenCode', - waitTimeout: 3000, - prompts: [], + waitTimeout: 5000, + prompts: [ + { + pattern: /select.*provider|choose.*provider|which.*provider/i, + response: '\r', // Select first provider + delay: 200, + description: 'Provider selection', + }, + { + pattern: /claude\s*pro|anthropic|select.*auth/i, + response: '\r', // Select first auth option + delay: 200, + description: 'Auth type selection', + }, + ], successPatterns: [ /success/i, /authenticated/i, + /logged\s*in/i, ], }, droid: { command: 'droid', - args: [], + args: ['--login'], urlPattern: /(https:\/\/[^\s]+)/, displayName: 'Droid', - waitTimeout: 3000, - prompts: [], + waitTimeout: 5000, + prompts: [ + { + pattern: /sign\s*in|log\s*in|authenticate/i, + response: '\r', + delay: 200, + description: 'Login prompt', + }, + ], successPatterns: [ /success/i, /authenticated/i, + /logged\s*in/i, ], }, }; From 9fccd708598cee0a084c9dcda6d060e216ce7634 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 09:14:07 +0000 Subject: [PATCH 031/103] Fix Gemini CLI install - npm -g requires root Move Gemini npm install -g before USER switch since global npm installs require root permissions. Same fix applied to both: - scripts/test-cli-auth/Dockerfile.real (test container) - deploy/workspace/Dockerfile (production) OpenCode uses curl install script which writes to ~/.local/bin and should work as non-root user. --- deploy/workspace/Dockerfile | 6 ++++-- scripts/test-cli-auth/Dockerfile.real | 10 +++++----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index e738d2e9..f25350ed 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -59,6 +59,9 @@ RUN chmod +x /entrypoint.sh /usr/local/bin/git-credential-relay # Install Codex globally as root (requires write to /usr/local) RUN npm install -g @openai/codex +# Install Gemini CLI globally as root (npm -g requires root) +RUN npm install -g @google/gemini-cli + # Create workspace directory RUN mkdir -p /workspace/repos /data @@ -77,8 +80,7 @@ RUN mkdir -p /home/workspace/.claude && \ RUN curl -fsSL https://opencode.ai/install | bash # Droid RUN curl -fsSL https://app.factory.ai/cli | sh -# Gemini -RUN npm install -g @google/gemini-cli +# Note: Gemini is installed as root above (npm -g requires root) # Environment ENV NODE_ENV=production diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real index da6b9c2a..710afd43 100644 --- a/scripts/test-cli-auth/Dockerfile.real +++ b/scripts/test-cli-auth/Dockerfile.real @@ -31,12 +31,15 @@ WORKDIR /app # Install Codex globally as root (matches workspace Dockerfile) RUN npm install -g @openai/codex || echo "Codex install failed" +# Install Gemini CLI globally as root (npm -g requires root) +RUN npm install -g @google/gemini-cli || echo "Gemini install failed" + # Create test user (CLIs install to ~/.local/bin) RUN useradd -m -u 1001 testuser RUN chown -R testuser:testuser /app USER testuser -# Install AI CLIs as testuser (matches workspace Dockerfile installation methods) +# Install AI CLIs as testuser (these install scripts write to ~/.local/bin) # Claude - uses official install script RUN curl -fsSL https://claude.ai/install.sh | bash || echo "Claude install failed" @@ -44,15 +47,12 @@ RUN curl -fsSL https://claude.ai/install.sh | bash || echo "Claude install faile RUN mkdir -p /home/testuser/.claude && \ echo '{"theme":"dark","hasCompletedOnboarding":true}' > /home/testuser/.claude/settings.local.json -# OpenCode - uses official install script +# OpenCode - uses official install script (installs to ~/.local/bin) RUN curl -fsSL https://opencode.ai/install | bash || echo "OpenCode install failed" # Droid - uses official install script RUN curl -fsSL https://app.factory.ai/cli | sh || echo "Droid install failed" -# Gemini CLI - install via npm -RUN npm install -g @google/gemini-cli || echo "Gemini install failed" - # Add user's local bin to PATH ENV PATH="/home/testuser/.local/bin:$PATH" From 14bc0c293ddbbbdc2b48b7aa36babd3aaa02a54d Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 09:15:39 +0000 Subject: [PATCH 032/103] Fix OpenCode CLI install - use npm instead of curl The curl install script (https://opencode.ai/install) returns 503. Use npm install instead: npm install -g opencode-ai@latest All npm-based CLIs (Codex, Gemini, OpenCode) are now installed as root before the USER switch since npm -g requires root. Only Claude and Droid use curl install scripts (they write to ~/.local/bin and work as non-root). --- deploy/workspace/Dockerfile | 9 +++------ scripts/test-cli-auth/Dockerfile.real | 8 +++----- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index f25350ed..f2ec7a2d 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -56,11 +56,10 @@ COPY deploy/workspace/entrypoint.sh /entrypoint.sh COPY deploy/workspace/git-credential-relay /usr/local/bin/git-credential-relay RUN chmod +x /entrypoint.sh /usr/local/bin/git-credential-relay -# Install Codex globally as root (requires write to /usr/local) +# Install npm-based CLIs globally as root (npm -g requires root) RUN npm install -g @openai/codex - -# Install Gemini CLI globally as root (npm -g requires root) RUN npm install -g @google/gemini-cli +RUN npm install -g opencode-ai@latest # Create workspace directory RUN mkdir -p /workspace/repos /data @@ -76,11 +75,9 @@ RUN curl -fsSL https://claude.ai/install.sh | bash # Pre-seed Claude config to skip interactive onboarding RUN mkdir -p /home/workspace/.claude && \ echo '{"theme":"dark","hasCompletedOnboarding":true}' > /home/workspace/.claude/settings.local.json -# OpenCode -RUN curl -fsSL https://opencode.ai/install | bash +# Note: Codex, Gemini, and OpenCode are installed as root above via npm # Droid RUN curl -fsSL https://app.factory.ai/cli | sh -# Note: Gemini is installed as root above (npm -g requires root) # Environment ENV NODE_ENV=production diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real index 710afd43..4fcdd423 100644 --- a/scripts/test-cli-auth/Dockerfile.real +++ b/scripts/test-cli-auth/Dockerfile.real @@ -28,11 +28,10 @@ RUN apt-get update && apt-get install -y \ WORKDIR /app -# Install Codex globally as root (matches workspace Dockerfile) +# Install npm-based CLIs globally as root (npm -g requires root) RUN npm install -g @openai/codex || echo "Codex install failed" - -# Install Gemini CLI globally as root (npm -g requires root) RUN npm install -g @google/gemini-cli || echo "Gemini install failed" +RUN npm install -g opencode-ai@latest || echo "OpenCode install failed" # Create test user (CLIs install to ~/.local/bin) RUN useradd -m -u 1001 testuser @@ -47,8 +46,7 @@ RUN curl -fsSL https://claude.ai/install.sh | bash || echo "Claude install faile RUN mkdir -p /home/testuser/.claude && \ echo '{"theme":"dark","hasCompletedOnboarding":true}' > /home/testuser/.claude/settings.local.json -# OpenCode - uses official install script (installs to ~/.local/bin) -RUN curl -fsSL https://opencode.ai/install | bash || echo "OpenCode install failed" +# Note: OpenCode is installed as root above via npm # Droid - uses official install script RUN curl -fsSL https://app.factory.ai/cli | sh || echo "Droid install failed" From 02b58d5042bc5a9afbe4163a572dbbee2bb3b695 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 09:55:06 +0000 Subject: [PATCH 033/103] Increase project hash from 12 to 16 chars for better collision resistance --- src/trajectory/config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/trajectory/config.ts b/src/trajectory/config.ts index dd304c73..607b016a 100644 --- a/src/trajectory/config.ts +++ b/src/trajectory/config.ts @@ -106,7 +106,7 @@ export function shouldStoreInRepo(projectRoot?: string): boolean { */ export function getProjectHash(projectRoot?: string): string { const root = projectRoot ?? getProjectPaths().projectRoot; - return createHash('sha256').update(root).digest('hex').slice(0, 12); + return createHash('sha256').update(root).digest('hex').slice(0, 16); } /** From b8c575931fb12b2918695b735437e10fd3d38782 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 10:48:02 +0000 Subject: [PATCH 034/103] Add design doc for CI failure webhook agent notifications Documents architecture for automatically notifying agents when GitHub CI checks fail on pull requests, enabling autonomous investigation and fixing of CI failures. Covers: - Webhook event handling (check_run, workflow_run) - Agent spawning vs messaging existing agents - Failure context extraction and prompt generation - Configuration options per repository - Agent profiles for different fix types (lint, test, typecheck) - Database schema for tracking failures and fix attempts - Security considerations (signature verification, rate limiting) - Monitoring and observability --- docs/design/ci-failure-webhooks.md | 702 +++++++++++++++++++++++++++++ 1 file changed, 702 insertions(+) create mode 100644 docs/design/ci-failure-webhooks.md diff --git a/docs/design/ci-failure-webhooks.md b/docs/design/ci-failure-webhooks.md new file mode 100644 index 00000000..0187076b --- /dev/null +++ b/docs/design/ci-failure-webhooks.md @@ -0,0 +1,702 @@ +# CI Failure Webhooks - Agent Notification System + +## Overview + +This document describes the architecture for automatically notifying agents when GitHub CI checks fail on pull requests. This enables agents to autonomously investigate and fix CI failures without human intervention. + +## Motivation + +Currently, when CI fails on a PR: +1. Developer notices the failure (manual) +2. Developer investigates logs (manual) +3. Developer fixes the issue (manual) +4. Developer pushes and waits for CI again (manual) + +With webhook-based agent notification: +1. CI fails โ†’ webhook fires +2. Agent receives failure context automatically +3. Agent investigates and pushes fix +4. CI re-runs automatically + +This closes the loop for autonomous PR maintenance. + +## Architecture + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” webhook โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ GitHub โ”‚ โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€> โ”‚ Cloud API โ”‚ +โ”‚ (CI fails) โ”‚ check_run โ”‚ /webhooks โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ completed โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ + โ”‚ spawn or message + โ–ผ + โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ”‚ Agent Relay โ”‚ + โ”‚ Daemon โ”‚ + โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ + โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ–ผ โ–ผ โ–ผ + โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ”‚ Agent โ”‚ โ”‚ Agent โ”‚ โ”‚ Agent โ”‚ + โ”‚ (PR) โ”‚ โ”‚ (Lint) โ”‚ โ”‚ (Test) โ”‚ + โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +## GitHub Webhook Events + +### Relevant Events + +| Event | Trigger | Use Case | +|-------|---------|----------| +| `check_run` | Individual check completes | Fine-grained failure handling | +| `check_suite` | All checks complete | Wait for full CI before acting | +| `workflow_run` | GitHub Action completes | Action-specific handling | +| `pull_request` | PR state changes | Track PR lifecycle | + +### Recommended: `check_run` Event + +The `check_run` event provides the most actionable data: + +```json +{ + "action": "completed", + "check_run": { + "id": 123456789, + "name": "lint", + "status": "completed", + "conclusion": "failure", + "output": { + "title": "ESLint found 3 errors", + "summary": "Fix the following issues...", + "text": "src/foo.ts:10:5 - error: ...", + "annotations": [ + { + "path": "src/foo.ts", + "start_line": 10, + "end_line": 10, + "annotation_level": "failure", + "message": "Unexpected console statement" + } + ] + }, + "pull_requests": [ + { + "number": 55, + "head": { + "ref": "feature-branch", + "sha": "abc123" + } + } + ] + }, + "repository": { + "full_name": "org/repo" + } +} +``` + +## Implementation + +### 1. Webhook Endpoint + +```typescript +// src/cloud/api/webhooks.ts + +import { Router } from 'express'; +import crypto from 'crypto'; + +export const webhookRouter = Router(); + +/** + * Verify GitHub webhook signature + */ +function verifyGitHubSignature( + payload: string, + signature: string, + secret: string +): boolean { + const expected = `sha256=${crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex')}`; + return crypto.timingSafeEqual( + Buffer.from(signature), + Buffer.from(expected) + ); +} + +/** + * GitHub webhook handler for CI failures + */ +webhookRouter.post('/github/ci', async (req, res) => { + const event = req.headers['x-github-event'] as string; + const signature = req.headers['x-hub-signature-256'] as string; + const payload = JSON.stringify(req.body); + + // Verify webhook authenticity + const secret = process.env.GITHUB_WEBHOOK_SECRET; + if (secret && !verifyGitHubSignature(payload, signature, secret)) { + return res.status(401).json({ error: 'Invalid signature' }); + } + + // Handle check_run events + if (event === 'check_run') { + await handleCheckRunEvent(req.body); + } + + // Handle workflow_run events + if (event === 'workflow_run') { + await handleWorkflowRunEvent(req.body); + } + + res.status(200).json({ received: true }); +}); +``` + +### 2. Check Run Handler + +```typescript +// src/cloud/api/ci-handlers.ts + +import { db } from '../db'; +import { spawnAgent, messageAgent } from '../services/agent-spawner'; + +interface CheckRunPayload { + action: string; + check_run: { + id: number; + name: string; + status: string; + conclusion: string | null; + output: { + title: string; + summary: string; + text?: string; + annotations?: Array<{ + path: string; + start_line: number; + end_line: number; + annotation_level: string; + message: string; + }>; + }; + pull_requests: Array<{ + number: number; + head: { ref: string; sha: string }; + }>; + }; + repository: { + full_name: string; + clone_url: string; + }; +} + +/** + * Handle check_run webhook events + */ +export async function handleCheckRunEvent(payload: CheckRunPayload) { + const { action, check_run, repository } = payload; + + // Only handle completed, failed checks + if (action !== 'completed') return; + if (check_run.conclusion !== 'failure') return; + + // Only handle checks on PRs + if (check_run.pull_requests.length === 0) return; + + const pr = check_run.pull_requests[0]; + const failureContext = buildFailureContext(payload); + + // Check if there's already an agent working on this PR + const existingAgent = await findAgentForPR(repository.full_name, pr.number); + + if (existingAgent) { + // Message the existing agent about the failure + await messageAgent(existingAgent.id, { + type: 'ci_failure', + ...failureContext, + }); + } else { + // Spawn a new agent to handle the failure + await spawnCIFixAgent(failureContext); + } +} + +/** + * Build structured context from check run failure + */ +function buildFailureContext(payload: CheckRunPayload) { + const { check_run, repository } = payload; + const pr = check_run.pull_requests[0]; + + return { + repository: repository.full_name, + cloneUrl: repository.clone_url, + prNumber: pr.number, + branch: pr.head.ref, + commitSha: pr.head.sha, + checkName: check_run.name, + checkId: check_run.id, + failureTitle: check_run.output.title, + failureSummary: check_run.output.summary, + failureDetails: check_run.output.text, + annotations: check_run.output.annotations || [], + }; +} +``` + +### 3. Agent Spawner + +```typescript +// src/cloud/services/agent-spawner.ts + +import { WorkspaceProvisioner } from '../provisioner'; + +interface CIFailureContext { + repository: string; + cloneUrl: string; + prNumber: number; + branch: string; + commitSha: string; + checkName: string; + checkId: number; + failureTitle: string; + failureSummary: string; + failureDetails?: string; + annotations: Array<{ + path: string; + start_line: number; + end_line: number; + message: string; + }>; +} + +/** + * Spawn an agent to fix CI failures + */ +export async function spawnCIFixAgent(context: CIFailureContext) { + const prompt = buildAgentPrompt(context); + + // Find or create workspace for this repository + const workspace = await findOrCreateWorkspace(context.repository); + + // Spawn agent in the workspace + await workspace.spawnAgent({ + name: `ci-fix-${context.checkName}-${context.prNumber}`, + prompt, + branch: context.branch, + workingDirectory: `/workspace/repos/${context.repository}`, + }); +} + +/** + * Build the prompt for the CI fix agent + */ +function buildAgentPrompt(context: CIFailureContext): string { + const annotationsList = context.annotations + .map(a => `- ${a.path}:${a.start_line} - ${a.message}`) + .join('\n'); + + return ` +# CI Failure Fix Task + +A CI check has failed on PR #${context.prNumber} in ${context.repository}. + +## Failure Details + +**Check Name:** ${context.checkName} +**Title:** ${context.failureTitle} +**Summary:** ${context.failureSummary} + +${context.failureDetails ? `**Details:**\n${context.failureDetails}` : ''} + +${annotationsList ? `## Annotations\n\n${annotationsList}` : ''} + +## Your Task + +1. Checkout the branch: \`${context.branch}\` +2. Analyze the failure based on the annotations and error messages +3. Fix the issues in the affected files +4. Run the relevant checks locally to verify the fix +5. Commit and push your changes with a clear commit message +6. Report back with a summary of what was fixed + +## Important + +- Only fix the specific issues causing the CI failure +- Do not refactor or improve unrelated code +- If you cannot fix the issue, explain why and what manual intervention is needed +`.trim(); +} +``` + +### 4. Agent Notification via Relay + +For agents already working on a PR, send failure notifications through the relay system: + +```typescript +// src/cloud/services/agent-notifier.ts + +import { RelayClient } from '../../relay/client'; + +interface CIFailureMessage { + type: 'ci_failure'; + checkName: string; + failureTitle: string; + failureSummary: string; + annotations: Array<{ + path: string; + start_line: number; + message: string; + }>; +} + +/** + * Notify an agent about CI failure via relay message + */ +export async function notifyAgentOfCIFailure( + agentId: string, + failure: CIFailureMessage +) { + const relay = new RelayClient(); + + const message = formatCIFailureMessage(failure); + + await relay.sendMessage({ + to: agentId, + content: message, + priority: 'high', + thread: `ci-failure-${failure.checkName}`, + }); +} + +function formatCIFailureMessage(failure: CIFailureMessage): string { + const annotations = failure.annotations + .slice(0, 10) // Limit to first 10 + .map(a => ` - ${a.path}:${a.start_line}: ${a.message}`) + .join('\n'); + + return ` +CI FAILURE: ${failure.checkName} + +${failure.failureTitle} + +${failure.failureSummary} + +${annotations ? `Issues:\n${annotations}` : ''} + +Please investigate and fix these issues, then push your changes. +`.trim(); +} +``` + +## Configuration + +### Workspace Settings + +Repositories can configure CI webhook behavior in `.relay/config.json`: + +```json +{ + "ciWebhooks": { + "enabled": true, + "autoFix": { + "lint": true, + "typecheck": true, + "test": false + }, + "notifyExistingAgent": true, + "spawnNewAgent": true, + "maxConcurrentAgents": 3, + "cooldownMinutes": 5 + } +} +``` + +### Check Name Mapping + +Map CI check names to fix strategies: + +```json +{ + "ciWebhooks": { + "checkStrategies": { + "lint": { + "autoFix": true, + "command": "npm run lint:fix", + "agentProfile": "linter" + }, + "typecheck": { + "autoFix": true, + "command": "npm run typecheck", + "agentProfile": "typescript-expert" + }, + "test": { + "autoFix": false, + "notifyOnly": true, + "agentProfile": "tester" + } + } + } +} +``` + +## Agent Profiles for CI Fixes + +### Lint Fix Agent + +```yaml +# .claude/agents/lint-fixer.md +--- +name: LintFixer +description: Fixes linting errors automatically +tools: + - Read + - Edit + - Bash +model: haiku +--- + +You are a code quality specialist. Your job is to fix linting errors. + +## Approach + +1. Read the files with errors +2. Understand the linting rule being violated +3. Fix the code to comply with the rule +4. Run the linter to verify the fix +5. Commit with message: "fix: resolve lint errors" + +## Rules + +- Fix only the specific errors reported +- Do not change code style beyond what's needed +- Do not add or remove features +- If a rule seems wrong, fix it anyway (discuss rule changes separately) +``` + +### Test Fix Agent + +```yaml +# .claude/agents/test-fixer.md +--- +name: TestFixer +description: Investigates and fixes failing tests +tools: + - Read + - Edit + - Bash + - Grep +model: sonnet +--- + +You are a testing specialist. Your job is to fix failing tests. + +## Approach + +1. Run the failing test to see the actual error +2. Determine if the issue is: + - Test is wrong (update the test) + - Code is wrong (fix the code) + - Environment issue (fix setup) +3. Apply the minimal fix +4. Run the test again to verify +5. Run the full test suite to check for regressions +6. Commit with descriptive message + +## Rules + +- Prefer fixing code over changing tests +- If changing tests, explain why in the commit message +- Never delete tests to make CI pass +- If stuck, report the issue instead of guessing +``` + +## Database Schema + +Track CI failure events and agent responses: + +```sql +-- CI failure events +CREATE TABLE ci_failure_events ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + workspace_id UUID REFERENCES workspaces(id), + repository TEXT NOT NULL, + pr_number INTEGER NOT NULL, + check_name TEXT NOT NULL, + check_id BIGINT NOT NULL, + conclusion TEXT NOT NULL, + failure_title TEXT, + failure_summary TEXT, + annotations JSONB, + created_at TIMESTAMP DEFAULT NOW() +); + +-- Agent responses to CI failures +CREATE TABLE ci_fix_attempts ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + failure_event_id UUID REFERENCES ci_failure_events(id), + agent_id TEXT NOT NULL, + agent_name TEXT NOT NULL, + status TEXT NOT NULL, -- 'pending', 'in_progress', 'success', 'failed' + commit_sha TEXT, + error_message TEXT, + started_at TIMESTAMP DEFAULT NOW(), + completed_at TIMESTAMP +); + +-- Indexes +CREATE INDEX idx_ci_failures_repo_pr ON ci_failure_events(repository, pr_number); +CREATE INDEX idx_ci_failures_created ON ci_failure_events(created_at); +CREATE INDEX idx_ci_fix_attempts_status ON ci_fix_attempts(status); +``` + +## API Endpoints + +### Webhook Registration + +``` +POST /api/webhooks/github/register +{ + "repository": "org/repo", + "events": ["check_run", "workflow_run"], + "secret": "webhook-secret" +} +``` + +### CI Failure History + +``` +GET /api/ci-failures?repository=org/repo&pr=55 + +Response: +{ + "failures": [ + { + "id": "...", + "checkName": "lint", + "failureTitle": "ESLint found 3 errors", + "createdAt": "2025-01-04T...", + "fixAttempt": { + "agentName": "ci-fix-lint-55", + "status": "success", + "commitSha": "def456" + } + } + ] +} +``` + +### Manual Trigger + +``` +POST /api/ci-failures/retry +{ + "failureEventId": "...", + "agentProfile": "lint-fixer" +} +``` + +## Security Considerations + +### Webhook Verification + +Always verify webhook signatures: + +```typescript +const signature = req.headers['x-hub-signature-256']; +const payload = JSON.stringify(req.body); +const expected = `sha256=${crypto + .createHmac('sha256', WEBHOOK_SECRET) + .update(payload) + .digest('hex')}`; + +if (!crypto.timingSafeEqual(Buffer.from(signature), Buffer.from(expected))) { + throw new Error('Invalid webhook signature'); +} +``` + +### Rate Limiting + +Prevent abuse with rate limits: + +```typescript +const rateLimiter = new RateLimiter({ + // Max 10 agent spawns per repo per hour + key: (req) => `ci-spawn:${req.body.repository.full_name}`, + maxRequests: 10, + windowMs: 60 * 60 * 1000, +}); +``` + +### Agent Permissions + +CI fix agents should have limited permissions: + +```yaml +permissions: + tools: + - Read + - Edit + - Bash + bash: + allowedCommands: + - npm + - git + - eslint + blockedCommands: + - rm -rf + - curl + - wget + files: + writable: + - "src/**" + - "test/**" + readonly: + - "package.json" + - ".github/**" +``` + +## Monitoring & Observability + +### Metrics to Track + +- `ci_webhook_received_total` - Total webhooks received by event type +- `ci_failure_events_total` - Total CI failures by check name +- `ci_fix_attempts_total` - Fix attempts by status (success/failed) +- `ci_fix_duration_seconds` - Time from failure to fix commit +- `ci_agent_spawn_total` - Agents spawned for CI fixes + +### Alerts + +```yaml +alerts: + - name: HighCIFailureRate + condition: rate(ci_failure_events_total[1h]) > 10 + severity: warning + message: "High CI failure rate detected" + + - name: AgentFixFailures + condition: rate(ci_fix_attempts_total{status="failed"}[1h]) > 5 + severity: warning + message: "Agents failing to fix CI issues" +``` + +## Future Enhancements + +1. **Learning from Fixes**: Track successful fixes to build patterns for common errors + +2. **Pre-emptive Checks**: Run checks locally before push to catch issues early + +3. **Fix Suggestions**: Instead of auto-fixing, suggest fixes for human review + +4. **Cross-repo Learning**: Apply fix patterns learned in one repo to others + +5. **Escalation Paths**: Auto-escalate to humans if agent can't fix after N attempts + +## References + +- [GitHub Webhooks Documentation](https://docs.github.com/en/webhooks) +- [GitHub Checks API](https://docs.github.com/en/rest/checks) +- [Agent Relay Protocol](./agent-relay-protocol.md) From fae7a326f2b6c1cebfc279fc9a1af26a6363d556 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 10:57:08 +0000 Subject: [PATCH 035/103] Implement CI failure webhook handling with agent spawning - Add database schema for tracking CI failure events and fix attempts - ciFailureEvents: stores check_run failures with annotations - ciFixAttempts: tracks agent responses to failures - Add check_run webhook handler to process CI failures - Records failures in database - Spawns agents to fix issues - Prevents duplicate agents per repo - Create ci-agent-spawner service - Builds agent prompts with failure context - Creates fix attempt records - Provides notification for existing agents - Add GitHub App permissions documentation - Required permissions for CI webhook integration - Webhook event configuration - Security considerations --- docs/design/github-app-permissions.md | 264 +++++++++++++++++++++++++ src/cloud/api/webhooks.ts | 195 ++++++++++++++++++ src/cloud/db/drizzle.ts | 167 ++++++++++++++++ src/cloud/db/index.ts | 17 ++ src/cloud/db/schema.ts | 108 ++++++++++ src/cloud/services/ci-agent-spawner.ts | 210 ++++++++++++++++++++ src/cloud/services/index.ts | 9 + 7 files changed, 970 insertions(+) create mode 100644 docs/design/github-app-permissions.md create mode 100644 src/cloud/services/ci-agent-spawner.ts diff --git a/docs/design/github-app-permissions.md b/docs/design/github-app-permissions.md new file mode 100644 index 00000000..2b086e8a --- /dev/null +++ b/docs/design/github-app-permissions.md @@ -0,0 +1,264 @@ +# GitHub App Permissions + +This document describes the GitHub App permissions required for Agent Relay's features, particularly the CI failure webhook integration. + +## Overview + +Agent Relay uses a GitHub App to: +1. Receive webhook events (installations, PRs, CI failures) +2. Access repository code for cloning/syncing +3. Create commits and push fixes +4. Interact with PRs (comments, reviews) + +## Required Permissions + +### Repository Permissions + +| Permission | Access Level | Purpose | +|------------|--------------|---------| +| **Contents** | Read & Write | Clone repos, push commits, read files | +| **Pull requests** | Read & Write | Read PR details, create/update PRs, comment | +| **Checks** | Read | Receive check_run webhooks, read failure details | +| **Actions** | Read | Receive workflow_run webhooks, read logs | +| **Commit statuses** | Read | Read status checks, understand CI state | +| **Metadata** | Read | Basic repo info (required for all apps) | + +### Organization Permissions + +| Permission | Access Level | Purpose | +|------------|--------------|---------| +| **Members** | Read | Identify organization members for access control | + +### Account Permissions + +| Permission | Access Level | Purpose | +|------------|--------------|---------| +| **Email addresses** | Read | User identification, notifications | + +## Webhook Events + +The following webhook events should be enabled: + +### Required Events + +| Event | Purpose | +|-------|---------| +| `installation` | Track app installations/uninstallations | +| `installation_repositories` | Track repo access changes | +| `check_run` | **CI failure detection** - triggers agent spawn | +| `workflow_run` | Workflow-level failure tracking | +| `push` | Detect new commits for sync | +| `pull_request` | Track PR lifecycle | + +### Optional Events + +| Event | Purpose | +|-------|---------| +| `issues` | Future: issue-to-agent assignment | +| `issue_comment` | Future: agent @mentions | +| `pull_request_review` | Future: review request handling | +| `check_suite` | Aggregate check status | + +## Configuration Steps + +### 1. Create GitHub App + +1. Go to GitHub Settings > Developer settings > GitHub Apps +2. Click "New GitHub App" +3. Fill in basic info: + - **Name**: Agent Relay (or your instance name) + - **Homepage URL**: Your dashboard URL + - **Webhook URL**: `https://your-domain.com/api/webhooks/github` + +### 2. Set Permissions + +Under "Permissions & events": + +**Repository permissions:** +- Contents: Read and write +- Pull requests: Read and write +- Checks: Read-only +- Actions: Read-only +- Commit statuses: Read-only +- Metadata: Read-only (default) + +**Organization permissions:** +- Members: Read-only + +**Account permissions:** +- Email addresses: Read-only + +### 3. Subscribe to Events + +Check the following events: +- [x] Check run +- [x] Workflow run +- [x] Installation +- [x] Installation and repositories +- [x] Push +- [x] Pull request + +### 4. Generate Keys + +1. Generate a private key (downloads .pem file) +2. Note the App ID +3. Generate a client secret for OAuth + +### 5. Configure Agent Relay + +Set environment variables: + +```bash +# GitHub App credentials +GITHUB_APP_ID=123456 +GITHUB_APP_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\n..." +GITHUB_CLIENT_ID=Iv1.abc123 +GITHUB_CLIENT_SECRET=secret123 + +# Webhook secret (generate a random string) +GITHUB_WEBHOOK_SECRET=whsec_random_string_here +``` + +## Permission Rationale + +### Why Contents: Write? + +Agents need to push fixes to branches. This includes: +- Creating new commits +- Pushing to existing branches +- Creating new branches for fixes + +### Why Checks: Read (not Write)? + +We only receive check failure events and read results. We don't: +- Create our own checks +- Update check status + +CI runs in GitHub Actions and creates its own checks. + +### Why Pull Requests: Write? + +Agents may need to: +- Comment on PRs with fix summaries +- Request reviews after fixes +- Update PR descriptions + +### Why Actions: Read? + +For workflow_run events that provide: +- Workflow-level failure context +- Access to workflow logs (future) + +## Security Considerations + +### Webhook Secret + +Always configure a webhook secret: + +```typescript +function verifyGitHubSignature(payload: string, signature: string, secret: string): boolean { + const expected = `sha256=${crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex')}`; + return crypto.timingSafeEqual( + Buffer.from(signature), + Buffer.from(expected) + ); +} +``` + +### Private Key Storage + +- Never commit the private key to version control +- Use secure secret management (Vault, AWS Secrets Manager, etc.) +- Rotate keys periodically + +### Installation Scope + +When users install the app: +- Recommend "Only select repositories" over "All repositories" +- Document which repos will be monitored +- Allow easy un-installation + +### Token Expiry + +GitHub App installation tokens expire after 1 hour: +- Cache tokens with expiry tracking +- Refresh before expiration +- Handle 401 errors with token refresh + +## Minimal Permissions Option + +For users who want minimal permissions: + +| Permission | Access | Notes | +|------------|--------|-------| +| Contents | Read | Can clone, cannot push | +| Pull requests | Read | Can read PRs, cannot comment | +| Checks | Read | Receive failures | + +With minimal permissions: +- Agents can analyze failures but cannot push fixes +- Manual intervention required for commits +- Good for "notify only" mode + +## Events Flow + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” webhook โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ GitHub CI โ”‚ โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€>โ”‚ Agent Relay โ”‚ +โ”‚ (check_run โ”‚ โ”‚ /webhooks โ”‚ +โ”‚ failed) โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ + โ”‚ verify signature + โ”‚ parse payload + โ–ผ + โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ”‚ Record failure โ”‚ + โ”‚ in database โ”‚ + โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ + โ”‚ spawn agent + โ–ผ + โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ”‚ Agent fixes โ”‚ + โ”‚ and pushes โ”‚ + โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ + โ”‚ uses Contents:write + โ–ผ + โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ”‚ GitHub CI โ”‚ + โ”‚ re-runs โ”‚ + โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +## Troubleshooting + +### Webhook Not Received + +1. Check webhook URL is correct and accessible +2. Verify webhook secret matches configuration +3. Check GitHub App webhook delivery logs +4. Ensure firewall allows GitHub IPs + +### Permission Denied Errors + +1. Verify app is installed on the repository +2. Check installation hasn't been suspended +3. Confirm required permissions are granted +4. Regenerate installation token + +### CI Events Not Triggering + +1. Verify `check_run` event is subscribed +2. Check check is from a GitHub Action (not external CI) +3. Ensure webhook URL is receiving events (check delivery logs) + +## References + +- [GitHub Apps documentation](https://docs.github.com/en/apps) +- [Webhook events and payloads](https://docs.github.com/en/webhooks/webhook-events-and-payloads) +- [GitHub App permissions](https://docs.github.com/en/rest/overview/permissions-required-for-github-apps) +- [Check runs API](https://docs.github.com/en/rest/checks/runs) diff --git a/src/cloud/api/webhooks.ts b/src/cloud/api/webhooks.ts index 6892b11c..5e95c2e2 100644 --- a/src/cloud/api/webhooks.ts +++ b/src/cloud/api/webhooks.ts @@ -75,6 +75,14 @@ webhooksRouter.post('/github', async (req: Request, res: Response) => { console.log(`[webhook] Issue ${req.body.action} on ${req.body.repository?.full_name}`); break; + case 'check_run': + await handleCheckRunEvent(req.body); + break; + + case 'workflow_run': + await handleWorkflowRunEvent(req.body); + break; + default: console.log(`[webhook] Unhandled event: ${event}`); } @@ -270,3 +278,190 @@ async function handleInstallationRepositoriesEvent(payload: { console.log(`[webhook] Removed access to ${repositories_removed.length} repositories`); } } + +// ============================================================================ +// CI Failure Webhook Handlers +// ============================================================================ + +/** + * Check run payload from GitHub webhook + */ +interface CheckRunPayload { + action: string; + check_run: { + id: number; + name: string; + status: string; + conclusion: string | null; + output: { + title: string | null; + summary: string | null; + text?: string | null; + annotations?: Array<{ + path: string; + start_line: number; + end_line: number; + annotation_level: string; + message: string; + }>; + }; + pull_requests: Array<{ + number: number; + head: { ref: string; sha: string }; + }>; + }; + repository: { + full_name: string; + clone_url: string; + }; +} + +/** + * Workflow run payload from GitHub webhook + */ +interface WorkflowRunPayload { + action: string; + workflow_run: { + id: number; + name: string; + status: string; + conclusion: string | null; + head_branch: string; + head_sha: string; + pull_requests: Array<{ + number: number; + }>; + }; + repository: { + full_name: string; + }; +} + +/** + * Handle check_run webhook events + * + * When a CI check fails on a PR, we: + * 1. Record the failure in our database + * 2. Check if an agent is already working on the PR + * 3. Either message the existing agent or spawn a new one + */ +async function handleCheckRunEvent(payload: CheckRunPayload): Promise { + const { action, check_run, repository } = payload; + + // Only handle completed checks + if (action !== 'completed') { + console.log(`[webhook] Ignoring check_run action: ${action}`); + return; + } + + // Only handle failures + if (check_run.conclusion !== 'failure') { + console.log(`[webhook] Check ${check_run.name} conclusion: ${check_run.conclusion} (not a failure)`); + return; + } + + // Only handle checks on PRs + if (check_run.pull_requests.length === 0) { + console.log(`[webhook] Check ${check_run.name} failed but not on a PR, skipping`); + return; + } + + const pr = check_run.pull_requests[0]; + + console.log( + `[webhook] CI failure: ${check_run.name} on ${repository.full_name}#${pr.number}` + ); + + // Build failure context + const failureContext = { + repository: repository.full_name, + prNumber: pr.number, + branch: pr.head.ref, + commitSha: pr.head.sha, + checkName: check_run.name, + checkId: check_run.id, + conclusion: check_run.conclusion, + failureTitle: check_run.output.title, + failureSummary: check_run.output.summary, + failureDetails: check_run.output.text, + annotations: (check_run.output.annotations || []).map(a => ({ + path: a.path, + startLine: a.start_line, + endLine: a.end_line, + annotationLevel: a.annotation_level, + message: a.message, + })), + }; + + // Record the failure in the database + try { + const failureEvent = await db.ciFailureEvents.create({ + repository: failureContext.repository, + prNumber: failureContext.prNumber, + branch: failureContext.branch, + commitSha: failureContext.commitSha, + checkName: failureContext.checkName, + checkId: failureContext.checkId, + conclusion: failureContext.conclusion, + failureTitle: failureContext.failureTitle, + failureSummary: failureContext.failureSummary, + failureDetails: failureContext.failureDetails, + annotations: failureContext.annotations, + }); + + console.log(`[webhook] Recorded CI failure event: ${failureEvent.id}`); + + // Check for existing active fix attempts on this repo + const activeAttempts = await db.ciFixAttempts.findActiveByRepository(repository.full_name); + + if (activeAttempts.length > 0) { + console.log(`[webhook] ${activeAttempts.length} active fix attempt(s) already exist, skipping spawn`); + await db.ciFailureEvents.markProcessed(failureEvent.id, false); + return; + } + + // Import and call the CI agent spawner (lazy import to avoid circular deps) + const { spawnCIFixAgent } = await import('../services/ci-agent-spawner.js'); + await spawnCIFixAgent(failureEvent); + + // Mark as processed with agent spawned + await db.ciFailureEvents.markProcessed(failureEvent.id, true); + console.log(`[webhook] Agent spawned for CI failure: ${failureEvent.id}`); + } catch (error) { + console.error(`[webhook] Failed to handle CI failure:`, error); + // Don't re-throw - we still want to return 200 to GitHub + } +} + +/** + * Handle workflow_run webhook events + * + * This handles the entire workflow completion. Useful for: + * - Waiting for all checks to complete before acting + * - Getting workflow-level context + */ +async function handleWorkflowRunEvent(payload: WorkflowRunPayload): Promise { + const { action, workflow_run, repository } = payload; + + // Only handle completed workflows + if (action !== 'completed') { + console.log(`[webhook] Ignoring workflow_run action: ${action}`); + return; + } + + // Only handle failures + if (workflow_run.conclusion !== 'failure') { + console.log(`[webhook] Workflow ${workflow_run.name} conclusion: ${workflow_run.conclusion}`); + return; + } + + // Log for now - we primarily handle individual check_runs + // but workflow_run events can be used for aggregate failure handling + console.log( + `[webhook] Workflow failed: ${workflow_run.name} on ${repository.full_name} ` + + `(branch: ${workflow_run.head_branch}, PRs: ${workflow_run.pull_requests.map(p => p.number).join(', ')})` + ); + + // Future: Could use this to trigger workflow-level actions + // For now, individual check_run events handle the actual failure processing +} diff --git a/src/cloud/db/drizzle.ts b/src/cloud/db/drizzle.ts index cb9c6736..42a94579 100644 --- a/src/cloud/db/drizzle.ts +++ b/src/cloud/db/drizzle.ts @@ -1156,6 +1156,173 @@ export const agentSummaryQueries: AgentSummaryQueries = { }, }; +// ============================================================================ +// CI Failure Event Queries +// ============================================================================ + +export interface CIFailureEventQueries { + findById(id: string): Promise; + findByRepository(repository: string, limit?: number): Promise; + findByPR(repository: string, prNumber: number): Promise; + findRecentUnprocessed(limit?: number): Promise; + create(data: schema.NewCIFailureEvent): Promise; + markProcessed(id: string, agentSpawned: boolean): Promise; + delete(id: string): Promise; +} + +export const ciFailureEventQueries: CIFailureEventQueries = { + async findById(id: string): Promise { + const db = getDb(); + const result = await db.select().from(schema.ciFailureEvents).where(eq(schema.ciFailureEvents.id, id)); + return result[0] ?? null; + }, + + async findByRepository(repository: string, limit = 50): Promise { + const db = getDb(); + return db + .select() + .from(schema.ciFailureEvents) + .where(eq(schema.ciFailureEvents.repository, repository)) + .orderBy(desc(schema.ciFailureEvents.createdAt)) + .limit(limit); + }, + + async findByPR(repository: string, prNumber: number): Promise { + const db = getDb(); + return db + .select() + .from(schema.ciFailureEvents) + .where( + and( + eq(schema.ciFailureEvents.repository, repository), + eq(schema.ciFailureEvents.prNumber, prNumber) + ) + ) + .orderBy(desc(schema.ciFailureEvents.createdAt)); + }, + + async findRecentUnprocessed(limit = 100): Promise { + const db = getDb(); + return db + .select() + .from(schema.ciFailureEvents) + .where(isNull(schema.ciFailureEvents.processedAt)) + .orderBy(schema.ciFailureEvents.createdAt) + .limit(limit); + }, + + async create(data: schema.NewCIFailureEvent): Promise { + const db = getDb(); + const result = await db.insert(schema.ciFailureEvents).values(data).returning(); + return result[0]; + }, + + async markProcessed(id: string, agentSpawned: boolean): Promise { + const db = getDb(); + await db + .update(schema.ciFailureEvents) + .set({ processedAt: new Date(), agentSpawned }) + .where(eq(schema.ciFailureEvents.id, id)); + }, + + async delete(id: string): Promise { + const db = getDb(); + await db.delete(schema.ciFailureEvents).where(eq(schema.ciFailureEvents.id, id)); + }, +}; + +// ============================================================================ +// CI Fix Attempt Queries +// ============================================================================ + +export interface CIFixAttemptQueries { + findById(id: string): Promise; + findByFailureEvent(failureEventId: string): Promise; + findActiveByRepository(repository: string): Promise; + create(data: schema.NewCIFixAttempt): Promise; + updateStatus(id: string, status: string, errorMessage?: string): Promise; + complete(id: string, status: 'success' | 'failed', commitSha?: string, errorMessage?: string): Promise; +} + +export const ciFixAttemptQueries: CIFixAttemptQueries = { + async findById(id: string): Promise { + const db = getDb(); + const result = await db.select().from(schema.ciFixAttempts).where(eq(schema.ciFixAttempts.id, id)); + return result[0] ?? null; + }, + + async findByFailureEvent(failureEventId: string): Promise { + const db = getDb(); + return db + .select() + .from(schema.ciFixAttempts) + .where(eq(schema.ciFixAttempts.failureEventId, failureEventId)) + .orderBy(desc(schema.ciFixAttempts.startedAt)); + }, + + async findActiveByRepository(repository: string): Promise { + const db = getDb(); + // Find active fix attempts by joining with failure events + const activeStatuses = ['pending', 'in_progress']; + return db + .select({ + id: schema.ciFixAttempts.id, + failureEventId: schema.ciFixAttempts.failureEventId, + agentId: schema.ciFixAttempts.agentId, + agentName: schema.ciFixAttempts.agentName, + status: schema.ciFixAttempts.status, + commitSha: schema.ciFixAttempts.commitSha, + errorMessage: schema.ciFixAttempts.errorMessage, + startedAt: schema.ciFixAttempts.startedAt, + completedAt: schema.ciFixAttempts.completedAt, + }) + .from(schema.ciFixAttempts) + .innerJoin(schema.ciFailureEvents, eq(schema.ciFixAttempts.failureEventId, schema.ciFailureEvents.id)) + .where( + and( + eq(schema.ciFailureEvents.repository, repository), + sql`${schema.ciFixAttempts.status} IN ('pending', 'in_progress')` + ) + ); + }, + + async create(data: schema.NewCIFixAttempt): Promise { + const db = getDb(); + const result = await db.insert(schema.ciFixAttempts).values(data).returning(); + return result[0]; + }, + + async updateStatus(id: string, status: string, errorMessage?: string): Promise { + const db = getDb(); + const updates: Record = { status }; + if (errorMessage) { + updates.errorMessage = errorMessage; + } + await db + .update(schema.ciFixAttempts) + .set(updates) + .where(eq(schema.ciFixAttempts.id, id)); + }, + + async complete( + id: string, + status: 'success' | 'failed', + commitSha?: string, + errorMessage?: string + ): Promise { + const db = getDb(); + await db + .update(schema.ciFixAttempts) + .set({ + status, + completedAt: new Date(), + commitSha: commitSha ?? null, + errorMessage: errorMessage ?? null, + }) + .where(eq(schema.ciFixAttempts.id, id)); + }, +}; + // ============================================================================ // Migration helper // ============================================================================ diff --git a/src/cloud/db/index.ts b/src/cloud/db/index.ts index 7cf52067..a5a63011 100644 --- a/src/cloud/db/index.ts +++ b/src/cloud/db/index.ts @@ -35,6 +35,14 @@ export type { NewSubscription, UsageRecord, NewUsageRecord, + // CI failure types + CIAnnotation, + CIFailureEvent, + NewCIFailureEvent, + CIFixAttempt, + NewCIFixAttempt, + CICheckStrategy, + CIWebhookConfig, } from './schema.js'; // Re-export schema tables for direct access if needed @@ -49,6 +57,8 @@ export { linkedDaemons as linkedDaemonsTable, subscriptions as subscriptionsTable, usageRecords as usageRecordsTable, + ciFailureEvents as ciFailureEventsTable, + ciFixAttempts as ciFixAttemptsTable, } from './schema.js'; // Import query modules @@ -64,6 +74,8 @@ import { linkedDaemonQueries, projectGroupQueries, repositoryQueries, + ciFailureEventQueries, + ciFixAttemptQueries, } from './drizzle.js'; // Legacy type aliases for backwards compatibility @@ -88,6 +100,9 @@ export const db = { repositories: repositoryQueries, // Linked daemon operations (for local agent-relay instances) linkedDaemons: linkedDaemonQueries, + // CI failure tracking + ciFailureEvents: ciFailureEventQueries, + ciFixAttempts: ciFixAttemptQueries, // Database utilities getDb, close: closeDb, @@ -104,6 +119,8 @@ export { projectGroupQueries, repositoryQueries, linkedDaemonQueries, + ciFailureEventQueries, + ciFixAttemptQueries, }; // Export database utilities diff --git a/src/cloud/db/schema.ts b/src/cloud/db/schema.ts index a5a5c7e5..be15958a 100644 --- a/src/cloud/db/schema.ts +++ b/src/cloud/db/schema.ts @@ -595,3 +595,111 @@ export type AgentCrash = typeof agentCrashes.$inferSelect; export type NewAgentCrash = typeof agentCrashes.$inferInsert; export type MemoryAlert = typeof memoryAlerts.$inferSelect; export type NewMemoryAlert = typeof memoryAlerts.$inferInsert; + +// ============================================================================ +// CI Failure Events (GitHub CI check failures) +// ============================================================================ + +export interface CIAnnotation { + path: string; + startLine: number; + endLine: number; + annotationLevel: string; + message: string; +} + +export const ciFailureEvents = pgTable('ci_failure_events', { + id: uuid('id').primaryKey().defaultRandom(), + repositoryId: uuid('repository_id').references(() => repositories.id, { onDelete: 'cascade' }), + repository: varchar('repository', { length: 255 }).notNull(), // org/repo format + prNumber: bigint('pr_number', { mode: 'number' }), + branch: varchar('branch', { length: 255 }), + commitSha: varchar('commit_sha', { length: 40 }), + checkName: varchar('check_name', { length: 255 }).notNull(), + checkId: bigint('check_id', { mode: 'number' }).notNull(), + conclusion: varchar('conclusion', { length: 50 }).notNull(), // failure, cancelled, timed_out, etc. + failureTitle: text('failure_title'), + failureSummary: text('failure_summary'), + failureDetails: text('failure_details'), + annotations: jsonb('annotations').$type().default([]), + workflowName: varchar('workflow_name', { length: 255 }), + workflowRunId: bigint('workflow_run_id', { mode: 'number' }), + // Processing state + processedAt: timestamp('processed_at'), + agentSpawned: boolean('agent_spawned').default(false), + createdAt: timestamp('created_at').defaultNow().notNull(), +}, (table) => ({ + repositoryIdx: index('idx_ci_failure_events_repository').on(table.repository), + prNumberIdx: index('idx_ci_failure_events_pr_number').on(table.prNumber), + checkNameIdx: index('idx_ci_failure_events_check_name').on(table.checkName), + createdAtIdx: index('idx_ci_failure_events_created_at').on(table.createdAt), + repoPrIdx: index('idx_ci_failure_events_repo_pr').on(table.repository, table.prNumber), +})); + +export const ciFailureEventsRelations = relations(ciFailureEvents, ({ one, many }) => ({ + repositoryRef: one(repositories, { + fields: [ciFailureEvents.repositoryId], + references: [repositories.id], + }), + fixAttempts: many(ciFixAttempts), +})); + +// ============================================================================ +// CI Fix Attempts (agent responses to failures) +// ============================================================================ + +export const ciFixAttempts = pgTable('ci_fix_attempts', { + id: uuid('id').primaryKey().defaultRandom(), + failureEventId: uuid('failure_event_id').notNull().references(() => ciFailureEvents.id, { onDelete: 'cascade' }), + agentId: varchar('agent_id', { length: 255 }).notNull(), + agentName: varchar('agent_name', { length: 255 }).notNull(), + status: varchar('status', { length: 50 }).notNull().default('pending'), // pending, in_progress, success, failed + commitSha: varchar('commit_sha', { length: 40 }), + errorMessage: text('error_message'), + // Timing + startedAt: timestamp('started_at').defaultNow().notNull(), + completedAt: timestamp('completed_at'), +}, (table) => ({ + failureEventIdx: index('idx_ci_fix_attempts_failure_event').on(table.failureEventId), + statusIdx: index('idx_ci_fix_attempts_status').on(table.status), + agentIdIdx: index('idx_ci_fix_attempts_agent_id').on(table.agentId), +})); + +export const ciFixAttemptsRelations = relations(ciFixAttempts, ({ one }) => ({ + failureEvent: one(ciFailureEvents, { + fields: [ciFixAttempts.failureEventId], + references: [ciFailureEvents.id], + }), +})); + +// ============================================================================ +// CI Webhook Configuration (per-repository settings) +// ============================================================================ + +export interface CICheckStrategy { + autoFix: boolean; + command?: string; + agentProfile?: string; + notifyOnly?: boolean; +} + +export interface CIWebhookConfig { + enabled: boolean; + autoFix?: { + lint?: boolean; + typecheck?: boolean; + test?: boolean; + build?: boolean; + }; + notifyExistingAgent?: boolean; + spawnNewAgent?: boolean; + maxConcurrentAgents?: number; + cooldownMinutes?: number; + checkStrategies?: Record; +} + +// Type exports for CI tables +export type CIFailureEvent = typeof ciFailureEvents.$inferSelect; +export type NewCIFailureEvent = typeof ciFailureEvents.$inferInsert; +export type CIFixAttempt = typeof ciFixAttempts.$inferSelect; +export type NewCIFixAttempt = typeof ciFixAttempts.$inferInsert; diff --git a/src/cloud/services/ci-agent-spawner.ts b/src/cloud/services/ci-agent-spawner.ts new file mode 100644 index 00000000..9434b8f4 --- /dev/null +++ b/src/cloud/services/ci-agent-spawner.ts @@ -0,0 +1,210 @@ +/** + * CI Agent Spawner Service + * + * Spawns agents to fix CI failures automatically. + * Called by the webhook handler when CI checks fail on PRs. + */ + +import { db, CIFailureEvent, CIAnnotation } from '../db/index.js'; + +/** + * Spawn an agent to fix CI failures + * + * This function: + * 1. Finds the workspace for the repository + * 2. Creates a fix attempt record + * 3. Spawns an agent with the failure context + * + * @param failureEvent - The CI failure event from the database + */ +export async function spawnCIFixAgent(failureEvent: CIFailureEvent): Promise { + console.log(`[ci-spawner] Spawning agent for failure: ${failureEvent.id}`); + console.log(`[ci-spawner] Repository: ${failureEvent.repository}`); + console.log(`[ci-spawner] Check: ${failureEvent.checkName}`); + console.log(`[ci-spawner] PR: #${failureEvent.prNumber}`); + + // Generate agent name and ID + const agentName = `ci-fix-${failureEvent.checkName}-${failureEvent.prNumber}`; + const agentId = `ci-${failureEvent.id}`; + + // Create fix attempt record + const fixAttempt = await db.ciFixAttempts.create({ + failureEventId: failureEvent.id, + agentId, + agentName, + status: 'pending', + }); + + console.log(`[ci-spawner] Created fix attempt: ${fixAttempt.id}`); + + try { + // Build the agent prompt + const prompt = buildAgentPrompt(failureEvent); + + // Update status to in_progress + await db.ciFixAttempts.updateStatus(fixAttempt.id, 'in_progress'); + + // TODO: Actually spawn the agent + // This will integrate with the workspace provisioner to: + // 1. Find or create workspace for the repository + // 2. Clone/checkout the correct branch + // 3. Spawn the agent with the prompt + // + // For now, we just log the intent + console.log(`[ci-spawner] Would spawn agent with prompt:`); + console.log(`[ci-spawner] --- BEGIN PROMPT ---`); + console.log(prompt); + console.log(`[ci-spawner] --- END PROMPT ---`); + + // In a real implementation: + // const workspace = await findOrCreateWorkspace(failureEvent.repository); + // await workspace.spawnAgent({ + // name: agentName, + // prompt, + // branch: failureEvent.branch, + // workingDirectory: `/workspace/repos/${failureEvent.repository}`, + // }); + + } catch (error) { + console.error(`[ci-spawner] Failed to spawn agent:`, error); + await db.ciFixAttempts.complete( + fixAttempt.id, + 'failed', + undefined, + error instanceof Error ? error.message : 'Unknown error' + ); + throw error; + } +} + +/** + * Build the prompt for the CI fix agent + */ +function buildAgentPrompt(failureEvent: CIFailureEvent): string { + const annotations = failureEvent.annotations as CIAnnotation[] | null; + const annotationsList = annotations && annotations.length > 0 + ? annotations + .slice(0, 20) // Limit to first 20 annotations + .map(a => `- ${a.path}:${a.startLine} - ${a.message}`) + .join('\n') + : null; + + return ` +# CI Failure Fix Task + +A CI check has failed on PR #${failureEvent.prNumber} in ${failureEvent.repository}. + +## Failure Details + +**Check Name:** ${failureEvent.checkName} +**Branch:** ${failureEvent.branch || 'unknown'} +**Commit:** ${failureEvent.commitSha || 'unknown'} + +${failureEvent.failureTitle ? `**Title:** ${failureEvent.failureTitle}` : ''} + +${failureEvent.failureSummary ? `**Summary:**\n${failureEvent.failureSummary}` : ''} + +${failureEvent.failureDetails ? `**Details:**\n${failureEvent.failureDetails}` : ''} + +${annotationsList ? `## Annotations\n\n${annotationsList}` : ''} + +## Your Task + +1. Checkout the branch: \`${failureEvent.branch || 'unknown'}\` +2. Analyze the failure based on the annotations and error messages +3. Fix the issues in the affected files +4. Run the relevant checks locally to verify the fix +5. Commit and push your changes with a clear commit message +6. Report back with a summary of what was fixed + +## Important + +- Only fix the specific issues causing the CI failure +- Do not refactor or improve unrelated code +- If you cannot fix the issue, explain why and what manual intervention is needed +- Keep your commit message descriptive and reference the CI check name +`.trim(); +} + +/** + * Notify an existing agent about a CI failure + * + * Used when an agent is already working on a PR and a new failure occurs. + * + * @param agentId - The ID of the existing agent + * @param failureEvent - The new CI failure event + */ +export async function notifyAgentOfCIFailure( + agentId: string, + failureEvent: CIFailureEvent +): Promise { + console.log(`[ci-spawner] Notifying agent ${agentId} of new failure`); + + // Build notification message + const annotations = failureEvent.annotations as CIAnnotation[] | null; + const annotationsList = annotations && annotations.length > 0 + ? annotations + .slice(0, 10) + .map(a => ` - ${a.path}:${a.startLine}: ${a.message}`) + .join('\n') + : null; + + const message = ` +CI FAILURE: ${failureEvent.checkName} + +${failureEvent.failureTitle || 'Check failed'} + +${failureEvent.failureSummary || ''} + +${annotationsList ? `Issues:\n${annotationsList}` : ''} + +Please investigate and fix these issues, then push your changes. +`.trim(); + + // TODO: Send message via relay + // This would use the agent-relay messaging system to send + // the failure notification to the existing agent + console.log(`[ci-spawner] Would send message to agent ${agentId}:`); + console.log(message); +} + +/** + * Mark a fix attempt as complete + * + * Called when an agent reports completion (success or failure) + */ +export async function completeFixAttempt( + fixAttemptId: string, + success: boolean, + commitSha?: string, + errorMessage?: string +): Promise { + console.log(`[ci-spawner] Completing fix attempt ${fixAttemptId}: ${success ? 'success' : 'failed'}`); + + await db.ciFixAttempts.complete( + fixAttemptId, + success ? 'success' : 'failed', + commitSha, + errorMessage + ); +} + +/** + * Get failure history for a repository + */ +export async function getFailureHistory( + repository: string, + limit = 50 +): Promise { + return db.ciFailureEvents.findByRepository(repository, limit); +} + +/** + * Get failure history for a specific PR + */ +export async function getPRFailureHistory( + repository: string, + prNumber: number +): Promise { + return db.ciFailureEvents.findByPR(repository, prNumber); +} diff --git a/src/cloud/services/index.ts b/src/cloud/services/index.ts index a1961ce8..f31c11f7 100644 --- a/src/cloud/services/index.ts +++ b/src/cloud/services/index.ts @@ -44,3 +44,12 @@ export { getScalingOrchestrator, createScalingOrchestrator, } from './scaling-orchestrator.js'; + +// CI failure handling +export { + spawnCIFixAgent, + notifyAgentOfCIFailure, + completeFixAttempt, + getFailureHistory, + getPRFailureHistory, +} from './ci-agent-spawner.js'; From 05892150843ddaf95388b9455f8ecdeaef5d5c51 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 11:04:26 +0000 Subject: [PATCH 036/103] Add issue and @mention webhook handling for agents - Add database schema for issue assignments and comment mentions - issueAssignments: tracks issues assigned to agents - commentMentions: tracks @mentions in comments - AgentTriggerConfig: per-repo configuration for agent triggers - Add webhook handlers for issues and comments - handleIssueEvent: records new issues, extracts priority from labels - handleIssueCommentEvent: detects @mentions in issue/PR comments - handlePRReviewCommentEvent: detects @mentions in review comments - Create mention-handler service - Known agent types: lead, developer, reviewer, ci-fix, etc. - Routes mentions to appropriate agents - Builds prompts with mention context - Update design doc with issue/comment handling section - Configuration options for agent triggers - Security considerations for rate limiting --- docs/design/ci-failure-webhooks.md | 110 +++++++++ src/cloud/api/webhooks.ts | 311 +++++++++++++++++++++++++- src/cloud/db/drizzle.ts | 210 +++++++++++++++++ src/cloud/db/index.ts | 15 ++ src/cloud/db/schema.ts | 109 +++++++++ src/cloud/services/index.ts | 12 + src/cloud/services/mention-handler.ts | 245 ++++++++++++++++++++ 7 files changed, 1010 insertions(+), 2 deletions(-) create mode 100644 src/cloud/services/mention-handler.ts diff --git a/docs/design/ci-failure-webhooks.md b/docs/design/ci-failure-webhooks.md index 0187076b..79179217 100644 --- a/docs/design/ci-failure-webhooks.md +++ b/docs/design/ci-failure-webhooks.md @@ -683,6 +683,112 @@ alerts: message: "Agents failing to fix CI issues" ``` +## Issue and Comment Handling + +In addition to CI failures, agents can respond to GitHub issues and @mentions in comments. + +### Supported Events + +| Event | Purpose | +|-------|---------| +| `issues` | Track new issues for agent assignment | +| `issue_comment` | Detect @mentions in issue/PR comments | +| `pull_request_review_comment` | Detect @mentions in PR review comments | + +### @Mention Detection + +When a comment contains `@agent-name`, the system: + +1. Extracts all @mentions from the comment text +2. Checks if the mentioned name is a known agent type +3. Creates a mention record in the database +4. Routes to the appropriate agent for response + +**Known Agent Types:** +- `@agent-relay` - General purpose agent +- `@lead` - Lead agent for coordination +- `@developer` - Developer agent for coding tasks +- `@reviewer` - Code review agent +- `@ci-fix` - CI failure fixing agent +- `@debugger` - Bug investigation agent +- `@docs` - Documentation agent +- `@test` - Test writing agent +- `@refactor` - Code refactoring agent + +### Issue Assignment + +When a new issue is opened: + +1. Record the issue in `issue_assignments` table +2. Extract priority from labels (p0-p3, critical/high/medium/low) +3. Optionally auto-assign based on label mapping +4. Agent receives issue context and works on a fix + +### Configuration + +Configure agent triggers per repository: + +```json +{ + "agentTriggers": { + "mentionableAgents": ["lead", "ci-fix", "reviewer"], + "defaultIssueAgent": "developer", + "autoAssignLabels": { + "bug": "debugger", + "enhancement": "developer", + "documentation": "docs" + }, + "autoRespondToMentions": true, + "maxResponsesPerHour": 20, + "allowedTriggerUsers": [] + } +} +``` + +### Database Schema + +```sql +-- Issue assignments +CREATE TABLE issue_assignments ( + id UUID PRIMARY KEY, + repository TEXT NOT NULL, + issue_number BIGINT NOT NULL, + issue_title TEXT NOT NULL, + issue_body TEXT, + agent_id TEXT, + agent_name TEXT, + status TEXT DEFAULT 'pending', + resolution TEXT, + linked_pr_number BIGINT, + labels TEXT[], + priority TEXT, + created_at TIMESTAMP DEFAULT NOW(), + UNIQUE(repository, issue_number) +); + +-- Comment mentions +CREATE TABLE comment_mentions ( + id UUID PRIMARY KEY, + repository TEXT NOT NULL, + source_type TEXT NOT NULL, -- issue_comment, pr_comment, pr_review + source_id BIGINT NOT NULL, + issue_or_pr_number BIGINT NOT NULL, + comment_body TEXT NOT NULL, + author_login TEXT NOT NULL, + mentioned_agent TEXT NOT NULL, + status TEXT DEFAULT 'pending', + response_comment_id BIGINT, + created_at TIMESTAMP DEFAULT NOW() +); +``` + +### Security + +- Rate limit @mentions to prevent abuse +- Optionally restrict which users can trigger agents +- Agents cannot respond to their own comments (prevent loops) +- Bot accounts are ignored by default + ## Future Enhancements 1. **Learning from Fixes**: Track successful fixes to build patterns for common errors @@ -695,6 +801,10 @@ alerts: 5. **Escalation Paths**: Auto-escalate to humans if agent can't fix after N attempts +6. **Slack/Discord Integration**: Notify team channels about agent activity + +7. **PR Review Automation**: Auto-request reviews from appropriate agents + ## References - [GitHub Webhooks Documentation](https://docs.github.com/en/webhooks) diff --git a/src/cloud/api/webhooks.ts b/src/cloud/api/webhooks.ts index 5e95c2e2..dddee56f 100644 --- a/src/cloud/api/webhooks.ts +++ b/src/cloud/api/webhooks.ts @@ -71,8 +71,15 @@ webhooksRouter.post('/github', async (req: Request, res: Response) => { break; case 'issues': - // Future: handle issue events - console.log(`[webhook] Issue ${req.body.action} on ${req.body.repository?.full_name}`); + await handleIssueEvent(req.body); + break; + + case 'issue_comment': + await handleIssueCommentEvent(req.body); + break; + + case 'pull_request_review_comment': + await handlePRReviewCommentEvent(req.body); break; case 'check_run': @@ -465,3 +472,303 @@ async function handleWorkflowRunEvent(payload: WorkflowRunPayload): Promise; + user: { login: string; id: number }; + assignees: Array<{ login: string; id: number }>; + }; + repository: { + full_name: string; + }; + sender: { + login: string; + id: number; + }; +} + +/** + * Issue comment payload from GitHub webhook + */ +interface IssueCommentPayload { + action: string; // created, edited, deleted + issue: { + number: number; + title: string; + pull_request?: { url: string }; // Present if this is a PR comment + }; + comment: { + id: number; + body: string; + html_url: string; + user: { login: string; id: number }; + }; + repository: { + full_name: string; + }; + sender: { + login: string; + id: number; + }; +} + +/** + * PR review comment payload from GitHub webhook + */ +interface PRReviewCommentPayload { + action: string; // created, edited, deleted + pull_request: { + number: number; + title: string; + }; + comment: { + id: number; + body: string; + html_url: string; + path: string; + line: number | null; + user: { login: string; id: number }; + }; + repository: { + full_name: string; + }; + sender: { + login: string; + id: number; + }; +} + +/** + * Extract @mentions from comment text + * Returns list of mentioned agent names (without @ prefix) + */ +function extractMentions(text: string): string[] { + // Match @agent-name patterns (alphanumeric, hyphens, underscores) + const mentionPattern = /@([a-zA-Z][a-zA-Z0-9_-]*)/g; + const mentions: string[] = []; + let match; + + while ((match = mentionPattern.exec(text)) !== null) { + mentions.push(match[1].toLowerCase()); + } + + return [...new Set(mentions)]; // Remove duplicates +} + +/** + * Get context around a mention (for prompt building) + */ +function getMentionContext(text: string, mention: string, contextLength = 200): string { + const mentionIndex = text.toLowerCase().indexOf(`@${mention.toLowerCase()}`); + if (mentionIndex === -1) return text.slice(0, contextLength); + + const start = Math.max(0, mentionIndex - contextLength / 2); + const end = Math.min(text.length, mentionIndex + mention.length + 1 + contextLength / 2); + + let context = text.slice(start, end); + if (start > 0) context = '...' + context; + if (end < text.length) context = context + '...'; + + return context; +} + +/** + * Handle issues webhook events + * + * When a new issue is opened or labeled, we can: + * 1. Auto-assign an agent based on labels + * 2. Record the issue for later assignment + */ +async function handleIssueEvent(payload: IssuePayload): Promise { + const { action, issue, repository } = payload; + + console.log(`[webhook] Issue ${action}: #${issue.number} on ${repository.full_name}`); + + // Only handle opened issues for now + if (action !== 'opened' && action !== 'labeled') { + return; + } + + try { + // Check if we already have an assignment for this issue + const existing = await db.issueAssignments.findByIssue(repository.full_name, issue.number); + if (existing) { + console.log(`[webhook] Issue #${issue.number} already has an assignment`); + return; + } + + // Determine priority based on labels + const labels = issue.labels.map(l => l.name.toLowerCase()); + let priority: string | undefined; + if (labels.includes('critical') || labels.includes('p0')) priority = 'critical'; + else if (labels.includes('high') || labels.includes('p1')) priority = 'high'; + else if (labels.includes('medium') || labels.includes('p2')) priority = 'medium'; + else if (labels.includes('low') || labels.includes('p3')) priority = 'low'; + + // Create issue assignment record + const assignment = await db.issueAssignments.create({ + repository: repository.full_name, + issueNumber: issue.number, + issueTitle: issue.title, + issueBody: issue.body, + issueUrl: issue.html_url, + status: 'pending', + labels: issue.labels.map(l => l.name), + priority, + }); + + console.log(`[webhook] Created issue assignment: ${assignment.id}`); + + // Check if we should auto-assign an agent + // TODO: Load repo configuration for auto-assign settings + // For now, issues remain in 'pending' status for manual assignment + + } catch (error) { + console.error(`[webhook] Failed to handle issue event:`, error); + } +} + +/** + * Handle issue_comment webhook events + * + * When someone @mentions an agent in a comment: + * 1. Detect the mention + * 2. Record it for agent processing + * 3. Route to appropriate agent + */ +async function handleIssueCommentEvent(payload: IssueCommentPayload): Promise { + const { action, issue, comment, repository, sender } = payload; + + // Only handle new comments + if (action !== 'created') { + return; + } + + const isPR = !!issue.pull_request; + const sourceType = isPR ? 'pr_comment' : 'issue_comment'; + + console.log( + `[webhook] ${sourceType} on ${repository.full_name}#${issue.number} by @${sender.login}` + ); + + // Extract @mentions from comment + const mentions = extractMentions(comment.body); + if (mentions.length === 0) { + return; // No mentions to process + } + + console.log(`[webhook] Found mentions: ${mentions.join(', ')}`); + + try { + for (const mention of mentions) { + // Check if this is a known agent mention + // TODO: Load configured agents from repo/workspace settings + // For now, we accept any mention that looks like an agent name + + const context = getMentionContext(comment.body, mention); + + // Create mention record + const mentionRecord = await db.commentMentions.create({ + repository: repository.full_name, + sourceType, + sourceId: comment.id, + issueOrPrNumber: issue.number, + commentBody: comment.body, + commentUrl: comment.html_url, + authorLogin: sender.login, + authorId: sender.id, + mentionedAgent: mention, + mentionContext: context, + status: 'pending', + }); + + console.log(`[webhook] Created mention record for @${mention}: ${mentionRecord.id}`); + + // Import and call the mention handler (lazy import) + try { + const { handleMention } = await import('../services/mention-handler.js'); + await handleMention(mentionRecord); + } catch (importError) { + // Handler not implemented yet - mentions will be processed later + console.log(`[webhook] Mention handler not available, mention queued for later processing`); + } + } + } catch (error) { + console.error(`[webhook] Failed to handle comment mentions:`, error); + } +} + +/** + * Handle pull_request_review_comment webhook events + * + * Similar to issue_comment, but for PR review comments (inline code comments) + */ +async function handlePRReviewCommentEvent(payload: PRReviewCommentPayload): Promise { + const { action, pull_request, comment, repository, sender } = payload; + + // Only handle new comments + if (action !== 'created') { + return; + } + + console.log( + `[webhook] PR review comment on ${repository.full_name}#${pull_request.number} ` + + `(${comment.path}:${comment.line}) by @${sender.login}` + ); + + // Extract @mentions from comment + const mentions = extractMentions(comment.body); + if (mentions.length === 0) { + return; // No mentions to process + } + + console.log(`[webhook] Found mentions in review comment: ${mentions.join(', ')}`); + + try { + for (const mention of mentions) { + const context = getMentionContext(comment.body, mention); + + // Create mention record + const mentionRecord = await db.commentMentions.create({ + repository: repository.full_name, + sourceType: 'pr_review', + sourceId: comment.id, + issueOrPrNumber: pull_request.number, + commentBody: comment.body, + commentUrl: comment.html_url, + authorLogin: sender.login, + authorId: sender.id, + mentionedAgent: mention, + mentionContext: `${comment.path}:${comment.line || '?'}\n\n${context}`, + status: 'pending', + }); + + console.log(`[webhook] Created review mention for @${mention}: ${mentionRecord.id}`); + + // Try to handle mention immediately + try { + const { handleMention } = await import('../services/mention-handler.js'); + await handleMention(mentionRecord); + } catch { + console.log(`[webhook] Mention handler not available, mention queued for later processing`); + } + } + } catch (error) { + console.error(`[webhook] Failed to handle PR review comment mentions:`, error); + } +} diff --git a/src/cloud/db/drizzle.ts b/src/cloud/db/drizzle.ts index 42a94579..f025d935 100644 --- a/src/cloud/db/drizzle.ts +++ b/src/cloud/db/drizzle.ts @@ -1323,6 +1323,216 @@ export const ciFixAttemptQueries: CIFixAttemptQueries = { }, }; +// ============================================================================ +// Issue Assignment Queries +// ============================================================================ + +export interface IssueAssignmentQueries { + findById(id: string): Promise; + findByRepository(repository: string, limit?: number): Promise; + findByIssue(repository: string, issueNumber: number): Promise; + findByAgent(agentId: string): Promise; + findPending(limit?: number): Promise; + create(data: schema.NewIssueAssignment): Promise; + assignAgent(id: string, agentId: string, agentName: string): Promise; + updateStatus(id: string, status: string, resolution?: string): Promise; + linkPR(id: string, prNumber: number): Promise; +} + +export const issueAssignmentQueries: IssueAssignmentQueries = { + async findById(id: string): Promise { + const db = getDb(); + const result = await db.select().from(schema.issueAssignments).where(eq(schema.issueAssignments.id, id)); + return result[0] ?? null; + }, + + async findByRepository(repository: string, limit = 50): Promise { + const db = getDb(); + return db + .select() + .from(schema.issueAssignments) + .where(eq(schema.issueAssignments.repository, repository)) + .orderBy(desc(schema.issueAssignments.createdAt)) + .limit(limit); + }, + + async findByIssue(repository: string, issueNumber: number): Promise { + const db = getDb(); + const result = await db + .select() + .from(schema.issueAssignments) + .where( + and( + eq(schema.issueAssignments.repository, repository), + eq(schema.issueAssignments.issueNumber, issueNumber) + ) + ); + return result[0] ?? null; + }, + + async findByAgent(agentId: string): Promise { + const db = getDb(); + return db + .select() + .from(schema.issueAssignments) + .where(eq(schema.issueAssignments.agentId, agentId)) + .orderBy(desc(schema.issueAssignments.createdAt)); + }, + + async findPending(limit = 100): Promise { + const db = getDb(); + return db + .select() + .from(schema.issueAssignments) + .where(eq(schema.issueAssignments.status, 'pending')) + .orderBy(schema.issueAssignments.createdAt) + .limit(limit); + }, + + async create(data: schema.NewIssueAssignment): Promise { + const db = getDb(); + const result = await db.insert(schema.issueAssignments).values(data).returning(); + return result[0]; + }, + + async assignAgent(id: string, agentId: string, agentName: string): Promise { + const db = getDb(); + await db + .update(schema.issueAssignments) + .set({ + agentId, + agentName, + assignedAt: new Date(), + status: 'assigned', + updatedAt: new Date(), + }) + .where(eq(schema.issueAssignments.id, id)); + }, + + async updateStatus(id: string, status: string, resolution?: string): Promise { + const db = getDb(); + const updates: Record = { status, updatedAt: new Date() }; + if (resolution) { + updates.resolution = resolution; + } + await db + .update(schema.issueAssignments) + .set(updates) + .where(eq(schema.issueAssignments.id, id)); + }, + + async linkPR(id: string, prNumber: number): Promise { + const db = getDb(); + await db + .update(schema.issueAssignments) + .set({ linkedPrNumber: prNumber, updatedAt: new Date() }) + .where(eq(schema.issueAssignments.id, id)); + }, +}; + +// ============================================================================ +// Comment Mention Queries +// ============================================================================ + +export interface CommentMentionQueries { + findById(id: string): Promise; + findByRepository(repository: string, limit?: number): Promise; + findBySource(sourceType: string, sourceId: number): Promise; + findPending(limit?: number): Promise; + findByMentionedAgent(mentionedAgent: string, limit?: number): Promise; + create(data: schema.NewCommentMention): Promise; + markProcessing(id: string, agentId: string, agentName: string): Promise; + markResponded(id: string, responseCommentId: number, responseBody: string): Promise; + markIgnored(id: string): Promise; +} + +export const commentMentionQueries: CommentMentionQueries = { + async findById(id: string): Promise { + const db = getDb(); + const result = await db.select().from(schema.commentMentions).where(eq(schema.commentMentions.id, id)); + return result[0] ?? null; + }, + + async findByRepository(repository: string, limit = 50): Promise { + const db = getDb(); + return db + .select() + .from(schema.commentMentions) + .where(eq(schema.commentMentions.repository, repository)) + .orderBy(desc(schema.commentMentions.createdAt)) + .limit(limit); + }, + + async findBySource(sourceType: string, sourceId: number): Promise { + const db = getDb(); + const result = await db + .select() + .from(schema.commentMentions) + .where( + and( + eq(schema.commentMentions.sourceType, sourceType), + eq(schema.commentMentions.sourceId, sourceId) + ) + ); + return result[0] ?? null; + }, + + async findPending(limit = 100): Promise { + const db = getDb(); + return db + .select() + .from(schema.commentMentions) + .where(eq(schema.commentMentions.status, 'pending')) + .orderBy(schema.commentMentions.createdAt) + .limit(limit); + }, + + async findByMentionedAgent(mentionedAgent: string, limit = 50): Promise { + const db = getDb(); + return db + .select() + .from(schema.commentMentions) + .where(eq(schema.commentMentions.mentionedAgent, mentionedAgent)) + .orderBy(desc(schema.commentMentions.createdAt)) + .limit(limit); + }, + + async create(data: schema.NewCommentMention): Promise { + const db = getDb(); + const result = await db.insert(schema.commentMentions).values(data).returning(); + return result[0]; + }, + + async markProcessing(id: string, agentId: string, agentName: string): Promise { + const db = getDb(); + await db + .update(schema.commentMentions) + .set({ status: 'processing', agentId, agentName }) + .where(eq(schema.commentMentions.id, id)); + }, + + async markResponded(id: string, responseCommentId: number, responseBody: string): Promise { + const db = getDb(); + await db + .update(schema.commentMentions) + .set({ + status: 'responded', + responseCommentId, + responseBody, + respondedAt: new Date(), + }) + .where(eq(schema.commentMentions.id, id)); + }, + + async markIgnored(id: string): Promise { + const db = getDb(); + await db + .update(schema.commentMentions) + .set({ status: 'ignored' }) + .where(eq(schema.commentMentions.id, id)); + }, +}; + // ============================================================================ // Migration helper // ============================================================================ diff --git a/src/cloud/db/index.ts b/src/cloud/db/index.ts index a5a63011..f83e8951 100644 --- a/src/cloud/db/index.ts +++ b/src/cloud/db/index.ts @@ -43,6 +43,12 @@ export type { NewCIFixAttempt, CICheckStrategy, CIWebhookConfig, + // Issue and comment types + IssueAssignment, + NewIssueAssignment, + CommentMention, + NewCommentMention, + AgentTriggerConfig, } from './schema.js'; // Re-export schema tables for direct access if needed @@ -59,6 +65,8 @@ export { usageRecords as usageRecordsTable, ciFailureEvents as ciFailureEventsTable, ciFixAttempts as ciFixAttemptsTable, + issueAssignments as issueAssignmentsTable, + commentMentions as commentMentionsTable, } from './schema.js'; // Import query modules @@ -76,6 +84,8 @@ import { repositoryQueries, ciFailureEventQueries, ciFixAttemptQueries, + issueAssignmentQueries, + commentMentionQueries, } from './drizzle.js'; // Legacy type aliases for backwards compatibility @@ -103,6 +113,9 @@ export const db = { // CI failure tracking ciFailureEvents: ciFailureEventQueries, ciFixAttempts: ciFixAttemptQueries, + // Issue and comment tracking + issueAssignments: issueAssignmentQueries, + commentMentions: commentMentionQueries, // Database utilities getDb, close: closeDb, @@ -121,6 +134,8 @@ export { linkedDaemonQueries, ciFailureEventQueries, ciFixAttemptQueries, + issueAssignmentQueries, + commentMentionQueries, }; // Export database utilities diff --git a/src/cloud/db/schema.ts b/src/cloud/db/schema.ts index be15958a..6c8fe0cc 100644 --- a/src/cloud/db/schema.ts +++ b/src/cloud/db/schema.ts @@ -703,3 +703,112 @@ export type CIFailureEvent = typeof ciFailureEvents.$inferSelect; export type NewCIFailureEvent = typeof ciFailureEvents.$inferInsert; export type CIFixAttempt = typeof ciFixAttempts.$inferSelect; export type NewCIFixAttempt = typeof ciFixAttempts.$inferInsert; + +// ============================================================================ +// GitHub Issue Assignments (agent handling of issues) +// ============================================================================ + +export const issueAssignments = pgTable('issue_assignments', { + id: uuid('id').primaryKey().defaultRandom(), + repositoryId: uuid('repository_id').references(() => repositories.id, { onDelete: 'cascade' }), + repository: varchar('repository', { length: 255 }).notNull(), // org/repo format + issueNumber: bigint('issue_number', { mode: 'number' }).notNull(), + issueTitle: text('issue_title').notNull(), + issueBody: text('issue_body'), + issueUrl: varchar('issue_url', { length: 512 }), + // Assignment details + agentId: varchar('agent_id', { length: 255 }), + agentName: varchar('agent_name', { length: 255 }), + assignedAt: timestamp('assigned_at'), + // Status tracking + status: varchar('status', { length: 50 }).notNull().default('pending'), // pending, assigned, in_progress, resolved, closed + resolution: text('resolution'), + // PR created to fix the issue + linkedPrNumber: bigint('linked_pr_number', { mode: 'number' }), + // Metadata + labels: text('labels').array(), + priority: varchar('priority', { length: 20 }), // low, medium, high, critical + createdAt: timestamp('created_at').defaultNow().notNull(), + updatedAt: timestamp('updated_at').defaultNow().notNull(), +}, (table) => ({ + repositoryIdx: index('idx_issue_assignments_repository').on(table.repository), + issueNumberIdx: index('idx_issue_assignments_issue_number').on(table.issueNumber), + statusIdx: index('idx_issue_assignments_status').on(table.status), + agentIdIdx: index('idx_issue_assignments_agent_id').on(table.agentId), + repoIssueIdx: unique('issue_assignments_repo_issue_unique').on(table.repository, table.issueNumber), +})); + +export const issueAssignmentsRelations = relations(issueAssignments, ({ one }) => ({ + repositoryRef: one(repositories, { + fields: [issueAssignments.repositoryId], + references: [repositories.id], + }), +})); + +// ============================================================================ +// Comment Mentions (tracking @mentions to agents) +// ============================================================================ + +export const commentMentions = pgTable('comment_mentions', { + id: uuid('id').primaryKey().defaultRandom(), + repositoryId: uuid('repository_id').references(() => repositories.id, { onDelete: 'cascade' }), + repository: varchar('repository', { length: 255 }).notNull(), + // Source of the mention + sourceType: varchar('source_type', { length: 50 }).notNull(), // issue_comment, pr_comment, pr_review + sourceId: bigint('source_id', { mode: 'number' }).notNull(), // GitHub comment ID + issueOrPrNumber: bigint('issue_or_pr_number', { mode: 'number' }).notNull(), + // Comment details + commentBody: text('comment_body').notNull(), + commentUrl: varchar('comment_url', { length: 512 }), + authorLogin: varchar('author_login', { length: 255 }).notNull(), + authorId: bigint('author_id', { mode: 'number' }), + // Mention details + mentionedAgent: varchar('mentioned_agent', { length: 255 }).notNull(), // e.g., "agent-relay", "ci-fix", "lead" + mentionContext: text('mention_context'), // Text surrounding the mention + // Response tracking + agentId: varchar('agent_id', { length: 255 }), + agentName: varchar('agent_name', { length: 255 }), + status: varchar('status', { length: 50 }).notNull().default('pending'), // pending, processing, responded, ignored + responseCommentId: bigint('response_comment_id', { mode: 'number' }), + responseBody: text('response_body'), + respondedAt: timestamp('responded_at'), + // Metadata + createdAt: timestamp('created_at').defaultNow().notNull(), +}, (table) => ({ + repositoryIdx: index('idx_comment_mentions_repository').on(table.repository), + sourceIdx: index('idx_comment_mentions_source').on(table.sourceType, table.sourceId), + statusIdx: index('idx_comment_mentions_status').on(table.status), + mentionedAgentIdx: index('idx_comment_mentions_mentioned_agent').on(table.mentionedAgent), +})); + +export const commentMentionsRelations = relations(commentMentions, ({ one }) => ({ + repositoryRef: one(repositories, { + fields: [commentMentions.repositoryId], + references: [repositories.id], + }), +})); + +// ============================================================================ +// Agent Webhook Configuration (per-repo settings for agent triggers) +// ============================================================================ + +export interface AgentTriggerConfig { + // Which agents can be mentioned + mentionableAgents?: string[]; // e.g., ["lead", "ci-fix", "reviewer"] + // Default agent for issue handling + defaultIssueAgent?: string; + // Labels that trigger agent assignment + autoAssignLabels?: Record; // e.g., { "bug": "debugger", "enhancement": "developer" } + // Whether to auto-respond to mentions + autoRespondToMentions?: boolean; + // Rate limiting + maxResponsesPerHour?: number; + // Who can trigger agents + allowedTriggerUsers?: string[]; // Empty = everyone, list = only these users +} + +// Type exports for issue/comment tables +export type IssueAssignment = typeof issueAssignments.$inferSelect; +export type NewIssueAssignment = typeof issueAssignments.$inferInsert; +export type CommentMention = typeof commentMentions.$inferSelect; +export type NewCommentMention = typeof commentMentions.$inferInsert; diff --git a/src/cloud/services/index.ts b/src/cloud/services/index.ts index f31c11f7..53cb1b73 100644 --- a/src/cloud/services/index.ts +++ b/src/cloud/services/index.ts @@ -53,3 +53,15 @@ export { getFailureHistory, getPRFailureHistory, } from './ci-agent-spawner.js'; + +// Issue and mention handling +export { + handleMention, + handleIssueAssignment, + getPendingMentions, + getPendingIssueAssignments, + processPendingMentions, + processPendingIssueAssignments, + KNOWN_AGENTS, + isKnownAgent, +} from './mention-handler.js'; diff --git a/src/cloud/services/mention-handler.ts b/src/cloud/services/mention-handler.ts new file mode 100644 index 00000000..69dd246b --- /dev/null +++ b/src/cloud/services/mention-handler.ts @@ -0,0 +1,245 @@ +/** + * Mention Handler Service + * + * Handles @mentions of agents in GitHub issues and PR comments. + * Routes mentions to appropriate agents for response. + */ + +import { db, CommentMention, IssueAssignment } from '../db/index.js'; + +/** + * Known agent types that can be mentioned + */ +export const KNOWN_AGENTS = { + // Generic agents + 'agent-relay': 'General purpose agent for any task', + 'lead': 'Lead agent for coordination and delegation', + 'developer': 'Developer agent for coding tasks', + 'reviewer': 'Code review agent', + + // Specialized agents + 'ci-fix': 'CI failure fixing agent', + 'debugger': 'Bug investigation and fixing agent', + 'docs': 'Documentation agent', + 'test': 'Test writing agent', + 'refactor': 'Code refactoring agent', +} as const; + +export type KnownAgentType = keyof typeof KNOWN_AGENTS; + +/** + * Check if a mention is for a known agent type + */ +export function isKnownAgent(mention: string): mention is KnownAgentType { + return mention in KNOWN_AGENTS; +} + +/** + * Handle a mention record + * + * This function: + * 1. Validates the mention is for a known agent + * 2. Routes to the appropriate agent handler + * 3. Spawns or messages the agent + */ +export async function handleMention(mention: CommentMention): Promise { + console.log(`[mention-handler] Processing mention: @${mention.mentionedAgent} in ${mention.repository}`); + + // Check if this is a known agent type + if (!isKnownAgent(mention.mentionedAgent)) { + console.log(`[mention-handler] Unknown agent: @${mention.mentionedAgent}, checking workspace config`); + // TODO: Check workspace configuration for custom agent names + // For now, ignore unknown agents + await db.commentMentions.markIgnored(mention.id); + return; + } + + // Update status to processing + const agentId = `mention-${mention.id}`; + const agentName = mention.mentionedAgent; + await db.commentMentions.markProcessing(mention.id, agentId, agentName); + + // Build the prompt for the agent + const prompt = buildMentionPrompt(mention); + + console.log(`[mention-handler] Built prompt for @${mention.mentionedAgent}:`); + console.log(`[mention-handler] --- BEGIN PROMPT ---`); + console.log(prompt); + console.log(`[mention-handler] --- END PROMPT ---`); + + // TODO: Actually spawn or message the agent + // This will integrate with the workspace/agent system to: + // 1. Find an existing agent working on this PR/issue + // 2. Message them if they exist + // 3. Spawn a new agent if needed + // + // For now, we just log the intent + console.log(`[mention-handler] Would spawn/message agent @${mention.mentionedAgent}`); +} + +/** + * Build a prompt for handling a mention + */ +function buildMentionPrompt(mention: CommentMention): string { + const agentDescription = isKnownAgent(mention.mentionedAgent) + ? KNOWN_AGENTS[mention.mentionedAgent] + : 'Custom agent'; + + const sourceTypeDescription = { + issue_comment: 'GitHub issue comment', + pr_comment: 'GitHub PR comment', + pr_review: 'GitHub PR review comment', + }[mention.sourceType] || 'GitHub comment'; + + return ` +# Agent Mention Task + +You (@${mention.mentionedAgent}) have been mentioned in a ${sourceTypeDescription}. + +## Your Role +${agentDescription} + +## Context + +**Repository:** ${mention.repository} +**Issue/PR:** #${mention.issueOrPrNumber} +**Comment by:** @${mention.authorLogin} +**Comment URL:** ${mention.commentUrl || 'N/A'} + +## Comment + +${mention.commentBody} + +## Your Task + +Analyze the comment and respond appropriately: + +1. If a question was asked, provide a helpful answer +2. If a task was requested, either complete it or explain what's needed +3. If feedback was given, acknowledge it and act on it if needed +4. Reply to the comment on GitHub with your response + +## Important + +- Be concise and helpful +- If you need to make code changes, create a commit and push +- If the request is unclear, ask for clarification +- Reference specific files and line numbers when relevant +`.trim(); +} + +/** + * Handle an issue assignment + * + * Called when an issue should be assigned to an agent + */ +export async function handleIssueAssignment(assignment: IssueAssignment): Promise { + console.log(`[mention-handler] Processing issue assignment: #${assignment.issueNumber} in ${assignment.repository}`); + + // Build prompt for the issue + const prompt = buildIssuePrompt(assignment); + + console.log(`[mention-handler] Built prompt for issue #${assignment.issueNumber}:`); + console.log(`[mention-handler] --- BEGIN PROMPT ---`); + console.log(prompt); + console.log(`[mention-handler] --- END PROMPT ---`); + + // TODO: Spawn agent for the issue + console.log(`[mention-handler] Would spawn agent for issue #${assignment.issueNumber}`); +} + +/** + * Build a prompt for an issue assignment + */ +function buildIssuePrompt(assignment: IssueAssignment): string { + const priorityNote = assignment.priority + ? `\n**Priority:** ${assignment.priority.toUpperCase()}` + : ''; + + const labelsNote = assignment.labels && assignment.labels.length > 0 + ? `\n**Labels:** ${assignment.labels.join(', ')}` + : ''; + + return ` +# Issue Assignment + +You have been assigned to work on GitHub issue #${assignment.issueNumber}. + +## Issue Details + +**Repository:** ${assignment.repository} +**Title:** ${assignment.issueTitle}${priorityNote}${labelsNote} +**URL:** ${assignment.issueUrl || 'N/A'} + +## Description + +${assignment.issueBody || 'No description provided.'} + +## Your Task + +1. Analyze the issue and understand what needs to be done +2. Investigate the codebase to find relevant files +3. Implement a solution if possible +4. Create a PR with your changes +5. Link the PR to this issue + +## Important + +- Start with a comment on the issue acknowledging you're working on it +- If you need clarification, ask in the issue comments +- Create a draft PR early if the fix is complex +- Reference the issue number in your commit messages (e.g., "Fix #${assignment.issueNumber}") +`.trim(); +} + +/** + * Get pending mentions for processing + */ +export async function getPendingMentions(limit = 50): Promise { + return db.commentMentions.findPending(limit); +} + +/** + * Get pending issue assignments for processing + */ +export async function getPendingIssueAssignments(limit = 50): Promise { + return db.issueAssignments.findPending(limit); +} + +/** + * Process all pending mentions (background job) + */ +export async function processPendingMentions(): Promise { + const pending = await getPendingMentions(); + let processed = 0; + + for (const mention of pending) { + try { + await handleMention(mention); + processed++; + } catch (error) { + console.error(`[mention-handler] Failed to process mention ${mention.id}:`, error); + } + } + + return processed; +} + +/** + * Process all pending issue assignments (background job) + */ +export async function processPendingIssueAssignments(): Promise { + const pending = await getPendingIssueAssignments(); + let processed = 0; + + for (const assignment of pending) { + try { + await handleIssueAssignment(assignment); + processed++; + } catch (error) { + console.error(`[mention-handler] Failed to process assignment ${assignment.id}:`, error); + } + } + + return processed; +} From 66320939dde69185ac3099d9f8d3d4369960ca2c Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 11:17:13 +0000 Subject: [PATCH 037/103] Implement full CI and mention handling with GitHub comments - Add findByFullName query for repositories - Replace stubs with working agent spawning logic - Post acknowledgment comments via GitHub App before spawning - Queue spawn commands for linked daemons - Add completion comments for CI fix results - Handle null PR numbers gracefully - Fix updateStatus call signature for issue assignments --- src/cloud/db/drizzle.ts | 10 + src/cloud/services/ci-agent-spawner.ts | 356 ++++++++++++++++++++++--- src/cloud/services/mention-handler.ts | 321 +++++++++++++++++++--- 3 files changed, 619 insertions(+), 68 deletions(-) diff --git a/src/cloud/db/drizzle.ts b/src/cloud/db/drizzle.ts index f025d935..c82e58ac 100644 --- a/src/cloud/db/drizzle.ts +++ b/src/cloud/db/drizzle.ts @@ -944,6 +944,7 @@ export const projectGroupQueries: ProjectGroupQueries = { export interface RepositoryQueries { findById(id: string): Promise; + findByFullName(fullName: string): Promise; findByUserId(userId: string): Promise; findByWorkspaceId(workspaceId: string): Promise; findByProjectGroupId(projectGroupId: string): Promise; @@ -962,6 +963,15 @@ export const repositoryQueries: RepositoryQueries = { return result[0] ?? null; }, + async findByFullName(fullName: string): Promise { + const db = getDb(); + const result = await db + .select() + .from(schema.repositories) + .where(eq(schema.repositories.githubFullName, fullName)); + return result[0] ?? null; + }, + async findByUserId(userId: string): Promise { const db = getDb(); return db diff --git a/src/cloud/services/ci-agent-spawner.ts b/src/cloud/services/ci-agent-spawner.ts index 9434b8f4..d94aaf2f 100644 --- a/src/cloud/services/ci-agent-spawner.ts +++ b/src/cloud/services/ci-agent-spawner.ts @@ -3,17 +3,182 @@ * * Spawns agents to fix CI failures automatically. * Called by the webhook handler when CI checks fail on PRs. + * + * Flow: + * 1. App posts acknowledgment comment on the PR + * 2. Finds a linked daemon for the repository + * 3. Queues spawn command for the daemon + * 4. Agent works and posts response comment */ -import { db, CIFailureEvent, CIAnnotation } from '../db/index.js'; +import { db, CIFailureEvent, CIAnnotation, Repository } from '../db/index.js'; +import { nangoService } from './nango.js'; + +/** + * Get the GitHub App name for comments + */ +function getAppName(): string { + return process.env.GITHUB_APP_NAME || 'Agent Relay'; +} + +/** + * Post a CI failure acknowledgment comment on GitHub + */ +async function postCIAcknowledgmentComment( + repository: Repository, + prNumber: number, + checkName: string, + failureTitle: string | null +): Promise<{ id: number; url: string } | null> { + if (!repository.nangoConnectionId) { + console.warn(`[ci-spawner] Repository ${repository.githubFullName} has no Nango connection`); + return null; + } + + const [owner, repo] = repository.githubFullName.split('/'); + const appName = getAppName(); + + const body = `๐Ÿ”ด **CI Failure Detected** + +The \`${checkName}\` check has failed${failureTitle ? `: ${failureTitle}` : ''}. + +I'm spawning an agent to investigate and fix this issue. The **@ci-fix** agent will analyze the failure and attempt to resolve it. + +You'll be notified when the fix is ready or if manual intervention is needed. + +_โ€” ${appName}_`; + + try { + const result = await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + prNumber, + body + ); + console.log(`[ci-spawner] Posted CI acknowledgment comment: ${result.html_url}`); + return { id: result.id, url: result.html_url }; + } catch (error) { + console.error(`[ci-spawner] Failed to post CI acknowledgment comment:`, error); + return null; + } +} + +/** + * Post a completion comment on GitHub + */ +async function postCompletionComment( + repository: Repository, + prNumber: number, + success: boolean, + summary: string, + commitSha?: string +): Promise { + if (!repository.nangoConnectionId) { + return; + } + + const [owner, repo] = repository.githubFullName.split('/'); + const appName = getAppName(); + + let body: string; + if (success) { + body = `โœ… **CI Fix Applied** + +${summary} + +${commitSha ? `**Commit:** ${commitSha.substring(0, 7)}` : ''} + +Please review the changes and re-run the CI checks. + +_โ€” ${appName}_`; + } else { + body = `โš ๏ธ **CI Fix Unsuccessful** + +${summary} + +Manual intervention may be required. Please check the failure details and fix the issue manually. + +_โ€” ${appName}_`; + } + + try { + await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + prNumber, + body + ); + console.log(`[ci-spawner] Posted completion comment for PR #${prNumber}`); + } catch (error) { + console.error(`[ci-spawner] Failed to post completion comment:`, error); + } +} + +/** + * Find a linked daemon that can handle this repository + */ +async function findAvailableDaemon(repository: Repository): Promise<{ id: string; userId: string } | null> { + if (!repository.userId) { + console.warn(`[ci-spawner] Repository ${repository.githubFullName} has no userId`); + return null; + } + + const daemons = await db.linkedDaemons.findByUserId(repository.userId); + const onlineDaemon = daemons.find(d => d.status === 'online'); + + if (!onlineDaemon) { + console.warn(`[ci-spawner] No online daemon found for user ${repository.userId}`); + return null; + } + + return { id: onlineDaemon.id, userId: repository.userId }; +} + +/** + * Queue a spawn command for a linked daemon + */ +async function queueSpawnCommand( + daemonId: string, + agentName: string, + prompt: string, + metadata: { + failureEventId: string; + fixAttemptId: string; + repository: string; + prNumber: number; + checkName: string; + } +): Promise { + const command = { + type: 'spawn_agent', + agentName, + cli: 'claude', + task: prompt, + metadata, + timestamp: new Date().toISOString(), + }; + + await db.linkedDaemons.queueMessage(daemonId, { + from: { daemonId: 'cloud', daemonName: 'Agent Relay Cloud', agent: 'system' }, + to: '__spawner__', + content: JSON.stringify(command), + metadata: { type: 'spawn_command' }, + timestamp: new Date().toISOString(), + }); + + console.log(`[ci-spawner] Queued spawn command for daemon ${daemonId}`); +} /** * Spawn an agent to fix CI failures * * This function: * 1. Finds the workspace for the repository - * 2. Creates a fix attempt record - * 3. Spawns an agent with the failure context + * 2. Posts acknowledgment comment + * 3. Creates a fix attempt record + * 4. Queues spawn command for a linked daemon * * @param failureEvent - The CI failure event from the database */ @@ -23,8 +188,23 @@ export async function spawnCIFixAgent(failureEvent: CIFailureEvent): Promise 0 ? annotations @@ -89,6 +304,27 @@ function buildAgentPrompt(failureEvent: CIFailureEvent): string { .join('\n') : null; + const responseInstructions = ` +## Response Instructions + +When you complete your work: +1. Commit and push your changes +2. Post a comment on the PR summarizing what you fixed + +Use the GitHub CLI (\`gh\`) to post your response: +\`\`\`bash +gh pr comment ${failureEvent.prNumber} --repo ${failureEvent.repository} --body "## CI Fix Applied + +Summary of changes... + +**Files modified:** +- file1.ts +- file2.ts + +Please re-run the CI checks to verify the fix." +\`\`\` +`; + return ` # CI Failure Fix Task @@ -117,6 +353,8 @@ ${annotationsList ? `## Annotations\n\n${annotationsList}` : ''} 5. Commit and push your changes with a clear commit message 6. Report back with a summary of what was fixed +${responseInstructions} + ## Important - Only fix the specific issues causing the CI failure @@ -140,6 +378,22 @@ export async function notifyAgentOfCIFailure( ): Promise { console.log(`[ci-spawner] Notifying agent ${agentId} of new failure`); + // Find the repository + const repository = await db.repositories.findByFullName(failureEvent.repository); + if (!repository || !repository.userId) { + console.warn(`[ci-spawner] Repository not found or has no userId: ${failureEvent.repository}`); + return; + } + + // Find the daemon that should have this agent + const daemons = await db.linkedDaemons.findByUserId(repository.userId); + const onlineDaemon = daemons.find(d => d.status === 'online'); + + if (!onlineDaemon) { + console.warn(`[ci-spawner] No online daemon to notify agent ${agentId}`); + return; + } + // Build notification message const annotations = failureEvent.annotations as CIAnnotation[] | null; const annotationsList = annotations && annotations.length > 0 @@ -161,11 +415,16 @@ ${annotationsList ? `Issues:\n${annotationsList}` : ''} Please investigate and fix these issues, then push your changes. `.trim(); - // TODO: Send message via relay - // This would use the agent-relay messaging system to send - // the failure notification to the existing agent - console.log(`[ci-spawner] Would send message to agent ${agentId}:`); - console.log(message); + // Queue message for the agent via daemon + await db.linkedDaemons.queueMessage(onlineDaemon.id, { + from: { daemonId: 'cloud', daemonName: 'Agent Relay Cloud', agent: 'system' }, + to: agentId, + content: message, + metadata: { type: 'ci_failure_notification', failureEventId: failureEvent.id }, + timestamp: new Date().toISOString(), + }); + + console.log(`[ci-spawner] Queued CI failure notification for agent ${agentId}`); } /** @@ -181,12 +440,43 @@ export async function completeFixAttempt( ): Promise { console.log(`[ci-spawner] Completing fix attempt ${fixAttemptId}: ${success ? 'success' : 'failed'}`); + // Update the fix attempt record await db.ciFixAttempts.complete( fixAttemptId, success ? 'success' : 'failed', commitSha, errorMessage ); + + // Get the fix attempt to find the failure event + const fixAttempt = await db.ciFixAttempts.findById(fixAttemptId); + if (!fixAttempt) { + console.warn(`[ci-spawner] Fix attempt not found: ${fixAttemptId}`); + return; + } + + // Get the failure event to find the repository and PR + const failureEvent = await db.ciFailureEvents.findById(fixAttempt.failureEventId); + if (!failureEvent) { + console.warn(`[ci-spawner] Failure event not found: ${fixAttempt.failureEventId}`); + return; + } + + // Find the repository to post completion comment + const repository = await db.repositories.findByFullName(failureEvent.repository); + if (repository && failureEvent.prNumber !== null) { + const summary = success + ? `The @ci-fix agent has fixed the \`${failureEvent.checkName}\` check failure.` + : errorMessage || 'The @ci-fix agent was unable to fix the issue.'; + + await postCompletionComment( + repository, + failureEvent.prNumber, + success, + summary, + commitSha + ); + } } /** diff --git a/src/cloud/services/mention-handler.ts b/src/cloud/services/mention-handler.ts index 69dd246b..1342e7fc 100644 --- a/src/cloud/services/mention-handler.ts +++ b/src/cloud/services/mention-handler.ts @@ -3,9 +3,16 @@ * * Handles @mentions of agents in GitHub issues and PR comments. * Routes mentions to appropriate agents for response. + * + * Flow: + * 1. App posts acknowledgment comment + * 2. Finds a linked daemon for the repository + * 3. Queues spawn command for the daemon + * 4. Agent works and posts response comment */ -import { db, CommentMention, IssueAssignment } from '../db/index.js'; +import { db, CommentMention, IssueAssignment, Repository } from '../db/index.js'; +import { nangoService } from './nango.js'; /** * Known agent types that can be mentioned @@ -34,13 +41,118 @@ export function isKnownAgent(mention: string): mention is KnownAgentType { return mention in KNOWN_AGENTS; } +/** + * Get the GitHub App name for comments + */ +function getAppName(): string { + return process.env.GITHUB_APP_NAME || 'Agent Relay'; +} + +/** + * Post an acknowledgment comment on GitHub + */ +async function postAcknowledgmentComment( + repository: Repository, + issueNumber: number, + mentionedAgent: string, + authorLogin: string +): Promise<{ id: number; url: string } | null> { + if (!repository.nangoConnectionId) { + console.warn(`[mention-handler] Repository ${repository.githubFullName} has no Nango connection`); + return null; + } + + const [owner, repo] = repository.githubFullName.split('/'); + const appName = getAppName(); + const agentDescription = isKnownAgent(mentionedAgent) + ? KNOWN_AGENTS[mentionedAgent] + : 'Custom agent'; + + const body = `๐Ÿ‘‹ @${authorLogin}, I've received your request and am routing it to **@${mentionedAgent}** (${agentDescription}). + +The agent will respond shortly. You can track progress in this thread. + +_โ€” ${appName}_`; + + try { + const result = await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + issueNumber, + body + ); + console.log(`[mention-handler] Posted acknowledgment comment: ${result.html_url}`); + return { id: result.id, url: result.html_url }; + } catch (error) { + console.error(`[mention-handler] Failed to post acknowledgment comment:`, error); + return null; + } +} + +/** + * Find a linked daemon that can handle this repository + */ +async function findAvailableDaemon(repository: Repository): Promise<{ id: string; userId: string } | null> { + // The daemon must belong to the repository owner + if (!repository.userId) { + console.warn(`[mention-handler] Repository ${repository.githubFullName} has no userId`); + return null; + } + + const daemons = await db.linkedDaemons.findByUserId(repository.userId); + const onlineDaemon = daemons.find(d => d.status === 'online'); + + if (!onlineDaemon) { + console.warn(`[mention-handler] No online daemon found for user ${repository.userId}`); + return null; + } + + return { id: onlineDaemon.id, userId: repository.userId }; +} + +/** + * Queue a spawn command for a linked daemon + */ +async function queueSpawnCommand( + daemonId: string, + agentName: string, + prompt: string, + metadata: { + mentionId: string; + repository: string; + issueNumber: number; + authorLogin: string; + } +): Promise { + const command = { + type: 'spawn_agent', + agentName, + cli: 'claude', // Default to Claude CLI + task: prompt, + metadata, + timestamp: new Date().toISOString(), + }; + + await db.linkedDaemons.queueMessage(daemonId, { + from: { daemonId: 'cloud', daemonName: 'Agent Relay Cloud', agent: 'system' }, + to: '__spawner__', + content: JSON.stringify(command), + metadata: { type: 'spawn_command' }, + timestamp: new Date().toISOString(), + }); + + console.log(`[mention-handler] Queued spawn command for daemon ${daemonId}`); +} + /** * Handle a mention record * * This function: * 1. Validates the mention is for a known agent - * 2. Routes to the appropriate agent handler - * 3. Spawns or messages the agent + * 2. Posts an acknowledgment comment + * 3. Finds a linked daemon + * 4. Queues a spawn command for the agent */ export async function handleMention(mention: CommentMention): Promise { console.log(`[mention-handler] Processing mention: @${mention.mentionedAgent} in ${mention.repository}`); @@ -49,38 +161,83 @@ export async function handleMention(mention: CommentMention): Promise { if (!isKnownAgent(mention.mentionedAgent)) { console.log(`[mention-handler] Unknown agent: @${mention.mentionedAgent}, checking workspace config`); // TODO: Check workspace configuration for custom agent names - // For now, ignore unknown agents + // For now, mark as ignored await db.commentMentions.markIgnored(mention.id); return; } - // Update status to processing + // Find the repository to get Nango connection + const repository = await db.repositories.findByFullName(mention.repository); + if (!repository) { + console.error(`[mention-handler] Repository not found: ${mention.repository}`); + await db.commentMentions.markIgnored(mention.id); + return; + } + + // Generate agent info const agentId = `mention-${mention.id}`; - const agentName = mention.mentionedAgent; + const agentName = `${mention.mentionedAgent}-${mention.issueOrPrNumber}`; + + // Update status to processing await db.commentMentions.markProcessing(mention.id, agentId, agentName); - // Build the prompt for the agent - const prompt = buildMentionPrompt(mention); - - console.log(`[mention-handler] Built prompt for @${mention.mentionedAgent}:`); - console.log(`[mention-handler] --- BEGIN PROMPT ---`); - console.log(prompt); - console.log(`[mention-handler] --- END PROMPT ---`); - - // TODO: Actually spawn or message the agent - // This will integrate with the workspace/agent system to: - // 1. Find an existing agent working on this PR/issue - // 2. Message them if they exist - // 3. Spawn a new agent if needed - // - // For now, we just log the intent - console.log(`[mention-handler] Would spawn/message agent @${mention.mentionedAgent}`); + // Step 1: Post acknowledgment comment + const ackResult = await postAcknowledgmentComment( + repository, + mention.issueOrPrNumber, + mention.mentionedAgent, + mention.authorLogin + ); + + if (!ackResult) { + console.warn(`[mention-handler] Could not post acknowledgment, continuing anyway`); + } + + // Step 2: Find a linked daemon + const daemon = await findAvailableDaemon(repository); + + if (!daemon) { + console.warn(`[mention-handler] No available daemon for ${mention.repository}`); + // Post a comment explaining the situation + if (repository.nangoConnectionId) { + const [owner, repo] = repository.githubFullName.split('/'); + try { + await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + mention.issueOrPrNumber, + `โš ๏ธ @${mention.authorLogin}, I couldn't find an available agent to handle this request. Please ensure you have a linked Agent Relay daemon running. + +You can set this up by running \`agent-relay cloud link\` on your development machine. + +_โ€” ${getAppName()}_` + ); + } catch (error) { + console.error(`[mention-handler] Failed to post error comment:`, error); + } + } + return; + } + + // Step 3: Build the prompt for the agent + const prompt = buildMentionPrompt(mention, repository); + + // Step 4: Queue spawn command for the daemon + await queueSpawnCommand(daemon.id, agentName, prompt, { + mentionId: mention.id, + repository: mention.repository, + issueNumber: mention.issueOrPrNumber, + authorLogin: mention.authorLogin, + }); + + console.log(`[mention-handler] Spawned agent @${mention.mentionedAgent} for mention ${mention.id}`); } /** * Build a prompt for handling a mention */ -function buildMentionPrompt(mention: CommentMention): string { +function buildMentionPrompt(mention: CommentMention, repository: Repository): string { const agentDescription = isKnownAgent(mention.mentionedAgent) ? KNOWN_AGENTS[mention.mentionedAgent] : 'Custom agent'; @@ -91,6 +248,25 @@ function buildMentionPrompt(mention: CommentMention): string { pr_review: 'GitHub PR review comment', }[mention.sourceType] || 'GitHub comment'; + const responseInstructions = ` +## Response Instructions + +When you complete your work: +1. Post a comment on GitHub to notify @${mention.authorLogin} +2. Reference specific files and line numbers when relevant +3. If you made code changes, push them and reference the commit + +Use the GitHub CLI (\`gh\`) to post your response: +\`\`\`bash +gh issue comment ${mention.issueOrPrNumber} --repo ${mention.repository} --body "Your response here @${mention.authorLogin}" +\`\`\` + +Or for PR comments: +\`\`\`bash +gh pr comment ${mention.issueOrPrNumber} --repo ${mention.repository} --body "Your response here @${mention.authorLogin}" +\`\`\` +`; + return ` # Agent Mention Task @@ -117,14 +293,15 @@ Analyze the comment and respond appropriately: 1. If a question was asked, provide a helpful answer 2. If a task was requested, either complete it or explain what's needed 3. If feedback was given, acknowledge it and act on it if needed -4. Reply to the comment on GitHub with your response + +${responseInstructions} ## Important - Be concise and helpful - If you need to make code changes, create a commit and push -- If the request is unclear, ask for clarification -- Reference specific files and line numbers when relevant +- If the request is unclear, ask for clarification in your response +- Always @mention ${mention.authorLogin} in your response so they get notified `.trim(); } @@ -136,22 +313,82 @@ Analyze the comment and respond appropriately: export async function handleIssueAssignment(assignment: IssueAssignment): Promise { console.log(`[mention-handler] Processing issue assignment: #${assignment.issueNumber} in ${assignment.repository}`); - // Build prompt for the issue - const prompt = buildIssuePrompt(assignment); + // Find the repository + const repository = await db.repositories.findByFullName(assignment.repository); + if (!repository) { + console.error(`[mention-handler] Repository not found: ${assignment.repository}`); + return; + } + + // Post acknowledgment comment + if (repository.nangoConnectionId) { + const [owner, repo] = repository.githubFullName.split('/'); + try { + await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + assignment.issueNumber, + `๐Ÿค– I've been assigned to work on this issue. I'll analyze the problem and get started. + +You can track my progress in this thread. I'll update you when I have a solution or need more information. + +_โ€” ${getAppName()}_` + ); + } catch (error) { + console.error(`[mention-handler] Failed to post assignment comment:`, error); + } + } - console.log(`[mention-handler] Built prompt for issue #${assignment.issueNumber}:`); - console.log(`[mention-handler] --- BEGIN PROMPT ---`); - console.log(prompt); - console.log(`[mention-handler] --- END PROMPT ---`); + // Find a linked daemon + const daemon = await findAvailableDaemon(repository); + + if (!daemon) { + console.warn(`[mention-handler] No available daemon for ${assignment.repository}`); + if (repository.nangoConnectionId) { + const [owner, repo] = repository.githubFullName.split('/'); + try { + await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + assignment.issueNumber, + `โš ๏ธ I couldn't start working on this issue because no Agent Relay daemon is available. + +Please ensure you have a linked daemon running by executing \`agent-relay cloud link\` on your development machine. + +_โ€” ${getAppName()}_` + ); + } catch (error) { + console.error(`[mention-handler] Failed to post error comment:`, error); + } + } + return; + } - // TODO: Spawn agent for the issue - console.log(`[mention-handler] Would spawn agent for issue #${assignment.issueNumber}`); + // Build prompt for the issue + const prompt = buildIssuePrompt(assignment, repository); + + // Queue spawn command + const agentName = `issue-${assignment.issueNumber}`; + await queueSpawnCommand(daemon.id, agentName, prompt, { + mentionId: assignment.id, + repository: assignment.repository, + issueNumber: assignment.issueNumber, + authorLogin: 'issue-author', // TODO: Get from issue + }); + + // Update assignment status and assign agent + await db.issueAssignments.assignAgent(assignment.id, agentName, agentName); + await db.issueAssignments.updateStatus(assignment.id, 'in_progress'); + + console.log(`[mention-handler] Spawned agent for issue #${assignment.issueNumber}`); } /** * Build a prompt for an issue assignment */ -function buildIssuePrompt(assignment: IssueAssignment): string { +function buildIssuePrompt(assignment: IssueAssignment, repository: Repository): string { const priorityNote = assignment.priority ? `\n**Priority:** ${assignment.priority.toUpperCase()}` : ''; @@ -183,6 +420,20 @@ ${assignment.issueBody || 'No description provided.'} 4. Create a PR with your changes 5. Link the PR to this issue +## Response Instructions + +Keep the issue updated with your progress: +\`\`\`bash +gh issue comment ${assignment.issueNumber} --repo ${assignment.repository} --body "Your update here" +\`\`\` + +When you create a PR: +\`\`\`bash +gh pr create --repo ${assignment.repository} --title "Fix #${assignment.issueNumber}: Brief description" --body "Fixes #${assignment.issueNumber} + +Description of changes..." +\`\`\` + ## Important - Start with a comment on the issue acknowledging you're working on it From a135df1e05a45ca8b946ddb9ed4c0d89d427d590 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 12:26:56 +0000 Subject: [PATCH 038/103] Add generic webhook system with GitHub, Linear, and Slack support - Create configurable webhook router supporting any source - Implement parsers for GitHub, Linear, and Slack events - Implement responders for posting back to each platform - Add rules engine with JSONPath conditions for event matching - Support native integrations (assign Linear issue to agent) - Add API endpoints: POST /api/webhooks/:source - Add comprehensive integrations design doc for future work --- docs/design/comprehensive-integrations.md | 238 +++++++++ src/cloud/api/generic-webhooks.ts | 145 ++++++ src/cloud/webhooks/index.ts | 42 ++ src/cloud/webhooks/parsers/github.ts | 249 +++++++++ src/cloud/webhooks/parsers/index.ts | 35 ++ src/cloud/webhooks/parsers/linear.ts | 275 ++++++++++ src/cloud/webhooks/parsers/slack.ts | 234 +++++++++ src/cloud/webhooks/responders/github.ts | 94 ++++ src/cloud/webhooks/responders/index.ts | 35 ++ src/cloud/webhooks/responders/linear.ts | 181 +++++++ src/cloud/webhooks/responders/slack.ts | 220 ++++++++ src/cloud/webhooks/router.ts | 592 ++++++++++++++++++++++ src/cloud/webhooks/rules-engine.ts | 291 +++++++++++ src/cloud/webhooks/types.ts | 198 ++++++++ 14 files changed, 2829 insertions(+) create mode 100644 docs/design/comprehensive-integrations.md create mode 100644 src/cloud/api/generic-webhooks.ts create mode 100644 src/cloud/webhooks/index.ts create mode 100644 src/cloud/webhooks/parsers/github.ts create mode 100644 src/cloud/webhooks/parsers/index.ts create mode 100644 src/cloud/webhooks/parsers/linear.ts create mode 100644 src/cloud/webhooks/parsers/slack.ts create mode 100644 src/cloud/webhooks/responders/github.ts create mode 100644 src/cloud/webhooks/responders/index.ts create mode 100644 src/cloud/webhooks/responders/linear.ts create mode 100644 src/cloud/webhooks/responders/slack.ts create mode 100644 src/cloud/webhooks/router.ts create mode 100644 src/cloud/webhooks/rules-engine.ts create mode 100644 src/cloud/webhooks/types.ts diff --git a/docs/design/comprehensive-integrations.md b/docs/design/comprehensive-integrations.md new file mode 100644 index 00000000..c5cf2a30 --- /dev/null +++ b/docs/design/comprehensive-integrations.md @@ -0,0 +1,238 @@ +# Comprehensive External Integrations + +This document outlines the plan for bidirectional integrations with external systems. + +## Current State + +We have a generic webhook system that can: +- Receive webhooks from GitHub, Linear, Slack +- Parse events into normalized format +- Match events to rules and spawn agents +- Send basic responses (comments) + +## Required Enhancements + +### 1. Linear Integration (Priority: High) + +**Inbound (Webhooks โ†’ Agents):** +- [x] Issue created +- [x] Issue assigned to agent +- [x] Comment with @mention +- [ ] Issue state changed +- [ ] Due date approaching +- [ ] Cycle started/ended + +**Outbound (Agents โ†’ Linear):** +- [x] Create comment on issue +- [ ] Update issue state +- [ ] Update issue assignee +- [ ] Add/remove labels +- [ ] Update issue description +- [ ] Create new issue +- [ ] Link issues + +**Agent Actions Needed:** +```typescript +// src/cloud/services/linear-integration.ts +interface LinearIntegration { + // Comments + createComment(issueId: string, body: string): Promise; + + // Issues + createIssue(teamId: string, data: CreateIssueInput): Promise; + updateIssue(issueId: string, data: UpdateIssueInput): Promise; + + // State management + setIssueState(issueId: string, stateId: string): Promise; + getAvailableStates(teamId: string): Promise; + + // Assignments + assignIssue(issueId: string, userId: string | null): Promise; + + // Labels + addLabel(issueId: string, labelId: string): Promise; + removeLabel(issueId: string, labelId: string): Promise; + + // Relations + linkIssues(issueId: string, relatedIssueId: string, type: RelationType): Promise; +} +``` + +### 2. Slack Integration (Priority: High) + +**Inbound:** +- [x] App mentioned +- [x] Direct message to bot +- [ ] Slash commands +- [ ] Interactive components (buttons, modals) +- [ ] File shared +- [ ] Scheduled message triggers + +**Outbound:** +- [x] Post message to channel +- [x] Reply in thread +- [ ] Update message +- [ ] Delete message +- [ ] Post with blocks (rich formatting) +- [ ] Upload file +- [ ] Create scheduled message +- [ ] Open modal/dialog + +**Agent Actions Needed:** +```typescript +// src/cloud/services/slack-integration.ts +interface SlackIntegration { + // Messages + postMessage(channel: string, text: string, options?: MessageOptions): Promise; + postBlocks(channel: string, blocks: Block[], text: string): Promise; + updateMessage(channel: string, ts: string, text: string): Promise; + replyInThread(channel: string, threadTs: string, text: string): Promise; + + // Reactions + addReaction(channel: string, ts: string, emoji: string): Promise; + + // Files + uploadFile(channels: string[], file: Buffer, filename: string): Promise; + + // Modals + openModal(triggerId: string, view: View): Promise; + updateModal(viewId: string, view: View): Promise; + + // Users + getUserInfo(userId: string): Promise; + lookupByEmail(email: string): Promise; +} +``` + +### 3. GitHub Integration (Priority: High) + +**Inbound:** +- [x] CI failure +- [x] Issue/PR comments with @mention +- [x] Issue created +- [ ] PR opened/updated +- [ ] PR review requested +- [ ] Release created +- [ ] Deployment status + +**Outbound:** +- [x] Post comment on issue/PR +- [ ] Create issue +- [ ] Create PR +- [ ] Request/dismiss review +- [ ] Merge PR +- [ ] Create/update check run +- [ ] Add labels +- [ ] Assign users +- [ ] Update PR description + +### 4. Jira Integration (Priority: Medium) + +**Inbound:** +- [ ] Issue created +- [ ] Issue assigned +- [ ] Issue transitioned +- [ ] Comment added + +**Outbound:** +- [ ] Create issue +- [ ] Update issue +- [ ] Transition issue +- [ ] Add comment +- [ ] Link issues + +### 5. GitLab Integration (Priority: Medium) + +Similar to GitHub with GitLab-specific events. + +### 6. Discord Integration (Priority: Low) + +Similar to Slack with Discord-specific features. + +## Implementation Plan + +### Phase 1: Core Linear Integration (This Week) +1. Create `LinearIntegration` service with full CRUD +2. Add Linear API key management in workspace settings +3. Create agent tools for Linear actions +4. Test bidirectional flow + +### Phase 2: Enhanced Slack Integration +1. Add slash command support +2. Add interactive components (buttons) +3. Add rich message formatting +4. Add modal support + +### Phase 3: Enhanced GitHub Integration +1. Add PR management +2. Add check run creation +3. Add deployment tracking + +### Phase 4: Additional Integrations +1. Jira +2. GitLab +3. Discord + +## Configuration + +### Workspace-Level Settings + +```typescript +interface WorkspaceIntegrations { + github?: { + enabled: boolean; + webhookSecret: string; + appInstallationId?: string; + }; + linear?: { + enabled: boolean; + apiKey: string; + webhookSecret: string; + teamId?: string; + }; + slack?: { + enabled: boolean; + botToken: string; + signingSecret: string; + appId?: string; + }; +} +``` + +### Agent Permissions + +```typescript +interface AgentIntegrationPermissions { + linear?: { + canComment: boolean; + canUpdateIssues: boolean; + canCreateIssues: boolean; + canAssign: boolean; + }; + slack?: { + canPost: boolean; + canUploadFiles: boolean; + channels?: string[]; // Allowed channels + }; + github?: { + canComment: boolean; + canMergePRs: boolean; + canCreateIssues: boolean; + }; +} +``` + +## Security Considerations + +1. **API Key Storage**: All API keys encrypted at rest +2. **Scope Limiting**: Agents only get permissions they need +3. **Audit Logging**: All external API calls logged +4. **Rate Limiting**: Respect external API rate limits +5. **Webhook Verification**: Always verify signatures + +## Testing Strategy + +1. Unit tests for parsers and responders +2. Integration tests with mock servers +3. E2E tests with sandbox accounts +4. Load testing for webhook handling diff --git a/src/cloud/api/generic-webhooks.ts b/src/cloud/api/generic-webhooks.ts new file mode 100644 index 00000000..bd68159a --- /dev/null +++ b/src/cloud/api/generic-webhooks.ts @@ -0,0 +1,145 @@ +/** + * Generic Webhooks API Routes + * + * Provides endpoints for receiving webhooks from any configured source. + * Routes: POST /api/webhooks/:source + */ + +import { Router, Request, Response } from 'express'; +import { processWebhook, getWebhookConfig } from '../webhooks/index.js'; + +export const genericWebhooksRouter = Router(); + +/** + * POST /api/webhooks/:source + * Receive a webhook from any configured source + */ +genericWebhooksRouter.post('/:source', async (req: Request, res: Response) => { + const { source } = req.params; + + // For Slack URL verification challenge + if (source === 'slack' && req.body?.type === 'url_verification') { + return res.json({ challenge: req.body.challenge }); + } + + try { + // Get raw body for signature verification + // Note: This requires express.raw() middleware or similar + const rawBody = typeof req.body === 'string' + ? req.body + : JSON.stringify(req.body); + + const result = await processWebhook( + source, + rawBody, + req.headers as Record + ); + + if (!result.success && result.responses[0]?.error === 'Invalid signature') { + return res.status(401).json({ error: 'Invalid signature' }); + } + + if (!result.success && result.responses[0]?.error?.includes('Unknown webhook source')) { + return res.status(404).json({ error: `Unknown webhook source: ${source}` }); + } + + console.log(`[webhooks] Processed ${source} webhook: ${result.eventType} (${result.matchedRules.length} rules matched)`); + + res.json({ + success: result.success, + eventId: result.eventId, + eventType: result.eventType, + matchedRules: result.matchedRules, + actionsExecuted: result.actions.length, + }); + } catch (error) { + console.error(`[webhooks] Error processing ${source} webhook:`, error); + res.status(500).json({ + error: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * GET /api/webhooks/config + * Get the current webhook configuration (for debugging) + */ +genericWebhooksRouter.get('/config', (_req: Request, res: Response) => { + const config = getWebhookConfig(); + + res.json({ + sources: Object.entries(config.sources).map(([id, source]) => ({ + id, + name: source.name, + enabled: source.enabled, + parser: source.parser, + responder: source.responder, + })), + rules: config.rules.map(rule => ({ + id: rule.id, + name: rule.name, + enabled: rule.enabled, + source: rule.source, + eventType: rule.eventType, + condition: rule.condition, + actionType: rule.action.type, + priority: rule.priority, + })), + }); +}); + +/** + * GET /api/webhooks/sources + * List available webhook sources with their setup instructions + */ +genericWebhooksRouter.get('/sources', (_req: Request, res: Response) => { + const baseUrl = process.env.PUBLIC_URL || 'https://your-domain.com'; + + res.json({ + sources: [ + { + id: 'github', + name: 'GitHub', + webhookUrl: `${baseUrl}/api/webhooks/github`, + setupInstructions: [ + '1. Go to your repository Settings > Webhooks > Add webhook', + `2. Set Payload URL to: ${baseUrl}/api/webhooks/github`, + '3. Set Content type to: application/json', + '4. Set Secret to your GITHUB_WEBHOOK_SECRET value', + '5. Select events: Check runs, Issues, Issue comments, Pull request review comments', + ], + requiredEnvVars: ['GITHUB_WEBHOOK_SECRET'], + events: ['check_run', 'issues', 'issue_comment', 'pull_request_review_comment'], + }, + { + id: 'linear', + name: 'Linear', + webhookUrl: `${baseUrl}/api/webhooks/linear`, + setupInstructions: [ + '1. Go to Linear Settings > API > Webhooks', + '2. Create a new webhook', + `3. Set URL to: ${baseUrl}/api/webhooks/linear`, + '4. Copy the signing secret to LINEAR_WEBHOOK_SECRET', + '5. Select events: Issues, Comments', + ], + requiredEnvVars: ['LINEAR_WEBHOOK_SECRET', 'LINEAR_API_KEY'], + events: ['Issue', 'Comment', 'IssueLabel'], + }, + { + id: 'slack', + name: 'Slack', + webhookUrl: `${baseUrl}/api/webhooks/slack`, + setupInstructions: [ + '1. Create a Slack App at api.slack.com/apps', + '2. Enable Event Subscriptions', + `3. Set Request URL to: ${baseUrl}/api/webhooks/slack`, + '4. Subscribe to bot events: app_mention, message.channels', + '5. Copy Signing Secret to SLACK_SIGNING_SECRET', + '6. Install the app to your workspace', + ], + requiredEnvVars: ['SLACK_SIGNING_SECRET', 'SLACK_BOT_TOKEN'], + events: ['app_mention', 'message', 'reaction_added'], + }, + ], + }); +}); diff --git a/src/cloud/webhooks/index.ts b/src/cloud/webhooks/index.ts new file mode 100644 index 00000000..de883ab4 --- /dev/null +++ b/src/cloud/webhooks/index.ts @@ -0,0 +1,42 @@ +/** + * Generic Webhook System + * + * A configurable webhook system that can handle events from any source + * (GitHub, GitLab, Linear, Slack, Jira, etc.) and route them to agents. + * + * Components: + * - Parsers: Transform source-specific payloads to normalized events + * - Responders: Send responses back to source systems + * - Rules Engine: Match events to actions based on configuration + * - Router: Orchestrates the full webhook processing pipeline + */ + +// Types +export * from './types.js'; + +// Parsers +export { getParser, registerParser, parsers } from './parsers/index.js'; +export { githubParser } from './parsers/github.js'; +export { linearParser } from './parsers/linear.js'; +export { slackParser } from './parsers/slack.js'; + +// Responders +export { getResponder, registerResponder, responders } from './responders/index.js'; +export { githubResponder } from './responders/github.js'; +export { linearResponder } from './responders/linear.js'; +export { slackResponder, formatSlackBlocks } from './responders/slack.js'; + +// Rules Engine +export { + matchesRule, + findMatchingRules, + resolveActionTemplate, + defaultRules, +} from './rules-engine.js'; + +// Router +export { + processWebhook, + getWebhookConfig, + defaultSources, +} from './router.js'; diff --git a/src/cloud/webhooks/parsers/github.ts b/src/cloud/webhooks/parsers/github.ts new file mode 100644 index 00000000..3fab490c --- /dev/null +++ b/src/cloud/webhooks/parsers/github.ts @@ -0,0 +1,249 @@ +/** + * GitHub Webhook Parser + * + * Transforms GitHub webhook payloads into normalized events. + */ + +import type { NormalizedEvent, WebhookParser } from '../types.js'; + +/** + * Extract @mentions from text + */ +function extractMentions(text: string | null | undefined): string[] { + if (!text) return []; + const mentionPattern = /@([a-zA-Z][a-zA-Z0-9_-]*)/g; + const mentions: string[] = []; + let match; + while ((match = mentionPattern.exec(text)) !== null) { + mentions.push(match[1].toLowerCase()); + } + return [...new Set(mentions)]; +} + +/** + * Map GitHub priority labels to normalized priority + */ +function extractPriority(labels: Array<{ name: string }>): 'critical' | 'high' | 'medium' | 'low' | undefined { + const labelNames = labels.map(l => l.name.toLowerCase()); + if (labelNames.includes('critical') || labelNames.includes('p0')) return 'critical'; + if (labelNames.includes('high') || labelNames.includes('p1')) return 'high'; + if (labelNames.includes('medium') || labelNames.includes('p2')) return 'medium'; + if (labelNames.includes('low') || labelNames.includes('p3')) return 'low'; + return undefined; +} + +export const githubParser: WebhookParser = { + id: 'github', + + parse(payload: unknown, headers: Record): NormalizedEvent[] { + const eventType = headers['x-github-event'] as string; + const deliveryId = headers['x-github-delivery'] as string; + const data = payload as Record; + + const events: NormalizedEvent[] = []; + const repository = data.repository as Record | undefined; + const sender = data.sender as Record | undefined; + + const baseEvent: Partial = { + id: deliveryId || `github-${Date.now()}`, + source: 'github', + timestamp: new Date(), + actor: { + id: String(sender?.id || 'unknown'), + name: String(sender?.login || 'unknown'), + }, + context: { + name: String(repository?.full_name || 'unknown'), + url: String(repository?.html_url || ''), + }, + labels: [], + mentions: [], + metadata: {}, + rawPayload: payload, + }; + + switch (eventType) { + case 'check_run': { + const checkRun = data.check_run as Record; + const action = data.action as string; + const conclusion = checkRun?.conclusion as string | null; + const pullRequests = checkRun?.pull_requests as Array> | undefined; + + if (action === 'completed' && conclusion === 'failure' && pullRequests?.length) { + const pr = pullRequests[0]; + const output = checkRun.output as Record | undefined; + const annotations = output?.annotations as Array> | undefined; + + events.push({ + ...baseEvent, + type: 'ci_failure', + item: { + type: 'check', + id: String(checkRun.id), + number: pr.number as number, + title: String(checkRun.name), + body: String(output?.summary || ''), + url: String(checkRun.html_url || ''), + state: 'failure', + }, + metadata: { + checkName: checkRun.name, + conclusion, + branch: (pr.head as Record)?.ref, + commitSha: (pr.head as Record)?.sha, + failureTitle: output?.title, + failureSummary: output?.summary, + failureDetails: output?.text, + annotations: annotations?.map(a => ({ + path: a.path, + startLine: a.start_line, + endLine: a.end_line, + level: a.annotation_level, + message: a.message, + })), + }, + } as NormalizedEvent); + } + break; + } + + case 'issues': { + const issue = data.issue as Record; + const action = data.action as string; + const labels = (issue?.labels || []) as Array<{ name: string }>; + + if (action === 'opened' || action === 'labeled') { + events.push({ + ...baseEvent, + type: 'issue_created', + item: { + type: 'issue', + id: String(issue.id), + number: issue.number as number, + title: String(issue.title), + body: String(issue.body || ''), + url: String(issue.html_url), + state: String(issue.state), + }, + labels: labels.map(l => l.name), + priority: extractPriority(labels), + mentions: extractMentions(issue.body as string), + metadata: { + action, + assignees: (issue.assignees as Array> || []).map(a => a.login), + }, + } as NormalizedEvent); + } + break; + } + + case 'issue_comment': { + const issue = data.issue as Record; + const comment = data.comment as Record; + const action = data.action as string; + const isPR = !!(issue?.pull_request); + + if (action === 'created') { + const mentions = extractMentions(comment.body as string); + if (mentions.length > 0) { + events.push({ + ...baseEvent, + type: 'mention', + item: { + type: isPR ? 'pull_request' : 'issue', + id: String(comment.id), + number: issue.number as number, + title: String(issue.title), + body: String(comment.body), + url: String(comment.html_url), + }, + mentions, + metadata: { + commentId: comment.id, + commentUrl: comment.html_url, + isPR, + }, + } as NormalizedEvent); + } + } + break; + } + + case 'pull_request_review_comment': { + const pr = data.pull_request as Record; + const comment = data.comment as Record; + const action = data.action as string; + + if (action === 'created') { + const mentions = extractMentions(comment.body as string); + if (mentions.length > 0) { + events.push({ + ...baseEvent, + type: 'mention', + item: { + type: 'pull_request', + id: String(comment.id), + number: pr.number as number, + title: String(pr.title), + body: String(comment.body), + url: String(comment.html_url), + }, + mentions, + metadata: { + commentId: comment.id, + commentUrl: comment.html_url, + filePath: comment.path, + line: comment.line, + isPR: true, + isReviewComment: true, + }, + } as NormalizedEvent); + } + } + break; + } + + case 'pull_request': { + const pr = data.pull_request as Record; + const action = data.action as string; + const labels = (pr?.labels || []) as Array<{ name: string }>; + + if (action === 'opened') { + events.push({ + ...baseEvent, + type: 'pr_opened', + item: { + type: 'pull_request', + id: String(pr.id), + number: pr.number as number, + title: String(pr.title), + body: String(pr.body || ''), + url: String(pr.html_url), + state: String(pr.state), + }, + labels: labels.map(l => l.name), + priority: extractPriority(labels), + mentions: extractMentions(pr.body as string), + metadata: { + action, + head: (pr.head as Record)?.ref, + base: (pr.base as Record)?.ref, + draft: pr.draft, + }, + } as NormalizedEvent); + } + break; + } + + default: + // Unknown event type - create a generic event + events.push({ + ...baseEvent, + type: `github.${eventType}`, + metadata: { action: data.action }, + } as NormalizedEvent); + } + + return events; + }, +}; diff --git a/src/cloud/webhooks/parsers/index.ts b/src/cloud/webhooks/parsers/index.ts new file mode 100644 index 00000000..1ca4bf17 --- /dev/null +++ b/src/cloud/webhooks/parsers/index.ts @@ -0,0 +1,35 @@ +/** + * Webhook Parsers Index + * + * Registry of all available parsers. + */ + +import type { WebhookParser } from '../types.js'; +import { githubParser } from './github.js'; +import { linearParser } from './linear.js'; +import { slackParser } from './slack.js'; + +/** + * Registry of all available parsers + */ +export const parsers: Record = { + github: githubParser, + linear: linearParser, + slack: slackParser, +}; + +/** + * Get a parser by ID + */ +export function getParser(id: string): WebhookParser | undefined { + return parsers[id]; +} + +/** + * Register a custom parser + */ +export function registerParser(parser: WebhookParser): void { + parsers[parser.id] = parser; +} + +export { githubParser, linearParser, slackParser }; diff --git a/src/cloud/webhooks/parsers/linear.ts b/src/cloud/webhooks/parsers/linear.ts new file mode 100644 index 00000000..03b8908d --- /dev/null +++ b/src/cloud/webhooks/parsers/linear.ts @@ -0,0 +1,275 @@ +/** + * Linear Webhook Parser + * + * Transforms Linear webhook payloads into normalized events. + * Linear webhooks: https://developers.linear.app/docs/graphql/webhooks + */ + +import type { NormalizedEvent, WebhookParser } from '../types.js'; + +/** + * Extract @mentions from text (Linear uses @username format) + */ +function extractMentions(text: string | null | undefined): string[] { + if (!text) return []; + const mentionPattern = /@([a-zA-Z][a-zA-Z0-9_-]*)/g; + const mentions: string[] = []; + let match; + while ((match = mentionPattern.exec(text)) !== null) { + mentions.push(match[1].toLowerCase()); + } + return [...new Set(mentions)]; +} + +/** + * Map Linear priority to normalized priority + * Linear: 0 = No priority, 1 = Urgent, 2 = High, 3 = Normal, 4 = Low + */ +function mapPriority(priority: number | undefined): 'critical' | 'high' | 'medium' | 'low' | undefined { + switch (priority) { + case 1: return 'critical'; + case 2: return 'high'; + case 3: return 'medium'; + case 4: return 'low'; + default: return undefined; + } +} + +export const linearParser: WebhookParser = { + id: 'linear', + + parse(payload: unknown): NormalizedEvent[] { + const data = payload as Record; + const events: NormalizedEvent[] = []; + + const action = data.action as string; // create, update, remove + const type = data.type as string; // Issue, Comment, Project, etc. + const webhookData = data.data as Record | undefined; + const webhookId = data.webhookId as string | undefined; + const createdAt = data.createdAt as string | undefined; + + if (!webhookData) return events; + + const baseEvent: Partial = { + id: webhookId || `linear-${Date.now()}`, + source: 'linear', + timestamp: createdAt ? new Date(createdAt) : new Date(), + actor: { + id: 'unknown', + name: 'unknown', + }, + context: { + name: 'unknown', + }, + labels: [], + mentions: [], + metadata: {}, + rawPayload: payload, + }; + + // Extract actor from various fields + const creator = webhookData.creator as Record | undefined; + const user = webhookData.user as Record | undefined; + const actor = creator || user; + if (actor) { + baseEvent.actor = { + id: String(actor.id || 'unknown'), + name: String(actor.name || actor.email || 'unknown'), + email: actor.email as string | undefined, + }; + } + + // Extract team/project context + const team = webhookData.team as Record | undefined; + const project = webhookData.project as Record | undefined; + if (team) { + baseEvent.context = { + name: String(team.key || team.name || 'unknown'), + url: `https://linear.app/team/${team.key}`, + }; + } else if (project) { + baseEvent.context = { + name: String(project.name || 'unknown'), + url: project.url as string | undefined, + }; + } + + switch (type) { + case 'Issue': { + const issue = webhookData; + const labels = (issue.labels as Array> || []); + const labelNames = labels.map(l => String(l.name)); + const assignee = issue.assignee as Record | undefined; + + if (action === 'create') { + events.push({ + ...baseEvent, + type: 'issue_created', + item: { + type: 'ticket', + id: String(issue.id), + number: issue.number as number | undefined, + title: String(issue.title || ''), + body: String(issue.description || ''), + url: String(issue.url || ''), + state: String((issue.state as Record)?.name || issue.state || 'unknown'), + }, + labels: labelNames, + priority: mapPriority(issue.priority as number | undefined), + mentions: extractMentions(issue.description as string), + metadata: { + action, + identifier: issue.identifier, // e.g., "ENG-123" + estimate: issue.estimate, + dueDate: issue.dueDate, + assignee: assignee?.name, + assigneeEmail: assignee?.email, + cycle: (issue.cycle as Record)?.name, + }, + } as NormalizedEvent); + } else if (action === 'update') { + // Check for assignment changes + const updatedFrom = data.updatedFrom as Record | undefined; + const wasAssigned = updatedFrom?.assigneeId !== undefined && + !updatedFrom?.assigneeId && + assignee?.id; + + // Check if assigned to an agent (name matches agent pattern) + const assigneeName = String(assignee?.name || '').toLowerCase(); + const agentPatterns = ['agent', 'bot', 'lead', 'developer', 'reviewer', 'debugger', 'ci-fix', 'test', 'docs', 'refactor']; + const isAgentAssignment = wasAssigned && agentPatterns.some(p => assigneeName.includes(p)); + + if (isAgentAssignment) { + // Extract the agent type from the assignee name + const matchedAgent = agentPatterns.find(p => assigneeName.includes(p)) || 'developer'; + + events.push({ + ...baseEvent, + type: 'issue_assigned', + item: { + type: 'ticket', + id: String(issue.id), + number: issue.number as number | undefined, + title: String(issue.title || ''), + body: String(issue.description || ''), + url: String(issue.url || ''), + state: String((issue.state as Record)?.name || issue.state || 'unknown'), + }, + labels: labelNames, + priority: mapPriority(issue.priority as number | undefined), + mentions: [matchedAgent], // The assigned agent type + metadata: { + action: 'assigned', + identifier: issue.identifier, + assignee: assignee?.name, + assigneeEmail: assignee?.email, + previousAssignee: updatedFrom?.assigneeId, + }, + } as NormalizedEvent); + } else { + // Regular update event + events.push({ + ...baseEvent, + type: 'issue_updated', + item: { + type: 'ticket', + id: String(issue.id), + number: issue.number as number | undefined, + title: String(issue.title || ''), + body: String(issue.description || ''), + url: String(issue.url || ''), + state: String((issue.state as Record)?.name || issue.state || 'unknown'), + }, + labels: labelNames, + priority: mapPriority(issue.priority as number | undefined), + metadata: { + action, + identifier: issue.identifier, + updatedFrom, + }, + } as NormalizedEvent); + } + } + break; + } + + case 'Comment': { + const comment = webhookData; + const issue = comment.issue as Record | undefined; + + if (action === 'create' && issue) { + const mentions = extractMentions(comment.body as string); + + events.push({ + ...baseEvent, + type: mentions.length > 0 ? 'mention' : 'comment_created', + item: { + type: 'comment', + id: String(comment.id), + number: issue.number as number | undefined, + title: String(issue.title || ''), + body: String(comment.body || ''), + url: String(comment.url || issue.url || ''), + }, + mentions, + metadata: { + action, + issueId: issue.id, + issueIdentifier: issue.identifier, + parentCommentId: (comment.parent as Record)?.id, + }, + } as NormalizedEvent); + } + break; + } + + case 'Project': { + const project = webhookData; + + if (action === 'create') { + events.push({ + ...baseEvent, + type: 'project_created', + context: { + name: String(project.name || 'unknown'), + url: String(project.url || ''), + }, + metadata: { + action, + projectId: project.id, + description: project.description, + targetDate: project.targetDate, + }, + } as NormalizedEvent); + } + break; + } + + case 'IssueLabel': { + // Label added/removed from issue + const label = webhookData; + events.push({ + ...baseEvent, + type: 'label_change', + labels: [String(label.name || '')], + metadata: { + action, + labelId: label.id, + color: label.color, + }, + } as NormalizedEvent); + break; + } + + default: + // Unknown type - create generic event + events.push({ + ...baseEvent, + type: `linear.${type?.toLowerCase() || 'unknown'}.${action || 'unknown'}`, + metadata: { action, type }, + } as NormalizedEvent); + } + + return events; + }, +}; diff --git a/src/cloud/webhooks/parsers/slack.ts b/src/cloud/webhooks/parsers/slack.ts new file mode 100644 index 00000000..11ae3ec5 --- /dev/null +++ b/src/cloud/webhooks/parsers/slack.ts @@ -0,0 +1,234 @@ +/** + * Slack Webhook Parser + * + * Transforms Slack Events API payloads into normalized events. + * https://api.slack.com/apis/connections/events-api + */ + +import type { NormalizedEvent, WebhookParser } from '../types.js'; + +/** + * Extract user mentions from Slack message text + * Slack format: <@U12345678> or <@U12345678|username> + */ +function extractSlackMentions(text: string | null | undefined): string[] { + if (!text) return []; + const mentionPattern = /<@([A-Z0-9]+)(?:\|([^>]+))?>/g; + const mentions: string[] = []; + let match; + while ((match = mentionPattern.exec(text)) !== null) { + // Prefer display name if available, otherwise use ID + mentions.push(match[2] || match[1]); + } + return [...new Set(mentions)]; +} + +/** + * Extract agent mentions from text (our custom @agent-name format) + */ +function extractAgentMentions(text: string | null | undefined): string[] { + if (!text) return []; + // Match @agent-name patterns that aren't Slack user mentions + const mentionPattern = /(?])/g; + const mentions: string[] = []; + let match; + while ((match = mentionPattern.exec(text)) !== null) { + mentions.push(match[1].toLowerCase()); + } + return [...new Set(mentions)]; +} + +/** + * Clean Slack message text (remove user mention formatting) + */ +function cleanSlackText(text: string | null | undefined): string { + if (!text) return ''; + // Replace <@U12345678|username> with @username + return text.replace(/<@[A-Z0-9]+\|([^>]+)>/g, '@$1') + .replace(/<@[A-Z0-9]+>/g, '@user') + // Replace with text + .replace(/<([^|>]+)\|([^>]+)>/g, '$2') + // Replace with URL + .replace(/<([^>]+)>/g, '$1'); +} + +export const slackParser: WebhookParser = { + id: 'slack', + + parse(payload: unknown): NormalizedEvent[] { + const data = payload as Record; + const events: NormalizedEvent[] = []; + + // Handle URL verification challenge + if (data.type === 'url_verification') { + // This is handled separately in the router + return []; + } + + // Events API wrapper + if (data.type !== 'event_callback') { + return []; + } + + const event = data.event as Record | undefined; + if (!event) return []; + + const eventType = event.type as string; + const teamId = data.team_id as string || 'unknown'; + const eventId = data.event_id as string || `slack-${Date.now()}`; + const eventTime = data.event_time as number | undefined; + + const baseEvent: Partial = { + id: eventId, + source: 'slack', + timestamp: eventTime ? new Date(eventTime * 1000) : new Date(), + actor: { + id: String(event.user || 'unknown'), + name: String(event.user || 'unknown'), + }, + context: { + name: teamId, + }, + labels: [], + mentions: [], + metadata: { + teamId, + channelId: event.channel, + channelType: event.channel_type, + }, + rawPayload: payload, + }; + + switch (eventType) { + case 'app_mention': { + // Bot was mentioned in a channel + const text = event.text as string; + const agentMentions = extractAgentMentions(text); + + events.push({ + ...baseEvent, + type: 'mention', + item: { + type: 'message', + id: String(event.ts), + body: cleanSlackText(text), + }, + mentions: agentMentions.length > 0 ? agentMentions : ['lead'], // Default to lead if no specific agent + metadata: { + ...baseEvent.metadata, + ts: event.ts, + threadTs: event.thread_ts, + userMentions: extractSlackMentions(text), + }, + } as NormalizedEvent); + break; + } + + case 'message': { + // Regular message in channel + const text = event.text as string; + const subtype = event.subtype as string | undefined; + + // Ignore bot messages, message changes, etc. + if (subtype && subtype !== 'thread_broadcast') { + break; + } + + const agentMentions = extractAgentMentions(text); + + // Only create event if there are agent mentions + if (agentMentions.length > 0) { + events.push({ + ...baseEvent, + type: 'mention', + item: { + type: 'message', + id: String(event.ts), + body: cleanSlackText(text), + }, + mentions: agentMentions, + metadata: { + ...baseEvent.metadata, + ts: event.ts, + threadTs: event.thread_ts, + userMentions: extractSlackMentions(text), + }, + } as NormalizedEvent); + } + break; + } + + case 'reaction_added': { + // Reaction added to a message + const reaction = event.reaction as string; + const item = event.item as Record; + + events.push({ + ...baseEvent, + type: 'reaction_added', + item: { + type: 'message', + id: String(item?.ts || 'unknown'), + }, + labels: [reaction], + metadata: { + ...baseEvent.metadata, + reaction, + itemType: item?.type, + itemChannel: item?.channel, + itemTs: item?.ts, + }, + } as NormalizedEvent); + break; + } + + case 'channel_created': { + const channel = event.channel as Record; + + events.push({ + ...baseEvent, + type: 'channel_created', + context: { + name: String(channel?.name || 'unknown'), + }, + metadata: { + ...baseEvent.metadata, + channelId: channel?.id, + channelName: channel?.name, + creator: channel?.creator, + }, + } as NormalizedEvent); + break; + } + + case 'member_joined_channel': { + events.push({ + ...baseEvent, + type: 'member_joined', + actor: { + id: String(event.user), + name: String(event.user), + }, + metadata: { + ...baseEvent.metadata, + inviter: event.inviter, + }, + } as NormalizedEvent); + break; + } + + default: + // Unknown event type + events.push({ + ...baseEvent, + type: `slack.${eventType}`, + metadata: { + ...baseEvent.metadata, + subtype: event.subtype, + }, + } as NormalizedEvent); + } + + return events; + }, +}; diff --git a/src/cloud/webhooks/responders/github.ts b/src/cloud/webhooks/responders/github.ts new file mode 100644 index 00000000..e718045b --- /dev/null +++ b/src/cloud/webhooks/responders/github.ts @@ -0,0 +1,94 @@ +/** + * GitHub Responder + * + * Sends responses back to GitHub via the GitHub App API. + */ + +import type { NormalizedEvent, WebhookResponder, WebhookResponse } from '../types.js'; +import { nangoService } from '../../services/nango.js'; +import { db } from '../../db/index.js'; + +export const githubResponder: WebhookResponder = { + id: 'github', + + async respond( + event: NormalizedEvent, + response: WebhookResponse, + _config?: Record + ): Promise<{ success: boolean; id?: string; url?: string; error?: string }> { + try { + // Get repository info from event context + const repoFullName = event.context.name; + const [owner, repo] = repoFullName.split('/'); + + if (!owner || !repo) { + return { success: false, error: `Invalid repository name: ${repoFullName}` }; + } + + // Find the repository in our database to get the Nango connection + const repository = await db.repositories.findByFullName(repoFullName); + if (!repository?.nangoConnectionId) { + return { + success: false, + error: `Repository ${repoFullName} not found or has no Nango connection`, + }; + } + + switch (response.type) { + case 'comment': { + // Post a comment on an issue or PR + const issueNumber = typeof response.target === 'number' + ? response.target + : parseInt(String(response.target), 10); + + if (isNaN(issueNumber)) { + return { success: false, error: `Invalid issue number: ${response.target}` }; + } + + const result = await nangoService.addGithubIssueComment( + repository.nangoConnectionId, + owner, + repo, + issueNumber, + response.body + ); + + return { + success: true, + id: String(result.id), + url: result.html_url, + }; + } + + case 'reaction': { + // Add a reaction to a comment or issue + // Note: This would need to be added to NangoService + return { + success: false, + error: 'Reactions not yet implemented for GitHub', + }; + } + + case 'status': { + // Update a check run status + // Note: This would need to be added to NangoService + return { + success: false, + error: 'Status updates not yet implemented for GitHub', + }; + } + + default: + return { + success: false, + error: `Unknown response type: ${response.type}`, + }; + } + } catch (error) { + return { + success: false, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } + }, +}; diff --git a/src/cloud/webhooks/responders/index.ts b/src/cloud/webhooks/responders/index.ts new file mode 100644 index 00000000..89b0f4c0 --- /dev/null +++ b/src/cloud/webhooks/responders/index.ts @@ -0,0 +1,35 @@ +/** + * Webhook Responders Index + * + * Registry of all available responders. + */ + +import type { WebhookResponder } from '../types.js'; +import { githubResponder } from './github.js'; +import { linearResponder } from './linear.js'; +import { slackResponder, formatSlackBlocks } from './slack.js'; + +/** + * Registry of all available responders + */ +export const responders: Record = { + github: githubResponder, + linear: linearResponder, + slack: slackResponder, +}; + +/** + * Get a responder by ID + */ +export function getResponder(id: string): WebhookResponder | undefined { + return responders[id]; +} + +/** + * Register a custom responder + */ +export function registerResponder(responder: WebhookResponder): void { + responders[responder.id] = responder; +} + +export { githubResponder, linearResponder, slackResponder, formatSlackBlocks }; diff --git a/src/cloud/webhooks/responders/linear.ts b/src/cloud/webhooks/responders/linear.ts new file mode 100644 index 00000000..a64936ed --- /dev/null +++ b/src/cloud/webhooks/responders/linear.ts @@ -0,0 +1,181 @@ +/** + * Linear Responder + * + * Sends responses back to Linear via their GraphQL API. + * https://developers.linear.app/docs/graphql/working-with-the-graphql-api + */ + +import type { NormalizedEvent, WebhookResponder, WebhookResponse } from '../types.js'; + +/** + * Execute a Linear GraphQL mutation + */ +async function linearGraphQL( + apiKey: string, + query: string, + variables: Record +): Promise<{ data?: Record; errors?: Array<{ message: string }> }> { + const response = await fetch('https://api.linear.app/graphql', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': apiKey, + }, + body: JSON.stringify({ query, variables }), + }); + + return response.json() as Promise<{ data?: Record; errors?: Array<{ message: string }> }>; +} + +export const linearResponder: WebhookResponder = { + id: 'linear', + + async respond( + event: NormalizedEvent, + response: WebhookResponse, + config?: Record + ): Promise<{ success: boolean; id?: string; url?: string; error?: string }> { + const apiKey = config?.apiKey as string || process.env.LINEAR_API_KEY; + + if (!apiKey) { + return { + success: false, + error: 'Linear API key not configured', + }; + } + + try { + switch (response.type) { + case 'comment': { + // Create a comment on an issue + const issueId = String(response.target); + + const mutation = ` + mutation CreateComment($issueId: String!, $body: String!) { + commentCreate(input: { issueId: $issueId, body: $body }) { + success + comment { + id + url + } + } + } + `; + + const result = await linearGraphQL(apiKey, mutation, { + issueId, + body: response.body, + }); + + if (result.errors?.length) { + return { + success: false, + error: result.errors.map(e => e.message).join(', '), + }; + } + + const commentCreate = result.data?.commentCreate as Record; + const comment = commentCreate?.comment as Record; + + return { + success: !!commentCreate?.success, + id: comment?.id as string, + url: comment?.url as string, + }; + } + + case 'reaction': { + // Add a reaction/emoji to a comment + const commentId = String(response.target); + const emoji = response.metadata?.emoji as string || '๐Ÿ‘'; + + const mutation = ` + mutation CreateReaction($commentId: String!, $emoji: String!) { + reactionCreate(input: { commentId: $commentId, emoji: $emoji }) { + success + reaction { + id + } + } + } + `; + + const result = await linearGraphQL(apiKey, mutation, { + commentId, + emoji, + }); + + if (result.errors?.length) { + return { + success: false, + error: result.errors.map(e => e.message).join(', '), + }; + } + + const reactionCreate = result.data?.reactionCreate as Record; + return { + success: !!reactionCreate?.success, + id: (reactionCreate?.reaction as Record)?.id as string, + }; + } + + case 'status': { + // Update issue state + const issueId = String(response.target); + const stateId = response.metadata?.stateId as string; + + if (!stateId) { + return { + success: false, + error: 'State ID required for status update', + }; + } + + const mutation = ` + mutation UpdateIssue($issueId: String!, $stateId: String!) { + issueUpdate(id: $issueId, input: { stateId: $stateId }) { + success + issue { + id + url + } + } + } + `; + + const result = await linearGraphQL(apiKey, mutation, { + issueId, + stateId, + }); + + if (result.errors?.length) { + return { + success: false, + error: result.errors.map(e => e.message).join(', '), + }; + } + + const issueUpdate = result.data?.issueUpdate as Record; + const issue = issueUpdate?.issue as Record; + + return { + success: !!issueUpdate?.success, + id: issue?.id as string, + url: issue?.url as string, + }; + } + + default: + return { + success: false, + error: `Unknown response type: ${response.type}`, + }; + } + } catch (error) { + return { + success: false, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } + }, +}; diff --git a/src/cloud/webhooks/responders/slack.ts b/src/cloud/webhooks/responders/slack.ts new file mode 100644 index 00000000..20560ced --- /dev/null +++ b/src/cloud/webhooks/responders/slack.ts @@ -0,0 +1,220 @@ +/** + * Slack Responder + * + * Sends responses back to Slack via their Web API. + * https://api.slack.com/methods + */ + +import type { NormalizedEvent, WebhookResponder, WebhookResponse } from '../types.js'; + +/** + * Call a Slack Web API method + */ +async function slackAPI( + token: string, + method: string, + body: Record +): Promise<{ ok: boolean; error?: string; ts?: string; channel?: string; message?: Record }> { + const response = await fetch(`https://slack.com/api/${method}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json; charset=utf-8', + 'Authorization': `Bearer ${token}`, + }, + body: JSON.stringify(body), + }); + + return response.json() as Promise<{ ok: boolean; error?: string; ts?: string; channel?: string; message?: Record }>; +} + +export const slackResponder: WebhookResponder = { + id: 'slack', + + async respond( + event: NormalizedEvent, + response: WebhookResponse, + config?: Record + ): Promise<{ success: boolean; id?: string; url?: string; error?: string }> { + const botToken = config?.botToken as string || process.env.SLACK_BOT_TOKEN; + + if (!botToken) { + return { + success: false, + error: 'Slack bot token not configured', + }; + } + + try { + // Get channel from event metadata or response target + const channelId = response.metadata?.channel as string + || event.metadata?.channelId as string + || String(response.target); + + if (!channelId) { + return { + success: false, + error: 'Channel ID required', + }; + } + + switch (response.type) { + case 'message': { + // Post a message to a channel + const threadTs = response.metadata?.threadTs as string + || event.metadata?.threadTs as string + || event.metadata?.ts as string; + + const result = await slackAPI(botToken, 'chat.postMessage', { + channel: channelId, + text: response.body, + thread_ts: threadTs, // Reply in thread if available + unfurl_links: false, + unfurl_media: false, + }); + + if (!result.ok) { + return { + success: false, + error: result.error || 'Failed to post message', + }; + } + + return { + success: true, + id: result.ts, + // Construct Slack message URL + url: `https://slack.com/archives/${channelId}/p${result.ts?.replace('.', '')}`, + }; + } + + case 'comment': { + // Same as message, but explicitly in a thread + const threadTs = String(response.target); + + const result = await slackAPI(botToken, 'chat.postMessage', { + channel: channelId, + text: response.body, + thread_ts: threadTs, + reply_broadcast: response.metadata?.broadcast === true, + }); + + if (!result.ok) { + return { + success: false, + error: result.error || 'Failed to post reply', + }; + } + + return { + success: true, + id: result.ts, + }; + } + + case 'reaction': { + // Add a reaction to a message + const ts = String(response.target); + const emoji = response.metadata?.emoji as string || response.body.replace(/:/g, ''); + + const result = await slackAPI(botToken, 'reactions.add', { + channel: channelId, + timestamp: ts, + name: emoji, + }); + + if (!result.ok && result.error !== 'already_reacted') { + return { + success: false, + error: result.error || 'Failed to add reaction', + }; + } + + return { + success: true, + }; + } + + case 'status': { + // Update bot status/presence (not commonly used) + return { + success: false, + error: 'Status updates not implemented for Slack', + }; + } + + default: + return { + success: false, + error: `Unknown response type: ${response.type}`, + }; + } + } catch (error) { + return { + success: false, + error: error instanceof Error ? error.message : 'Unknown error', + }; + } + }, +}; + +/** + * Helper to format a message with blocks for richer formatting + */ +export function formatSlackBlocks( + text: string, + options?: { + header?: string; + context?: string; + actions?: Array<{ text: string; url: string }>; + } +): Array> { + const blocks: Array> = []; + + if (options?.header) { + blocks.push({ + type: 'header', + text: { + type: 'plain_text', + text: options.header, + emoji: true, + }, + }); + } + + blocks.push({ + type: 'section', + text: { + type: 'mrkdwn', + text, + }, + }); + + if (options?.context) { + blocks.push({ + type: 'context', + elements: [ + { + type: 'mrkdwn', + text: options.context, + }, + ], + }); + } + + if (options?.actions?.length) { + blocks.push({ + type: 'actions', + elements: options.actions.map(action => ({ + type: 'button', + text: { + type: 'plain_text', + text: action.text, + emoji: true, + }, + url: action.url, + })), + }); + } + + return blocks; +} diff --git a/src/cloud/webhooks/router.ts b/src/cloud/webhooks/router.ts new file mode 100644 index 00000000..59340214 --- /dev/null +++ b/src/cloud/webhooks/router.ts @@ -0,0 +1,592 @@ +/** + * Generic Webhook Router + * + * Routes incoming webhooks from any source through the configurable pipeline: + * 1. Verify signature + * 2. Parse payload into normalized events + * 3. Match events against rules + * 4. Execute actions + * 5. Send responses + */ + +import crypto from 'crypto'; +import type { + WebhookConfig, + WebhookSourceConfig, + NormalizedEvent, + WebhookAction, + WebhookResult, + WebhookResponse, +} from './types.js'; +import { getParser } from './parsers/index.js'; +import { getResponder } from './responders/index.js'; +import { findMatchingRules, resolveActionTemplate, defaultRules } from './rules-engine.js'; +import { db } from '../db/index.js'; + +/** + * Default webhook source configurations + */ +export const defaultSources: Record = { + github: { + id: 'github', + name: 'GitHub', + enabled: true, + signature: { + header: 'x-hub-signature-256', + algorithm: 'sha256', + secretEnvVar: 'GITHUB_WEBHOOK_SECRET', + signaturePrefix: 'sha256=', + }, + parser: 'github', + responder: 'github', + }, + linear: { + id: 'linear', + name: 'Linear', + enabled: true, + signature: { + header: 'linear-signature', + algorithm: 'sha256', + secretEnvVar: 'LINEAR_WEBHOOK_SECRET', + }, + parser: 'linear', + responder: 'linear', + }, + slack: { + id: 'slack', + name: 'Slack', + enabled: true, + signature: { + header: 'x-slack-signature', + algorithm: 'slack-v0', + secretEnvVar: 'SLACK_SIGNING_SECRET', + }, + parser: 'slack', + responder: 'slack', + }, +}; + +/** + * Get webhook configuration + * In the future, this could load from database per-workspace + */ +export function getWebhookConfig(): WebhookConfig { + return { + sources: defaultSources, + rules: defaultRules, + }; +} + +/** + * Verify webhook signature + */ +function verifySignature( + payload: string, + signature: string | undefined, + config: WebhookSourceConfig, + headers?: Record +): boolean { + if (config.signature.algorithm === 'none') { + return true; + } + + if (!signature) { + return false; + } + + const secret = process.env[config.signature.secretEnvVar]; + if (!secret) { + console.warn(`[webhook-router] Secret not configured: ${config.signature.secretEnvVar}`); + return false; + } + + try { + let expectedSignature: string; + let actualSignature = signature; + + // Remove prefix if configured + if (config.signature.signaturePrefix && actualSignature.startsWith(config.signature.signaturePrefix)) { + actualSignature = actualSignature.slice(config.signature.signaturePrefix.length); + } + + switch (config.signature.algorithm) { + case 'sha256': + expectedSignature = crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex'); + break; + + case 'sha1': + expectedSignature = crypto + .createHmac('sha1', secret) + .update(payload) + .digest('hex'); + break; + + case 'token': + // Direct token comparison + return actualSignature === secret; + + case 'slack-v0': { + // Slack signature verification + // Format: v0= + const timestamp = headers?.['x-slack-request-timestamp'] as string; + if (!timestamp) return false; + + // Check timestamp is within 5 minutes + const now = Math.floor(Date.now() / 1000); + if (Math.abs(now - parseInt(timestamp, 10)) > 300) { + console.warn('[webhook-router] Slack request timestamp too old'); + return false; + } + + const sigBasestring = `v0:${timestamp}:${payload}`; + expectedSignature = 'v0=' + crypto + .createHmac('sha256', secret) + .update(sigBasestring) + .digest('hex'); + + return crypto.timingSafeEqual( + Buffer.from(signature), + Buffer.from(expectedSignature) + ); + } + + default: + console.warn(`[webhook-router] Unknown signature algorithm: ${config.signature.algorithm}`); + return false; + } + + return crypto.timingSafeEqual( + Buffer.from(actualSignature), + Buffer.from(expectedSignature) + ); + } catch (error) { + console.error('[webhook-router] Signature verification error:', error); + return false; + } +} + +/** + * Execute an action for an event + */ +async function executeAction( + action: WebhookAction, + event: NormalizedEvent, + responder: ReturnType, + responderConfig?: Record +): Promise<{ success: boolean; error?: string }> { + const resolvedAction = resolveActionTemplate(action, event); + + switch (resolvedAction.type) { + case 'spawn_agent': { + const agentType = resolvedAction.agentType || 'lead'; + const prompt = buildPrompt(resolvedAction.prompt || 'default', event); + + // Find the repository and queue spawn command + const repository = await db.repositories.findByFullName(event.context.name); + if (!repository?.userId) { + return { success: false, error: 'Repository not found or not linked' }; + } + + // Find an available daemon + const daemons = await db.linkedDaemons.findByUserId(repository.userId); + const onlineDaemon = daemons.find(d => d.status === 'online'); + + if (!onlineDaemon) { + // Post a response indicating no daemon available + if (responder && event.item?.number) { + await responder.respond(event, { + type: 'comment', + target: event.item.number, + body: `โš ๏ธ No Agent Relay daemon is available to handle this request. Please ensure you have a linked daemon running.`, + }, responderConfig); + } + return { success: false, error: 'No available daemon' }; + } + + // Post acknowledgment + if (responder && event.item?.number) { + await responder.respond(event, { + type: 'comment', + target: event.item.number, + body: `๐Ÿ‘‹ Routing to **@${agentType}** agent. The agent will respond shortly.`, + }, responderConfig); + } + + // Queue spawn command + const agentName = `${agentType}-${event.id.slice(0, 8)}`; + await db.linkedDaemons.queueMessage(onlineDaemon.id, { + from: { daemonId: 'cloud', daemonName: 'Agent Relay Cloud', agent: 'system' }, + to: '__spawner__', + content: JSON.stringify({ + type: 'spawn_agent', + agentName, + cli: 'claude', + task: prompt, + metadata: { + eventId: event.id, + source: event.source, + eventType: event.type, + repository: event.context.name, + itemNumber: event.item?.number, + }, + }), + metadata: { type: 'spawn_command' }, + timestamp: new Date().toISOString(), + }); + + console.log(`[webhook-router] Queued spawn command for ${agentName}`); + return { success: true }; + } + + case 'message_agent': { + // Send message to existing agent + return { success: false, error: 'message_agent not yet implemented' }; + } + + case 'post_comment': { + if (!responder) { + return { success: false, error: 'No responder available' }; + } + + const body = resolvedAction.config?.body as string || 'Action received.'; + const target = event.item?.number || event.item?.id || ''; + + const result = await responder.respond(event, { + type: 'comment', + target, + body, + }, responderConfig); + + return { success: result.success, error: result.error }; + } + + case 'create_issue': { + return { success: false, error: 'create_issue not yet implemented' }; + } + + case 'custom': { + // Custom action handler + const handler = resolvedAction.config?.handler as ((event: NormalizedEvent) => Promise) | undefined; + if (handler) { + await handler(event); + return { success: true }; + } + return { success: false, error: 'No custom handler defined' }; + } + + default: + return { success: false, error: `Unknown action type: ${resolvedAction.type}` }; + } +} + +/** + * Build a prompt from a template name and event + */ +function buildPrompt(templateName: string, event: NormalizedEvent): string { + const templates: Record string> = { + 'ci-failure': (e) => ` +# CI Failure Fix Task + +A CI check has failed in ${e.context.name}. + +## Failure Details + +**Check Name:** ${e.item?.title || 'Unknown'} +**Branch:** ${e.metadata?.branch || 'unknown'} +**Commit:** ${e.metadata?.commitSha || 'unknown'} + +${e.metadata?.failureSummary ? `**Summary:**\n${e.metadata.failureSummary}` : ''} + +${e.metadata?.annotations ? `## Annotations\n\n${formatAnnotations(e.metadata.annotations as Array>)}` : ''} + +## Your Task + +1. Analyze the failure +2. Fix the issues +3. Push your changes +4. Report back with a summary +`.trim(), + + 'mention': (e) => ` +# Agent Mention Task + +You were mentioned in ${e.source} in ${e.context.name}. + +## Context + +**Item:** ${e.item?.title || 'N/A'} (#${e.item?.number || e.item?.id || 'N/A'}) +**Author:** @${e.actor.name} + +## Message + +${e.item?.body || 'No message content'} + +## Your Task + +Respond helpfully to the mention. If code changes are needed, make them and push. +`.trim(), + + 'issue': (e) => ` +# Issue Assignment + +You've been assigned to work on an issue in ${e.context.name}. + +## Issue Details + +**Title:** ${e.item?.title} +**Priority:** ${e.priority || 'normal'} +**Labels:** ${e.labels.join(', ') || 'none'} + +## Description + +${e.item?.body || 'No description provided.'} + +## Your Task + +1. Analyze the issue +2. Implement a solution +3. Create a PR +`.trim(), + + 'linear-issue': (e) => ` +# Linear Issue + +A new issue was created in ${e.context.name}. + +## Issue Details + +**Identifier:** ${e.metadata?.identifier || 'N/A'} +**Title:** ${e.item?.title} +**Priority:** ${e.priority || 'normal'} +**State:** ${e.item?.state || 'unknown'} + +## Description + +${e.item?.body || 'No description provided.'} + +## Your Task + +Analyze and work on this issue if appropriate. +`.trim(), + + 'slack-request': (e) => ` +# Slack Request + +Someone mentioned you in Slack. + +## Message + +${e.item?.body || 'No message content'} + +## Your Task + +Respond to the request. Use the Slack API to post your response. +`.trim(), + + 'default': (e) => ` +# Webhook Event + +A webhook event was received from ${e.source}. + +## Event Details + +**Type:** ${e.type} +**Context:** ${e.context.name} +**Actor:** ${e.actor.name} + +## Item + +${e.item ? `**${e.item.type}:** ${e.item.title || e.item.id}` : 'No item'} + +## Body + +${e.item?.body || 'No content'} +`.trim(), + }; + + const template = templates[templateName] || templates['default']; + return template(event); +} + +/** + * Format annotations for prompt + */ +function formatAnnotations(annotations: Array>): string { + return annotations + .slice(0, 20) + .map(a => `- ${a.path}:${a.startLine} - ${a.message}`) + .join('\n'); +} + +/** + * Process a webhook from any source + */ +export async function processWebhook( + source: string, + payload: string, + headers: Record, + config?: WebhookConfig +): Promise { + const webhookConfig = config || getWebhookConfig(); + const sourceConfig = webhookConfig.sources[source]; + + if (!sourceConfig) { + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: `Unknown webhook source: ${source}`, + }], + }; + } + + if (!sourceConfig.enabled) { + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: `Webhook source disabled: ${source}`, + }], + }; + } + + // Verify signature + const signature = headers[sourceConfig.signature.header] as string | undefined; + if (!verifySignature(payload, signature, sourceConfig, headers)) { + console.error(`[webhook-router] Invalid signature for source: ${source}`); + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: 'Invalid signature', + }], + }; + } + + // Parse payload + const parser = getParser(sourceConfig.parser); + if (!parser) { + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: `Parser not found: ${sourceConfig.parser}`, + }], + }; + } + + let parsedPayload: unknown; + try { + parsedPayload = JSON.parse(payload); + } catch { + return { + success: false, + eventId: 'unknown', + source, + eventType: 'unknown', + matchedRules: [], + actions: [], + responses: [{ + type: 'error', + success: false, + error: 'Invalid JSON payload', + }], + }; + } + + const events = parser.parse(parsedPayload, headers, sourceConfig.parserConfig); + + if (events.length === 0) { + return { + success: true, + eventId: 'none', + source, + eventType: 'none', + matchedRules: [], + actions: [], + responses: [], + }; + } + + // Get responder + const responder = getResponder(sourceConfig.responder); + + // Process each event + const results: WebhookResult[] = []; + + for (const event of events) { + const matchedRules = findMatchingRules(webhookConfig.rules, event); + const actionResults: WebhookResult['actions'] = []; + const responseResults: WebhookResult['responses'] = []; + + console.log(`[webhook-router] Event ${event.id}: type=${event.type}, matched ${matchedRules.length} rules`); + + for (const rule of matchedRules) { + const result = await executeAction( + rule.action, + event, + responder, + sourceConfig.responderConfig + ); + + actionResults.push({ + ruleId: rule.id, + action: rule.action, + success: result.success, + error: result.error, + }); + } + + results.push({ + success: actionResults.every(a => a.success), + eventId: event.id, + source: event.source, + eventType: event.type, + matchedRules: matchedRules.map(r => r.id), + actions: actionResults, + responses: responseResults, + }); + } + + // Return combined result + if (results.length === 1) { + return results[0]; + } + + return { + success: results.every(r => r.success), + eventId: events[0].id, + source, + eventType: events.map(e => e.type).join(','), + matchedRules: results.flatMap(r => r.matchedRules), + actions: results.flatMap(r => r.actions), + responses: results.flatMap(r => r.responses), + }; +} diff --git a/src/cloud/webhooks/rules-engine.ts b/src/cloud/webhooks/rules-engine.ts new file mode 100644 index 00000000..f2a9f98f --- /dev/null +++ b/src/cloud/webhooks/rules-engine.ts @@ -0,0 +1,291 @@ +/** + * Webhook Rules Engine + * + * Matches normalized events against configured rules and determines actions to take. + */ + +import type { NormalizedEvent, WebhookRule, WebhookAction } from './types.js'; + +/** + * Simple JSONPath-like evaluator for conditions + * Supports: $.field, $.field.subfield, comparisons (==, !=, in, contains) + */ +function evaluateCondition(condition: string, event: NormalizedEvent): boolean { + if (!condition || condition.trim() === '') return true; + + try { + // Parse condition: $.path operator value + const conditionPattern = /^\$\.([a-zA-Z0-9_.]+)\s*(==|!=|in|contains|>|<|>=|<=)\s*(.+)$/; + const match = condition.match(conditionPattern); + + if (!match) { + console.warn(`[rules-engine] Invalid condition format: ${condition}`); + return false; + } + + const [, path, operator, rawValue] = match; + const value = rawValue.trim(); + + // Get the value from the event + const eventValue = getValueByPath(event, path); + + // Parse the comparison value + let compareValue: unknown; + if (value.startsWith('[') && value.endsWith(']')) { + // Array literal + compareValue = JSON.parse(value); + } else if (value.startsWith('"') && value.endsWith('"')) { + // String literal + compareValue = value.slice(1, -1); + } else if (value === 'true') { + compareValue = true; + } else if (value === 'false') { + compareValue = false; + } else if (value === 'null') { + compareValue = null; + } else if (!isNaN(Number(value))) { + compareValue = Number(value); + } else { + // Treat as string + compareValue = value; + } + + switch (operator) { + case '==': + return eventValue === compareValue; + case '!=': + return eventValue !== compareValue; + case 'in': + return Array.isArray(compareValue) && compareValue.includes(eventValue); + case 'contains': + if (Array.isArray(eventValue)) { + return eventValue.includes(compareValue); + } + if (typeof eventValue === 'string' && typeof compareValue === 'string') { + return eventValue.includes(compareValue); + } + return false; + case '>': + return typeof eventValue === 'number' && typeof compareValue === 'number' && eventValue > compareValue; + case '<': + return typeof eventValue === 'number' && typeof compareValue === 'number' && eventValue < compareValue; + case '>=': + return typeof eventValue === 'number' && typeof compareValue === 'number' && eventValue >= compareValue; + case '<=': + return typeof eventValue === 'number' && typeof compareValue === 'number' && eventValue <= compareValue; + default: + return false; + } + } catch (error) { + console.error(`[rules-engine] Error evaluating condition: ${condition}`, error); + return false; + } +} + +/** + * Get a value from an object by dot-separated path + */ +function getValueByPath(obj: unknown, path: string): unknown { + const parts = path.split('.'); + let current: unknown = obj; + + for (const part of parts) { + if (current === null || current === undefined) return undefined; + if (typeof current !== 'object') return undefined; + current = (current as Record)[part]; + } + + return current; +} + +/** + * Check if a rule matches an event + */ +export function matchesRule(rule: WebhookRule, event: NormalizedEvent): boolean { + // Check if rule is enabled + if (!rule.enabled) return false; + + // Check source match + if (rule.source !== '*' && rule.source !== event.source) { + return false; + } + + // Check event type match + if (rule.eventType !== '*' && rule.eventType !== event.type) { + // Support wildcard prefix matching (e.g., 'ci_*' matches 'ci_failure') + if (rule.eventType.endsWith('*')) { + const prefix = rule.eventType.slice(0, -1); + if (!event.type.startsWith(prefix)) { + return false; + } + } else { + return false; + } + } + + // Check condition if present + if (rule.condition && !evaluateCondition(rule.condition, event)) { + return false; + } + + return true; +} + +/** + * Find all matching rules for an event, sorted by priority + */ +export function findMatchingRules(rules: WebhookRule[], event: NormalizedEvent): WebhookRule[] { + return rules + .filter(rule => matchesRule(rule, event)) + .sort((a, b) => a.priority - b.priority); +} + +/** + * Resolve template variables in action configuration + * Supports: ${event.field}, ${event.field.subfield} + */ +export function resolveActionTemplate(action: WebhookAction, event: NormalizedEvent): WebhookAction { + const resolvedAction = { ...action }; + + // Resolve agentType if it references an event field + if (resolvedAction.agentType?.startsWith('$.')) { + const path = resolvedAction.agentType.slice(2); + const value = getValueByPath(event, path); + if (typeof value === 'string') { + resolvedAction.agentType = value; + } else if (Array.isArray(value) && value.length > 0) { + // Use first mentioned agent + resolvedAction.agentType = String(value[0]); + } + } + + // Resolve prompt template references + if (resolvedAction.prompt?.startsWith('${') && resolvedAction.prompt?.endsWith('}')) { + const path = resolvedAction.prompt.slice(2, -1); + const value = getValueByPath(event, path); + if (typeof value === 'string') { + resolvedAction.prompt = value; + } + } + + return resolvedAction; +} + +/** + * Default rules for common patterns + */ +export const defaultRules: WebhookRule[] = [ + // CI Failures + { + id: 'ci-failure', + name: 'CI Failure Handler', + enabled: true, + source: 'github', + eventType: 'ci_failure', + action: { + type: 'spawn_agent', + agentType: 'ci-fix', + prompt: 'ci-failure', + }, + priority: 10, + }, + // GitHub Mentions + { + id: 'github-mention', + name: 'GitHub Mention Handler', + enabled: true, + source: 'github', + eventType: 'mention', + action: { + type: 'spawn_agent', + agentType: '$.mentions', // Use first mentioned agent + prompt: 'mention', + }, + priority: 20, + }, + // GitHub Issues + { + id: 'github-issue', + name: 'GitHub Issue Handler', + enabled: true, + source: 'github', + eventType: 'issue_created', + condition: '$.priority in ["critical", "high"]', + action: { + type: 'spawn_agent', + agentType: 'developer', + prompt: 'issue', + }, + priority: 30, + }, + // Linear Issues + { + id: 'linear-issue', + name: 'Linear Issue Handler', + enabled: true, + source: 'linear', + eventType: 'issue_created', + action: { + type: 'spawn_agent', + agentType: 'developer', + prompt: 'linear-issue', + }, + priority: 20, + }, + // Linear Mentions + { + id: 'linear-mention', + name: 'Linear Mention Handler', + enabled: true, + source: 'linear', + eventType: 'mention', + action: { + type: 'spawn_agent', + agentType: '$.mentions', + prompt: 'mention', + }, + priority: 20, + }, + // Slack App Mentions + { + id: 'slack-mention', + name: 'Slack App Mention Handler', + enabled: true, + source: 'slack', + eventType: 'mention', + action: { + type: 'spawn_agent', + agentType: '$.mentions', + prompt: 'slack-request', + }, + priority: 20, + }, + // Linear Issue Assignments (native integration) + { + id: 'linear-assignment', + name: 'Linear Issue Assignment Handler', + enabled: true, + source: 'linear', + eventType: 'issue_assigned', + action: { + type: 'spawn_agent', + agentType: '$.mentions', // Use the assigned agent type + prompt: 'linear-issue', + }, + priority: 15, + }, + // GitHub Issue Assignments + { + id: 'github-assignment', + name: 'GitHub Issue Assignment Handler', + enabled: true, + source: 'github', + eventType: 'issue_assigned', + action: { + type: 'spawn_agent', + agentType: '$.mentions', + prompt: 'issue', + }, + priority: 15, + }, +]; diff --git a/src/cloud/webhooks/types.ts b/src/cloud/webhooks/types.ts new file mode 100644 index 00000000..2f78f339 --- /dev/null +++ b/src/cloud/webhooks/types.ts @@ -0,0 +1,198 @@ +/** + * Generic Webhook System - Type Definitions + * + * Defines the core types for a configurable webhook system + * that can handle events from any source (GitHub, GitLab, Linear, Slack, etc.) + */ + +/** + * Normalized event format that all parsers produce + */ +export interface NormalizedEvent { + /** Unique event ID */ + id: string; + /** Source system (github, gitlab, linear, slack, etc.) */ + source: string; + /** Event type (e.g., 'ci_failure', 'mention', 'issue_created') */ + type: string; + /** Timestamp of the event */ + timestamp: Date; + /** Actor who triggered the event */ + actor: { + id: string; + name: string; + email?: string; + }; + /** Repository or project context */ + context: { + /** Full name (e.g., 'owner/repo' or project ID) */ + name: string; + /** URL to the repository/project */ + url?: string; + }; + /** The item this event relates to (issue, PR, ticket, message) */ + item?: { + type: 'issue' | 'pull_request' | 'ticket' | 'message' | 'comment' | 'check'; + id: string | number; + number?: number; + title?: string; + body?: string; + url?: string; + state?: string; + }; + /** Mentioned agents or users */ + mentions: string[]; + /** Labels, tags, or categories */ + labels: string[]; + /** Priority level if applicable */ + priority?: 'critical' | 'high' | 'medium' | 'low'; + /** Additional source-specific data */ + metadata: Record; + /** Raw payload for debugging */ + rawPayload: unknown; +} + +/** + * Action to take in response to an event + */ +export interface WebhookAction { + type: 'spawn_agent' | 'message_agent' | 'post_comment' | 'create_issue' | 'custom'; + /** Agent type or name to spawn/message */ + agentType?: string; + /** Prompt template name or inline prompt */ + prompt?: string; + /** Additional action-specific config */ + config?: Record; +} + +/** + * Signature verification configuration + */ +export interface SignatureConfig { + /** Header containing the signature */ + header: string; + /** Algorithm to use for verification */ + algorithm: 'sha256' | 'sha1' | 'token' | 'slack-v0' | 'none'; + /** Environment variable containing the secret */ + secretEnvVar: string; + /** Optional prefix to strip from signature (e.g., 'sha256=') */ + signaturePrefix?: string; +} + +/** + * Webhook source configuration + */ +export interface WebhookSourceConfig { + /** Source identifier */ + id: string; + /** Display name */ + name: string; + /** Whether this source is enabled */ + enabled: boolean; + /** Signature verification config */ + signature: SignatureConfig; + /** Parser to use for this source */ + parser: string; + /** Responder to use for sending responses */ + responder: string; + /** Parser-specific configuration */ + parserConfig?: Record; + /** Responder-specific configuration */ + responderConfig?: Record; +} + +/** + * Event routing rule + */ +export interface WebhookRule { + /** Rule identifier */ + id: string; + /** Display name */ + name: string; + /** Whether this rule is enabled */ + enabled: boolean; + /** Source to match (* for any) */ + source: string; + /** Event type to match (* for any) */ + eventType: string; + /** JSONPath condition (optional) */ + condition?: string; + /** Action to take when matched */ + action: WebhookAction; + /** Priority (lower = higher priority) */ + priority: number; +} + +/** + * Complete webhook configuration + */ +export interface WebhookConfig { + sources: Record; + rules: WebhookRule[]; +} + +/** + * Parser interface - transforms source-specific payloads to normalized events + */ +export interface WebhookParser { + /** Parser identifier */ + id: string; + /** Parse raw payload into normalized event(s) */ + parse( + payload: unknown, + headers: Record, + config?: Record + ): NormalizedEvent[]; +} + +/** + * Response to send back to the source system + */ +export interface WebhookResponse { + /** Type of response */ + type: 'comment' | 'message' | 'reaction' | 'status'; + /** Target (issue number, channel ID, etc.) */ + target: string | number; + /** Response body/content */ + body: string; + /** Additional response metadata */ + metadata?: Record; +} + +/** + * Responder interface - sends responses back to source systems + */ +export interface WebhookResponder { + /** Responder identifier */ + id: string; + /** Send a response to the source system */ + respond( + event: NormalizedEvent, + response: WebhookResponse, + config?: Record + ): Promise<{ success: boolean; id?: string; url?: string; error?: string }>; +} + +/** + * Result of processing a webhook + */ +export interface WebhookResult { + success: boolean; + eventId: string; + source: string; + eventType: string; + matchedRules: string[]; + actions: Array<{ + ruleId: string; + action: WebhookAction; + success: boolean; + error?: string; + }>; + responses: Array<{ + type: string; + success: boolean; + id?: string; + url?: string; + error?: string; + }>; +} From 852ade9c2b4060e8a47e14de895dab194e299abc Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 12:39:31 +0000 Subject: [PATCH 039/103] Add comprehensive webhook tests and fix parser/engine bugs Tests (94 passing): - GitHub parser: CI failures, mentions, issues, PRs, review comments - Linear parser: Issues, comments, assignments, agent detection - Slack parser: App mentions, messages, reactions, channels - Rules engine: Conditions, matching, priority sorting, templates - Router: Signature verification, event processing, Slack URL verification Bug fixes: - Rules engine: Reorder regex to match >= and <= before > and < - Rules engine: Handle undefined as equivalent to null in comparisons - Linear parser: Reorder agent patterns to match specific before generic Also adds task breakdown for future webhook integration iterations. --- docs/tasks/webhook-integrations.tasks.md | 184 ++++++++++ src/cloud/webhooks/parsers/github.test.ts | 412 ++++++++++++++++++++++ src/cloud/webhooks/parsers/linear.test.ts | 405 +++++++++++++++++++++ src/cloud/webhooks/parsers/linear.ts | 5 +- src/cloud/webhooks/parsers/slack.test.ts | 383 ++++++++++++++++++++ src/cloud/webhooks/router.test.ts | 391 ++++++++++++++++++++ src/cloud/webhooks/rules-engine.test.ts | 346 ++++++++++++++++++ src/cloud/webhooks/rules-engine.ts | 7 +- 8 files changed, 2130 insertions(+), 3 deletions(-) create mode 100644 docs/tasks/webhook-integrations.tasks.md create mode 100644 src/cloud/webhooks/parsers/github.test.ts create mode 100644 src/cloud/webhooks/parsers/linear.test.ts create mode 100644 src/cloud/webhooks/parsers/slack.test.ts create mode 100644 src/cloud/webhooks/router.test.ts create mode 100644 src/cloud/webhooks/rules-engine.test.ts diff --git a/docs/tasks/webhook-integrations.tasks.md b/docs/tasks/webhook-integrations.tasks.md new file mode 100644 index 00000000..ff6177fc --- /dev/null +++ b/docs/tasks/webhook-integrations.tasks.md @@ -0,0 +1,184 @@ +# Webhook Integrations - Task Breakdown + +Tasks for future iterations of the webhook and integrations system. +Convert to beads tasks with: `bd import docs/tasks/webhook-integrations.tasks.md` + +## Phase 1: Linear Full Integration [priority: high] + +### linear-outbound-comments +- [ ] Create LinearIntegration service class +- [ ] Implement createComment() with Linear GraphQL API +- [ ] Add Linear API key management to workspace settings +- [ ] Add encryption for stored API keys + +Dependencies: none +Estimate: 2 story points + +### linear-outbound-state +- [ ] Implement setIssueState() for state transitions +- [ ] Implement getAvailableStates() to fetch team states +- [ ] Add state ID caching with TTL + +Dependencies: linear-outbound-comments +Estimate: 1 story point + +### linear-outbound-issues +- [ ] Implement createIssue() with full CreateIssueInput +- [ ] Implement updateIssue() for editing +- [ ] Implement assignIssue() for assignment changes +- [ ] Add label operations (add/remove) + +Dependencies: linear-outbound-state +Estimate: 3 story points + +### linear-webhook-state-change +- [ ] Parse issue state change webhooks +- [ ] Add `issue_state_changed` event type +- [ ] Create rule for auto-responding to state changes + +Dependencies: none +Estimate: 1 story point + +### linear-webhook-due-dates +- [ ] Parse due date approaching events +- [ ] Add `issue_due_soon` event type with configurable threshold +- [ ] Create reminder rule for approaching due dates + +Dependencies: linear-webhook-state-change +Estimate: 1 story point + +## Phase 2: Slack Enhanced [priority: high] + +### slack-slash-commands +- [ ] Create slash command handler endpoint +- [ ] Parse slash command payloads +- [ ] Add slash_command event type +- [ ] Create agent spawning from slash commands + +Dependencies: none +Estimate: 2 story points + +### slack-interactive-components +- [ ] Handle button click callbacks +- [ ] Handle modal submission callbacks +- [ ] Add interactive_message event type +- [ ] Implement openModal() and updateModal() + +Dependencies: slack-slash-commands +Estimate: 3 story points + +### slack-rich-messages +- [ ] Implement postBlocks() with Block Kit +- [ ] Add common block templates (code, error, success) +- [ ] Add file upload support +- [ ] Add scheduled message support + +Dependencies: none +Estimate: 2 story points + +## Phase 3: GitHub Enhanced [priority: high] + +### github-pr-management +- [ ] Parse PR opened/updated webhooks +- [ ] Add pr_opened, pr_updated event types +- [ ] Implement PR review request parsing +- [ ] Add createPR() outbound action + +Dependencies: none +Estimate: 3 story points + +### github-check-runs +- [ ] Implement createCheckRun() for CI status +- [ ] Implement updateCheckRun() for progress +- [ ] Add annotations support for inline errors +- [ ] Parse deployment status webhooks + +Dependencies: github-pr-management +Estimate: 2 story points + +### github-issue-management +- [ ] Implement createIssue() +- [ ] Implement addLabels() and removeLabels() +- [ ] Implement assignUsers() +- [ ] Add issue linking support + +Dependencies: none +Estimate: 2 story points + +## Phase 4: Agent Tools [priority: medium] + +### agent-integration-tools +- [ ] Create integration tools accessible to agents +- [ ] Add LinearTool for agent actions +- [ ] Add SlackTool for agent messages +- [ ] Add GitHubTool for agent operations + +Dependencies: linear-outbound-issues, slack-rich-messages, github-issue-management +Estimate: 4 story points + +### agent-permissions +- [ ] Implement AgentIntegrationPermissions type +- [ ] Add permission checking before actions +- [ ] Create permission UI in spawn modal +- [ ] Add audit logging for all external calls + +Dependencies: agent-integration-tools +Estimate: 2 story points + +## Phase 5: Additional Integrations [priority: low] + +### jira-integration +- [ ] Create Jira webhook parser +- [ ] Implement JiraIntegration service +- [ ] Add Jira responder +- [ ] Add workspace settings for Jira + +Dependencies: agent-integration-tools +Estimate: 4 story points + +### gitlab-integration +- [ ] Create GitLab webhook parser +- [ ] Implement GitLabIntegration service +- [ ] Add GitLab responder +- [ ] Map GitLab events to normalized format + +Dependencies: agent-integration-tools +Estimate: 3 story points + +### discord-integration +- [ ] Create Discord webhook parser +- [ ] Implement DiscordIntegration service +- [ ] Add Discord responder +- [ ] Handle Discord-specific message formatting + +Dependencies: agent-integration-tools +Estimate: 3 story points + +## Testing & Infrastructure + +### webhook-load-testing +- [ ] Create load test suite for webhook endpoint +- [ ] Measure p50/p95/p99 latencies +- [ ] Test concurrent webhook handling +- [ ] Add rate limiting if needed + +Dependencies: none +Estimate: 2 story points + +### integration-mocks +- [ ] Create mock Linear API server for tests +- [ ] Create mock Slack API server for tests +- [ ] Create mock GitHub API server for tests +- [ ] Add E2E test suite with mocks + +Dependencies: none +Estimate: 3 story points + +### sandbox-testing +- [ ] Set up Linear sandbox workspace +- [ ] Set up Slack test workspace +- [ ] Set up GitHub test repository +- [ ] Create E2E test suite with real APIs + +Dependencies: integration-mocks +Estimate: 2 story points diff --git a/src/cloud/webhooks/parsers/github.test.ts b/src/cloud/webhooks/parsers/github.test.ts new file mode 100644 index 00000000..cb103787 --- /dev/null +++ b/src/cloud/webhooks/parsers/github.test.ts @@ -0,0 +1,412 @@ +/** + * GitHub Parser Tests + */ + +import { describe, it, expect } from 'vitest'; +import { githubParser } from './github.js'; + +describe('githubParser', () => { + describe('check_run events', () => { + it('should parse CI failure event', () => { + const payload = { + action: 'completed', + check_run: { + id: 12345, + name: 'build', + conclusion: 'failure', + html_url: 'https://github.com/owner/repo/runs/12345', + pull_requests: [ + { + number: 42, + head: { ref: 'feature-branch', sha: 'abc123' }, + }, + ], + output: { + title: 'Build failed', + summary: 'TypeScript compilation errors', + text: 'Error details here', + annotations: [ + { + path: 'src/index.ts', + start_line: 10, + end_line: 10, + annotation_level: 'failure', + message: "Cannot find name 'foo'", + }, + ], + }, + }, + repository: { + full_name: 'owner/repo', + html_url: 'https://github.com/owner/repo', + }, + sender: { + id: 123, + login: 'github-actions', + }, + }; + + const headers = { + 'x-github-event': 'check_run', + 'x-github-delivery': 'delivery-123', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('ci_failure'); + expect(events[0].source).toBe('github'); + expect(events[0].context.name).toBe('owner/repo'); + expect(events[0].item?.type).toBe('check'); + expect(events[0].item?.number).toBe(42); + expect(events[0].metadata?.checkName).toBe('build'); + expect(events[0].metadata?.annotations).toHaveLength(1); + }); + + it('should not create CI failure event for successful check run', () => { + const payload = { + action: 'completed', + check_run: { + id: 12345, + name: 'build', + conclusion: 'success', + pull_requests: [{ number: 42, head: { ref: 'main', sha: 'abc' } }], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'check_run', + 'x-github-delivery': 'delivery-123', + }; + + const events = githubParser.parse(payload, headers); + + // Should not create a ci_failure event (may create generic event or none) + const ciFailureEvents = events.filter(e => e.type === 'ci_failure'); + expect(ciFailureEvents).toHaveLength(0); + }); + + it('should not create CI failure event for check run without PR', () => { + const payload = { + action: 'completed', + check_run: { + id: 12345, + name: 'build', + conclusion: 'failure', + pull_requests: [], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'check_run', + 'x-github-delivery': 'delivery-123', + }; + + const events = githubParser.parse(payload, headers); + + // Should not create a ci_failure event (may create generic event or none) + const ciFailureEvents = events.filter(e => e.type === 'ci_failure'); + expect(ciFailureEvents).toHaveLength(0); + }); + }); + + describe('issue_comment events', () => { + it('should parse mention in issue comment', () => { + const payload = { + action: 'created', + issue: { + number: 42, + title: 'Bug report', + html_url: 'https://github.com/owner/repo/issues/42', + }, + comment: { + id: 789, + body: '@developer please fix this bug', + html_url: 'https://github.com/owner/repo/issues/42#issuecomment-789', + }, + repository: { + full_name: 'owner/repo', + html_url: 'https://github.com/owner/repo', + }, + sender: { + id: 123, + login: 'reporter', + }, + }; + + const headers = { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'delivery-456', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].item?.number).toBe(42); + expect(events[0].item?.body).toBe('@developer please fix this bug'); + }); + + it('should extract multiple mentions', () => { + const payload = { + action: 'created', + issue: { number: 42, title: 'Issue' }, + comment: { + id: 789, + body: '@lead please assign this to @developer or @reviewer', + html_url: 'https://github.com/owner/repo/issues/42#issuecomment-789', + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'delivery-456', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].mentions).toContain('lead'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].mentions).toContain('reviewer'); + }); + + it('should not create mention event if no mentions', () => { + const payload = { + action: 'created', + issue: { number: 42, title: 'Issue' }, + comment: { + id: 789, + body: 'This is a regular comment', + html_url: 'https://github.com/owner/repo/issues/42#issuecomment-789', + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'delivery-456', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(0); + }); + + it('should identify PR comments vs issue comments', () => { + const payload = { + action: 'created', + issue: { + number: 42, + title: 'Fix bug', + pull_request: { url: 'https://api.github.com/repos/owner/repo/pulls/42' }, + }, + comment: { + id: 789, + body: '@reviewer please check this', + html_url: 'https://github.com/owner/repo/pull/42#issuecomment-789', + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'developer' }, + }; + + const headers = { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'delivery-456', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].item?.type).toBe('pull_request'); + expect(events[0].metadata?.isPR).toBe(true); + }); + }); + + describe('issues events', () => { + it('should parse issue created event', () => { + const payload = { + action: 'opened', + issue: { + id: 123, + number: 42, + title: 'Critical bug in production', + body: 'The app crashes when users try to login', + html_url: 'https://github.com/owner/repo/issues/42', + state: 'open', + labels: [ + { name: 'bug' }, + { name: 'critical' }, + ], + assignees: [], + }, + repository: { + full_name: 'owner/repo', + html_url: 'https://github.com/owner/repo', + }, + sender: { + id: 123, + login: 'reporter', + }, + }; + + const headers = { + 'x-github-event': 'issues', + 'x-github-delivery': 'delivery-789', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('issue_created'); + expect(events[0].item?.title).toBe('Critical bug in production'); + expect(events[0].labels).toContain('bug'); + expect(events[0].labels).toContain('critical'); + expect(events[0].priority).toBe('critical'); + }); + + it('should extract mentions from issue body', () => { + const payload = { + action: 'opened', + issue: { + id: 123, + number: 42, + title: 'Feature request', + body: 'Hey @lead, can we add this feature? cc @developer', + html_url: 'https://github.com/owner/repo/issues/42', + state: 'open', + labels: [], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'issues', + 'x-github-delivery': 'delivery-789', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].mentions).toContain('lead'); + expect(events[0].mentions).toContain('developer'); + }); + + it('should map priority labels correctly', () => { + const testCases = [ + { labels: [{ name: 'p0' }], expected: 'critical' }, + { labels: [{ name: 'p1' }], expected: 'high' }, + { labels: [{ name: 'high' }], expected: 'high' }, + { labels: [{ name: 'p2' }], expected: 'medium' }, + { labels: [{ name: 'medium' }], expected: 'medium' }, + { labels: [{ name: 'p3' }], expected: 'low' }, + { labels: [{ name: 'low' }], expected: 'low' }, + { labels: [{ name: 'enhancement' }], expected: undefined }, + ]; + + for (const { labels, expected } of testCases) { + const payload = { + action: 'opened', + issue: { + id: 123, + number: 42, + title: 'Test', + body: '', + html_url: 'https://github.com/owner/repo/issues/42', + state: 'open', + labels, + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + const headers = { + 'x-github-event': 'issues', + 'x-github-delivery': 'delivery-789', + }; + + const events = githubParser.parse(payload, headers); + expect(events[0].priority).toBe(expected); + } + }); + }); + + describe('pull_request_review_comment events', () => { + it('should parse review comment with mention', () => { + const payload = { + action: 'created', + pull_request: { + number: 42, + title: 'Add feature', + }, + comment: { + id: 789, + body: '@developer this needs to be refactored', + html_url: 'https://github.com/owner/repo/pull/42#discussion_r789', + path: 'src/index.ts', + line: 25, + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'reviewer' }, + }; + + const headers = { + 'x-github-event': 'pull_request_review_comment', + 'x-github-delivery': 'delivery-abc', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].metadata?.filePath).toBe('src/index.ts'); + expect(events[0].metadata?.line).toBe(25); + expect(events[0].metadata?.isReviewComment).toBe(true); + }); + }); + + describe('pull_request events', () => { + it('should parse PR opened event', () => { + const payload = { + action: 'opened', + pull_request: { + id: 123, + number: 42, + title: 'Add new feature', + body: 'This PR adds the requested feature', + html_url: 'https://github.com/owner/repo/pull/42', + state: 'open', + draft: false, + head: { ref: 'feature-branch' }, + base: { ref: 'main' }, + labels: [], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'developer' }, + }; + + const headers = { + 'x-github-event': 'pull_request', + 'x-github-delivery': 'delivery-def', + }; + + const events = githubParser.parse(payload, headers); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('pr_opened'); + expect(events[0].item?.title).toBe('Add new feature'); + expect(events[0].metadata?.head).toBe('feature-branch'); + expect(events[0].metadata?.base).toBe('main'); + }); + }); +}); diff --git a/src/cloud/webhooks/parsers/linear.test.ts b/src/cloud/webhooks/parsers/linear.test.ts new file mode 100644 index 00000000..21a02028 --- /dev/null +++ b/src/cloud/webhooks/parsers/linear.test.ts @@ -0,0 +1,405 @@ +/** + * Linear Parser Tests + */ + +import { describe, it, expect } from 'vitest'; +import { linearParser } from './linear.js'; + +describe('linearParser', () => { + describe('Issue events', () => { + it('should parse issue created event', () => { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-123', + createdAt: '2024-01-15T10:00:00Z', + data: { + id: 'issue-123', + number: 42, + title: 'Implement new feature', + description: 'We need to add a new dashboard component', + url: 'https://linear.app/team/issue/ENG-42', + identifier: 'ENG-42', + priority: 2, // High + estimate: 3, + dueDate: '2024-01-30', + state: { name: 'Todo' }, + labels: [ + { name: 'feature' }, + { name: 'frontend' }, + ], + assignee: { + id: 'user-1', + name: 'John Developer', + email: 'john@example.com', + }, + creator: { + id: 'user-2', + name: 'Jane PM', + email: 'jane@example.com', + }, + team: { + key: 'ENG', + name: 'Engineering', + }, + cycle: { + name: 'Sprint 5', + }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('issue_created'); + expect(events[0].source).toBe('linear'); + expect(events[0].item?.type).toBe('ticket'); + expect(events[0].item?.title).toBe('Implement new feature'); + expect(events[0].item?.number).toBe(42); + expect(events[0].priority).toBe('high'); + expect(events[0].labels).toContain('feature'); + expect(events[0].labels).toContain('frontend'); + expect(events[0].metadata?.identifier).toBe('ENG-42'); + expect(events[0].metadata?.assignee).toBe('John Developer'); + expect(events[0].actor.name).toBe('Jane PM'); + expect(events[0].context.name).toBe('ENG'); + }); + + it('should map Linear priority correctly', () => { + const testCases = [ + { priority: 1, expected: 'critical' }, + { priority: 2, expected: 'high' }, + { priority: 3, expected: 'medium' }, + { priority: 4, expected: 'low' }, + { priority: 0, expected: undefined }, + { priority: undefined, expected: undefined }, + ]; + + for (const { priority, expected } of testCases) { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-123', + data: { + id: 'issue-123', + title: 'Test', + priority, + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + expect(events[0].priority).toBe(expected); + } + }); + + it('should detect agent assignment', () => { + const payload = { + action: 'update', + type: 'Issue', + webhookId: 'webhook-456', + createdAt: '2024-01-15T11:00:00Z', + updatedFrom: { + assigneeId: null, // Was unassigned + }, + data: { + id: 'issue-123', + number: 42, + title: 'Fix authentication bug', + description: 'Users cannot log in', + url: 'https://linear.app/team/issue/ENG-42', + identifier: 'ENG-42', + state: { name: 'In Progress' }, + labels: [], + assignee: { + id: 'agent-developer-1', + name: 'Developer Agent', + email: 'developer@agents.local', + }, + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('issue_assigned'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].metadata?.action).toBe('assigned'); + }); + + it('should detect various agent name patterns', () => { + const agentNames = [ + { name: 'Lead Agent', expectedAgent: 'lead' }, + { name: 'Developer Bot', expectedAgent: 'developer' }, + { name: 'Code Reviewer', expectedAgent: 'reviewer' }, + { name: 'CI-Fix Agent', expectedAgent: 'ci-fix' }, + { name: 'Test Bot', expectedAgent: 'test' }, + { name: 'Docs Agent', expectedAgent: 'docs' }, + { name: 'Refactor Bot', expectedAgent: 'refactor' }, + { name: 'Debugger', expectedAgent: 'debugger' }, + ]; + + for (const { name, expectedAgent } of agentNames) { + const payload = { + action: 'update', + type: 'Issue', + webhookId: 'webhook-456', + updatedFrom: { assigneeId: null }, + data: { + id: 'issue-123', + title: 'Test issue', + state: { name: 'Todo' }, + labels: [], + assignee: { id: 'agent-1', name }, + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + expect(events[0].type).toBe('issue_assigned'); + expect(events[0].mentions).toContain(expectedAgent); + } + }); + + it('should not treat regular user assignment as agent assignment', () => { + const payload = { + action: 'update', + type: 'Issue', + webhookId: 'webhook-456', + updatedFrom: { assigneeId: null }, + data: { + id: 'issue-123', + title: 'Test issue', + state: { name: 'Todo' }, + labels: [], + assignee: { id: 'user-1', name: 'John Smith' }, + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + expect(events[0].type).toBe('issue_updated'); + expect(events[0].type).not.toBe('issue_assigned'); + }); + + it('should parse regular issue update', () => { + const payload = { + action: 'update', + type: 'Issue', + webhookId: 'webhook-789', + updatedFrom: { stateId: 'state-1' }, + data: { + id: 'issue-123', + title: 'Test issue', + state: { name: 'In Progress' }, + labels: [], + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('issue_updated'); + }); + + it('should extract mentions from issue description', () => { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-123', + data: { + id: 'issue-123', + title: 'Review request', + description: 'Hey @lead, please review this. cc @developer', + state: { name: 'Todo' }, + labels: [], + team: { key: 'ENG' }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events[0].mentions).toContain('lead'); + expect(events[0].mentions).toContain('developer'); + }); + }); + + describe('Comment events', () => { + it('should parse comment created event', () => { + const payload = { + action: 'create', + type: 'Comment', + webhookId: 'webhook-comment-123', + createdAt: '2024-01-15T12:00:00Z', + data: { + id: 'comment-1', + body: 'I found the root cause of this issue', + url: 'https://linear.app/team/issue/ENG-42#comment-1', + issue: { + id: 'issue-123', + number: 42, + title: 'Bug report', + identifier: 'ENG-42', + }, + user: { + id: 'user-1', + name: 'Developer', + email: 'dev@example.com', + }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('comment_created'); + expect(events[0].item?.type).toBe('comment'); + expect(events[0].item?.body).toBe('I found the root cause of this issue'); + expect(events[0].metadata?.issueIdentifier).toBe('ENG-42'); + }); + + it('should parse comment with mentions', () => { + const payload = { + action: 'create', + type: 'Comment', + webhookId: 'webhook-comment-456', + data: { + id: 'comment-2', + body: '@reviewer please take a look at this fix', + url: 'https://linear.app/team/issue/ENG-42#comment-2', + issue: { + id: 'issue-123', + number: 42, + title: 'Bug report', + identifier: 'ENG-42', + }, + user: { id: 'user-1', name: 'Developer' }, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].mentions).toContain('reviewer'); + }); + + it('should not create event for comment without issue context', () => { + const payload = { + action: 'create', + type: 'Comment', + webhookId: 'webhook-comment-789', + data: { + id: 'comment-3', + body: 'Orphan comment', + // No issue field + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + }); + + describe('Project events', () => { + it('should parse project created event', () => { + const payload = { + action: 'create', + type: 'Project', + webhookId: 'webhook-project-123', + data: { + id: 'project-1', + name: 'Q1 Roadmap', + description: 'Features for Q1 2024', + url: 'https://linear.app/team/project/q1-roadmap', + targetDate: '2024-03-31', + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('project_created'); + expect(events[0].context.name).toBe('Q1 Roadmap'); + }); + }); + + describe('IssueLabel events', () => { + it('should parse label change event', () => { + const payload = { + action: 'create', + type: 'IssueLabel', + webhookId: 'webhook-label-123', + data: { + id: 'label-1', + name: 'bug', + color: '#ff0000', + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('label_change'); + expect(events[0].labels).toContain('bug'); + }); + }); + + describe('Unknown events', () => { + it('should create generic event for unknown types', () => { + const payload = { + action: 'create', + type: 'Workflow', + webhookId: 'webhook-unknown-123', + data: { + id: 'workflow-1', + name: 'Custom workflow', + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('linear.workflow.create'); + }); + }); + + describe('Edge cases', () => { + it('should handle missing data gracefully', () => { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-edge-1', + // Missing data field + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + + it('should handle null/undefined fields', () => { + const payload = { + action: 'create', + type: 'Issue', + webhookId: 'webhook-edge-2', + data: { + id: 'issue-123', + title: null, + description: undefined, + state: null, + labels: null, + team: null, + }, + }; + + const events = linearParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].item?.title).toBe(''); + }); + }); +}); diff --git a/src/cloud/webhooks/parsers/linear.ts b/src/cloud/webhooks/parsers/linear.ts index 03b8908d..eb8cf38a 100644 --- a/src/cloud/webhooks/parsers/linear.ts +++ b/src/cloud/webhooks/parsers/linear.ts @@ -136,11 +136,12 @@ export const linearParser: WebhookParser = { // Check if assigned to an agent (name matches agent pattern) const assigneeName = String(assignee?.name || '').toLowerCase(); - const agentPatterns = ['agent', 'bot', 'lead', 'developer', 'reviewer', 'debugger', 'ci-fix', 'test', 'docs', 'refactor']; + // Order matters: more specific patterns first, generic 'agent' and 'bot' last + const agentPatterns = ['developer', 'reviewer', 'debugger', 'ci-fix', 'refactor', 'lead', 'test', 'docs', 'agent', 'bot']; const isAgentAssignment = wasAssigned && agentPatterns.some(p => assigneeName.includes(p)); if (isAgentAssignment) { - // Extract the agent type from the assignee name + // Extract the agent type from the assignee name (finds first/most-specific match) const matchedAgent = agentPatterns.find(p => assigneeName.includes(p)) || 'developer'; events.push({ diff --git a/src/cloud/webhooks/parsers/slack.test.ts b/src/cloud/webhooks/parsers/slack.test.ts new file mode 100644 index 00000000..9bdcb94f --- /dev/null +++ b/src/cloud/webhooks/parsers/slack.test.ts @@ -0,0 +1,383 @@ +/** + * Slack Parser Tests + */ + +import { describe, it, expect } from 'vitest'; +import { slackParser } from './slack.js'; + +describe('slackParser', () => { + describe('URL verification', () => { + it('should return empty array for url_verification', () => { + const payload = { + type: 'url_verification', + challenge: 'test-challenge-token', + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + }); + + describe('Non-event payloads', () => { + it('should return empty array for non-event_callback type', () => { + const payload = { + type: 'interactive_message', + callback_id: 'some-callback', + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + }); + + describe('app_mention events', () => { + it('should parse app mention event', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12345', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT_ID> can you help me with this?', + ts: '1705320000.000100', + channel: 'C12345', + channel_type: 'channel', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].source).toBe('slack'); + expect(events[0].actor.id).toBe('U12345'); + expect(events[0].item?.type).toBe('message'); + // Should default to 'lead' when no specific agent mentioned + expect(events[0].mentions).toContain('lead'); + expect(events[0].metadata?.channelId).toBe('C12345'); + }); + + it('should extract agent mentions from message', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12346', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT_ID> @developer please help with this bug', + ts: '1705320000.000200', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].mentions).toContain('developer'); + }); + + it('should clean Slack user mentions from text', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12347', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT_ID> hey <@U67890|john> check this', + ts: '1705320000.000300', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + // Body should have cleaned text + expect(events[0].item?.body).toContain('@john'); + expect(events[0].item?.body).not.toContain('<@'); + }); + + it('should capture thread context', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12348', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT_ID> replying in thread', + ts: '1705320000.000400', + thread_ts: '1705310000.000100', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events[0].metadata?.threadTs).toBe('1705310000.000100'); + }); + }); + + describe('message events', () => { + it('should parse message with agent mention', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12349', + event_time: 1705320000, + event: { + type: 'message', + user: 'U12345', + text: '@reviewer can you check this PR?', + ts: '1705320000.000500', + channel: 'C12345', + channel_type: 'channel', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('mention'); + expect(events[0].mentions).toContain('reviewer'); + }); + + it('should not create event for regular message without agent mention', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12350', + event_time: 1705320000, + event: { + type: 'message', + user: 'U12345', + text: 'Just a regular message', + ts: '1705320000.000600', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + + it('should ignore bot messages', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12351', + event_time: 1705320000, + event: { + type: 'message', + subtype: 'bot_message', + user: 'U_BOT', + text: '@developer check this', + ts: '1705320000.000700', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + + it('should ignore message_changed subtypes', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12352', + event_time: 1705320000, + event: { + type: 'message', + subtype: 'message_changed', + user: 'U12345', + text: '@developer check this', + ts: '1705320000.000800', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(0); + }); + + it('should allow thread_broadcast subtype', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12353', + event_time: 1705320000, + event: { + type: 'message', + subtype: 'thread_broadcast', + user: 'U12345', + text: '@lead important update', + ts: '1705320000.000900', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].mentions).toContain('lead'); + }); + }); + + describe('reaction_added events', () => { + it('should parse reaction added event', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12354', + event_time: 1705320000, + event: { + type: 'reaction_added', + user: 'U12345', + reaction: 'thumbsup', + item: { + type: 'message', + channel: 'C12345', + ts: '1705310000.000100', + }, + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('reaction_added'); + expect(events[0].labels).toContain('thumbsup'); + expect(events[0].metadata?.reaction).toBe('thumbsup'); + }); + }); + + describe('channel_created events', () => { + it('should parse channel created event', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12355', + event_time: 1705320000, + event: { + type: 'channel_created', + channel: { + id: 'C_NEW', + name: 'project-alpha', + creator: 'U12345', + }, + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('channel_created'); + expect(events[0].context.name).toBe('project-alpha'); + }); + }); + + describe('member_joined_channel events', () => { + it('should parse member joined event', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12356', + event_time: 1705320000, + event: { + type: 'member_joined_channel', + user: 'U_NEW', + channel: 'C12345', + inviter: 'U12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('member_joined'); + expect(events[0].actor.id).toBe('U_NEW'); + expect(events[0].metadata?.inviter).toBe('U12345'); + }); + }); + + describe('Unknown events', () => { + it('should create generic event for unknown types', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12357', + event_time: 1705320000, + event: { + type: 'file_shared', + user: 'U12345', + file_id: 'F12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('slack.file_shared'); + }); + }); + + describe('Text cleaning', () => { + it('should clean URLs from text', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12358', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT> check and ', + ts: '1705320000.001000', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events[0].item?.body).toContain('this link'); + expect(events[0].item?.body).toContain('https://other.com'); + expect(events[0].item?.body).not.toContain(' { + it('should extract all mentioned agents', () => { + const payload = { + type: 'event_callback', + team_id: 'T12345', + event_id: 'Ev12359', + event_time: 1705320000, + event: { + type: 'app_mention', + user: 'U12345', + text: '<@U_BOT> @lead please assign @developer to review this with @reviewer', + ts: '1705320000.001100', + channel: 'C12345', + }, + }; + + const events = slackParser.parse(payload, {}); + + expect(events[0].mentions).toContain('lead'); + expect(events[0].mentions).toContain('developer'); + expect(events[0].mentions).toContain('reviewer'); + }); + }); +}); diff --git a/src/cloud/webhooks/router.test.ts b/src/cloud/webhooks/router.test.ts new file mode 100644 index 00000000..6c88d0cb --- /dev/null +++ b/src/cloud/webhooks/router.test.ts @@ -0,0 +1,391 @@ +/** + * Webhook Router Tests + */ + +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import crypto from 'crypto'; +import { processWebhook, getWebhookConfig, defaultSources } from './router.js'; +import type { WebhookConfig } from './types.js'; + +// Mock the db module +vi.mock('../db/index.js', () => ({ + db: { + repositories: { + findByFullName: vi.fn().mockResolvedValue({ + id: 'repo-1', + userId: 'user-1', + nangoConnectionId: 'nango-conn-1', + githubFullName: 'owner/repo', + }), + }, + linkedDaemons: { + findByUserId: vi.fn().mockResolvedValue([ + { id: 'daemon-1', userId: 'user-1', status: 'online' }, + ]), + queueMessage: vi.fn().mockResolvedValue(undefined), + }, + }, +})); + +// Mock the responders +vi.mock('./responders/index.js', () => ({ + getResponder: vi.fn().mockReturnValue({ + id: 'github', + respond: vi.fn().mockResolvedValue({ success: true, id: '123', url: 'https://example.com' }), + }), +})); + +describe('getWebhookConfig', () => { + it('should return default configuration', () => { + const config = getWebhookConfig(); + + expect(config.sources).toBeDefined(); + expect(config.rules).toBeDefined(); + expect(config.sources.github).toBeDefined(); + expect(config.sources.linear).toBeDefined(); + expect(config.sources.slack).toBeDefined(); + }); +}); + +describe('defaultSources', () => { + it('should have GitHub source configured', () => { + const github = defaultSources.github; + + expect(github.id).toBe('github'); + expect(github.enabled).toBe(true); + expect(github.signature.header).toBe('x-hub-signature-256'); + expect(github.signature.algorithm).toBe('sha256'); + expect(github.parser).toBe('github'); + expect(github.responder).toBe('github'); + }); + + it('should have Linear source configured', () => { + const linear = defaultSources.linear; + + expect(linear.id).toBe('linear'); + expect(linear.enabled).toBe(true); + expect(linear.signature.algorithm).toBe('sha256'); + expect(linear.parser).toBe('linear'); + expect(linear.responder).toBe('linear'); + }); + + it('should have Slack source configured', () => { + const slack = defaultSources.slack; + + expect(slack.id).toBe('slack'); + expect(slack.enabled).toBe(true); + expect(slack.signature.algorithm).toBe('slack-v0'); + expect(slack.parser).toBe('slack'); + expect(slack.responder).toBe('slack'); + }); +}); + +describe('processWebhook', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe('unknown source', () => { + it('should return error for unknown source', async () => { + const result = await processWebhook( + 'unknown-source', + '{}', + {} + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toContain('Unknown webhook source'); + }); + }); + + describe('disabled source', () => { + it('should return error for disabled source', async () => { + const config: WebhookConfig = { + sources: { + github: { + ...defaultSources.github, + enabled: false, + }, + }, + rules: [], + }; + + const result = await processWebhook( + 'github', + '{}', + {}, + config + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toContain('disabled'); + }); + }); + + describe('signature verification', () => { + const secret = 'test-secret'; + const payload = JSON.stringify({ test: true }); + + beforeEach(() => { + vi.stubEnv('GITHUB_WEBHOOK_SECRET', secret); + }); + + afterEach(() => { + vi.unstubAllEnvs(); + }); + + it('should reject invalid signature', async () => { + const result = await processWebhook( + 'github', + payload, + { 'x-hub-signature-256': 'sha256=invalid' } + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toBe('Invalid signature'); + }); + + it('should accept valid signature', async () => { + const signature = 'sha256=' + crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex'); + + const result = await processWebhook( + 'github', + payload, + { + 'x-hub-signature-256': signature, + 'x-github-event': 'ping', + 'x-github-delivery': 'test-delivery', + } + ); + + // May not be fully successful depending on mock setup, but shouldn't fail signature + expect(result.responses[0]?.error).not.toBe('Invalid signature'); + }); + + it('should reject missing signature', async () => { + const result = await processWebhook( + 'github', + payload, + {} // No signature header + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toBe('Invalid signature'); + }); + }); + + describe('invalid payload', () => { + beforeEach(() => { + vi.stubEnv('GITHUB_WEBHOOK_SECRET', ''); + }); + + afterEach(() => { + vi.unstubAllEnvs(); + }); + + it('should handle non-JSON payload', async () => { + // Create a config that skips signature verification + const config: WebhookConfig = { + sources: { + github: { + ...defaultSources.github, + signature: { + ...defaultSources.github.signature, + algorithm: 'none', + }, + }, + }, + rules: [], + }; + + const result = await processWebhook( + 'github', + 'not valid json', + {}, + config + ); + + expect(result.success).toBe(false); + expect(result.responses[0].error).toBe('Invalid JSON payload'); + }); + }); + + describe('event processing', () => { + const mentionPayload = { + action: 'created', + issue: { number: 42, title: 'Test' }, + comment: { + id: 789, + body: '@developer please fix this', + html_url: 'https://github.com/owner/repo/issues/42#issuecomment-789', + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'user' }, + }; + + beforeEach(() => { + vi.stubEnv('GITHUB_WEBHOOK_SECRET', ''); + }); + + afterEach(() => { + vi.unstubAllEnvs(); + }); + + it('should process GitHub mention event', async () => { + const payload = JSON.stringify(mentionPayload); + const config: WebhookConfig = { + sources: { + github: { + ...defaultSources.github, + signature: { ...defaultSources.github.signature, algorithm: 'none' }, + }, + }, + rules: [ + { + id: 'test-mention', + name: 'Test Mention', + enabled: true, + source: 'github', + eventType: 'mention', + action: { type: 'spawn_agent', agentType: '$.mentions' }, + priority: 10, + }, + ], + }; + + const result = await processWebhook( + 'github', + payload, + { + 'x-github-event': 'issue_comment', + 'x-github-delivery': 'test-delivery', + }, + config + ); + + expect(result.eventType).toBe('mention'); + expect(result.matchedRules).toContain('test-mention'); + }); + + it('should return empty result for no matching events', async () => { + const payload = JSON.stringify({ + action: 'completed', + check_run: { + id: 123, + name: 'build', + conclusion: 'success', // Not a failure + pull_requests: [], + }, + repository: { full_name: 'owner/repo' }, + sender: { id: 123, login: 'github-actions' }, + }); + + const config: WebhookConfig = { + sources: { + github: { + ...defaultSources.github, + signature: { ...defaultSources.github.signature, algorithm: 'none' }, + }, + }, + rules: [], + }; + + const result = await processWebhook( + 'github', + payload, + { + 'x-github-event': 'check_run', + 'x-github-delivery': 'test-delivery', + }, + config + ); + + // Should have processed but with no specific events + expect(result.matchedRules).toHaveLength(0); + }); + }); + + describe('Slack URL verification', () => { + it('should handle Slack URL verification (handled at API level)', async () => { + // Note: URL verification is actually handled at the API level, + // but the parser should return empty events for it + const payload = JSON.stringify({ + type: 'url_verification', + challenge: 'test-challenge', + }); + + const config: WebhookConfig = { + sources: { + slack: { + ...defaultSources.slack, + signature: { ...defaultSources.slack.signature, algorithm: 'none' }, + }, + }, + rules: [], + }; + + const result = await processWebhook( + 'slack', + payload, + {}, + config + ); + + // Parser returns empty for url_verification + expect(result.success).toBe(true); + expect(result.matchedRules).toHaveLength(0); + }); + }); +}); + +describe('signature verification algorithms', () => { + describe('sha256', () => { + it('should verify SHA256 HMAC signature', () => { + const secret = 'test-secret'; + const payload = '{"test": true}'; + const signature = crypto + .createHmac('sha256', secret) + .update(payload) + .digest('hex'); + + // Signature should match expected format + expect(signature).toMatch(/^[a-f0-9]{64}$/); + }); + }); + + describe('sha1', () => { + it('should verify SHA1 HMAC signature', () => { + const secret = 'test-secret'; + const payload = '{"test": true}'; + const signature = crypto + .createHmac('sha1', secret) + .update(payload) + .digest('hex'); + + // Signature should match expected format + expect(signature).toMatch(/^[a-f0-9]{40}$/); + }); + }); + + describe('slack-v0', () => { + it('should create Slack-format signature', () => { + const secret = 'test-secret'; + const timestamp = Math.floor(Date.now() / 1000); + const payload = '{"test": true}'; + const sigBasestring = `v0:${timestamp}:${payload}`; + const signature = 'v0=' + crypto + .createHmac('sha256', secret) + .update(sigBasestring) + .digest('hex'); + + expect(signature).toMatch(/^v0=[a-f0-9]{64}$/); + }); + }); +}); diff --git a/src/cloud/webhooks/rules-engine.test.ts b/src/cloud/webhooks/rules-engine.test.ts new file mode 100644 index 00000000..76f8ef78 --- /dev/null +++ b/src/cloud/webhooks/rules-engine.test.ts @@ -0,0 +1,346 @@ +/** + * Rules Engine Tests + */ + +import { describe, it, expect } from 'vitest'; +import { + matchesRule, + findMatchingRules, + resolveActionTemplate, + defaultRules, +} from './rules-engine.js'; +import type { NormalizedEvent, WebhookRule } from './types.js'; + +const createEvent = (overrides: Partial = {}): NormalizedEvent => ({ + id: 'test-event-1', + source: 'github', + type: 'mention', + timestamp: new Date(), + actor: { id: 'user-1', name: 'testuser' }, + context: { name: 'owner/repo' }, + mentions: ['developer'], + labels: [], + metadata: {}, + rawPayload: {}, + ...overrides, +}); + +const createRule = (overrides: Partial = {}): WebhookRule => ({ + id: 'test-rule', + name: 'Test Rule', + enabled: true, + source: '*', + eventType: '*', + action: { type: 'spawn_agent', agentType: 'developer' }, + priority: 10, + ...overrides, +}); + +describe('matchesRule', () => { + describe('enabled/disabled', () => { + it('should not match disabled rules', () => { + const rule = createRule({ enabled: false }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(false); + }); + + it('should match enabled rules', () => { + const rule = createRule({ enabled: true }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(true); + }); + }); + + describe('source matching', () => { + it('should match wildcard source', () => { + const rule = createRule({ source: '*' }); + const event = createEvent({ source: 'github' }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should match exact source', () => { + const rule = createRule({ source: 'github' }); + const event = createEvent({ source: 'github' }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should not match different source', () => { + const rule = createRule({ source: 'linear' }); + const event = createEvent({ source: 'github' }); + + expect(matchesRule(rule, event)).toBe(false); + }); + }); + + describe('eventType matching', () => { + it('should match wildcard eventType', () => { + const rule = createRule({ eventType: '*' }); + const event = createEvent({ type: 'ci_failure' }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should match exact eventType', () => { + const rule = createRule({ eventType: 'mention' }); + const event = createEvent({ type: 'mention' }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should match prefix wildcard', () => { + const rule = createRule({ eventType: 'ci_*' }); + + expect(matchesRule(rule, createEvent({ type: 'ci_failure' }))).toBe(true); + expect(matchesRule(rule, createEvent({ type: 'ci_success' }))).toBe(true); + expect(matchesRule(rule, createEvent({ type: 'issue_created' }))).toBe(false); + }); + + it('should not match different eventType', () => { + const rule = createRule({ eventType: 'ci_failure' }); + const event = createEvent({ type: 'mention' }); + + expect(matchesRule(rule, event)).toBe(false); + }); + }); + + describe('condition evaluation', () => { + it('should match without condition', () => { + const rule = createRule({ condition: undefined }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should match empty condition', () => { + const rule = createRule({ condition: '' }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should evaluate == condition', () => { + const rule = createRule({ condition: '$.priority == "high"' }); + + expect(matchesRule(rule, createEvent({ priority: 'high' }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'low' }))).toBe(false); + }); + + it('should evaluate != condition', () => { + const rule = createRule({ condition: '$.priority != "low"' }); + + expect(matchesRule(rule, createEvent({ priority: 'high' }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'low' }))).toBe(false); + }); + + it('should evaluate "in" condition with array', () => { + const rule = createRule({ condition: '$.priority in ["critical", "high"]' }); + + expect(matchesRule(rule, createEvent({ priority: 'critical' }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'high' }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'medium' }))).toBe(false); + }); + + it('should evaluate "contains" condition for arrays', () => { + const rule = createRule({ condition: '$.labels contains "bug"' }); + + expect(matchesRule(rule, createEvent({ labels: ['bug', 'critical'] }))).toBe(true); + expect(matchesRule(rule, createEvent({ labels: ['feature'] }))).toBe(false); + }); + + it('should evaluate "contains" condition for strings', () => { + const rule = createRule({ condition: '$.actor.name contains "test"' }); + + expect(matchesRule(rule, createEvent({ actor: { id: '1', name: 'testuser' } }))).toBe(true); + expect(matchesRule(rule, createEvent({ actor: { id: '1', name: 'admin' } }))).toBe(false); + }); + + it('should evaluate numeric comparisons', () => { + const event = createEvent({ metadata: { count: 5 } }); + + expect(matchesRule(createRule({ condition: '$.metadata.count > 3' }), event)).toBe(true); + expect(matchesRule(createRule({ condition: '$.metadata.count < 3' }), event)).toBe(false); + expect(matchesRule(createRule({ condition: '$.metadata.count >= 5' }), event)).toBe(true); + expect(matchesRule(createRule({ condition: '$.metadata.count <= 5' }), event)).toBe(true); + }); + + it('should evaluate boolean conditions', () => { + const rule = createRule({ condition: '$.metadata.urgent == true' }); + + expect(matchesRule(rule, createEvent({ metadata: { urgent: true } }))).toBe(true); + expect(matchesRule(rule, createEvent({ metadata: { urgent: false } }))).toBe(false); + }); + + it('should evaluate null conditions', () => { + const rule = createRule({ condition: '$.priority == null' }); + + expect(matchesRule(rule, createEvent({ priority: undefined }))).toBe(true); + expect(matchesRule(rule, createEvent({ priority: 'high' }))).toBe(false); + }); + + it('should handle nested path access', () => { + const rule = createRule({ condition: '$.metadata.check.name == "build"' }); + const event = createEvent({ + metadata: { check: { name: 'build' } }, + }); + + expect(matchesRule(rule, event)).toBe(true); + }); + + it('should handle invalid condition gracefully', () => { + const rule = createRule({ condition: 'invalid condition syntax' }); + const event = createEvent(); + + expect(matchesRule(rule, event)).toBe(false); + }); + }); +}); + +describe('findMatchingRules', () => { + it('should return matching rules sorted by priority', () => { + const rules: WebhookRule[] = [ + createRule({ id: 'rule-3', priority: 30 }), + createRule({ id: 'rule-1', priority: 10 }), + createRule({ id: 'rule-2', priority: 20 }), + ]; + const event = createEvent(); + + const matched = findMatchingRules(rules, event); + + expect(matched).toHaveLength(3); + expect(matched[0].id).toBe('rule-1'); + expect(matched[1].id).toBe('rule-2'); + expect(matched[2].id).toBe('rule-3'); + }); + + it('should filter out non-matching rules', () => { + const rules: WebhookRule[] = [ + createRule({ id: 'match-1', source: 'github' }), + createRule({ id: 'no-match', source: 'linear' }), + createRule({ id: 'match-2', source: '*' }), + ]; + const event = createEvent({ source: 'github' }); + + const matched = findMatchingRules(rules, event); + + expect(matched).toHaveLength(2); + expect(matched.map(r => r.id)).toContain('match-1'); + expect(matched.map(r => r.id)).toContain('match-2'); + }); + + it('should return empty array if no rules match', () => { + const rules: WebhookRule[] = [ + createRule({ source: 'linear' }), + createRule({ eventType: 'ci_failure' }), + ]; + const event = createEvent({ source: 'github', type: 'mention' }); + + const matched = findMatchingRules(rules, event); + + expect(matched).toHaveLength(0); + }); +}); + +describe('resolveActionTemplate', () => { + it('should resolve $.mentions to first mention', () => { + const action = { type: 'spawn_agent' as const, agentType: '$.mentions' }; + const event = createEvent({ mentions: ['developer', 'reviewer'] }); + + const resolved = resolveActionTemplate(action, event); + + expect(resolved.agentType).toBe('developer'); + }); + + it('should resolve nested path', () => { + const action = { type: 'spawn_agent' as const, agentType: '$.metadata.agentType' }; + const event = createEvent({ metadata: { agentType: 'ci-fix' } }); + + const resolved = resolveActionTemplate(action, event); + + expect(resolved.agentType).toBe('ci-fix'); + }); + + it('should keep literal agent type', () => { + const action = { type: 'spawn_agent' as const, agentType: 'developer' }; + const event = createEvent(); + + const resolved = resolveActionTemplate(action, event); + + expect(resolved.agentType).toBe('developer'); + }); + + it('should resolve prompt template references', () => { + const action = { type: 'spawn_agent' as const, prompt: '${item.body}' }; + const event = createEvent({ item: { type: 'issue', id: '1', body: 'Fix the bug' } }); + + const resolved = resolveActionTemplate(action, event); + + expect(resolved.prompt).toBe('Fix the bug'); + }); +}); + +describe('defaultRules', () => { + it('should have CI failure rule for GitHub', () => { + const ciRule = defaultRules.find(r => r.id === 'ci-failure'); + + expect(ciRule).toBeDefined(); + expect(ciRule?.source).toBe('github'); + expect(ciRule?.eventType).toBe('ci_failure'); + expect(ciRule?.action.agentType).toBe('ci-fix'); + }); + + it('should have mention rules for all sources', () => { + const githubMention = defaultRules.find(r => r.id === 'github-mention'); + const linearMention = defaultRules.find(r => r.id === 'linear-mention'); + const slackMention = defaultRules.find(r => r.id === 'slack-mention'); + + expect(githubMention).toBeDefined(); + expect(linearMention).toBeDefined(); + expect(slackMention).toBeDefined(); + }); + + it('should have assignment rules', () => { + const linearAssignment = defaultRules.find(r => r.id === 'linear-assignment'); + const githubAssignment = defaultRules.find(r => r.id === 'github-assignment'); + + expect(linearAssignment).toBeDefined(); + expect(githubAssignment).toBeDefined(); + expect(linearAssignment?.eventType).toBe('issue_assigned'); + }); + + it('should have all rules enabled by default', () => { + for (const rule of defaultRules) { + expect(rule.enabled).toBe(true); + } + }); + + it('should match CI failure event', () => { + const ciRule = defaultRules.find(r => r.id === 'ci-failure')!; + const event = createEvent({ + source: 'github', + type: 'ci_failure', + }); + + expect(matchesRule(ciRule, event)).toBe(true); + }); + + it('should match GitHub high priority issue', () => { + const issueRule = defaultRules.find(r => r.id === 'github-issue')!; + const highPriorityEvent = createEvent({ + source: 'github', + type: 'issue_created', + priority: 'high', + }); + const lowPriorityEvent = createEvent({ + source: 'github', + type: 'issue_created', + priority: 'low', + }); + + expect(matchesRule(issueRule, highPriorityEvent)).toBe(true); + expect(matchesRule(issueRule, lowPriorityEvent)).toBe(false); + }); +}); diff --git a/src/cloud/webhooks/rules-engine.ts b/src/cloud/webhooks/rules-engine.ts index f2a9f98f..464ab033 100644 --- a/src/cloud/webhooks/rules-engine.ts +++ b/src/cloud/webhooks/rules-engine.ts @@ -15,7 +15,8 @@ function evaluateCondition(condition: string, event: NormalizedEvent): boolean { try { // Parse condition: $.path operator value - const conditionPattern = /^\$\.([a-zA-Z0-9_.]+)\s*(==|!=|in|contains|>|<|>=|<=)\s*(.+)$/; + // Note: >= and <= must come before > and < in the alternation to match correctly + const conditionPattern = /^\$\.([a-zA-Z0-9_.]+)\s*(==|!=|>=|<=|>|<|in|contains)\s*(.+)$/; const match = condition.match(conditionPattern); if (!match) { @@ -52,6 +53,10 @@ function evaluateCondition(condition: string, event: NormalizedEvent): boolean { switch (operator) { case '==': + // Handle null/undefined equivalence + if (compareValue === null) { + return eventValue === null || eventValue === undefined; + } return eventValue === compareValue; case '!=': return eventValue !== compareValue; From 90fb86b34bce288cb295fe4e20ab976d14bbccc6 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 12:50:24 +0000 Subject: [PATCH 040/103] Fix CLI prompt config to match pre-seeded Docker setup The dark mode prompt was configured but never appeared because Dockerfile.real pre-seeds Claude's settings with theme: "dark". Removed from expected prompts to match actual behavior (2/2 handled). Added comment explaining the pre-seeding approach. --- src/cloud/api/cli-pty-runner.ts | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/cloud/api/cli-pty-runner.ts b/src/cloud/api/cli-pty-runner.ts index 0c386b01..ee0ee076 100644 --- a/src/cloud/api/cli-pty-runner.ts +++ b/src/cloud/api/cli-pty-runner.ts @@ -66,12 +66,8 @@ export const CLI_AUTH_CONFIG: Record = { displayName: 'Claude', waitTimeout: 5000, prompts: [ - { - pattern: /dark\s*(mode|theme)/i, - response: '\r', // Press enter to accept default - delay: 100, - description: 'Dark mode prompt', - }, + // Note: Dark mode prompt is pre-seeded in Dockerfile.real to avoid interactive setup + // If running without pre-seeding, add: { pattern: /dark\s*(mode|theme)/i, response: '\r', description: 'Dark mode prompt' } { pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, response: '\r', // Press enter for first option (subscription) From 8acd3bf3d16d88e809c27cfea11abf3a73932855 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 12:50:39 +0000 Subject: [PATCH 041/103] Revert "Fix CLI prompt config to match pre-seeded Docker setup" This reverts commit 90fb86b34bce288cb295fe4e20ab976d14bbccc6. --- src/cloud/api/cli-pty-runner.ts | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/cloud/api/cli-pty-runner.ts b/src/cloud/api/cli-pty-runner.ts index ee0ee076..0c386b01 100644 --- a/src/cloud/api/cli-pty-runner.ts +++ b/src/cloud/api/cli-pty-runner.ts @@ -66,8 +66,12 @@ export const CLI_AUTH_CONFIG: Record = { displayName: 'Claude', waitTimeout: 5000, prompts: [ - // Note: Dark mode prompt is pre-seeded in Dockerfile.real to avoid interactive setup - // If running without pre-seeding, add: { pattern: /dark\s*(mode|theme)/i, response: '\r', description: 'Dark mode prompt' } + { + pattern: /dark\s*(mode|theme)/i, + response: '\r', // Press enter to accept default + delay: 100, + description: 'Dark mode prompt', + }, { pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, response: '\r', // Press enter for first option (subscription) From 227090c5eaa06222586eeaf51b2c62e0f190ab8a Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 12:51:01 +0000 Subject: [PATCH 042/103] Remove Claude config pre-seeding to test full interactive flow Previously pre-seeded theme:dark which caused the dark mode prompt to be skipped (2/3 prompts handled). Now the test exercises the complete interactive flow including all configured prompts. --- scripts/test-cli-auth/Dockerfile.real | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real index 4fcdd423..fcab07e0 100644 --- a/scripts/test-cli-auth/Dockerfile.real +++ b/scripts/test-cli-auth/Dockerfile.real @@ -42,9 +42,8 @@ USER testuser # Claude - uses official install script RUN curl -fsSL https://claude.ai/install.sh | bash || echo "Claude install failed" -# Pre-seed Claude config to skip interactive onboarding -RUN mkdir -p /home/testuser/.claude && \ - echo '{"theme":"dark","hasCompletedOnboarding":true}' > /home/testuser/.claude/settings.local.json +# Note: We don't pre-seed Claude config - we want to test the full interactive flow +# including the dark mode and auth method prompts # Note: OpenCode is installed as root above via npm From 27e4b1abef1b5d0c454adadf123bfbfe2d377965 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 12:59:52 +0000 Subject: [PATCH 043/103] Add E2B sandbox integration design with advanced capabilities Comprehensive design doc covering: - E2B basic sandboxes (~150ms startup, microVM isolation) - E2B Desktop (full GUI, Chrome/Firefox/VS Code, VNC) - Browserbase integration (serverless browsers, Playwright) - Docker MCP Catalog (200+ tool integrations) - Hybrid architecture for different agent types - Configuration schema for workspace execution backends Agents can now be configured for: - Code execution (E2B Sandbox) - GUI/browser testing (E2B Desktop) - Web automation at scale (Browserbase) --- docs/design/e2b-sandbox-integration.md | 504 +++++++++++++++++++++++++ 1 file changed, 504 insertions(+) create mode 100644 docs/design/e2b-sandbox-integration.md diff --git a/docs/design/e2b-sandbox-integration.md b/docs/design/e2b-sandbox-integration.md new file mode 100644 index 00000000..15e804f1 --- /dev/null +++ b/docs/design/e2b-sandbox-integration.md @@ -0,0 +1,504 @@ +# E2B Sandbox Integration + +## Overview + +[E2B](https://e2b.dev) provides secure, isolated cloud sandboxes for running AI-generated code. This document outlines how we can leverage E2B to improve agent execution in Agent Relay. + +## Current Architecture + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Cloud Service โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ Webhooks โ”‚โ”€โ”€โ”€โ–ถโ”‚ Spawner โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ spawn command + โ–ผ +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Docker Workspace Container โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ Daemon โ”‚โ”€โ”€โ”€โ–ถโ”‚ Agent โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ”‚ - Node.js, Python, Git, gh โ”‚ +โ”‚ - AI CLIs (Claude, Codex, Gemini, etc.) โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +**Pain Points:** +- Container startup time (~5-10s) +- Infrastructure management overhead +- Scaling requires container orchestration (K8s, ECS, etc.) +- No easy pause/resume for long-running agents + +## Proposed Architecture with E2B + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Cloud Service โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ Webhooks โ”‚โ”€โ”€โ”€โ–ถโ”‚ Spawner โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ E2B SDK + โ–ผ +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ E2B Cloud (Managed) โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ Custom Sandbox Template โ”‚ โ”‚ +โ”‚ โ”‚ - relay-workspace-v1 โ”‚ โ”‚ +โ”‚ โ”‚ - Pre-installed: Node, Python, Git, gh โ”‚ โ”‚ +โ”‚ โ”‚ - Pre-installed: Claude, Codex, Gemini โ”‚ โ”‚ +โ”‚ โ”‚ - ~150ms startup โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ”‚ โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ Sandbox โ”‚ โ”‚ Sandbox โ”‚ โ”‚ Sandbox โ”‚ ... โ”‚ +โ”‚ โ”‚ Agent 1 โ”‚ โ”‚ Agent 2 โ”‚ โ”‚ Agent 3 โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +## Benefits + +| Aspect | Docker (Current) | E2B (Proposed) | +|--------|------------------|----------------| +| Startup time | ~5-10s | ~150ms | +| Infrastructure | Self-managed | Managed | +| Scaling | Manual/K8s | Automatic | +| Isolation | Container | Microvm | +| Pause/Resume | Not supported | Native | +| Cost model | Always-on | Pay per use | + +## Implementation Plan + +### Phase 1: E2B SDK Integration + +Add E2B SDK and create basic sandbox spawning: + +```typescript +// src/cloud/services/e2b-sandbox.ts +import { Sandbox } from '@e2b/sdk'; + +export interface SandboxConfig { + template: string; + timeout?: number; + envVars?: Record; +} + +export async function createAgentSandbox(config: SandboxConfig): Promise { + const sandbox = await Sandbox.create(config.template, { + timeoutMs: config.timeout || 60000, + envVars: config.envVars, + }); + + return sandbox; +} + +export async function runAgentInSandbox( + sandbox: Sandbox, + agentType: string, + prompt: string +): Promise<{ output: string; exitCode: number }> { + // Clone repo if needed + await sandbox.commands.run('git clone $REPO_URL /workspace/repo'); + + // Run the agent + const result = await sandbox.commands.run( + `claude --agent ${agentType} --prompt "${prompt}"`, + { cwd: '/workspace/repo' } + ); + + return { + output: result.stdout + result.stderr, + exitCode: result.exitCode, + }; +} +``` + +### Phase 2: Custom Sandbox Template + +Create a custom E2B template matching our workspace: + +```dockerfile +# e2b/templates/relay-workspace/Dockerfile +FROM e2b/base:latest + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + bash ca-certificates curl git python3 jq + +# Install GitHub CLI +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \ + | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && apt-get update && apt-get install -y gh + +# Install AI CLIs +RUN npm install -g @openai/codex @google/gemini-cli opencode-ai@latest +RUN curl -fsSL https://claude.ai/install.sh | bash +RUN curl -fsSL https://app.factory.ai/cli | sh + +ENV PATH="/root/.local/bin:$PATH" +``` + +```yaml +# e2b/templates/relay-workspace/e2b.toml +[template] +name = "relay-workspace" +dockerfile = "Dockerfile" + +[template.resources] +cpu = 2 +memory = 4096 +``` + +### Phase 3: Spawner Integration + +Update spawners to use E2B: + +```typescript +// src/cloud/services/ci-agent-spawner.ts +import { createAgentSandbox, runAgentInSandbox } from './e2b-sandbox.js'; + +export async function spawnCIFixAgent(event: CIFailureEvent): Promise { + // Create sandbox + const sandbox = await createAgentSandbox({ + template: 'relay-workspace', + timeout: 300000, // 5 minutes + envVars: { + REPO_URL: event.repository, + GITHUB_TOKEN: await getRepoToken(event.repositoryId), + CI_RUN_ID: event.checkRunId, + }, + }); + + try { + // Run CI fix agent + const result = await runAgentInSandbox( + sandbox, + 'ci-fix', + `Fix CI failure in ${event.checkName}: ${event.conclusion}` + ); + + // Post results back to GitHub + await postCIFixComment(event, result); + } finally { + // Always clean up + await sandbox.close(); + } +} +``` + +### Phase 4: Hybrid Mode + +Support both Docker (self-hosted) and E2B (cloud) execution: + +```typescript +// src/cloud/services/agent-executor.ts +export type ExecutionBackend = 'docker' | 'e2b'; + +export interface ExecutorConfig { + backend: ExecutionBackend; + e2bApiKey?: string; + dockerSocket?: string; +} + +export async function executeAgent( + config: ExecutorConfig, + agentType: string, + prompt: string, + context: ExecutionContext +): Promise { + switch (config.backend) { + case 'e2b': + return executeInE2B(agentType, prompt, context); + case 'docker': + return executeInDocker(agentType, prompt, context); + } +} +``` + +## Configuration + +Add E2B configuration to workspace settings: + +```typescript +// Workspace settings +interface WorkspaceSettings { + execution: { + backend: 'docker' | 'e2b' | 'hybrid'; + e2b?: { + apiKey: string; + template: string; + defaultTimeout: number; + }; + docker?: { + image: string; + socket: string; + }; + }; +} +``` + +## Cost Considerations + +E2B pricing is based on sandbox-seconds. Estimated costs: + +| Scenario | Docker (self-hosted) | E2B | +|----------|---------------------|-----| +| CI fix agent (5 min) | ~$0.01 compute | ~$0.05 | +| Code review (2 min) | ~$0.004 | ~$0.02 | +| Long task (30 min) | ~$0.06 | ~$0.30 | + +**Recommendation:** Use E2B for: +- Short-lived tasks (CI fixes, code review) +- Burst workloads (many concurrent agents) +- Teams without container infrastructure + +Use Docker for: +- Long-running agents +- High-volume workloads +- Self-hosted/air-gapped environments + +## Security + +E2B sandboxes provide: +- **Microvm isolation** - stronger than containers +- **Network isolation** - configurable internet access +- **Ephemeral by default** - no persistent state unless explicit +- **No host access** - sandboxes can't reach host systems + +## Migration Path + +1. **Week 1**: Add E2B SDK, create basic integration +2. **Week 2**: Build custom template, test with CI agents +3. **Week 3**: Add hybrid mode, workspace configuration +4. **Week 4**: Documentation, monitoring, rollout + +## Open Questions + +1. **Template caching**: How often do we need to rebuild templates? +2. **Secrets management**: How to inject API keys securely? +3. **Artifact persistence**: How to preserve agent outputs? +4. **Monitoring**: How to track sandbox usage and costs? + +## Advanced Capabilities + +### E2B Desktop - Full GUI/Browser Control + +[E2B Desktop](https://github.com/e2b-dev/desktop) provides complete Linux desktop environments: + +**Features:** +- Xfce4 desktop environment +- Pre-installed Chrome, Firefox, VS Code +- VNC streaming for real-time viewing +- Mouse/keyboard control via xdotool +- Screenshot capture for visual AI + +```typescript +// src/cloud/services/e2b-desktop.ts +import { Desktop } from '@e2b/desktop'; + +export async function runBrowserTest( + testScript: string, + url: string +): Promise<{ screenshots: string[]; result: string }> { + const desktop = await Desktop.create(); + + try { + // Open browser + await desktop.launch('google-chrome', [url]); + await desktop.wait(2000); + + // Take screenshot + const screenshot = await desktop.screenshot(); + + // Run test script with Playwright + const result = await desktop.commands.run(`npx playwright test ${testScript}`); + + return { + screenshots: [screenshot], + result: result.stdout, + }; + } finally { + await desktop.close(); + } +} +``` + +**Use cases:** +- Visual regression testing +- E2E browser tests +- GUI automation +- Screen recording for demos + +### Browserbase Integration - Serverless Browsers + +[Browserbase](https://browserbase.com) provides dedicated serverless browser infrastructure: + +**Features:** +- Spin up 1000s of browsers in milliseconds +- Native Playwright/Puppeteer/Selenium support +- Built-in captcha solving +- Residential proxies +- Session recording & debugging +- SOC-2 & HIPAA compliant + +```typescript +// src/cloud/services/browserbase.ts +import { chromium } from 'playwright'; + +export async function runWithBrowserbase( + script: (page: Page) => Promise +): Promise { + const browser = await chromium.connectOverCDP( + `wss://connect.browserbase.com?apiKey=${process.env.BROWSERBASE_API_KEY}` + ); + + try { + const context = browser.contexts()[0]; + const page = context.pages()[0]; + await script(page); + } finally { + await browser.close(); + } +} +``` + +**Use cases:** +- Web scraping agents +- Form automation +- Testing production sites +- Multi-browser testing + +### Docker MCP Catalog - 200+ Tools + +E2B sandboxes now include access to [Docker's MCP Catalog](https://www.docker.com/blog/docker-e2b-building-the-future-of-trusted-ai/): + +**Available tools include:** +- GitHub, GitLab +- Perplexity, Browserbase +- ElevenLabs, Stripe +- Slack, Discord +- And 200+ more + +```typescript +// Agents can use MCP tools within sandboxes +const sandbox = await Sandbox.create('relay-workspace-mcp'); +await sandbox.commands.run(` + # Use GitHub MCP tool + mcp-github create-issue --repo user/repo --title "Bug fix" +`); +``` + +### Hybrid Architecture for Advanced Agents + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Agent Relay Cloud โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ Spawner โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ”‚ โ”‚ โ”‚ +โ”‚ โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ–ผ โ–ผ โ–ผ โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ E2B Sandbox โ”‚ โ”‚ E2B Desktop โ”‚ โ”‚ Browserbase โ”‚ โ”‚ +โ”‚ โ”‚ (Code exec) โ”‚ โ”‚ (GUI/VNC) โ”‚ โ”‚ (Browsers) โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ”‚ โ”‚ โ”‚ โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ”‚ โ”‚ โ”‚ +โ”‚ MCP Tool Gateway โ”‚ +โ”‚ (200+ integrations) โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +### Agent Capabilities Matrix + +| Capability | E2B Sandbox | E2B Desktop | Browserbase | +|------------|-------------|-------------|-------------| +| Code execution | โœ… | โœ… | โŒ | +| Terminal/CLI | โœ… | โœ… | โŒ | +| File system | โœ… | โœ… | Limited | +| GUI apps | โŒ | โœ… | โŒ | +| Browser control | Limited | โœ… | โœ… | +| Visual testing | โŒ | โœ… | โœ… | +| Parallel scale | Good | Limited | Excellent | +| Cost | Low | Medium | Medium | + +### Workspace Configuration + +```typescript +interface WorkspaceExecutionConfig { + // Default execution backend + default: 'e2b' | 'e2b-desktop' | 'docker'; + + // E2B configuration + e2b?: { + apiKey: string; + template: string; + timeout: number; + enableMcp: boolean; + }; + + // E2B Desktop for GUI tasks + e2bDesktop?: { + apiKey: string; + resolution: { width: number; height: number }; + vncEnabled: boolean; + }; + + // Browserbase for web automation + browserbase?: { + apiKey: string; + proxy?: 'residential' | 'datacenter'; + captchaSolver: boolean; + }; + + // Agent-specific overrides + agentOverrides?: { + [agentType: string]: { + backend: 'e2b' | 'e2b-desktop' | 'browserbase' | 'docker'; + capabilities?: string[]; + }; + }; +} + +// Example configuration +const config: WorkspaceExecutionConfig = { + default: 'e2b', + e2b: { + apiKey: process.env.E2B_API_KEY!, + template: 'relay-workspace', + timeout: 300000, + enableMcp: true, + }, + e2bDesktop: { + apiKey: process.env.E2B_API_KEY!, + resolution: { width: 1920, height: 1080 }, + vncEnabled: true, + }, + browserbase: { + apiKey: process.env.BROWSERBASE_API_KEY!, + captchaSolver: true, + }, + agentOverrides: { + 'visual-tester': { backend: 'e2b-desktop' }, + 'web-scraper': { backend: 'browserbase' }, + 'ci-fix': { backend: 'e2b' }, + }, +}; +``` + +## References + +- [E2B Documentation](https://e2b.dev/docs) +- [E2B GitHub](https://github.com/e2b-dev/E2B) +- [E2B Desktop](https://github.com/e2b-dev/desktop) +- [Custom Templates Guide](https://e2b.dev/docs/sandbox-template) +- [Docker + E2B Partnership](https://www.docker.com/blog/docker-e2b-building-the-future-of-trusted-ai/) +- [Browserbase](https://browserbase.com) +- [How Manus Uses E2B](https://e2b.dev/blog/how-manus-uses-e2b-to-provide-agents-with-virtual-computers) From 84c3564dcfc374ca63f5325a643041a4578ef1ca Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 13:06:00 +0000 Subject: [PATCH 044/103] Add browser testing and container spawning for current infrastructure New capabilities without external dependencies: 1. Browser Testing (Dockerfile.browser): - Playwright with Chromium/Firefox pre-installed - Xvfb virtual display for headless operation - VNC + noVNC for real-time viewing/debugging - Screenshot capture via scrot - Helper scripts: take-screenshot, launch-browser, run-playwright 2. Container Spawning (container-spawner.ts): - Docker socket mounting for agent container control - Preset configs for Node, Python, Go, Rust, Playwright - Resource limits (memory, CPU) - runCode() helper for quick language execution 3. Browser Testing Service (browser-testing.ts): - Playwright test runner integration - Screenshot capture API - Inline script execution - VNC connection info 4. Docker Compose (docker-compose.browser.yml): - workspace-browser: Full browser testing stack - workspace-dind: Docker-in-Docker variant (requires sysbox) Usage: docker compose -f docker-compose.dev.yml -f docker-compose.browser.yml up # Access VNC at http://localhost:6080/vnc.html --- deploy/workspace/Dockerfile.browser | 154 +++++++++ deploy/workspace/entrypoint-browser.sh | 118 +++++++ docker-compose.browser.yml | 78 +++++ src/daemon/services/browser-testing.ts | 320 +++++++++++++++++ src/daemon/services/container-spawner.ts | 418 +++++++++++++++++++++++ 5 files changed, 1088 insertions(+) create mode 100644 deploy/workspace/Dockerfile.browser create mode 100644 deploy/workspace/entrypoint-browser.sh create mode 100644 docker-compose.browser.yml create mode 100644 src/daemon/services/browser-testing.ts create mode 100644 src/daemon/services/container-spawner.ts diff --git a/deploy/workspace/Dockerfile.browser b/deploy/workspace/Dockerfile.browser new file mode 100644 index 00000000..51a0cf53 --- /dev/null +++ b/deploy/workspace/Dockerfile.browser @@ -0,0 +1,154 @@ +# Agent Relay Workspace - Browser Testing Variant +# Adds Playwright, Xvfb, VNC for full browser testing capabilities +# +# Build: docker build -f Dockerfile.browser -t agent-relay-workspace:browser . +# Run: docker run -p 3888:3888 -p 6080:6080 agent-relay-workspace:browser + +FROM node:20-slim AS builder + +WORKDIR /app + +# Install build dependencies for native modules (node-pty, better-sqlite3) +RUN apt-get update && apt-get install -y \ + python3 \ + make \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# Copy package files and scripts (needed for postinstall) +COPY package*.json ./ +COPY scripts ./scripts + +# Install dependencies (production only, skip tmux in CI) +ENV CI=true +RUN npm ci --omit=dev + +# Copy pre-built dist (build before docker build) +COPY dist ./dist + +# --- + +FROM node:20-slim + +WORKDIR /app + +# ============================================================================ +# System Dependencies +# ============================================================================ +RUN apt-get update && apt-get install -y \ + # Basic tools + bash \ + ca-certificates \ + curl \ + git \ + python3 \ + jq \ + # Docker CLI (for agents to spawn containers) + docker.io \ + # Browser testing dependencies + xvfb \ + x11vnc \ + fluxbox \ + # noVNC for browser-based VNC access + novnc \ + websockify \ + # Playwright system dependencies + libnss3 \ + libatk1.0-0 \ + libatk-bridge2.0-0 \ + libcups2 \ + libdrm2 \ + libxkbcommon0 \ + libxcomposite1 \ + libxdamage1 \ + libxfixes3 \ + libxrandr2 \ + libgbm1 \ + libasound2 \ + libpango-1.0-0 \ + libcairo2 \ + # Screenshot tools + scrot \ + imagemagick \ + && rm -rf /var/lib/apt/lists/* + +# Install GitHub CLI +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \ + && chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && apt-get update \ + && apt-get install -y gh \ + && rm -rf /var/lib/apt/lists/* + +# Copy from builder +COPY --from=builder /app/dist ./dist +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package*.json ./ +COPY deploy/workspace/entrypoint.sh /entrypoint.sh +COPY deploy/workspace/entrypoint-browser.sh /entrypoint-browser.sh +COPY deploy/workspace/git-credential-relay /usr/local/bin/git-credential-relay +RUN chmod +x /entrypoint.sh /entrypoint-browser.sh /usr/local/bin/git-credential-relay + +# Install npm-based CLIs globally as root (npm -g requires root) +RUN npm install -g @openai/codex +RUN npm install -g @google/gemini-cli +RUN npm install -g opencode-ai@latest +# Install Playwright with browsers +RUN npm install -g playwright +RUN npx playwright install chromium firefox + +# Create workspace directory +RUN mkdir -p /workspace/repos /data + +# Create non-root user +RUN useradd -m -u 1001 workspace +RUN usermod -aG docker workspace # Allow docker access +RUN chown -R workspace:workspace /app /workspace /data + +USER workspace + +# Install AI CLIs as workspace user (they install to ~/.local/bin) +# Claude +RUN curl -fsSL https://claude.ai/install.sh | bash +# Pre-seed Claude config to skip interactive onboarding +RUN mkdir -p /home/workspace/.claude && \ + echo '{"theme":"dark","hasCompletedOnboarding":true}' > /home/workspace/.claude/settings.local.json +# Droid +RUN curl -fsSL https://app.factory.ai/cli | sh + +# ============================================================================ +# Environment +# ============================================================================ +ENV NODE_ENV=production +ENV PORT=3888 +ENV AGENT_RELAY_DATA_DIR=/data +ENV AGENT_RELAY_DASHBOARD_PORT=3888 +ENV PATH="/home/workspace/.local/bin:$PATH" + +# Display settings for Xvfb +ENV DISPLAY=:99 +ENV SCREEN_WIDTH=1920 +ENV SCREEN_HEIGHT=1080 +ENV SCREEN_DEPTH=24 + +# VNC settings +ENV VNC_PORT=5900 +ENV NOVNC_PORT=6080 + +# ============================================================================ +# Expose Ports +# ============================================================================ +# 3888 - Dashboard/API +# 3889 - WebSocket (optional) +# 5900 - VNC direct +# 6080 - noVNC web interface +EXPOSE 3888 3889 5900 6080 + +# Volume for persistent data +VOLUME ["/data", "/workspace"] + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=10s --retries=3 \ + CMD node -e "require('http').get('http://localhost:3888/health', (r) => process.exit(r.statusCode === 200 ? 0 : 1))" + +ENTRYPOINT ["/entrypoint-browser.sh"] diff --git a/deploy/workspace/entrypoint-browser.sh b/deploy/workspace/entrypoint-browser.sh new file mode 100644 index 00000000..457c360a --- /dev/null +++ b/deploy/workspace/entrypoint-browser.sh @@ -0,0 +1,118 @@ +#!/usr/bin/env bash + +set -euo pipefail + +log() { + echo "[workspace-browser] $*" +} + +# ============================================================================ +# Start Virtual Display (Xvfb) +# ============================================================================ +log "Starting Xvfb virtual display..." +Xvfb :99 -screen 0 "${SCREEN_WIDTH:-1920}x${SCREEN_HEIGHT:-1080}x${SCREEN_DEPTH:-24}" & +XVFB_PID=$! +sleep 1 + +# Verify Xvfb started +if ! kill -0 $XVFB_PID 2>/dev/null; then + log "ERROR: Xvfb failed to start" + exit 1 +fi +log "Xvfb started on display :99" + +# ============================================================================ +# Start Window Manager (Fluxbox) +# ============================================================================ +log "Starting Fluxbox window manager..." +fluxbox & +sleep 1 +log "Fluxbox started" + +# ============================================================================ +# Start VNC Server (optional, for debugging/viewing) +# ============================================================================ +if [[ "${VNC_ENABLED:-true}" == "true" ]]; then + log "Starting x11vnc server..." + x11vnc -display :99 -forever -shared -rfbport "${VNC_PORT:-5900}" -bg -nopw -xkb + log "VNC server started on port ${VNC_PORT:-5900}" + + # Start noVNC for browser-based access + if [[ "${NOVNC_ENABLED:-true}" == "true" ]]; then + log "Starting noVNC web interface..." + websockify --web=/usr/share/novnc/ "${NOVNC_PORT:-6080}" localhost:"${VNC_PORT:-5900}" & + log "noVNC available at http://localhost:${NOVNC_PORT:-6080}/vnc.html" + fi +fi + +# ============================================================================ +# Export browser testing utilities +# ============================================================================ + +# Create screenshot helper +cat > /usr/local/bin/take-screenshot <<'EOF' +#!/usr/bin/env bash +# Take a screenshot and save to specified path +# Usage: take-screenshot [output.png] +OUTPUT="${1:-/tmp/screenshot-$(date +%Y%m%d-%H%M%S).png}" +DISPLAY=:99 scrot "$OUTPUT" +echo "$OUTPUT" +EOF +chmod +x /usr/local/bin/take-screenshot + +# Create browser launcher helper +cat > /usr/local/bin/launch-browser <<'EOF' +#!/usr/bin/env bash +# Launch browser with optional URL +# Usage: launch-browser [url] +URL="${1:-about:blank}" +DISPLAY=:99 chromium --no-sandbox --disable-gpu --start-maximized "$URL" & +echo "Browser launched with PID $!" +EOF +chmod +x /usr/local/bin/launch-browser + +# Create Playwright test runner helper +cat > /usr/local/bin/run-playwright <<'EOF' +#!/usr/bin/env bash +# Run Playwright tests with proper display settings +# Usage: run-playwright [test-file.spec.ts] [additional args...] +export DISPLAY=:99 +npx playwright test "$@" +EOF +chmod +x /usr/local/bin/run-playwright + +# ============================================================================ +# Docker-in-Docker helper (if socket mounted) +# ============================================================================ +if [[ -S /var/run/docker.sock ]]; then + log "Docker socket detected - agents can spawn containers" + + # Create helper for agents to spawn isolated containers + cat > /usr/local/bin/spawn-container <<'EOF' +#!/usr/bin/env bash +# Spawn an isolated container for agent tasks +# Usage: spawn-container [command...] +IMAGE="${1:-ubuntu:22.04}" +shift +docker run --rm -it \ + --network=host \ + -v "$(pwd):/workspace" \ + -w /workspace \ + "$IMAGE" "$@" +EOF + chmod +x /usr/local/bin/spawn-container +else + log "WARN: Docker socket not mounted - container spawning disabled" +fi + +# ============================================================================ +# Continue with main entrypoint +# ============================================================================ +log "Browser testing environment ready" +log " - Display: $DISPLAY (${SCREEN_WIDTH}x${SCREEN_HEIGHT})" +log " - VNC: ${VNC_ENABLED:-true} (port ${VNC_PORT:-5900})" +log " - noVNC: ${NOVNC_ENABLED:-true} (http://localhost:${NOVNC_PORT:-6080})" +log " - Playwright: $(npx playwright --version 2>/dev/null || echo 'installed')" + +# Hand off to main entrypoint +exec /entrypoint.sh "$@" diff --git a/docker-compose.browser.yml b/docker-compose.browser.yml new file mode 100644 index 00000000..4c81e293 --- /dev/null +++ b/docker-compose.browser.yml @@ -0,0 +1,78 @@ +# Agent Relay - Browser Testing Workspace +# +# Extends docker-compose.dev.yml with browser testing capabilities. +# +# Usage: +# docker compose -f docker-compose.dev.yml -f docker-compose.browser.yml up +# +# Access: +# - Dashboard: http://localhost:3888 +# - VNC (web): http://localhost:6080/vnc.html +# - VNC (native): vnc://localhost:5900 + +version: '3.8' + +services: + # Browser-enabled workspace with full testing capabilities + workspace-browser: + build: + context: . + dockerfile: deploy/workspace/Dockerfile.browser + ports: + - "3888:3888" # Dashboard/API + - "3889:3889" # WebSocket + - "5900:5900" # VNC direct + - "6080:6080" # noVNC web interface + environment: + WORKSPACE_ID: browser-workspace + SUPERVISOR_ENABLED: "true" + MAX_AGENTS: "10" + # Browser display settings + DISPLAY: ":99" + SCREEN_WIDTH: "1920" + SCREEN_HEIGHT: "1080" + SCREEN_DEPTH: "24" + # VNC settings + VNC_ENABLED: "true" + VNC_PORT: "5900" + NOVNC_ENABLED: "true" + NOVNC_PORT: "6080" + volumes: + # Persistent data + - workspace_browser_data:/data + # Mount repos + - ./:/workspace/relay:ro + # Docker socket for spawning containers + - /var/run/docker.sock:/var/run/docker.sock + # Required for some browser operations + shm_size: '2gb' + # Security options for browser sandboxing + security_opt: + - seccomp:unconfined + depends_on: + - cloud + + # Alternative: Rootless Docker-in-Docker workspace + # Uses sysbox runtime for secure nested containers + workspace-dind: + build: + context: . + dockerfile: deploy/workspace/Dockerfile.browser + runtime: sysbox-runc # Requires sysbox installed on host + ports: + - "3898:3888" + - "6090:6080" + environment: + WORKSPACE_ID: dind-workspace + SUPERVISOR_ENABLED: "true" + MAX_AGENTS: "10" + # DinD mode - Docker daemon runs inside container + DOCKER_HOST: "unix:///var/run/docker.sock" + volumes: + - workspace_dind_data:/data + profiles: + - dind # Only start with: --profile dind + +volumes: + workspace_browser_data: + workspace_dind_data: diff --git a/src/daemon/services/browser-testing.ts b/src/daemon/services/browser-testing.ts new file mode 100644 index 00000000..31eb5493 --- /dev/null +++ b/src/daemon/services/browser-testing.ts @@ -0,0 +1,320 @@ +/** + * Browser Testing Service + * + * Provides browser automation capabilities for agents running in the workspace. + * Uses Playwright for browser control and Xvfb for headless display. + * + * Features: + * - Screenshot capture + * - Browser automation via Playwright + * - Visual regression testing + * - PDF generation + */ + +import { spawn, execSync } from 'child_process'; +import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs'; +import { join } from 'path'; + +export interface ScreenshotOptions { + /** Output path for screenshot (default: /tmp/screenshot-{timestamp}.png) */ + outputPath?: string; + /** Full page screenshot */ + fullPage?: boolean; + /** Clip region */ + clip?: { x: number; y: number; width: number; height: number }; +} + +export interface BrowserTestOptions { + /** Browser to use (chromium, firefox, webkit) */ + browser?: 'chromium' | 'firefox' | 'webkit'; + /** Headless mode (default: true in container, false with VNC) */ + headless?: boolean; + /** Viewport size */ + viewport?: { width: number; height: number }; + /** Timeout in ms */ + timeout?: number; +} + +/** + * Check if browser testing is available + */ +export function isBrowserTestingAvailable(): boolean { + try { + // Check if DISPLAY is set (Xvfb running) + if (!process.env.DISPLAY) { + return false; + } + + // Check if Playwright is installed + execSync('npx playwright --version', { stdio: 'pipe' }); + return true; + } catch { + return false; + } +} + +/** + * Take a screenshot of the current display + */ +export async function takeDisplayScreenshot( + options: ScreenshotOptions = {} +): Promise { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const outputPath = options.outputPath || `/tmp/screenshot-${timestamp}.png`; + + // Ensure output directory exists + const dir = join(outputPath, '..'); + if (!existsSync(dir)) { + mkdirSync(dir, { recursive: true }); + } + + return new Promise((resolve, reject) => { + const args = [outputPath]; + if (options.fullPage) { + args.unshift('-u'); // Capture including window decorations + } + + const proc = spawn('scrot', args, { + env: { ...process.env, DISPLAY: process.env.DISPLAY || ':99' }, + }); + + proc.on('close', (code) => { + if (code === 0) { + resolve(outputPath); + } else { + reject(new Error(`Screenshot failed with code ${code}`)); + } + }); + + proc.on('error', reject); + }); +} + +/** + * Run a Playwright test file + */ +export async function runPlaywrightTest( + testFile: string, + options: BrowserTestOptions = {} +): Promise<{ success: boolean; output: string; screenshots: string[] }> { + const browser = options.browser || 'chromium'; + const timeout = options.timeout || 30000; + + return new Promise((resolve) => { + const args = ['playwright', 'test', testFile, `--project=${browser}`]; + + if (options.headless !== false) { + args.push('--headed=false'); + } + + const proc = spawn('npx', args, { + env: { + ...process.env, + DISPLAY: process.env.DISPLAY || ':99', + PLAYWRIGHT_BROWSERS_PATH: '/ms-playwright', + }, + timeout, + }); + + let output = ''; + const screenshots: string[] = []; + + proc.stdout.on('data', (data) => { + output += data.toString(); + // Parse screenshot paths from output + const matches = data.toString().match(/Screenshot saved: (.+\.png)/g); + if (matches) { + screenshots.push(...matches.map((m: string) => m.replace('Screenshot saved: ', ''))); + } + }); + + proc.stderr.on('data', (data) => { + output += data.toString(); + }); + + proc.on('close', (code) => { + resolve({ + success: code === 0, + output, + screenshots, + }); + }); + + proc.on('error', (err) => { + resolve({ + success: false, + output: err.message, + screenshots: [], + }); + }); + }); +} + +/** + * Launch a browser and navigate to a URL + * Returns the browser PID for later control + */ +export async function launchBrowser( + url: string, + options: { browser?: 'chromium' | 'firefox' } = {} +): Promise<{ pid: number }> { + const browser = options.browser || 'chromium'; + const command = browser === 'firefox' ? 'firefox' : 'chromium'; + + return new Promise((resolve, reject) => { + const args = + browser === 'chromium' + ? ['--no-sandbox', '--disable-gpu', '--start-maximized', url] + : ['--new-window', url]; + + const proc = spawn(command, args, { + env: { ...process.env, DISPLAY: process.env.DISPLAY || ':99' }, + detached: true, + stdio: 'ignore', + }); + + proc.unref(); + + // Give browser time to start + setTimeout(() => { + if (proc.pid) { + resolve({ pid: proc.pid }); + } else { + reject(new Error('Failed to launch browser')); + } + }, 1000); + }); +} + +/** + * Generate a Playwright test file from a description + */ +export function generatePlaywrightTest( + name: string, + steps: Array<{ + action: 'goto' | 'click' | 'fill' | 'screenshot' | 'wait'; + target?: string; + value?: string; + }> +): string { + const testCode = ` +import { test, expect } from '@playwright/test'; + +test('${name}', async ({ page }) => { +${steps + .map((step) => { + switch (step.action) { + case 'goto': + return ` await page.goto('${step.target}');`; + case 'click': + return ` await page.click('${step.target}');`; + case 'fill': + return ` await page.fill('${step.target}', '${step.value}');`; + case 'screenshot': + return ` await page.screenshot({ path: '${step.target || 'screenshot.png'}' });`; + case 'wait': + return ` await page.waitForTimeout(${step.value || 1000});`; + default: + return ` // Unknown action: ${step.action}`; + } + }) + .join('\n')} +}); +`.trim(); + + return testCode; +} + +/** + * Run inline Playwright script + */ +export async function runPlaywrightScript( + script: string, + options: BrowserTestOptions = {} +): Promise<{ success: boolean; output: string; result?: unknown }> { + const tempDir = '/tmp/playwright-scripts'; + if (!existsSync(tempDir)) { + mkdirSync(tempDir, { recursive: true }); + } + + const scriptPath = join(tempDir, `script-${Date.now()}.mjs`); + + // Wrap script with Playwright imports and browser launch + const wrappedScript = ` +import { chromium, firefox, webkit } from 'playwright'; + +async function run() { + const browser = await ${options.browser || 'chromium'}.launch({ + headless: ${options.headless !== false}, + }); + const context = await browser.newContext({ + viewport: ${JSON.stringify(options.viewport || { width: 1920, height: 1080 })}, + }); + const page = await context.newPage(); + + try { + ${script} + } finally { + await browser.close(); + } +} + +run().catch(console.error); +`; + + writeFileSync(scriptPath, wrappedScript); + + return new Promise((resolve) => { + const proc = spawn('node', [scriptPath], { + env: { + ...process.env, + DISPLAY: process.env.DISPLAY || ':99', + }, + timeout: options.timeout || 30000, + }); + + let output = ''; + + proc.stdout.on('data', (data) => { + output += data.toString(); + }); + + proc.stderr.on('data', (data) => { + output += data.toString(); + }); + + proc.on('close', (code) => { + resolve({ + success: code === 0, + output, + }); + }); + + proc.on('error', (err) => { + resolve({ + success: false, + output: err.message, + }); + }); + }); +} + +/** + * Get VNC connection info + */ +export function getVNCInfo(): { + available: boolean; + vncUrl?: string; + noVncUrl?: string; +} { + const vncEnabled = process.env.VNC_ENABLED !== 'false'; + const vncPort = process.env.VNC_PORT || '5900'; + const noVncPort = process.env.NOVNC_PORT || '6080'; + const hostname = process.env.HOSTNAME || 'localhost'; + + return { + available: vncEnabled, + vncUrl: vncEnabled ? `vnc://${hostname}:${vncPort}` : undefined, + noVncUrl: vncEnabled ? `http://${hostname}:${noVncPort}/vnc.html` : undefined, + }; +} diff --git a/src/daemon/services/container-spawner.ts b/src/daemon/services/container-spawner.ts new file mode 100644 index 00000000..130c7683 --- /dev/null +++ b/src/daemon/services/container-spawner.ts @@ -0,0 +1,418 @@ +/** + * Container Spawner Service + * + * Allows agents to spawn isolated Docker containers for specific tasks. + * Requires Docker socket to be mounted: -v /var/run/docker.sock:/var/run/docker.sock + * + * Use cases: + * - Running untrusted code in isolation + * - Testing against different environments (Node versions, OS variants) + * - Parallel task execution + * - Language-specific toolchains + */ + +import { spawn, execSync, ExecSyncOptions } from 'child_process'; +import { existsSync } from 'fs'; + +export interface ContainerConfig { + /** Docker image to use */ + image: string; + /** Command to run (default: shell) */ + command?: string[]; + /** Working directory inside container */ + workdir?: string; + /** Environment variables */ + env?: Record; + /** Volumes to mount (host:container format) */ + volumes?: string[]; + /** Port mappings (host:container format) */ + ports?: string[]; + /** Memory limit (e.g., '512m', '2g') */ + memory?: string; + /** CPU limit (e.g., '0.5', '2') */ + cpus?: string; + /** Network mode (bridge, host, none) */ + network?: 'bridge' | 'host' | 'none'; + /** Remove container after exit */ + autoRemove?: boolean; + /** Container name */ + name?: string; + /** Timeout in ms */ + timeout?: number; +} + +export interface ContainerResult { + success: boolean; + exitCode: number | null; + stdout: string; + stderr: string; + containerId?: string; +} + +/** + * Check if Docker is available + */ +export function isDockerAvailable(): boolean { + // Check if socket exists + if (!existsSync('/var/run/docker.sock')) { + return false; + } + + try { + execSync('docker info', { stdio: 'pipe' }); + return true; + } catch { + return false; + } +} + +/** + * Build Docker command arguments from config + */ +function buildDockerArgs(config: ContainerConfig): string[] { + const args: string[] = ['run']; + + // Auto-remove + if (config.autoRemove !== false) { + args.push('--rm'); + } + + // Name + if (config.name) { + args.push('--name', config.name); + } + + // Working directory + if (config.workdir) { + args.push('-w', config.workdir); + } + + // Environment variables + if (config.env) { + for (const [key, value] of Object.entries(config.env)) { + args.push('-e', `${key}=${value}`); + } + } + + // Volumes + if (config.volumes) { + for (const vol of config.volumes) { + args.push('-v', vol); + } + } + + // Ports + if (config.ports) { + for (const port of config.ports) { + args.push('-p', port); + } + } + + // Resource limits + if (config.memory) { + args.push('--memory', config.memory); + } + if (config.cpus) { + args.push('--cpus', config.cpus); + } + + // Network + if (config.network) { + args.push('--network', config.network); + } + + // Image + args.push(config.image); + + // Command + if (config.command && config.command.length > 0) { + args.push(...config.command); + } + + return args; +} + +/** + * Run a command in a new container and wait for completion + */ +export async function runInContainer(config: ContainerConfig): Promise { + if (!isDockerAvailable()) { + return { + success: false, + exitCode: null, + stdout: '', + stderr: 'Docker is not available. Mount /var/run/docker.sock to enable container spawning.', + }; + } + + const args = buildDockerArgs(config); + + return new Promise((resolve) => { + const proc = spawn('docker', args, { + timeout: config.timeout || 60000, + }); + + let stdout = ''; + let stderr = ''; + + proc.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + proc.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + proc.on('close', (code) => { + resolve({ + success: code === 0, + exitCode: code, + stdout, + stderr, + }); + }); + + proc.on('error', (err) => { + resolve({ + success: false, + exitCode: null, + stdout, + stderr: err.message, + }); + }); + }); +} + +/** + * Run a command in a container interactively (for TTY) + */ +export function runInteractive(config: ContainerConfig): { pid: number; containerId?: string } { + if (!isDockerAvailable()) { + throw new Error('Docker is not available'); + } + + const args = buildDockerArgs({ ...config, autoRemove: true }); + args.splice(1, 0, '-it'); // Add interactive + TTY flags + + const proc = spawn('docker', args, { + stdio: 'inherit', + detached: false, + }); + + return { pid: proc.pid || 0 }; +} + +/** + * Start a container in the background + */ +export async function startContainer(config: ContainerConfig): Promise<{ containerId: string }> { + if (!isDockerAvailable()) { + throw new Error('Docker is not available'); + } + + const args = buildDockerArgs({ ...config, autoRemove: false }); + args.splice(1, 0, '-d'); // Add detach flag + + const result = execSync(`docker ${args.join(' ')}`, { encoding: 'utf-8' }); + const containerId = result.trim(); + + return { containerId }; +} + +/** + * Stop a running container + */ +export async function stopContainer(containerId: string): Promise { + execSync(`docker stop ${containerId}`, { stdio: 'pipe' }); +} + +/** + * Execute a command in a running container + */ +export async function execInContainer( + containerId: string, + command: string[], + options: { workdir?: string; env?: Record } = {} +): Promise { + const args = ['exec']; + + if (options.workdir) { + args.push('-w', options.workdir); + } + + if (options.env) { + for (const [key, value] of Object.entries(options.env)) { + args.push('-e', `${key}=${value}`); + } + } + + args.push(containerId, ...command); + + return new Promise((resolve) => { + const proc = spawn('docker', args); + + let stdout = ''; + let stderr = ''; + + proc.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + proc.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + proc.on('close', (code) => { + resolve({ + success: code === 0, + exitCode: code, + stdout, + stderr, + containerId, + }); + }); + + proc.on('error', (err) => { + resolve({ + success: false, + exitCode: null, + stdout, + stderr: err.message, + containerId, + }); + }); + }); +} + +/** + * Pull a Docker image + */ +export async function pullImage(image: string): Promise { + if (!isDockerAvailable()) { + return false; + } + + try { + execSync(`docker pull ${image}`, { stdio: 'pipe' }); + return true; + } catch { + return false; + } +} + +/** + * List running containers + */ +export function listContainers(): Array<{ + id: string; + image: string; + name: string; + status: string; +}> { + if (!isDockerAvailable()) { + return []; + } + + try { + const output = execSync( + 'docker ps --format "{{.ID}}|{{.Image}}|{{.Names}}|{{.Status}}"', + { encoding: 'utf-8' } + ); + + return output + .trim() + .split('\n') + .filter(Boolean) + .map((line) => { + const [id, image, name, status] = line.split('|'); + return { id, image, name, status }; + }); + } catch { + return []; + } +} + +// ============================================================================ +// Predefined container configurations for common tasks +// ============================================================================ + +export const PRESET_CONTAINERS = { + /** Node.js 20 environment */ + node20: { + image: 'node:20-slim', + workdir: '/workspace', + }, + + /** Python 3.11 environment */ + python311: { + image: 'python:3.11-slim', + workdir: '/workspace', + }, + + /** Go 1.21 environment */ + go121: { + image: 'golang:1.21-alpine', + workdir: '/workspace', + }, + + /** Rust environment */ + rust: { + image: 'rust:slim', + workdir: '/workspace', + }, + + /** Ubuntu with common tools */ + ubuntu: { + image: 'ubuntu:22.04', + workdir: '/workspace', + }, + + /** Alpine minimal */ + alpine: { + image: 'alpine:3.18', + workdir: '/workspace', + }, + + /** Playwright with browsers */ + playwright: { + image: 'mcr.microsoft.com/playwright:latest', + workdir: '/workspace', + }, +} as const; + +/** + * Run code in a language-specific container + */ +export async function runCode( + language: 'node' | 'python' | 'go' | 'rust' | 'bash', + code: string, + options: { workspaceDir?: string; timeout?: number } = {} +): Promise { + const configs: Record = { + node: { image: 'node:20-slim', command: ['node', '-e', code] }, + python: { image: 'python:3.11-slim', command: ['python', '-c', code] }, + go: { image: 'golang:1.21-alpine', command: ['go', 'run', '-'] }, + rust: { image: 'rust:slim', command: ['rustc', '--edition', '2021', '-', '-o', '/tmp/a', '&&', '/tmp/a'] }, + bash: { image: 'ubuntu:22.04', command: ['bash', '-c', code] }, + }; + + const config = configs[language]; + if (!config) { + return { + success: false, + exitCode: null, + stdout: '', + stderr: `Unknown language: ${language}`, + }; + } + + return runInContainer({ + image: config.image, + command: config.command, + workdir: '/workspace', + volumes: options.workspaceDir ? [`${options.workspaceDir}:/workspace`] : [], + timeout: options.timeout, + memory: '512m', + cpus: '1', + }); +} From d619db6da45e4f9bbdb4b55fa88395bdd4171223 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 13:08:54 +0000 Subject: [PATCH 045/103] Add MCP tools and documentation for workspace capabilities Agents can now discover and use browser testing + container spawning: 1. MCP Server (workspace-tools-server.ts): - workspace_capabilities: Discover what's available - browser_screenshot: Capture display or URL - browser_navigate: Open browser (visible via VNC) - browser_script: Run Playwright automation - container_run: Execute in isolated containers - container_run_code: Quick language execution - vnc_info: Get VNC connection details 2. Skill (.openskills/workspace-capabilities/): - Full documentation with examples - Common patterns (visual testing, E2E, multi-env) - Troubleshooting guide 3. Agent Rule (.claude/rules/workspace-capabilities.md): - Auto-injected context about available tools - When to use each capability - Quick examples 4. Updated CLAUDE.md skills manifest Agents will see workspace capabilities via: - MCP tool discovery (ListTools) - Skills system (load on demand) - Rules injection (always visible) --- .claude/rules/workspace-capabilities.md | 62 +++ .openskills/workspace-capabilities/SKILL.md | 196 ++++++++ src/daemon/mcp/workspace-tools-server.ts | 473 ++++++++++++++++++++ 3 files changed, 731 insertions(+) create mode 100644 .claude/rules/workspace-capabilities.md create mode 100644 .openskills/workspace-capabilities/SKILL.md create mode 100644 src/daemon/mcp/workspace-tools-server.ts diff --git a/.claude/rules/workspace-capabilities.md b/.claude/rules/workspace-capabilities.md new file mode 100644 index 00000000..0d4bcd34 --- /dev/null +++ b/.claude/rules/workspace-capabilities.md @@ -0,0 +1,62 @@ +--- +paths: + - "**/*" +--- + +# Workspace Capabilities + +This workspace has additional tools available via MCP. Check what's available before attempting advanced operations. + +## Available MCP Tools + +### Discovery +- `workspace_capabilities` - **Call this first** to see what's available + +### Browser Testing (if enabled) +- `browser_screenshot` - Capture screenshots (display or URL) +- `browser_navigate` - Open URL in browser +- `browser_script` - Run Playwright automation scripts +- `vnc_info` - Get VNC URL to watch browser + +### Container Spawning (if Docker socket mounted) +- `container_run` - Run command in isolated container +- `container_run_code` - Quick code execution (node/python/go/rust/bash) +- `container_list` - List running containers + +## When to Use These Tools + +**Use browser testing for:** +- Visual verification of UI changes +- E2E testing with real browsers +- Screenshot documentation +- Debugging frontend issues + +**Use container spawning for:** +- Running untrusted or risky code safely +- Testing in different environments (Node versions, etc.) +- Parallel isolated task execution +- Language-specific toolchains + +## Example: Verify UI Change + +```typescript +// After making frontend changes, verify visually +const result = await mcp.browser_script({ + script: ` + await page.goto('http://localhost:3000'); + await page.screenshot({ path: '/tmp/ui-check.png' }); + ` +}); +``` + +## Example: Test Code Safely + +```typescript +// Run potentially dangerous code in isolation +const result = await mcp.container_run({ + image: 'python:3.11', + command: ['python', '-c', userCode], + memory: '256m', + timeout: 10000 +}); +``` diff --git a/.openskills/workspace-capabilities/SKILL.md b/.openskills/workspace-capabilities/SKILL.md new file mode 100644 index 00000000..9c7d83fe --- /dev/null +++ b/.openskills/workspace-capabilities/SKILL.md @@ -0,0 +1,196 @@ +# Workspace Capabilities + +This workspace has advanced capabilities for browser testing and container management. + +## Quick Reference + +### Check What's Available + +```typescript +// Call the workspace_capabilities tool first +const caps = await mcp.workspace_capabilities(); +console.log(caps); +// { +// browserTesting: { available: true, features: ['screenshot', 'navigation', 'playwright', 'vnc'] }, +// containerSpawning: { available: true, presets: ['node20', 'python311', 'go121', ...] } +// } +``` + +## Browser Testing + +### Take Screenshots + +```typescript +// Screenshot current display +await mcp.browser_screenshot(); + +// Screenshot a URL +await mcp.browser_screenshot({ url: 'https://example.com', fullPage: true }); +``` + +### Browser Automation with Playwright + +```typescript +// Run Playwright script +await mcp.browser_script({ + script: ` + await page.goto('https://example.com'); + await page.click('button#submit'); + await page.screenshot({ path: '/tmp/result.png' }); + ` +}); +``` + +### Open Browser for Visual Inspection + +```typescript +// Launch browser (visible via VNC) +await mcp.browser_navigate({ url: 'https://example.com' }); + +// Get VNC URL to watch +const vnc = await mcp.vnc_info(); +console.log(vnc.noVncUrl); // http://localhost:6080/vnc.html +``` + +## Container Spawning + +### Run Code in Isolated Containers + +```typescript +// Quick: Run Python code +await mcp.container_run_code({ + language: 'python', + code: 'print(2 + 2)' +}); + +// Quick: Run Node.js code +await mcp.container_run_code({ + language: 'node', + code: 'console.log(Date.now())' +}); +``` + +### Run Custom Container Commands + +```typescript +// Run in specific image +await mcp.container_run({ + image: 'python:3.11', + command: ['python', '-c', 'import sys; print(sys.version)'] +}); + +// With resource limits +await mcp.container_run({ + image: 'node:20', + command: ['npm', 'test'], + workdir: '/workspace', + volumes: ['./:/workspace'], + memory: '1g', + timeout: 60000 +}); +``` + +### Available Language Presets + +| Language | Image | Usage | +|----------|-------|-------| +| `node` | node:20-slim | JavaScript/TypeScript | +| `python` | python:3.11-slim | Python | +| `go` | golang:1.21-alpine | Go | +| `rust` | rust:slim | Rust | +| `bash` | ubuntu:22.04 | Shell scripts | + +## Common Patterns + +### Visual Regression Testing + +```typescript +// Take baseline screenshot +await mcp.browser_screenshot({ + url: 'http://localhost:3000', + outputPath: '/tmp/baseline.png' +}); + +// Make changes... + +// Take comparison screenshot +await mcp.browser_screenshot({ + url: 'http://localhost:3000', + outputPath: '/tmp/current.png' +}); + +// Compare with ImageMagick +await mcp.container_run({ + image: 'dpokidov/imagemagick', + command: ['compare', '-metric', 'RMSE', '/tmp/baseline.png', '/tmp/current.png', '/tmp/diff.png'], + volumes: ['/tmp:/tmp'] +}); +``` + +### E2E Test in Container + +```typescript +// Run Playwright tests in isolated container +await mcp.container_run({ + image: 'mcr.microsoft.com/playwright:latest', + command: ['npx', 'playwright', 'test'], + workdir: '/workspace', + volumes: ['./:/workspace'], + env: { CI: 'true' } +}); +``` + +### Multi-Environment Testing + +```typescript +// Test against multiple Node versions +for (const version of ['18', '20', '22']) { + const result = await mcp.container_run({ + image: `node:${version}`, + command: ['node', '--version'] + }); + console.log(`Node ${version}: ${result.stdout}`); +} +``` + +## VNC Viewing + +When browser testing is enabled, you can watch agent browser interactions: + +1. **Web interface**: Open `http://localhost:6080/vnc.html` +2. **Native VNC client**: Connect to `vnc://localhost:5900` + +The VNC URL is returned by `vnc_info` tool. + +## Troubleshooting + +### Browser testing not available + +The workspace needs Xvfb virtual display. Use the browser-enabled Dockerfile: + +```bash +docker compose -f docker-compose.dev.yml -f docker-compose.browser.yml up +``` + +### Container spawning not available + +Docker socket must be mounted: + +```yaml +volumes: + - /var/run/docker.sock:/var/run/docker.sock +``` + +### Screenshots are blank + +Wait for page load: + +```typescript +await mcp.browser_script({ + script: ` + await page.goto('https://example.com'); + await page.waitForLoadState('networkidle'); + await page.screenshot({ path: '/tmp/screenshot.png' }); + ` +}); +``` diff --git a/src/daemon/mcp/workspace-tools-server.ts b/src/daemon/mcp/workspace-tools-server.ts new file mode 100644 index 00000000..bd9c0df0 --- /dev/null +++ b/src/daemon/mcp/workspace-tools-server.ts @@ -0,0 +1,473 @@ +/** + * Workspace Tools MCP Server + * + * Exposes browser testing and container spawning capabilities to agents via MCP. + * Agents can discover and call these tools through the standard MCP protocol. + * + * Tools provided: + * - browser_screenshot: Capture screenshot of current display or URL + * - browser_navigate: Open URL in browser + * - browser_test: Run Playwright test + * - container_run: Run command in isolated container + * - container_exec: Execute in running container + * - workspace_capabilities: List available capabilities + */ + +import { Server } from '@modelcontextprotocol/sdk/server/index.js'; +import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; +import { + CallToolRequestSchema, + ListToolsRequestSchema, + Tool, +} from '@modelcontextprotocol/sdk/types.js'; + +import { + isBrowserTestingAvailable, + takeDisplayScreenshot, + runPlaywrightScript, + launchBrowser, + getVNCInfo, +} from '../services/browser-testing.js'; + +import { + isDockerAvailable, + runInContainer, + runCode, + listContainers, + PRESET_CONTAINERS, +} from '../services/container-spawner.js'; + +// ============================================================================ +// Tool Definitions +// ============================================================================ + +const TOOLS: Tool[] = [ + { + name: 'workspace_capabilities', + description: + 'List all available workspace capabilities (browser testing, container spawning, etc.). Call this first to understand what tools are available.', + inputSchema: { + type: 'object', + properties: {}, + required: [], + }, + }, + { + name: 'browser_screenshot', + description: + 'Capture a screenshot. Can capture the current display or navigate to a URL first. Returns the path to the saved screenshot.', + inputSchema: { + type: 'object', + properties: { + url: { + type: 'string', + description: 'Optional URL to navigate to before taking screenshot', + }, + outputPath: { + type: 'string', + description: 'Optional path to save screenshot (default: /tmp/screenshot-{timestamp}.png)', + }, + fullPage: { + type: 'boolean', + description: 'Capture full page (for web pages)', + }, + }, + required: [], + }, + }, + { + name: 'browser_navigate', + description: + 'Open a URL in the browser. The browser window is visible via VNC if enabled.', + inputSchema: { + type: 'object', + properties: { + url: { + type: 'string', + description: 'URL to navigate to', + }, + browser: { + type: 'string', + enum: ['chromium', 'firefox'], + description: 'Browser to use (default: chromium)', + }, + }, + required: ['url'], + }, + }, + { + name: 'browser_script', + description: + 'Run a Playwright script for browser automation. The script has access to `page` object.', + inputSchema: { + type: 'object', + properties: { + script: { + type: 'string', + description: + 'Playwright script to execute. Has access to `page` object. Example: await page.goto("https://example.com"); await page.screenshot({path: "test.png"});', + }, + browser: { + type: 'string', + enum: ['chromium', 'firefox', 'webkit'], + description: 'Browser to use (default: chromium)', + }, + timeout: { + type: 'number', + description: 'Timeout in milliseconds (default: 30000)', + }, + }, + required: ['script'], + }, + }, + { + name: 'container_run', + description: + 'Run a command in an isolated Docker container. Useful for running untrusted code, testing in different environments, or using language-specific toolchains.', + inputSchema: { + type: 'object', + properties: { + image: { + type: 'string', + description: + 'Docker image to use. Examples: node:20, python:3.11, ubuntu:22.04, golang:1.21', + }, + command: { + type: 'array', + items: { type: 'string' }, + description: 'Command to run as array. Example: ["python", "-c", "print(1+1)"]', + }, + workdir: { + type: 'string', + description: 'Working directory inside container (default: /workspace)', + }, + volumes: { + type: 'array', + items: { type: 'string' }, + description: 'Volumes to mount in host:container format', + }, + env: { + type: 'object', + description: 'Environment variables', + }, + memory: { + type: 'string', + description: 'Memory limit (e.g., "512m", "2g")', + }, + timeout: { + type: 'number', + description: 'Timeout in milliseconds', + }, + }, + required: ['image', 'command'], + }, + }, + { + name: 'container_run_code', + description: + 'Quick helper to run code in a language-specific container. Automatically selects the right image and command.', + inputSchema: { + type: 'object', + properties: { + language: { + type: 'string', + enum: ['node', 'python', 'go', 'rust', 'bash'], + description: 'Programming language', + }, + code: { + type: 'string', + description: 'Code to execute', + }, + timeout: { + type: 'number', + description: 'Timeout in milliseconds', + }, + }, + required: ['language', 'code'], + }, + }, + { + name: 'container_list', + description: 'List running Docker containers', + inputSchema: { + type: 'object', + properties: {}, + required: [], + }, + }, + { + name: 'vnc_info', + description: 'Get VNC connection info for viewing the browser/display', + inputSchema: { + type: 'object', + properties: {}, + required: [], + }, + }, +]; + +// ============================================================================ +// Tool Handlers +// ============================================================================ + +async function handleToolCall( + name: string, + args: Record +): Promise<{ content: Array<{ type: string; text: string }> }> { + switch (name) { + case 'workspace_capabilities': { + const capabilities = { + browserTesting: { + available: isBrowserTestingAvailable(), + features: isBrowserTestingAvailable() + ? ['screenshot', 'navigation', 'playwright', 'vnc'] + : [], + vnc: getVNCInfo(), + }, + containerSpawning: { + available: isDockerAvailable(), + presets: isDockerAvailable() ? Object.keys(PRESET_CONTAINERS) : [], + }, + }; + return { + content: [{ type: 'text', text: JSON.stringify(capabilities, null, 2) }], + }; + } + + case 'browser_screenshot': { + if (!isBrowserTestingAvailable()) { + return { + content: [ + { + type: 'text', + text: 'Browser testing not available. Workspace may not have Xvfb/display configured.', + }, + ], + }; + } + + const url = args.url as string | undefined; + const outputPath = args.outputPath as string | undefined; + + // If URL provided, navigate first using Playwright + if (url) { + const result = await runPlaywrightScript( + ` + await page.goto('${url}'); + await page.waitForLoadState('networkidle'); + await page.screenshot({ path: '${outputPath || '/tmp/screenshot.png'}', fullPage: ${args.fullPage || false} }); + `, + { timeout: 30000 } + ); + return { + content: [ + { + type: 'text', + text: result.success + ? `Screenshot saved to ${outputPath || '/tmp/screenshot.png'}` + : `Failed: ${result.output}`, + }, + ], + }; + } + + // Otherwise just capture current display + const path = await takeDisplayScreenshot({ outputPath }); + return { + content: [{ type: 'text', text: `Screenshot saved to ${path}` }], + }; + } + + case 'browser_navigate': { + if (!isBrowserTestingAvailable()) { + return { + content: [{ type: 'text', text: 'Browser testing not available.' }], + }; + } + + const url = args.url as string; + const browser = (args.browser as 'chromium' | 'firefox') || 'chromium'; + const result = await launchBrowser(url, { browser }); + + return { + content: [ + { + type: 'text', + text: `Browser launched (PID: ${result.pid}). View via VNC: ${getVNCInfo().noVncUrl || 'not available'}`, + }, + ], + }; + } + + case 'browser_script': { + if (!isBrowserTestingAvailable()) { + return { + content: [{ type: 'text', text: 'Browser testing not available.' }], + }; + } + + const script = args.script as string; + const browser = args.browser as 'chromium' | 'firefox' | 'webkit' | undefined; + const timeout = args.timeout as number | undefined; + + const result = await runPlaywrightScript(script, { browser, timeout }); + + return { + content: [ + { + type: 'text', + text: result.success + ? `Script executed successfully.\n\nOutput:\n${result.output}` + : `Script failed.\n\nOutput:\n${result.output}`, + }, + ], + }; + } + + case 'container_run': { + if (!isDockerAvailable()) { + return { + content: [ + { + type: 'text', + text: 'Docker not available. Mount /var/run/docker.sock to enable container spawning.', + }, + ], + }; + } + + const result = await runInContainer({ + image: args.image as string, + command: args.command as string[], + workdir: args.workdir as string | undefined, + volumes: args.volumes as string[] | undefined, + env: args.env as Record | undefined, + memory: args.memory as string | undefined, + timeout: args.timeout as number | undefined, + }); + + return { + content: [ + { + type: 'text', + text: `Exit code: ${result.exitCode}\n\nStdout:\n${result.stdout}\n\nStderr:\n${result.stderr}`, + }, + ], + }; + } + + case 'container_run_code': { + if (!isDockerAvailable()) { + return { + content: [{ type: 'text', text: 'Docker not available.' }], + }; + } + + const language = args.language as 'node' | 'python' | 'go' | 'rust' | 'bash'; + const code = args.code as string; + const timeout = args.timeout as number | undefined; + + const result = await runCode(language, code, { timeout }); + + return { + content: [ + { + type: 'text', + text: `Exit code: ${result.exitCode}\n\nOutput:\n${result.stdout}${result.stderr}`, + }, + ], + }; + } + + case 'container_list': { + if (!isDockerAvailable()) { + return { + content: [{ type: 'text', text: 'Docker not available.' }], + }; + } + + const containers = listContainers(); + return { + content: [ + { + type: 'text', + text: + containers.length > 0 + ? containers.map((c) => `${c.name} (${c.image}): ${c.status}`).join('\n') + : 'No running containers', + }, + ], + }; + } + + case 'vnc_info': { + const info = getVNCInfo(); + return { + content: [ + { + type: 'text', + text: info.available + ? `VNC available:\n- Web interface: ${info.noVncUrl}\n- Native: ${info.vncUrl}` + : 'VNC not available in this workspace', + }, + ], + }; + } + + default: + return { + content: [{ type: 'text', text: `Unknown tool: ${name}` }], + }; + } +} + +// ============================================================================ +// Server Setup +// ============================================================================ + +export async function startWorkspaceToolsServer(): Promise { + const server = new Server( + { + name: 'workspace-tools', + version: '1.0.0', + }, + { + capabilities: { + tools: {}, + }, + } + ); + + // List available tools + server.setRequestHandler(ListToolsRequestSchema, async () => { + // Filter tools based on what's actually available + const availableTools = TOOLS.filter((tool) => { + if (tool.name.startsWith('browser_') && !isBrowserTestingAvailable()) { + return false; + } + if (tool.name.startsWith('container_') && !isDockerAvailable()) { + return false; + } + return true; + }); + + return { tools: availableTools }; + }); + + // Handle tool calls + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const { name, arguments: args } = request.params; + return handleToolCall(name, (args as Record) || {}); + }); + + // Start server + const transport = new StdioServerTransport(); + await server.connect(transport); + + console.error('[workspace-tools] MCP server started'); +} + +// Run if executed directly +if (import.meta.url === `file://${process.argv[1]}`) { + startWorkspaceToolsServer().catch(console.error); +} From 268221c41bef5baacceb3cf4d97761cf8336a7c7 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 13:09:40 +0000 Subject: [PATCH 046/103] Add workspace-capabilities skill to AGENTS.md --- AGENTS.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index ca3c6ce8..812f7e0e 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -23,6 +23,12 @@ Usage notes: .openskills/frontend-design/SKILL.md + +workspace-capabilities +Browser testing (Playwright, screenshots, VNC) and container spawning (Docker) capabilities. Use when you need to run browser automation, take screenshots, or execute code in isolated containers. +.openskills/workspace-capabilities/SKILL.md + + From 63c73a4fc77eeb12c03e1b0d7cf8c4ac5a565053 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 13:15:28 +0000 Subject: [PATCH 047/103] Revert "Add workspace-capabilities skill to AGENTS.md" This reverts commit 268221c41bef5baacceb3cf4d97761cf8336a7c7. --- AGENTS.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 812f7e0e..ca3c6ce8 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -23,12 +23,6 @@ Usage notes: .openskills/frontend-design/SKILL.md - -workspace-capabilities -Browser testing (Playwright, screenshots, VNC) and container spawning (Docker) capabilities. Use when you need to run browser automation, take screenshots, or execute code in isolated containers. -.openskills/workspace-capabilities/SKILL.md - - From aea01c5d408bacb566e8141f40e2f7680ab7303e Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 13:15:28 +0000 Subject: [PATCH 048/103] Revert "Add MCP tools and documentation for workspace capabilities" This reverts commit d619db6da45e4f9bbdb4b55fa88395bdd4171223. --- .claude/rules/workspace-capabilities.md | 62 --- .openskills/workspace-capabilities/SKILL.md | 196 -------- src/daemon/mcp/workspace-tools-server.ts | 473 -------------------- 3 files changed, 731 deletions(-) delete mode 100644 .claude/rules/workspace-capabilities.md delete mode 100644 .openskills/workspace-capabilities/SKILL.md delete mode 100644 src/daemon/mcp/workspace-tools-server.ts diff --git a/.claude/rules/workspace-capabilities.md b/.claude/rules/workspace-capabilities.md deleted file mode 100644 index 0d4bcd34..00000000 --- a/.claude/rules/workspace-capabilities.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -paths: - - "**/*" ---- - -# Workspace Capabilities - -This workspace has additional tools available via MCP. Check what's available before attempting advanced operations. - -## Available MCP Tools - -### Discovery -- `workspace_capabilities` - **Call this first** to see what's available - -### Browser Testing (if enabled) -- `browser_screenshot` - Capture screenshots (display or URL) -- `browser_navigate` - Open URL in browser -- `browser_script` - Run Playwright automation scripts -- `vnc_info` - Get VNC URL to watch browser - -### Container Spawning (if Docker socket mounted) -- `container_run` - Run command in isolated container -- `container_run_code` - Quick code execution (node/python/go/rust/bash) -- `container_list` - List running containers - -## When to Use These Tools - -**Use browser testing for:** -- Visual verification of UI changes -- E2E testing with real browsers -- Screenshot documentation -- Debugging frontend issues - -**Use container spawning for:** -- Running untrusted or risky code safely -- Testing in different environments (Node versions, etc.) -- Parallel isolated task execution -- Language-specific toolchains - -## Example: Verify UI Change - -```typescript -// After making frontend changes, verify visually -const result = await mcp.browser_script({ - script: ` - await page.goto('http://localhost:3000'); - await page.screenshot({ path: '/tmp/ui-check.png' }); - ` -}); -``` - -## Example: Test Code Safely - -```typescript -// Run potentially dangerous code in isolation -const result = await mcp.container_run({ - image: 'python:3.11', - command: ['python', '-c', userCode], - memory: '256m', - timeout: 10000 -}); -``` diff --git a/.openskills/workspace-capabilities/SKILL.md b/.openskills/workspace-capabilities/SKILL.md deleted file mode 100644 index 9c7d83fe..00000000 --- a/.openskills/workspace-capabilities/SKILL.md +++ /dev/null @@ -1,196 +0,0 @@ -# Workspace Capabilities - -This workspace has advanced capabilities for browser testing and container management. - -## Quick Reference - -### Check What's Available - -```typescript -// Call the workspace_capabilities tool first -const caps = await mcp.workspace_capabilities(); -console.log(caps); -// { -// browserTesting: { available: true, features: ['screenshot', 'navigation', 'playwright', 'vnc'] }, -// containerSpawning: { available: true, presets: ['node20', 'python311', 'go121', ...] } -// } -``` - -## Browser Testing - -### Take Screenshots - -```typescript -// Screenshot current display -await mcp.browser_screenshot(); - -// Screenshot a URL -await mcp.browser_screenshot({ url: 'https://example.com', fullPage: true }); -``` - -### Browser Automation with Playwright - -```typescript -// Run Playwright script -await mcp.browser_script({ - script: ` - await page.goto('https://example.com'); - await page.click('button#submit'); - await page.screenshot({ path: '/tmp/result.png' }); - ` -}); -``` - -### Open Browser for Visual Inspection - -```typescript -// Launch browser (visible via VNC) -await mcp.browser_navigate({ url: 'https://example.com' }); - -// Get VNC URL to watch -const vnc = await mcp.vnc_info(); -console.log(vnc.noVncUrl); // http://localhost:6080/vnc.html -``` - -## Container Spawning - -### Run Code in Isolated Containers - -```typescript -// Quick: Run Python code -await mcp.container_run_code({ - language: 'python', - code: 'print(2 + 2)' -}); - -// Quick: Run Node.js code -await mcp.container_run_code({ - language: 'node', - code: 'console.log(Date.now())' -}); -``` - -### Run Custom Container Commands - -```typescript -// Run in specific image -await mcp.container_run({ - image: 'python:3.11', - command: ['python', '-c', 'import sys; print(sys.version)'] -}); - -// With resource limits -await mcp.container_run({ - image: 'node:20', - command: ['npm', 'test'], - workdir: '/workspace', - volumes: ['./:/workspace'], - memory: '1g', - timeout: 60000 -}); -``` - -### Available Language Presets - -| Language | Image | Usage | -|----------|-------|-------| -| `node` | node:20-slim | JavaScript/TypeScript | -| `python` | python:3.11-slim | Python | -| `go` | golang:1.21-alpine | Go | -| `rust` | rust:slim | Rust | -| `bash` | ubuntu:22.04 | Shell scripts | - -## Common Patterns - -### Visual Regression Testing - -```typescript -// Take baseline screenshot -await mcp.browser_screenshot({ - url: 'http://localhost:3000', - outputPath: '/tmp/baseline.png' -}); - -// Make changes... - -// Take comparison screenshot -await mcp.browser_screenshot({ - url: 'http://localhost:3000', - outputPath: '/tmp/current.png' -}); - -// Compare with ImageMagick -await mcp.container_run({ - image: 'dpokidov/imagemagick', - command: ['compare', '-metric', 'RMSE', '/tmp/baseline.png', '/tmp/current.png', '/tmp/diff.png'], - volumes: ['/tmp:/tmp'] -}); -``` - -### E2E Test in Container - -```typescript -// Run Playwright tests in isolated container -await mcp.container_run({ - image: 'mcr.microsoft.com/playwright:latest', - command: ['npx', 'playwright', 'test'], - workdir: '/workspace', - volumes: ['./:/workspace'], - env: { CI: 'true' } -}); -``` - -### Multi-Environment Testing - -```typescript -// Test against multiple Node versions -for (const version of ['18', '20', '22']) { - const result = await mcp.container_run({ - image: `node:${version}`, - command: ['node', '--version'] - }); - console.log(`Node ${version}: ${result.stdout}`); -} -``` - -## VNC Viewing - -When browser testing is enabled, you can watch agent browser interactions: - -1. **Web interface**: Open `http://localhost:6080/vnc.html` -2. **Native VNC client**: Connect to `vnc://localhost:5900` - -The VNC URL is returned by `vnc_info` tool. - -## Troubleshooting - -### Browser testing not available - -The workspace needs Xvfb virtual display. Use the browser-enabled Dockerfile: - -```bash -docker compose -f docker-compose.dev.yml -f docker-compose.browser.yml up -``` - -### Container spawning not available - -Docker socket must be mounted: - -```yaml -volumes: - - /var/run/docker.sock:/var/run/docker.sock -``` - -### Screenshots are blank - -Wait for page load: - -```typescript -await mcp.browser_script({ - script: ` - await page.goto('https://example.com'); - await page.waitForLoadState('networkidle'); - await page.screenshot({ path: '/tmp/screenshot.png' }); - ` -}); -``` diff --git a/src/daemon/mcp/workspace-tools-server.ts b/src/daemon/mcp/workspace-tools-server.ts deleted file mode 100644 index bd9c0df0..00000000 --- a/src/daemon/mcp/workspace-tools-server.ts +++ /dev/null @@ -1,473 +0,0 @@ -/** - * Workspace Tools MCP Server - * - * Exposes browser testing and container spawning capabilities to agents via MCP. - * Agents can discover and call these tools through the standard MCP protocol. - * - * Tools provided: - * - browser_screenshot: Capture screenshot of current display or URL - * - browser_navigate: Open URL in browser - * - browser_test: Run Playwright test - * - container_run: Run command in isolated container - * - container_exec: Execute in running container - * - workspace_capabilities: List available capabilities - */ - -import { Server } from '@modelcontextprotocol/sdk/server/index.js'; -import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; -import { - CallToolRequestSchema, - ListToolsRequestSchema, - Tool, -} from '@modelcontextprotocol/sdk/types.js'; - -import { - isBrowserTestingAvailable, - takeDisplayScreenshot, - runPlaywrightScript, - launchBrowser, - getVNCInfo, -} from '../services/browser-testing.js'; - -import { - isDockerAvailable, - runInContainer, - runCode, - listContainers, - PRESET_CONTAINERS, -} from '../services/container-spawner.js'; - -// ============================================================================ -// Tool Definitions -// ============================================================================ - -const TOOLS: Tool[] = [ - { - name: 'workspace_capabilities', - description: - 'List all available workspace capabilities (browser testing, container spawning, etc.). Call this first to understand what tools are available.', - inputSchema: { - type: 'object', - properties: {}, - required: [], - }, - }, - { - name: 'browser_screenshot', - description: - 'Capture a screenshot. Can capture the current display or navigate to a URL first. Returns the path to the saved screenshot.', - inputSchema: { - type: 'object', - properties: { - url: { - type: 'string', - description: 'Optional URL to navigate to before taking screenshot', - }, - outputPath: { - type: 'string', - description: 'Optional path to save screenshot (default: /tmp/screenshot-{timestamp}.png)', - }, - fullPage: { - type: 'boolean', - description: 'Capture full page (for web pages)', - }, - }, - required: [], - }, - }, - { - name: 'browser_navigate', - description: - 'Open a URL in the browser. The browser window is visible via VNC if enabled.', - inputSchema: { - type: 'object', - properties: { - url: { - type: 'string', - description: 'URL to navigate to', - }, - browser: { - type: 'string', - enum: ['chromium', 'firefox'], - description: 'Browser to use (default: chromium)', - }, - }, - required: ['url'], - }, - }, - { - name: 'browser_script', - description: - 'Run a Playwright script for browser automation. The script has access to `page` object.', - inputSchema: { - type: 'object', - properties: { - script: { - type: 'string', - description: - 'Playwright script to execute. Has access to `page` object. Example: await page.goto("https://example.com"); await page.screenshot({path: "test.png"});', - }, - browser: { - type: 'string', - enum: ['chromium', 'firefox', 'webkit'], - description: 'Browser to use (default: chromium)', - }, - timeout: { - type: 'number', - description: 'Timeout in milliseconds (default: 30000)', - }, - }, - required: ['script'], - }, - }, - { - name: 'container_run', - description: - 'Run a command in an isolated Docker container. Useful for running untrusted code, testing in different environments, or using language-specific toolchains.', - inputSchema: { - type: 'object', - properties: { - image: { - type: 'string', - description: - 'Docker image to use. Examples: node:20, python:3.11, ubuntu:22.04, golang:1.21', - }, - command: { - type: 'array', - items: { type: 'string' }, - description: 'Command to run as array. Example: ["python", "-c", "print(1+1)"]', - }, - workdir: { - type: 'string', - description: 'Working directory inside container (default: /workspace)', - }, - volumes: { - type: 'array', - items: { type: 'string' }, - description: 'Volumes to mount in host:container format', - }, - env: { - type: 'object', - description: 'Environment variables', - }, - memory: { - type: 'string', - description: 'Memory limit (e.g., "512m", "2g")', - }, - timeout: { - type: 'number', - description: 'Timeout in milliseconds', - }, - }, - required: ['image', 'command'], - }, - }, - { - name: 'container_run_code', - description: - 'Quick helper to run code in a language-specific container. Automatically selects the right image and command.', - inputSchema: { - type: 'object', - properties: { - language: { - type: 'string', - enum: ['node', 'python', 'go', 'rust', 'bash'], - description: 'Programming language', - }, - code: { - type: 'string', - description: 'Code to execute', - }, - timeout: { - type: 'number', - description: 'Timeout in milliseconds', - }, - }, - required: ['language', 'code'], - }, - }, - { - name: 'container_list', - description: 'List running Docker containers', - inputSchema: { - type: 'object', - properties: {}, - required: [], - }, - }, - { - name: 'vnc_info', - description: 'Get VNC connection info for viewing the browser/display', - inputSchema: { - type: 'object', - properties: {}, - required: [], - }, - }, -]; - -// ============================================================================ -// Tool Handlers -// ============================================================================ - -async function handleToolCall( - name: string, - args: Record -): Promise<{ content: Array<{ type: string; text: string }> }> { - switch (name) { - case 'workspace_capabilities': { - const capabilities = { - browserTesting: { - available: isBrowserTestingAvailable(), - features: isBrowserTestingAvailable() - ? ['screenshot', 'navigation', 'playwright', 'vnc'] - : [], - vnc: getVNCInfo(), - }, - containerSpawning: { - available: isDockerAvailable(), - presets: isDockerAvailable() ? Object.keys(PRESET_CONTAINERS) : [], - }, - }; - return { - content: [{ type: 'text', text: JSON.stringify(capabilities, null, 2) }], - }; - } - - case 'browser_screenshot': { - if (!isBrowserTestingAvailable()) { - return { - content: [ - { - type: 'text', - text: 'Browser testing not available. Workspace may not have Xvfb/display configured.', - }, - ], - }; - } - - const url = args.url as string | undefined; - const outputPath = args.outputPath as string | undefined; - - // If URL provided, navigate first using Playwright - if (url) { - const result = await runPlaywrightScript( - ` - await page.goto('${url}'); - await page.waitForLoadState('networkidle'); - await page.screenshot({ path: '${outputPath || '/tmp/screenshot.png'}', fullPage: ${args.fullPage || false} }); - `, - { timeout: 30000 } - ); - return { - content: [ - { - type: 'text', - text: result.success - ? `Screenshot saved to ${outputPath || '/tmp/screenshot.png'}` - : `Failed: ${result.output}`, - }, - ], - }; - } - - // Otherwise just capture current display - const path = await takeDisplayScreenshot({ outputPath }); - return { - content: [{ type: 'text', text: `Screenshot saved to ${path}` }], - }; - } - - case 'browser_navigate': { - if (!isBrowserTestingAvailable()) { - return { - content: [{ type: 'text', text: 'Browser testing not available.' }], - }; - } - - const url = args.url as string; - const browser = (args.browser as 'chromium' | 'firefox') || 'chromium'; - const result = await launchBrowser(url, { browser }); - - return { - content: [ - { - type: 'text', - text: `Browser launched (PID: ${result.pid}). View via VNC: ${getVNCInfo().noVncUrl || 'not available'}`, - }, - ], - }; - } - - case 'browser_script': { - if (!isBrowserTestingAvailable()) { - return { - content: [{ type: 'text', text: 'Browser testing not available.' }], - }; - } - - const script = args.script as string; - const browser = args.browser as 'chromium' | 'firefox' | 'webkit' | undefined; - const timeout = args.timeout as number | undefined; - - const result = await runPlaywrightScript(script, { browser, timeout }); - - return { - content: [ - { - type: 'text', - text: result.success - ? `Script executed successfully.\n\nOutput:\n${result.output}` - : `Script failed.\n\nOutput:\n${result.output}`, - }, - ], - }; - } - - case 'container_run': { - if (!isDockerAvailable()) { - return { - content: [ - { - type: 'text', - text: 'Docker not available. Mount /var/run/docker.sock to enable container spawning.', - }, - ], - }; - } - - const result = await runInContainer({ - image: args.image as string, - command: args.command as string[], - workdir: args.workdir as string | undefined, - volumes: args.volumes as string[] | undefined, - env: args.env as Record | undefined, - memory: args.memory as string | undefined, - timeout: args.timeout as number | undefined, - }); - - return { - content: [ - { - type: 'text', - text: `Exit code: ${result.exitCode}\n\nStdout:\n${result.stdout}\n\nStderr:\n${result.stderr}`, - }, - ], - }; - } - - case 'container_run_code': { - if (!isDockerAvailable()) { - return { - content: [{ type: 'text', text: 'Docker not available.' }], - }; - } - - const language = args.language as 'node' | 'python' | 'go' | 'rust' | 'bash'; - const code = args.code as string; - const timeout = args.timeout as number | undefined; - - const result = await runCode(language, code, { timeout }); - - return { - content: [ - { - type: 'text', - text: `Exit code: ${result.exitCode}\n\nOutput:\n${result.stdout}${result.stderr}`, - }, - ], - }; - } - - case 'container_list': { - if (!isDockerAvailable()) { - return { - content: [{ type: 'text', text: 'Docker not available.' }], - }; - } - - const containers = listContainers(); - return { - content: [ - { - type: 'text', - text: - containers.length > 0 - ? containers.map((c) => `${c.name} (${c.image}): ${c.status}`).join('\n') - : 'No running containers', - }, - ], - }; - } - - case 'vnc_info': { - const info = getVNCInfo(); - return { - content: [ - { - type: 'text', - text: info.available - ? `VNC available:\n- Web interface: ${info.noVncUrl}\n- Native: ${info.vncUrl}` - : 'VNC not available in this workspace', - }, - ], - }; - } - - default: - return { - content: [{ type: 'text', text: `Unknown tool: ${name}` }], - }; - } -} - -// ============================================================================ -// Server Setup -// ============================================================================ - -export async function startWorkspaceToolsServer(): Promise { - const server = new Server( - { - name: 'workspace-tools', - version: '1.0.0', - }, - { - capabilities: { - tools: {}, - }, - } - ); - - // List available tools - server.setRequestHandler(ListToolsRequestSchema, async () => { - // Filter tools based on what's actually available - const availableTools = TOOLS.filter((tool) => { - if (tool.name.startsWith('browser_') && !isBrowserTestingAvailable()) { - return false; - } - if (tool.name.startsWith('container_') && !isDockerAvailable()) { - return false; - } - return true; - }); - - return { tools: availableTools }; - }); - - // Handle tool calls - server.setRequestHandler(CallToolRequestSchema, async (request) => { - const { name, arguments: args } = request.params; - return handleToolCall(name, (args as Record) || {}); - }); - - // Start server - const transport = new StdioServerTransport(); - await server.connect(transport); - - console.error('[workspace-tools] MCP server started'); -} - -// Run if executed directly -if (import.meta.url === `file://${process.argv[1]}`) { - startWorkspaceToolsServer().catch(console.error); -} From 9116c85cf0082440500897d38ce2af28b6764c9a Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 13:16:25 +0000 Subject: [PATCH 049/103] Add task breakdown for workspace capability discovery design --- docs/tasks/workspace-capabilities.tasks.md | 121 +++++++++++++++++++++ 1 file changed, 121 insertions(+) create mode 100644 docs/tasks/workspace-capabilities.tasks.md diff --git a/docs/tasks/workspace-capabilities.tasks.md b/docs/tasks/workspace-capabilities.tasks.md new file mode 100644 index 00000000..abc19381 --- /dev/null +++ b/docs/tasks/workspace-capabilities.tasks.md @@ -0,0 +1,121 @@ +# Workspace Capabilities - Agent Discovery + +How should agents discover and use workspace capabilities (browser testing, container spawning, etc.)? + +## Problem Statement + +We have workspace capabilities: +- Browser testing (Playwright, Xvfb, VNC) +- Container spawning (Docker socket) +- Potentially more in the future (E2B, Browserbase) + +**Challenge:** How do agents know these exist without bloating context for every agent? + +Current implementations exist but are not wired up: +- `src/daemon/services/browser-testing.ts` +- `src/daemon/services/container-spawner.ts` +- `deploy/workspace/Dockerfile.browser` + +## Key Questions + +### 1. Static vs Dynamic Discovery +- [ ] Should capabilities be in rules/skills (static, always injected)? +- [ ] Should capabilities be discovered via MCP at runtime (dynamic)? +- [ ] Hybrid: minimal hint in rules, full discovery via MCP? + +### 2. Cloud vs Local +- [ ] Cloud workspaces: How are capabilities configured per workspace? +- [ ] Local daemons: How does the daemon know what's available? +- [ ] Should there be a "capability manifest" per workspace? + +### 3. Context Budget +- [ ] How much context is acceptable for capability hints? +- [ ] Should agents ask for capabilities only when needed? +- [ ] Can we use tool descriptions instead of injected prompts? + +### 4. Opt-in vs Opt-out +- [ ] Should capabilities be enabled by default? +- [ ] Per-workspace configuration? +- [ ] Per-agent configuration? + +## Design Options + +### Option A: MCP-Only Discovery +Agents call `workspace_capabilities` tool to discover what's available. +No static context injection. + +**Pros:** Zero context overhead, dynamic +**Cons:** Agents might not know to call it + +### Option B: Minimal Hint + MCP +One line in system prompt: "Call workspace_capabilities to check for browser/container tools" + +**Pros:** Tiny context, agents know to look +**Cons:** Still some static injection + +### Option C: Workspace Manifest +Each workspace has a capabilities.json that configures what's available. +Cloud provisions this, agents read at startup. + +**Pros:** Explicit configuration +**Cons:** More infrastructure + +### Option D: Auto-Detection +MCP server auto-detects capabilities (checks DISPLAY, docker.sock) and only exposes available tools. + +**Pros:** Zero configuration, just works +**Cons:** Magic behavior + +## Tasks + +### capability-discovery-design +- [ ] Decide on discovery mechanism +- [ ] Document decision rationale +- [ ] Create ADR (Architecture Decision Record) + +Dependencies: none +Priority: high + +### capability-manifest-schema +- [ ] Define WorkspaceCapabilities schema +- [ ] Define how cloud provisions capabilities +- [ ] Define how daemon reads capabilities + +Dependencies: capability-discovery-design +Priority: medium + +### mcp-capability-tools +- [ ] Create MCP server for workspace tools +- [ ] Only expose tools for available capabilities +- [ ] Add workspace_capabilities discovery tool + +Dependencies: capability-manifest-schema +Priority: medium + +### agent-prompting-strategy +- [ ] Determine minimal context for capability awareness +- [ ] Test with real agents +- [ ] Measure context overhead + +Dependencies: capability-discovery-design +Priority: medium + +### cloud-workspace-config +- [ ] Add capabilities to workspace provisioning +- [ ] UI for enabling/disabling capabilities +- [ ] Per-workspace capability billing (if applicable) + +Dependencies: capability-manifest-schema +Priority: low + +## Notes + +The core services are already implemented: +- Browser testing: `src/daemon/services/browser-testing.ts` +- Container spawning: `src/daemon/services/container-spawner.ts` +- Browser Dockerfile: `deploy/workspace/Dockerfile.browser` + +What's missing is the discovery/awareness layer that doesn't bloat context. + +See also: +- `docs/design/e2b-sandbox-integration.md` - E2B as alternative backend From eaaf6548a1d9012952e2a0b31c10b5e40f46ef1a Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 14:20:54 +0000 Subject: [PATCH 050/103] Add task breakdown for global skills system design --- docs/tasks/global-skills-system.tasks.md | 265 +++++++++++++++++++++++ 1 file changed, 265 insertions(+) create mode 100644 docs/tasks/global-skills-system.tasks.md diff --git a/docs/tasks/global-skills-system.tasks.md b/docs/tasks/global-skills-system.tasks.md new file mode 100644 index 00000000..ee1eb5db --- /dev/null +++ b/docs/tasks/global-skills-system.tasks.md @@ -0,0 +1,265 @@ +# Agent Relay Global Skills System + +A system for distributing opt-in skills that are installed globally, not in project repos. + +## Problem Statement + +Skills that bloat project context: +- Workspace capabilities (browser testing, containers) +- Integration guides (Linear, Slack, GitHub) +- Agent patterns (debugging, refactoring, testing) +- Provider-specific knowledge (Claude, Codex, Gemini quirks) + +**We don't want these in every project's `.claude/` or `.openskills/`** + +## Proposed Architecture + +``` +~/.agent-relay/ +โ”œโ”€โ”€ skills/ # Global skills directory +โ”‚ โ”œโ”€โ”€ workspace-capabilities/ +โ”‚ โ”‚ โ””โ”€โ”€ SKILL.md +โ”‚ โ”œโ”€โ”€ browser-testing/ +โ”‚ โ”‚ โ””โ”€โ”€ SKILL.md +โ”‚ โ”œโ”€โ”€ linear-integration/ +โ”‚ โ”‚ โ””โ”€โ”€ SKILL.md +โ”‚ โ””โ”€โ”€ debugging-patterns/ +โ”‚ โ””โ”€โ”€ SKILL.md +โ”œโ”€โ”€ skills.json # Installed skills manifest +โ””โ”€โ”€ config.json # User preferences +``` + +## Key Questions + +### 1. Installation & Distribution +- [ ] How are skills installed? (`agent-relay skills install `?) +- [ ] Where do skills come from? (npm, git, registry?) +- [ ] Version management? +- [ ] Updates? + +### 2. Discovery by Agents +- [ ] How do agents know global skills exist? +- [ ] Merged with project skills in manifest? +- [ ] Separate namespace? (`@relay/browser-testing` vs `browser-testing`) + +### 3. Activation +- [ ] All installed skills available, or per-project activation? +- [ ] `agent-relay.json` in project root to enable specific global skills? +- [ ] Environment variable overrides? + +### 4. Context Loading +- [ ] Lazy load (agent requests) vs eager load (always injected)? +- [ ] How to hint at available skills without loading content? +- [ ] Skill metadata (description, size, dependencies)? + +### 5. Cloud vs Local +- [ ] Cloud workspaces: skills bundled in workspace image? +- [ ] Cloud workspaces: fetched on demand? +- [ ] User skill preferences synced to cloud? + +## Design Options + +### Option A: CLI-Managed Global Skills + +```bash +# Install a skill +agent-relay skills install @relay/browser-testing + +# List installed skills +agent-relay skills list + +# Enable for current project +agent-relay skills enable browser-testing + +# Skills available via same mechanism as project skills +``` + +**Pros:** Familiar pattern (npm-like), explicit control +**Cons:** Another thing to manage + +### Option B: Skills Registry + Auto-Discovery + +Skills published to a registry. Daemon auto-discovers based on workspace capabilities. + +```json +// ~/.agent-relay/skills.json +{ + "installed": ["@relay/browser-testing", "@relay/linear"], + "autoEnable": { + "browser-testing": { "when": "xvfb-available" }, + "linear": { "when": "linear-token-set" } + } +} +``` + +**Pros:** Smart activation, less manual work +**Cons:** Magic, harder to debug + +### Option C: Bundled Skill Packs + +Curated skill packs installed together: + +```bash +agent-relay skills install @relay/workspace-pack # browser, containers, etc. +agent-relay skills install @relay/integrations-pack # linear, slack, github +``` + +**Pros:** Simpler UX, curated combinations +**Cons:** Less granular control + +### Option D: Git-Based Skills + +Skills are git repos, installed via URL: + +```bash +agent-relay skills install https://github.com/agent-relay/skills-browser-testing +``` + +**Pros:** Easy to create/share custom skills +**Cons:** No central discovery + +## Skill Manifest Schema + +```typescript +interface GlobalSkill { + name: string; // e.g., "@relay/browser-testing" + version: string; + description: string; // Short description for listing + + // Activation conditions + activation: { + mode: 'lazy' | 'eager' | 'conditional'; + condition?: string; // e.g., "env.DISPLAY" or "file:/var/run/docker.sock" + }; + + // Context cost + estimatedTokens: number; + + // Dependencies + requires?: string[]; // Other skills or capabilities + + // Content + skillPath: string; // Path to SKILL.md + rulesPath?: string; // Optional rules to inject +} +``` + +## CLI Commands (Proposed) + +```bash +# Installation +agent-relay skills install # Install from registry +agent-relay skills install # Install from git +agent-relay skills uninstall +agent-relay skills update [name] + +# Discovery +agent-relay skills list # List installed +agent-relay skills search # Search registry +agent-relay skills info # Show details + +# Project-level +agent-relay skills enable # Enable in current project +agent-relay skills disable +agent-relay skills status # Show what's active + +# For agents +agent-relay skills manifest # Output JSON for agent consumption +``` + +## Tasks + +### global-skills-architecture +- [ ] Finalize directory structure +- [ ] Define skill manifest schema +- [ ] Define installation sources (registry, git, local) +- [ ] Document in ADR + +Dependencies: none +Priority: high + +### global-skills-cli +- [ ] Implement `skills install` command +- [ ] Implement `skills list` command +- [ ] Implement `skills enable/disable` for projects +- [ ] Add to existing CLI + +Dependencies: global-skills-architecture +Priority: high + +### global-skills-registry +- [ ] Decide on registry approach (npm? custom? github releases?) +- [ ] Implement registry client +- [ ] Create initial skill packages + +Dependencies: global-skills-architecture +Priority: medium + +### global-skills-agent-discovery +- [ ] How agents see global skills in manifest +- [ ] Namespace handling (@relay/ prefix?) +- [ ] Integration with existing skills system + +Dependencies: global-skills-cli +Priority: medium + +### global-skills-cloud-sync +- [ ] Sync user skill preferences to cloud +- [ ] Cloud workspace skill provisioning +- [ ] Per-workspace skill overrides + +Dependencies: global-skills-agent-discovery +Priority: low + +### initial-skill-pack +- [ ] Create @relay/workspace-capabilities skill +- [ ] Create @relay/browser-testing skill +- [ ] Create @relay/container-spawning skill +- [ ] Create @relay/debugging-patterns skill + +Dependencies: global-skills-cli +Priority: medium + +## Example User Flow + +```bash +# User installs agent-relay +npm install -g agent-relay + +# User wants browser testing capabilities +agent-relay skills search browser +# Found: @relay/browser-testing - Playwright, screenshots, VNC for browser automation + +agent-relay skills install @relay/browser-testing +# Installed @relay/browser-testing v1.0.0 to ~/.agent-relay/skills/ + +# In a project where they want it +cd my-project +agent-relay skills enable browser-testing +# Enabled @relay/browser-testing for this project + +# Agent now sees in skills manifest: +# - Project skills (from .openskills/) +# - Global skills (from ~/.agent-relay/skills/, filtered by enabled) +``` + +## Relationship to Workspace Capabilities + +This solves the "how do agents know" problem from workspace-capabilities.tasks.md: + +1. **Skills are documentation** - they tell agents what's possible +2. **Capabilities are runtime** - they're what's actually available +3. **Skills can check capabilities** - `activation.condition: "env.DISPLAY"` + +An agent loads the browser-testing skill โ†’ learns the APIs โ†’ calls MCP tools โ†’ tools check if Xvfb is running. + +## Notes + +- Skills are NOT MCP tools (those are separate) +- Skills are context/documentation that help agents use tools effectively +- Skills can reference MCP tools in their content +- Keep skills focused and small (estimate tokens) + +See also: +- `docs/tasks/workspace-capabilities.tasks.md` - Runtime capability discovery +- `docs/design/e2b-sandbox-integration.md` - Alternative execution backends From 1a86fd376319d10808a2b00ef06aeebbe03c553f Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 14:44:24 +0000 Subject: [PATCH 051/103] Update global skills task to leverage PRPM instead of custom system --- docs/tasks/global-skills-system.tasks.md | 363 ++++++++++------------- 1 file changed, 164 insertions(+), 199 deletions(-) diff --git a/docs/tasks/global-skills-system.tasks.md b/docs/tasks/global-skills-system.tasks.md index ee1eb5db..5360f4de 100644 --- a/docs/tasks/global-skills-system.tasks.md +++ b/docs/tasks/global-skills-system.tasks.md @@ -1,265 +1,230 @@ -# Agent Relay Global Skills System +# Agent Relay Skills via PRPM -A system for distributing opt-in skills that are installed globally, not in project repos. +Leverage PRPM (Prompt Package Manager) to distribute agent-relay skills that users can opt into. -## Problem Statement +## Overview -Skills that bloat project context: -- Workspace capabilities (browser testing, containers) -- Integration guides (Linear, Slack, GitHub) -- Agent patterns (debugging, refactoring, testing) -- Provider-specific knowledge (Claude, Codex, Gemini quirks) +PRPM already provides: +- Registry at `registry.prpm.dev` +- `prpm install @org/package` CLI +- Lockfile tracking (`prpm.lock`) +- Format conversion (claude, cursor, agents.md) +- Subtypes: skill, agent, rule, snippet +- Lazy loading (`eager: false`) -**We don't want these in every project's `.claude/` or `.openskills/`** +**We should publish `@agent-relay/*` packages to PRPM instead of building custom infrastructure.** -## Proposed Architecture +## Current State -``` -~/.agent-relay/ -โ”œโ”€โ”€ skills/ # Global skills directory -โ”‚ โ”œโ”€โ”€ workspace-capabilities/ -โ”‚ โ”‚ โ””โ”€โ”€ SKILL.md -โ”‚ โ”œโ”€โ”€ browser-testing/ -โ”‚ โ”‚ โ””โ”€โ”€ SKILL.md -โ”‚ โ”œโ”€โ”€ linear-integration/ -โ”‚ โ”‚ โ””โ”€โ”€ SKILL.md -โ”‚ โ””โ”€โ”€ debugging-patterns/ -โ”‚ โ””โ”€โ”€ SKILL.md -โ”œโ”€โ”€ skills.json # Installed skills manifest -โ””โ”€โ”€ config.json # User preferences -``` - -## Key Questions - -### 1. Installation & Distribution -- [ ] How are skills installed? (`agent-relay skills install `?) -- [ ] Where do skills come from? (npm, git, registry?) -- [ ] Version management? -- [ ] Updates? - -### 2. Discovery by Agents -- [ ] How do agents know global skills exist? -- [ ] Merged with project skills in manifest? -- [ ] Separate namespace? (`@relay/browser-testing` vs `browser-testing`) +Already using prpm in this repo (see `prpm.lock`): +- `@agent-relay/agent-relay-snippet` - Relay messaging syntax +- `@agent-relay/agent-relay-protocol` - Full protocol docs +- Various skills from `@prpm/*`, `@anthropic/*`, `@my-senior-dev/*` -### 3. Activation -- [ ] All installed skills available, or per-project activation? -- [ ] `agent-relay.json` in project root to enable specific global skills? -- [ ] Environment variable overrides? +## Problem: Global vs Project Skills -### 4. Context Loading -- [ ] Lazy load (agent requests) vs eager load (always injected)? -- [ ] How to hint at available skills without loading content? -- [ ] Skill metadata (description, size, dependencies)? +PRPM installs to project directories (`.claude/skills/`). We need: +- Skills NOT in project source control +- Skills available across all projects +- Per-user opt-in, not per-project -### 5. Cloud vs Local -- [ ] Cloud workspaces: skills bundled in workspace image? -- [ ] Cloud workspaces: fetched on demand? -- [ ] User skill preferences synced to cloud? - -## Design Options - -### Option A: CLI-Managed Global Skills +### Potential Solutions +**A. PRPM Global Flag (feature request)** ```bash -# Install a skill -agent-relay skills install @relay/browser-testing - -# List installed skills -agent-relay skills list - -# Enable for current project -agent-relay skills enable browser-testing +prpm install --global @agent-relay/browser-testing +# Installs to ~/.prpm/skills/ or ~/.config/prpm/skills/ +``` -# Skills available via same mechanism as project skills +**B. User-level prpm.lock** +``` +~/.agent-relay/ +โ”œโ”€โ”€ prpm.lock # User's global skills +โ””โ”€โ”€ .claude/skills/ # Installed skill content ``` +Agent reads both project and user prpm.lock. -**Pros:** Familiar pattern (npm-like), explicit control -**Cons:** Another thing to manage +**C. Workspace Bundle** +Cloud workspaces come with @agent-relay skills pre-installed. +Users don't manage - just available in cloud. -### Option B: Skills Registry + Auto-Discovery +## Proposed Skills to Publish -Skills published to a registry. Daemon auto-discovers based on workspace capabilities. +### @agent-relay/workspace-capabilities +Documentation for browser testing + container spawning. ```json -// ~/.agent-relay/skills.json { - "installed": ["@relay/browser-testing", "@relay/linear"], - "autoEnable": { - "browser-testing": { "when": "xvfb-available" }, - "linear": { "when": "linear-token-set" } - } + "name": "@agent-relay/workspace-capabilities", + "version": "1.0.0", + "description": "Browser testing (Playwright, VNC) and container spawning (Docker) for agent-relay workspaces", + "format": "claude", + "subtype": "skill", + "eager": false, + "tags": ["agent-relay", "browser-testing", "docker", "workspace"], + "files": [".claude/skills/workspace-capabilities/SKILL.md"] } ``` -**Pros:** Smart activation, less manual work -**Cons:** Magic, harder to debug +### @agent-relay/browser-testing +Focused Playwright/screenshot skill. -### Option C: Bundled Skill Packs +### @agent-relay/container-spawning +Focused Docker/container skill. -Curated skill packs installed together: +### @agent-relay/linear-integration +Linear webhook/API patterns. -```bash -agent-relay skills install @relay/workspace-pack # browser, containers, etc. -agent-relay skills install @relay/integrations-pack # linear, slack, github -``` +### @agent-relay/slack-integration +Slack bot patterns. -**Pros:** Simpler UX, curated combinations -**Cons:** Less granular control +### @agent-relay/workspace-pack (collection) +Bundle of all workspace skills. -### Option D: Git-Based Skills +```json +{ + "collections": [{ + "id": "workspace-pack", + "name": "Agent Relay Workspace Pack", + "description": "All workspace capability skills", + "packages": [ + { "packageId": "@agent-relay/workspace-capabilities" }, + { "packageId": "@agent-relay/browser-testing" }, + { "packageId": "@agent-relay/container-spawning" } + ] + }] +} +``` -Skills are git repos, installed via URL: +## Tasks -```bash -agent-relay skills install https://github.com/agent-relay/skills-browser-testing -``` +### prpm-global-research +- [ ] Check if prpm supports `--global` flag +- [ ] If not, evaluate: feature request vs workaround +- [ ] Document findings -**Pros:** Easy to create/share custom skills -**Cons:** No central discovery +Dependencies: none +Priority: high -## Skill Manifest Schema +### user-skills-directory +- [ ] Define `~/.agent-relay/skills/` structure +- [ ] Implement reading from user directory in daemon +- [ ] Merge user + project skills in agent manifest -```typescript -interface GlobalSkill { - name: string; // e.g., "@relay/browser-testing" - version: string; - description: string; // Short description for listing +Dependencies: prpm-global-research +Priority: high - // Activation conditions - activation: { - mode: 'lazy' | 'eager' | 'conditional'; - condition?: string; // e.g., "env.DISPLAY" or "file:/var/run/docker.sock" - }; +### publish-workspace-capabilities +- [ ] Create skill content (SKILL.md) +- [ ] Create prpm.json manifest +- [ ] Test locally with `prpm install .` +- [ ] Publish to registry.prpm.dev - // Context cost - estimatedTokens: number; +Dependencies: none (can do in parallel) +Priority: high - // Dependencies - requires?: string[]; // Other skills or capabilities +### publish-browser-testing +- [ ] Extract browser-specific content from workspace-capabilities +- [ ] Create focused SKILL.md +- [ ] Publish to registry - // Content - skillPath: string; // Path to SKILL.md - rulesPath?: string; // Optional rules to inject -} -``` +Dependencies: publish-workspace-capabilities +Priority: medium -## CLI Commands (Proposed) +### publish-container-spawning +- [ ] Extract container-specific content +- [ ] Create focused SKILL.md +- [ ] Publish to registry -```bash -# Installation -agent-relay skills install # Install from registry -agent-relay skills install # Install from git -agent-relay skills uninstall -agent-relay skills update [name] - -# Discovery -agent-relay skills list # List installed -agent-relay skills search # Search registry -agent-relay skills info # Show details - -# Project-level -agent-relay skills enable # Enable in current project -agent-relay skills disable -agent-relay skills status # Show what's active - -# For agents -agent-relay skills manifest # Output JSON for agent consumption -``` +Dependencies: publish-workspace-capabilities +Priority: medium -## Tasks +### workspace-pack-collection +- [ ] Create collection prpm.json +- [ ] Bundle all workspace skills +- [ ] Publish collection -### global-skills-architecture -- [ ] Finalize directory structure -- [ ] Define skill manifest schema -- [ ] Define installation sources (registry, git, local) -- [ ] Document in ADR +Dependencies: publish-browser-testing, publish-container-spawning +Priority: low -Dependencies: none -Priority: high +### cloud-workspace-provisioning +- [ ] Pre-install @agent-relay skills in cloud workspace images +- [ ] Or: fetch on workspace creation +- [ ] Make configurable per-workspace -### global-skills-cli -- [ ] Implement `skills install` command -- [ ] Implement `skills list` command -- [ ] Implement `skills enable/disable` for projects -- [ ] Add to existing CLI +Dependencies: publish-workspace-capabilities +Priority: medium -Dependencies: global-skills-architecture -Priority: high +## Example Skill Content -### global-skills-registry -- [ ] Decide on registry approach (npm? custom? github releases?) -- [ ] Implement registry client -- [ ] Create initial skill packages +```markdown +--- +name: workspace-capabilities +description: Browser testing and container spawning for agent-relay workspaces +--- -Dependencies: global-skills-architecture -Priority: medium +# Workspace Capabilities -### global-skills-agent-discovery -- [ ] How agents see global skills in manifest -- [ ] Namespace handling (@relay/ prefix?) -- [ ] Integration with existing skills system +This workspace may have additional capabilities available. -Dependencies: global-skills-cli -Priority: medium +## Checking Availability -### global-skills-cloud-sync -- [ ] Sync user skill preferences to cloud -- [ ] Cloud workspace skill provisioning -- [ ] Per-workspace skill overrides +Before using these features, verify they're available: -Dependencies: global-skills-agent-discovery -Priority: low +\`\`\`typescript +// Check for browser testing +const hasBrowser = process.env.DISPLAY !== undefined; -### initial-skill-pack -- [ ] Create @relay/workspace-capabilities skill -- [ ] Create @relay/browser-testing skill -- [ ] Create @relay/container-spawning skill -- [ ] Create @relay/debugging-patterns skill +// Check for container spawning +const hasDocker = existsSync('/var/run/docker.sock'); +\`\`\` -Dependencies: global-skills-cli -Priority: medium +## Browser Testing -## Example User Flow +[Content about Playwright, screenshots, VNC...] -```bash -# User installs agent-relay -npm install -g agent-relay +## Container Spawning -# User wants browser testing capabilities -agent-relay skills search browser -# Found: @relay/browser-testing - Playwright, screenshots, VNC for browser automation +[Content about Docker, presets, resource limits...] +``` -agent-relay skills install @relay/browser-testing -# Installed @relay/browser-testing v1.0.0 to ~/.agent-relay/skills/ +## User Flow -# In a project where they want it -cd my-project -agent-relay skills enable browser-testing -# Enabled @relay/browser-testing for this project +```bash +# Option A: Global install (if prpm supports it) +prpm install --global @agent-relay/workspace-pack -# Agent now sees in skills manifest: -# - Project skills (from .openskills/) -# - Global skills (from ~/.agent-relay/skills/, filtered by enabled) +# Option B: User directory workaround +cd ~/.agent-relay +prpm install @agent-relay/workspace-pack + +# Option C: Cloud workspace (automatic) +# Skills pre-installed, just use them ``` -## Relationship to Workspace Capabilities +## Why PRPM Over Custom + +| Custom System | PRPM | +|--------------|------| +| Build registry | โœ… Already exists | +| Build CLI | โœ… Already exists | +| Build lockfile | โœ… Already exists | +| Version management | โœ… Already exists | +| Format conversion | โœ… Already exists | -This solves the "how do agents know" problem from workspace-capabilities.tasks.md: +**PRPM gives us distribution for free. We just publish packages.** -1. **Skills are documentation** - they tell agents what's possible -2. **Capabilities are runtime** - they're what's actually available -3. **Skills can check capabilities** - `activation.condition: "env.DISPLAY"` +## Open Questions for PRPM -An agent loads the browser-testing skill โ†’ learns the APIs โ†’ calls MCP tools โ†’ tools check if Xvfb is running. +1. **Global installs** - `prpm install --global`? +2. **Multiple lockfile locations** - project + user? +3. **Conditional activation** - `activationCondition` field? -## Notes +May need to contribute these features or work around them. -- Skills are NOT MCP tools (those are separate) -- Skills are context/documentation that help agents use tools effectively -- Skills can reference MCP tools in their content -- Keep skills focused and small (estimate tokens) +## References -See also: +- `prpm.lock` - Current installed packages +- `.claude/skills/prpm-json-best-practices-skill/` - How to create packages - `docs/tasks/workspace-capabilities.tasks.md` - Runtime capability discovery -- `docs/design/e2b-sandbox-integration.md` - Alternative execution backends +- Implementation: `src/daemon/services/browser-testing.ts`, `container-spawner.ts` From 7d9b940b74c75bb8d198ddfb5f48460052db35b7 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 14:52:26 +0000 Subject: [PATCH 052/103] Add beads issue for PRPM global skills (agent-relay-350) --- .beads/issues.jsonl | 1 + 1 file changed, 1 insertion(+) diff --git a/.beads/issues.jsonl b/.beads/issues.jsonl index eaf28bb8..c15d018b 100644 --- a/.beads/issues.jsonl +++ b/.beads/issues.jsonl @@ -315,3 +315,4 @@ {"id":"agent-relay-yvf","title":"[Memory] Build task-based trajectory layer on Mem0","description":"Implement trajectory grouping on top of Mem0 observations. Map task_id to Mem0 user_id or metadata. Support chapter-based organization within trajectories.","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-21T15:00:00Z","updated_at":"2025-12-21T15:00:00Z"} {"id":"agent-relay-yvg","title":"[Memory] Implement fleet-wide knowledge workspace","description":"Build knowledge workspace layer: decisions log, pattern library, cross-agent context. Query interface for agents to access fleet knowledge. Uses Mem0 for storage.","status":"open","priority":2,"issue_type":"task","created_at":"2025-12-21T15:00:00Z","updated_at":"2025-12-21T15:00:00Z"} {"id":"agent-relay-yvh","title":"[Memory] Abstract MemoryBackend for future flexibility","description":"Create MemoryBackend interface allowing swap between Mem0, Zep, or custom SQLite+Chroma. Default to Mem0. Enable migration path if needs change.","status":"open","priority":3,"issue_type":"task","created_at":"2025-12-21T15:00:00Z","updated_at":"2025-12-21T15:00:00Z"} +{"id":"agent-relay-350","title":"Global skills via PRPM","description":"Distribute @agent-relay/* skills via PRPM registry for opt-in workspace capabilities.\n\n## Goals\n- Publish skills to registry.prpm.dev\n- Users install globally (not per-project)\n- Zero context bloat until loaded\n\n## Key Tasks\n1. Research prpm --global support\n2. Define ~/.agent-relay/ user skills directory\n3. Publish @agent-relay/workspace-capabilities\n4. Publish @agent-relay/browser-testing\n5. Publish @agent-relay/container-spawning\n6. Create @agent-relay/workspace-pack collection\n7. Cloud workspace pre-installation\n\n## Skills to Publish\n- workspace-capabilities: Browser + container docs\n- browser-testing: Playwright, screenshots, VNC\n- container-spawning: Docker, presets, resource limits\n- linear-integration: Webhooks, API patterns\n- slack-integration: Bot patterns\n\n## Open Questions\n- Does prpm support --global flag?\n- Can daemon read user + project skills?\n- Conditional activation based on capabilities?\n\nSee: docs/tasks/global-skills-system.tasks.md","status":"open","priority":2,"issue_type":"epic","created_at":"2026-01-04T13:30:00.000000Z","updated_at":"2026-01-04T13:30:00.000000Z"} From 3504111fb408dfb46d6d22af65c296ee6fc35ac1 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 15:42:18 +0000 Subject: [PATCH 053/103] Fix Claude CLI auth flow showing false success Two issues were causing the auth flow to incorrectly report success: 1. Dockerfile pre-seeded hasCompletedOnboarding: true which caused Claude to skip the interactive auth flow entirely, never generating an auth URL 2. onboarding.ts would report "alreadyAuthenticated" without verifying that credentials were actually found - now it checks session.token and returns an error if no auth URL or credentials exist --- deploy/workspace/Dockerfile | 5 ++--- src/cloud/api/onboarding.ts | 27 +++++++++++++++++++-------- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index f2ec7a2d..612674fa 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -72,9 +72,8 @@ USER workspace # Install AI CLIs as workspace user (they install to ~/.local/bin) # Claude RUN curl -fsSL https://claude.ai/install.sh | bash -# Pre-seed Claude config to skip interactive onboarding -RUN mkdir -p /home/workspace/.claude && \ - echo '{"theme":"dark","hasCompletedOnboarding":true}' > /home/workspace/.claude/settings.local.json +# Note: We don't pre-seed Claude config - we want the full interactive auth flow +# to run so the PTY runner can capture the auth URL # Note: Codex, Gemini, and OpenCode are installed as root above via npm # Droid RUN curl -fsSL https://app.factory.ai/cli | sh diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index 37f4e841..877cc081 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -137,14 +137,25 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response // Return session info based on current state if (session.status === 'success' && !session.authUrl) { - // Already authenticated - CLI exited successfully without auth URL - activeSessions.delete(sessionId); - res.json({ - sessionId, - status: 'success', - alreadyAuthenticated: true, - message: `Already authenticated with ${config.displayName}`, - }); + // CLI exited without auth URL - check if we have credentials + if (session.token) { + // Already authenticated - we found existing credentials + activeSessions.delete(sessionId); + res.json({ + sessionId, + status: 'success', + alreadyAuthenticated: true, + message: `Already authenticated with ${config.displayName}`, + }); + } else { + // No auth URL and no credentials - CLI didn't start auth flow properly + activeSessions.delete(sessionId); + console.error(`[onboarding] CLI exited without auth URL or credentials. Output:\n${session.output}`); + res.status(500).json({ + error: 'CLI auth failed - no auth URL generated. Please try again or check CLI installation.', + debug: process.env.NODE_ENV === 'development' ? session.output.slice(-500) : undefined, + }); + } } else if (session.authUrl) { res.json({ sessionId, From e818ecda8f6cbeebbb522ba12baafb570fbe990f Mon Sep 17 00:00:00 2001 From: Khaliq Date: Sun, 4 Jan 2026 16:46:08 +0100 Subject: [PATCH 054/103] csrf fixes --- src/dashboard/lib/cloudApi.ts | 50 +++++++++++++++++-- src/dashboard/react-components/App.tsx | 1 + .../react-components/hooks/useSession.ts | 4 ++ 3 files changed, 51 insertions(+), 4 deletions(-) diff --git a/src/dashboard/lib/cloudApi.ts b/src/dashboard/lib/cloudApi.ts index 63f927f6..5b736e00 100644 --- a/src/dashboard/lib/cloudApi.ts +++ b/src/dashboard/lib/cloudApi.ts @@ -47,6 +47,26 @@ export type SessionExpiredCallback = (error: SessionError) => void; // Global session expiration listeners const sessionExpiredListeners = new Set(); +// Global CSRF token storage +let csrfToken: string | null = null; + +/** + * Get the current CSRF token + */ +export function getCsrfToken(): string | null { + return csrfToken; +} + +/** + * Capture CSRF token from response headers + */ +function captureCsrfToken(response: Response): void { + const token = response.headers.get('X-CSRF-Token'); + if (token) { + csrfToken = token; + } +} + /** * Register a callback for when session expires */ @@ -90,15 +110,26 @@ async function cloudFetch( options: RequestInit = {} ): Promise<{ success: true; data: T } | { success: false; error: string; sessionExpired?: boolean }> { try { + // Build headers, including CSRF token for non-GET requests + const headers: Record = { + 'Content-Type': 'application/json', + ...(options.headers as Record), + }; + + // Include CSRF token for state-changing requests + if (options.method && options.method !== 'GET' && csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + const response = await fetch(endpoint, { ...options, credentials: 'include', // Include cookies for session - headers: { - 'Content-Type': 'application/json', - ...options.headers, - }, + headers, }); + // Capture CSRF token from response + captureCsrfToken(response); + const data = await response.json(); if (isSessionError(response, data)) { @@ -175,6 +206,8 @@ export const cloudApi = { const response = await fetch('/api/auth/nango/login-session', { credentials: 'include', }); + // Capture CSRF token from response + captureCsrfToken(response); const data = await response.json(); if (!response.ok) { return { success: false, error: data.error || 'Failed to create login session' }; @@ -193,6 +226,8 @@ export const cloudApi = { const response = await fetch(`/api/auth/nango/login-status/${encodeURIComponent(connectionId)}`, { credentials: 'include', }); + // Capture CSRF token from response + captureCsrfToken(response); const data = await response.json(); if (!response.ok) { return { success: false, error: data.error || 'Failed to check login status' }; @@ -225,6 +260,8 @@ export const cloudApi = { const response = await fetch('/api/auth/session', { credentials: 'include', }); + // Capture CSRF token from response + captureCsrfToken(response); const data = await response.json(); return data as SessionStatus; } catch { @@ -248,9 +285,14 @@ export const cloudApi = { */ async logout(): Promise<{ success: boolean; error?: string }> { try { + const headers: Record = {}; + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } const response = await fetch('/api/auth/logout', { method: 'POST', credentials: 'include', + headers, }); const data = await response.json(); return data as { success: boolean; error?: string }; diff --git a/src/dashboard/react-components/App.tsx b/src/dashboard/react-components/App.tsx index 42bdfc58..5482a819 100644 --- a/src/dashboard/react-components/App.tsx +++ b/src/dashboard/react-components/App.tsx @@ -986,6 +986,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { settings={settings} onSettingsChange={setSettings} onResetSettings={() => setSettings(defaultSettings)} + csrfToken={cloudSession?.csrfToken} /> {/* Add Workspace Modal */} diff --git a/src/dashboard/react-components/hooks/useSession.ts b/src/dashboard/react-components/hooks/useSession.ts index a83aa9a2..7e75a09e 100644 --- a/src/dashboard/react-components/hooks/useSession.ts +++ b/src/dashboard/react-components/hooks/useSession.ts @@ -9,6 +9,7 @@ import { useState, useEffect, useCallback, useRef } from 'react'; import { cloudApi, onSessionExpired, + getCsrfToken, type CloudUser, type SessionError, type SessionStatus, @@ -34,6 +35,8 @@ export interface UseSessionReturn { isExpired: boolean; /** Session error if any */ error: SessionError | null; + /** CSRF token for API requests */ + csrfToken: string | null; /** Manually check session status */ checkSession: () => Promise; /** Clear the expired state (e.g., after dismissing modal) */ @@ -195,6 +198,7 @@ export function useSession(options: UseSessionOptions = {}): UseSessionReturn { isAuthenticated: user !== null, isExpired, error, + csrfToken: getCsrfToken(), checkSession, clearExpired, redirectToLogin, From 3c669fb621312ffa060233a0a9607b1d0f6de7fa Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 15:53:10 +0000 Subject: [PATCH 055/103] Fix CLI auth to run inside workspace container Cloud and workspace containers are separate. CLI tools (claude, codex, etc.) are installed in the workspace container, not the cloud server. Changes: - Add CLI auth endpoints to daemon API (runs in workspace) - POST /auth/cli/:provider/start - start auth via PTY - GET /auth/cli/:provider/status/:sessionId - check status - GET /auth/cli/:provider/creds/:sessionId - get token - POST /auth/cli/:provider/cancel/:sessionId - cancel - Update cloud onboarding to forward CLI auth to workspace - Finds user's running workspace - Forwards auth requests to workspace daemon - Polls workspace for status/credentials - Stores credentials in vault when complete - Returns clear error if no workspace running This fixes the issue where CLI auth was trying to run on the cloud server where CLI tools aren't installed. --- src/cloud/api/onboarding.ts | 212 +++++++++++--------- src/daemon/api.ts | 82 ++++++++ src/daemon/cli-auth.ts | 385 ++++++++++++++++++++++++++++++++++++ 3 files changed, 582 insertions(+), 97 deletions(-) create mode 100644 src/daemon/cli-auth.ts diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index 877cc081..a8d87e17 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -83,11 +83,15 @@ setInterval(() => { /** * POST /api/onboarding/cli/:provider/start - * Start CLI-based auth - spawns the CLI and captures auth URL + * Start CLI-based auth - forwards to workspace daemon if available + * + * CLI auth requires a running workspace since CLI tools are installed there. + * For onboarding without a workspace, users should use the API key flow. */ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response) => { const { provider } = req.params; const userId = req.session.userId!; + const { workspaceId } = req.body; // Optional: specific workspace to use const config = CLI_AUTH_CONFIG[provider]; if (!config) { @@ -97,85 +101,74 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response }); } - // Create session - const sessionId = crypto.randomUUID(); - const session: CLIAuthSession = { - userId, - provider, - status: 'starting', - createdAt: new Date(), - output: '', - }; - activeSessions.set(sessionId, session); - try { - // Use shared PTY runner for CLI auth - const ptyResult = await runCLIAuthViaPTY(config, { - onAuthUrl: (url) => { - session.authUrl = url; - session.status = 'waiting_auth'; - }, - onPromptHandled: (description) => { - console.log(`[onboarding] Auto-responded to: ${description}`); - }, - onOutput: (data) => { - session.output += data; - if (matchesSuccessPattern(data, config.successPatterns)) { - session.status = 'success'; - } - }, - }); - - // Update session with result - if (ptyResult.success && !session.authUrl) { - session.status = 'success'; - await extractCredentials(session, config); - } else if (ptyResult.error && session.status === 'starting') { - session.status = 'error'; - session.error = ptyResult.error; + // Find a running workspace to use for CLI auth + let workspace; + if (workspaceId) { + workspace = await db.workspaces.findById(workspaceId); + if (!workspace || workspace.userId !== userId) { + return res.status(404).json({ error: 'Workspace not found' }); + } + } else { + // Find any running workspace for this user + const workspaces = await db.workspaces.findByUserId(userId); + workspace = workspaces.find(w => w.status === 'running' && w.publicUrl); } - // Return session info based on current state - if (session.status === 'success' && !session.authUrl) { - // CLI exited without auth URL - check if we have credentials - if (session.token) { - // Already authenticated - we found existing credentials - activeSessions.delete(sessionId); - res.json({ - sessionId, - status: 'success', - alreadyAuthenticated: true, - message: `Already authenticated with ${config.displayName}`, - }); - } else { - // No auth URL and no credentials - CLI didn't start auth flow properly - activeSessions.delete(sessionId); - console.error(`[onboarding] CLI exited without auth URL or credentials. Output:\n${session.output}`); - res.status(500).json({ - error: 'CLI auth failed - no auth URL generated. Please try again or check CLI installation.', - debug: process.env.NODE_ENV === 'development' ? session.output.slice(-500) : undefined, - }); - } - } else if (session.authUrl) { - res.json({ - sessionId, - status: 'waiting_auth', - authUrl: session.authUrl, - message: 'Open the auth URL to complete login', + if (!workspace || workspace.status !== 'running' || !workspace.publicUrl) { + return res.status(400).json({ + error: 'CLI auth requires a running workspace', + code: 'NO_RUNNING_WORKSPACE', + message: 'Please start a workspace first, or use the API key input to connect your provider.', + hint: 'You can create a workspace without providers and connect them afterward using CLI auth.', }); - } else if (session.status === 'error') { - activeSessions.delete(sessionId); - res.status(500).json({ error: session.error || 'CLI auth failed to start' }); - } else { - // Still starting, return session ID to poll - res.json({ - sessionId, - status: 'starting', - message: 'Auth session starting, poll for status', + } + + // Forward auth request to workspace daemon + const workspaceUrl = workspace.publicUrl.replace(/\/$/, ''); + const authResponse = await fetch(`${workspaceUrl}/auth/cli/${provider}/start`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + }); + + if (!authResponse.ok) { + const errorData = await authResponse.json().catch(() => ({})) as { error?: string }; + return res.status(authResponse.status).json({ + error: errorData.error || 'Failed to start CLI auth in workspace', }); } + + const workspaceSession = await authResponse.json() as { + sessionId: string; + status?: string; + authUrl?: string; + }; + + // Create cloud session to track this + const sessionId = crypto.randomUUID(); + const session: CLIAuthSession = { + userId, + provider, + status: (workspaceSession.status as CLIAuthSession['status']) || 'starting', + authUrl: workspaceSession.authUrl, + createdAt: new Date(), + output: '', + }; + + // Store workspace info for status polling + (session as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string }).workspaceUrl = workspaceUrl; + (session as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string }).workspaceSessionId = workspaceSession.sessionId; + + activeSessions.set(sessionId, session); + + res.json({ + sessionId, + status: session.status, + authUrl: session.authUrl, + workspaceId: workspace.id, + message: session.authUrl ? 'Open the auth URL to complete login' : 'Auth session starting, poll for status', + }); } catch (error) { - activeSessions.delete(sessionId); console.error(`Error starting CLI auth for ${provider}:`, error); res.status(500).json({ error: 'Failed to start CLI authentication' }); } @@ -183,13 +176,13 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response /** * GET /api/onboarding/cli/:provider/status/:sessionId - * Check status of CLI auth session + * Check status of CLI auth session - forwards to workspace daemon */ -onboardingRouter.get('/cli/:provider/status/:sessionId', (req: Request, res: Response) => { - const { sessionId } = req.params; +onboardingRouter.get('/cli/:provider/status/:sessionId', async (req: Request, res: Response) => { + const { provider, sessionId } = req.params; const userId = req.session.userId!; - const session = activeSessions.get(sessionId); + const session = activeSessions.get(sessionId) as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string } | undefined; if (!session) { return res.status(404).json({ error: 'Session not found or expired' }); } @@ -198,6 +191,28 @@ onboardingRouter.get('/cli/:provider/status/:sessionId', (req: Request, res: Res return res.status(403).json({ error: 'Unauthorized' }); } + // If we have workspace info, poll the workspace for status + if (session.workspaceUrl && session.workspaceSessionId) { + try { + const statusResponse = await fetch( + `${session.workspaceUrl}/auth/cli/${provider}/status/${session.workspaceSessionId}` + ); + if (statusResponse.ok) { + const workspaceStatus = await statusResponse.json() as { + status?: string; + authUrl?: string; + error?: string; + }; + // Update local session with workspace status + session.status = (workspaceStatus.status as CLIAuthSession['status']) || session.status; + session.authUrl = workspaceStatus.authUrl || session.authUrl; + session.error = workspaceStatus.error; + } + } catch (err) { + console.error('[onboarding] Failed to poll workspace status:', err); + } + } + res.json({ status: session.status, authUrl: session.authUrl, @@ -214,7 +229,7 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, const userId = req.session.userId!; const { token } = req.body; // Optional: user can paste token directly - const session = activeSessions.get(sessionId); + const session = activeSessions.get(sessionId) as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string } | undefined; if (!session) { return res.status(404).json({ error: 'Session not found or expired' }); } @@ -227,12 +242,18 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, // If token provided directly, use it let accessToken = token || session.token; - // If no token yet, try to read from credentials file - if (!accessToken) { - const config = CLI_AUTH_CONFIG[provider]; - if (config) { - await extractCredentials(session, config); - accessToken = session.token; + // If no token yet, try to get from workspace + if (!accessToken && session.workspaceUrl && session.workspaceSessionId) { + try { + const credsResponse = await fetch( + `${session.workspaceUrl}/auth/cli/${provider}/creds/${session.workspaceSessionId}` + ); + if (credsResponse.ok) { + const creds = await credsResponse.json() as { token?: string }; + accessToken = creds.token; + } + } catch (err) { + console.error('[onboarding] Failed to get credentials from workspace:', err); } } @@ -251,13 +272,6 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, }); // Clean up session - if (session.process) { - try { - session.process.kill(); - } catch { - // Process may already be dead - } - } activeSessions.delete(sessionId); res.json({ @@ -274,17 +288,21 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, * POST /api/onboarding/cli/:provider/cancel/:sessionId * Cancel a CLI auth session */ -onboardingRouter.post('/cli/:provider/cancel/:sessionId', (req: Request, res: Response) => { - const { sessionId } = req.params; +onboardingRouter.post('/cli/:provider/cancel/:sessionId', async (req: Request, res: Response) => { + const { provider, sessionId } = req.params; const userId = req.session.userId!; - const session = activeSessions.get(sessionId); + const session = activeSessions.get(sessionId) as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string } | undefined; if (session?.userId === userId) { - if (session.process) { + // Cancel on workspace side if applicable + if (session.workspaceUrl && session.workspaceSessionId) { try { - session.process.kill(); + await fetch( + `${session.workspaceUrl}/auth/cli/${provider}/cancel/${session.workspaceSessionId}`, + { method: 'POST' } + ); } catch { - // Process may already be dead + // Ignore cancel errors } } activeSessions.delete(sessionId); diff --git a/src/daemon/api.ts b/src/daemon/api.ts index 74a20d18..838e7048 100644 --- a/src/daemon/api.ts +++ b/src/daemon/api.ts @@ -19,6 +19,12 @@ import type { AddWorkspaceRequest, SpawnAgentRequest, } from './types.js'; +import { + startCLIAuth, + getAuthSession, + cancelAuthSession, + getSupportedProviders, +} from './cli-auth.js'; const logger = createLogger('daemon-api'); @@ -308,6 +314,82 @@ export class DaemonApi extends EventEmitter { const agents = this.agentManager.getAll(); return { status: 200, body: { agents } }; }); + + // === CLI Auth (for cloud server to call) === + + // List supported providers + this.routes.set('GET /auth/providers', async (): Promise => { + return { status: 200, body: { providers: getSupportedProviders() } }; + }); + + // Start CLI auth flow + this.routes.set('POST /auth/cli/:provider/start', async (req): Promise => { + const { provider } = req.params; + try { + const session = startCLIAuth(provider); + return { + status: 200, + body: { + sessionId: session.id, + status: session.status, + authUrl: session.authUrl, + }, + }; + } catch (err) { + return { + status: 400, + body: { error: err instanceof Error ? err.message : 'Failed to start auth' }, + }; + } + }); + + // Get auth session status + this.routes.set('GET /auth/cli/:provider/status/:sessionId', async (req): Promise => { + const { sessionId } = req.params; + const session = getAuthSession(sessionId); + if (!session) { + return { status: 404, body: { error: 'Session not found' } }; + } + return { + status: 200, + body: { + sessionId: session.id, + status: session.status, + authUrl: session.authUrl, + error: session.error, + promptsHandled: session.promptsHandled, + }, + }; + }); + + // Get credentials from completed auth + this.routes.set('GET /auth/cli/:provider/creds/:sessionId', async (req): Promise => { + const { sessionId } = req.params; + const session = getAuthSession(sessionId); + if (!session) { + return { status: 404, body: { error: 'Session not found' } }; + } + if (session.status !== 'success') { + return { status: 400, body: { error: 'Auth not complete', status: session.status } }; + } + return { + status: 200, + body: { + token: session.token, + provider: session.provider, + }, + }; + }); + + // Cancel auth session + this.routes.set('POST /auth/cli/:provider/cancel/:sessionId', async (req): Promise => { + const { sessionId } = req.params; + const cancelled = cancelAuthSession(sessionId); + if (!cancelled) { + return { status: 404, body: { error: 'Session not found' } }; + } + return { status: 200, body: { success: true } }; + }); } /** diff --git a/src/daemon/cli-auth.ts b/src/daemon/cli-auth.ts new file mode 100644 index 00000000..14d2dff2 --- /dev/null +++ b/src/daemon/cli-auth.ts @@ -0,0 +1,385 @@ +/** + * CLI Auth Handler for Workspace Daemon + * + * Handles CLI-based authentication (claude, codex, etc.) via PTY. + * Runs inside the workspace container where CLI tools are installed. + */ + +import * as pty from 'node-pty'; +import * as crypto from 'crypto'; +import * as fs from 'fs/promises'; +import * as os from 'os'; +import { createLogger } from '../resiliency/logger.js'; + +const logger = createLogger('cli-auth'); + +/** + * CLI auth configuration for each provider + */ +interface CLIAuthConfig { + command: string; + args: string[]; + urlPattern: RegExp; + credentialPath?: string; + displayName: string; + prompts: PromptHandler[]; + successPatterns: RegExp[]; + waitTimeout: number; +} + +interface PromptHandler { + pattern: RegExp; + response: string; + delay?: number; + description: string; +} + +const CLI_AUTH_CONFIG: Record = { + anthropic: { + command: 'claude', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + credentialPath: '~/.claude/credentials.json', + displayName: 'Claude', + waitTimeout: 30000, + prompts: [ + { + pattern: /dark\s*(mode|theme)/i, + response: '\r', + delay: 100, + description: 'Dark mode prompt', + }, + { + pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, + response: '\r', + delay: 100, + description: 'Auth method prompt', + }, + { + pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, + response: 'y\r', + delay: 100, + description: 'Trust directory prompt', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + openai: { + command: 'codex', + args: ['login'], + urlPattern: /(https:\/\/[^\s]+)/, + credentialPath: '~/.codex/credentials.json', + displayName: 'Codex', + waitTimeout: 30000, + prompts: [ + { + pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, + response: 'y\r', + delay: 100, + description: 'Trust directory prompt', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + google: { + command: 'gemini', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Gemini', + waitTimeout: 30000, + prompts: [ + { + pattern: /login\s*with\s*google|google\s*account|choose.*auth/i, + response: '\r', + delay: 200, + description: 'Auth method selection', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + opencode: { + command: 'opencode', + args: ['auth', 'login'], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'OpenCode', + waitTimeout: 30000, + prompts: [ + { + pattern: /select.*provider|choose.*provider|which.*provider/i, + response: '\r', + delay: 200, + description: 'Provider selection', + }, + { + pattern: /claude\s*pro|anthropic|select.*auth/i, + response: '\r', + delay: 200, + description: 'Auth type selection', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + droid: { + command: 'droid', + args: ['--login'], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Droid', + waitTimeout: 30000, + prompts: [ + { + pattern: /sign\s*in|log\s*in|authenticate/i, + response: '\r', + delay: 200, + description: 'Login prompt', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, +}; + +/** + * Auth session state + */ +interface AuthSession { + id: string; + provider: string; + status: 'starting' | 'waiting_auth' | 'success' | 'error'; + authUrl?: string; + token?: string; + error?: string; + output: string; + promptsHandled: string[]; + createdAt: Date; + process?: pty.IPty; +} + +// Active sessions +const sessions = new Map(); + +// Clean up old sessions periodically +setInterval(() => { + const now = Date.now(); + for (const [id, session] of sessions) { + if (now - session.createdAt.getTime() > 10 * 60 * 1000) { + if (session.process) { + try { + session.process.kill(); + } catch { + // Process may already be dead + } + } + sessions.delete(id); + } + } +}, 60000); + +/** + * Strip ANSI escape codes from text + */ +function stripAnsiCodes(text: string): string { + // eslint-disable-next-line no-control-regex + return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +/** + * Check if text matches any success pattern + */ +function matchesSuccessPattern(text: string, patterns: RegExp[]): boolean { + const cleanText = stripAnsiCodes(text).toLowerCase(); + return patterns.some((p) => p.test(cleanText)); +} + +/** + * Find matching prompt handler + */ +function findMatchingPrompt( + text: string, + prompts: PromptHandler[], + respondedPrompts: Set +): PromptHandler | null { + const cleanText = stripAnsiCodes(text); + for (const prompt of prompts) { + if (respondedPrompts.has(prompt.description)) continue; + if (prompt.pattern.test(cleanText)) { + return prompt; + } + } + return null; +} + +/** + * Start CLI auth flow + */ +export function startCLIAuth(provider: string): AuthSession { + const config = CLI_AUTH_CONFIG[provider]; + if (!config) { + throw new Error(`Unknown provider: ${provider}`); + } + + const sessionId = crypto.randomUUID(); + const session: AuthSession = { + id: sessionId, + provider, + status: 'starting', + output: '', + promptsHandled: [], + createdAt: new Date(), + }; + sessions.set(sessionId, session); + + const respondedPrompts = new Set(); + + try { + const proc = pty.spawn(config.command, config.args, { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: process.cwd(), + env: { + ...process.env, + NO_COLOR: '1', + TERM: 'xterm-256color', + BROWSER: 'echo', + DISPLAY: '', + } as Record, + }); + + session.process = proc; + + // Timeout handler + const timeout = setTimeout(() => { + if (session.status === 'starting' || session.status === 'waiting_auth') { + proc.kill(); + session.status = 'error'; + session.error = 'Timeout waiting for auth completion'; + } + }, config.waitTimeout + 60000); // Extra time for user to complete OAuth + + proc.onData((data: string) => { + session.output += data; + + // Handle prompts + const matchingPrompt = findMatchingPrompt(data, config.prompts, respondedPrompts); + if (matchingPrompt) { + respondedPrompts.add(matchingPrompt.description); + session.promptsHandled.push(matchingPrompt.description); + logger.info('Auto-responding to prompt', { description: matchingPrompt.description }); + + const delay = matchingPrompt.delay ?? 100; + setTimeout(() => { + try { + proc.write(matchingPrompt.response); + } catch { + // Process may have exited + } + }, delay); + } + + // Extract auth URL + const cleanText = stripAnsiCodes(data); + const match = cleanText.match(config.urlPattern); + if (match && match[1] && !session.authUrl) { + session.authUrl = match[1]; + session.status = 'waiting_auth'; + logger.info('Auth URL captured', { provider, url: session.authUrl }); + } + + // Check for success + if (matchesSuccessPattern(data, config.successPatterns)) { + session.status = 'success'; + } + }); + + proc.onExit(async ({ exitCode }) => { + clearTimeout(timeout); + logger.info('CLI process exited', { provider, exitCode }); + + // Try to extract credentials + if (session.authUrl || exitCode === 0) { + try { + const token = await extractCredentials(provider, config); + if (token) { + session.token = token; + session.status = 'success'; + } + } catch (err) { + logger.error('Failed to extract credentials', { error: String(err) }); + } + } + + if (!session.authUrl && !session.token && session.status !== 'error') { + session.status = 'error'; + session.error = 'CLI exited without auth URL or credentials'; + } + }); + } catch (err) { + session.status = 'error'; + session.error = err instanceof Error ? err.message : 'Failed to spawn CLI'; + logger.error('Failed to start CLI auth', { error: session.error }); + } + + return session; +} + +/** + * Get auth session status + */ +export function getAuthSession(sessionId: string): AuthSession | null { + return sessions.get(sessionId) || null; +} + +/** + * Cancel auth session + */ +export function cancelAuthSession(sessionId: string): boolean { + const session = sessions.get(sessionId); + if (!session) return false; + + if (session.process) { + try { + session.process.kill(); + } catch { + // Already dead + } + } + + sessions.delete(sessionId); + return true; +} + +/** + * Extract credentials from CLI credential file + */ +async function extractCredentials( + provider: string, + config: CLIAuthConfig +): Promise { + if (!config.credentialPath) return null; + + try { + const credPath = config.credentialPath.replace('~', os.homedir()); + const content = await fs.readFile(credPath, 'utf8'); + const creds = JSON.parse(content); + + // Extract token based on provider + if (provider === 'anthropic') { + return creds.oauth_token || creds.access_token || creds.api_key; + } else if (provider === 'openai') { + return creds.token || creds.access_token || creds.api_key; + } + + return creds.token || creds.access_token || creds.api_key || null; + } catch { + return null; + } +} + +/** + * Get supported providers + */ +export function getSupportedProviders(): { id: string; displayName: string }[] { + return Object.entries(CLI_AUTH_CONFIG).map(([id, config]) => ({ + id, + displayName: config.displayName, + })); +} From 89cb490da1cef50c90716069d93f769f4d33e4ea Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 16:06:59 +0000 Subject: [PATCH 056/103] Add workspace auto-provision and status indicator - Auto-provision workspace when GitHub App OAuth repos are connected - Add workspace status polling endpoints (summary, primary, wakeup) - Add WorkspaceStatusIndicator component for dashboard - Add useWorkspaceStatus hook for status monitoring and auto-wakeup - Frontend shows workspace status with visual indicators: - Running (green): Active and ready - Stopped (amber): Idle, can wake up on demand - Provisioning (cyan): Being created - Error (red): Has an issue --- src/cloud/api/nango-auth.ts | 56 +++ src/cloud/api/workspaces.ts | 188 +++++++++ src/dashboard/lib/cloudApi.ts | 84 ++++ .../WorkspaceStatusIndicator.tsx | 367 ++++++++++++++++++ src/dashboard/react-components/hooks/index.ts | 6 + .../hooks/useWorkspaceStatus.ts | 223 +++++++++++ 6 files changed, 924 insertions(+) create mode 100644 src/dashboard/react-components/WorkspaceStatusIndicator.tsx create mode 100644 src/dashboard/react-components/hooks/useWorkspaceStatus.ts diff --git a/src/cloud/api/nango-auth.ts b/src/cloud/api/nango-auth.ts index c50e93fd..ca4f32b5 100644 --- a/src/cloud/api/nango-auth.ts +++ b/src/cloud/api/nango-auth.ts @@ -11,6 +11,7 @@ import { randomUUID } from 'crypto'; import { requireAuth } from './auth.js'; import { db } from '../db/index.js'; import { nangoService, NANGO_INTEGRATIONS } from '../services/nango.js'; +import { getProvisioner } from '../provisioner/index.js'; export const nangoAuthRouter = Router(); @@ -150,6 +151,10 @@ nangoAuthRouter.get('/repo-status/:connectionId', requireAuth, async (req: Reque return res.json({ ready: false }); } + // Check workspace status for frontend visibility + const workspaces = await db.workspaces.findByUserId(userId); + const primaryWorkspace = workspaces[0]; + res.json({ ready: true, repos: reposFromConnection.map(r => ({ @@ -158,6 +163,13 @@ nangoAuthRouter.get('/repo-status/:connectionId', requireAuth, async (req: Reque isPrivate: r.isPrivate, defaultBranch: r.defaultBranch, })), + workspace: primaryWorkspace ? { + id: primaryWorkspace.id, + name: primaryWorkspace.name, + status: primaryWorkspace.status, + publicUrl: primaryWorkspace.publicUrl, + } : null, + workspaceProvisioning: primaryWorkspace?.status === 'provisioning', }); } catch (error) { console.error('Error checking repo status:', error); @@ -409,6 +421,11 @@ async function handleRepoAuthWebhook( console.log(`[nango-webhook] Synced ${repos.length} repos for ${user.githubUsername} (installation: ${githubInstallationId || 'unknown'})`); + // Auto-provision a workspace if user doesn't have one + if (repos.length > 0) { + await autoProvisionWorkspaceIfNeeded(user.id, user.githubUsername || 'user', repos.map(r => r.full_name)); + } + } catch (error: unknown) { const err = error as { message?: string }; if (err.message?.includes('403')) { @@ -420,3 +437,42 @@ async function handleRepoAuthWebhook( } } } + +/** + * Auto-provision a workspace for the user if they don't have one + * This is called after repos are connected to provide immediate workspace access + */ +async function autoProvisionWorkspaceIfNeeded( + userId: string, + username: string, + repositories: string[] +): Promise { + try { + // Check if user already has a workspace + const existingWorkspaces = await db.workspaces.findByUserId(userId); + if (existingWorkspaces.length > 0) { + console.log(`[auto-provision] User ${username} already has ${existingWorkspaces.length} workspace(s), skipping auto-provision`); + return; + } + + console.log(`[auto-provision] Starting workspace provision for ${username} with ${repositories.length} repos`); + + const provisioner = getProvisioner(); + const result = await provisioner.provision({ + userId, + name: `${username}'s Workspace`, + providers: [], // No AI providers yet - user can connect them later + repositories, + }); + + if (result.status === 'error') { + console.error(`[auto-provision] Failed to provision workspace for ${username}:`, result.error); + return; + } + + console.log(`[auto-provision] Workspace ${result.workspaceId} provisioned for ${username} (status: ${result.status})`); + } catch (error) { + console.error(`[auto-provision] Error provisioning workspace for ${username}:`, error); + // Non-fatal - user can still manually create a workspace + } +} diff --git a/src/cloud/api/workspaces.ts b/src/cloud/api/workspaces.ts index b2fd0256..3da92a52 100644 --- a/src/cloud/api/workspaces.ts +++ b/src/cloud/api/workspaces.ts @@ -729,3 +729,191 @@ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: R res.status(500).json({ error: 'Failed to provision workspace' }); } }); + +/** + * GET /api/workspaces/summary + * Get summary of all user workspaces for dashboard status indicator + */ +workspacesRouter.get('/summary', async (req: Request, res: Response) => { + const userId = req.session.userId!; + + try { + const workspaces = await db.workspaces.findByUserId(userId); + const provisioner = getProvisioner(); + + // Get live status for each workspace + const workspaceSummaries = await Promise.all( + workspaces.map(async (w) => { + let liveStatus = w.status; + try { + liveStatus = await provisioner.getStatus(w.id); + } catch { + // Fall back to DB status + } + + return { + id: w.id, + name: w.name, + status: liveStatus, + publicUrl: w.publicUrl, + isStopped: liveStatus === 'stopped', + isRunning: liveStatus === 'running', + isProvisioning: liveStatus === 'provisioning', + hasError: liveStatus === 'error', + }; + }) + ); + + // Overall status for quick dashboard indicator + const hasRunningWorkspace = workspaceSummaries.some(w => w.isRunning); + const hasStoppedWorkspace = workspaceSummaries.some(w => w.isStopped); + const hasProvisioningWorkspace = workspaceSummaries.some(w => w.isProvisioning); + + res.json({ + workspaces: workspaceSummaries, + summary: { + total: workspaceSummaries.length, + running: workspaceSummaries.filter(w => w.isRunning).length, + stopped: workspaceSummaries.filter(w => w.isStopped).length, + provisioning: workspaceSummaries.filter(w => w.isProvisioning).length, + error: workspaceSummaries.filter(w => w.hasError).length, + }, + overallStatus: hasRunningWorkspace + ? 'ready' + : hasProvisioningWorkspace + ? 'provisioning' + : hasStoppedWorkspace + ? 'stopped' + : workspaceSummaries.length === 0 + ? 'none' + : 'error', + }); + } catch (error) { + console.error('Error getting workspace summary:', error); + res.status(500).json({ error: 'Failed to get workspace summary' }); + } +}); + +/** + * POST /api/workspaces/:id/wakeup + * Check if workspace is stopped and auto-restart if needed + * Returns status and whether a restart was triggered + */ +workspacesRouter.post('/:id/wakeup', async (req: Request, res: Response) => { + const userId = req.session.userId!; + const { id } = req.params; + + try { + const workspace = await db.workspaces.findById(id); + + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + if (workspace.userId !== userId) { + return res.status(403).json({ error: 'Unauthorized' }); + } + + const provisioner = getProvisioner(); + const currentStatus = await provisioner.getStatus(id); + + // If already running, no action needed + if (currentStatus === 'running') { + return res.json({ + status: 'running', + wasRestarted: false, + message: 'Workspace is already running', + publicUrl: workspace.publicUrl, + }); + } + + // If stopped, trigger restart + if (currentStatus === 'stopped') { + await provisioner.restart(id); + return res.json({ + status: 'starting', + wasRestarted: true, + message: 'Workspace is starting up. This typically takes 15-30 seconds.', + estimatedStartTime: 30, // seconds + publicUrl: workspace.publicUrl, + }); + } + + // Other states (provisioning, error) + res.json({ + status: currentStatus, + wasRestarted: false, + message: currentStatus === 'provisioning' + ? 'Workspace is still being provisioned' + : 'Workspace is in an error state. Please check the dashboard.', + publicUrl: workspace.publicUrl, + }); + } catch (error) { + console.error('Error waking up workspace:', error); + res.status(500).json({ error: 'Failed to wake up workspace' }); + } +}); + +/** + * GET /api/workspaces/primary + * Get the user's primary workspace (first/default) with live status + * Used by dashboard to show quick status indicator + */ +workspacesRouter.get('/primary', async (req: Request, res: Response) => { + const userId = req.session.userId!; + + try { + const workspaces = await db.workspaces.findByUserId(userId); + + if (workspaces.length === 0) { + return res.json({ + exists: false, + message: 'No workspace found. Connect a repository to auto-provision one.', + }); + } + + const primary = workspaces[0]; + const provisioner = getProvisioner(); + + let liveStatus = primary.status; + try { + liveStatus = await provisioner.getStatus(primary.id); + } catch { + // Fall back to DB status + } + + res.json({ + exists: true, + workspace: { + id: primary.id, + name: primary.name, + status: liveStatus, + publicUrl: primary.publicUrl, + isStopped: liveStatus === 'stopped', + isRunning: liveStatus === 'running', + isProvisioning: liveStatus === 'provisioning', + hasError: liveStatus === 'error', + config: { + providers: primary.config.providers || [], + repositories: primary.config.repositories || [], + }, + }, + // Quick messages for UI + statusMessage: liveStatus === 'running' + ? 'Workspace is running' + : liveStatus === 'stopped' + ? 'Workspace is idle (will start automatically when needed)' + : liveStatus === 'provisioning' + ? 'Workspace is being provisioned...' + : 'Workspace has an error', + actionNeeded: liveStatus === 'stopped' + ? 'wakeup' + : liveStatus === 'error' + ? 'check_error' + : null, + }); + } catch (error) { + console.error('Error getting primary workspace:', error); + res.status(500).json({ error: 'Failed to get primary workspace' }); + } +}); diff --git a/src/dashboard/lib/cloudApi.ts b/src/dashboard/lib/cloudApi.ts index 5b736e00..70ac46cb 100644 --- a/src/dashboard/lib/cloudApi.ts +++ b/src/dashboard/lib/cloudApi.ts @@ -340,6 +340,90 @@ export const cloudApi = { }); }, + /** + * Get primary workspace with live status + */ + async getPrimaryWorkspace() { + return cloudFetch<{ + exists: boolean; + message?: string; + workspace?: { + id: string; + name: string; + status: string; + publicUrl?: string; + isStopped: boolean; + isRunning: boolean; + isProvisioning: boolean; + hasError: boolean; + config: { + providers: string[]; + repositories: string[]; + }; + }; + statusMessage: string; + actionNeeded?: 'wakeup' | 'check_error' | null; + }>('/api/workspaces/primary'); + }, + + /** + * Get workspace summary (all workspaces with status) + */ + async getWorkspaceSummary() { + return cloudFetch<{ + workspaces: Array<{ + id: string; + name: string; + status: string; + publicUrl?: string; + isStopped: boolean; + isRunning: boolean; + isProvisioning: boolean; + hasError: boolean; + }>; + summary: { + total: number; + running: number; + stopped: number; + provisioning: number; + error: number; + }; + overallStatus: 'ready' | 'provisioning' | 'stopped' | 'none' | 'error'; + }>('/api/workspaces/summary'); + }, + + /** + * Get workspace status (live polling from compute provider) + */ + async getWorkspaceStatus(id: string) { + return cloudFetch<{ status: string }>(`/api/workspaces/${encodeURIComponent(id)}/status`); + }, + + /** + * Wake up a stopped workspace + */ + async wakeupWorkspace(id: string) { + return cloudFetch<{ + status: string; + wasRestarted: boolean; + message: string; + estimatedStartTime?: number; + publicUrl?: string; + }>(`/api/workspaces/${encodeURIComponent(id)}/wakeup`, { + method: 'POST', + }); + }, + + /** + * Restart a workspace + */ + async restartWorkspace(id: string) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(id)}/restart`, + { method: 'POST' } + ); + }, + // ===== Provider API ===== /** diff --git a/src/dashboard/react-components/WorkspaceStatusIndicator.tsx b/src/dashboard/react-components/WorkspaceStatusIndicator.tsx new file mode 100644 index 00000000..c4097442 --- /dev/null +++ b/src/dashboard/react-components/WorkspaceStatusIndicator.tsx @@ -0,0 +1,367 @@ +/** + * Workspace Status Indicator + * + * Shows workspace status in the dashboard with visual indicators: + * - Running (green): Workspace is active and ready + * - Stopped (amber): Workspace is idle, can be woken up + * - Provisioning (cyan): Workspace is being created + * - Error (red): Workspace has an issue + * - None (gray): No workspace exists + */ + +import React, { useCallback, useState } from 'react'; +import { useWorkspaceStatus } from './hooks/useWorkspaceStatus'; + +export interface WorkspaceStatusIndicatorProps { + /** Show expanded view with details (default: false) */ + expanded?: boolean; + /** Auto-wakeup when workspace is stopped (default: false) */ + autoWakeup?: boolean; + /** Callback when wakeup is triggered */ + onWakeup?: () => void; + /** Callback when status changes */ + onStatusChange?: (status: string) => void; + /** Custom class name */ + className?: string; +} + +export function WorkspaceStatusIndicator({ + expanded = false, + autoWakeup = false, + onWakeup, + onStatusChange, + className = '', +}: WorkspaceStatusIndicatorProps) { + const [showToast, setShowToast] = useState(false); + const [toastMessage, setToastMessage] = useState(''); + + const { + workspace, + exists, + isLoading, + isWakingUp, + statusMessage, + actionNeeded, + wakeup, + } = useWorkspaceStatus({ + autoWakeup, + onStatusChange: (status, wasRestarted) => { + onStatusChange?.(status); + if (wasRestarted) { + setToastMessage('Workspace is starting up...'); + setShowToast(true); + setTimeout(() => setShowToast(false), 5000); + } else if (status === 'running') { + setToastMessage('Workspace is ready!'); + setShowToast(true); + setTimeout(() => setShowToast(false), 3000); + } + }, + }); + + const handleWakeup = useCallback(async () => { + const result = await wakeup(); + if (result.success) { + onWakeup?.(); + setToastMessage(result.message); + setShowToast(true); + setTimeout(() => setShowToast(false), 5000); + } + }, [wakeup, onWakeup]); + + // Get status color and icon + const getStatusConfig = () => { + if (!exists) { + return { + color: 'text-text-muted', + bgColor: 'bg-bg-tertiary', + borderColor: 'border-border-subtle', + icon: , + label: 'No workspace', + pulseColor: null, + }; + } + + if (isLoading && !workspace) { + return { + color: 'text-text-muted', + bgColor: 'bg-bg-tertiary', + borderColor: 'border-border-subtle', + icon: , + label: 'Loading...', + pulseColor: null, + }; + } + + if (workspace?.isRunning) { + return { + color: 'text-success', + bgColor: 'bg-success/10', + borderColor: 'border-success/30', + icon: , + label: 'Running', + pulseColor: 'bg-success', + }; + } + + if (workspace?.isStopped) { + return { + color: 'text-amber-400', + bgColor: 'bg-amber-400/10', + borderColor: 'border-amber-400/30', + icon: , + label: 'Stopped', + pulseColor: null, + }; + } + + if (workspace?.isProvisioning || isWakingUp) { + return { + color: 'text-accent-cyan', + bgColor: 'bg-accent-cyan/10', + borderColor: 'border-accent-cyan/30', + icon: , + label: isWakingUp ? 'Starting...' : 'Provisioning', + pulseColor: 'bg-accent-cyan', + }; + } + + if (workspace?.hasError) { + return { + color: 'text-error', + bgColor: 'bg-error/10', + borderColor: 'border-error/30', + icon: , + label: 'Error', + pulseColor: null, + }; + } + + return { + color: 'text-text-muted', + bgColor: 'bg-bg-tertiary', + borderColor: 'border-border-subtle', + icon: , + label: 'Unknown', + pulseColor: null, + }; + }; + + const config = getStatusConfig(); + + // Compact indicator (for header) + if (!expanded) { + return ( +
+
+ {config.icon} + + {config.label} + + {config.pulseColor && ( + + )} +
+ + {/* Wakeup button for stopped state */} + {actionNeeded === 'wakeup' && !isWakingUp && ( + + )} + + {/* Toast notification */} + {showToast && ( +
+ {toastMessage} +
+ )} +
+ ); + } + + // Expanded view (for sidebar or dedicated panel) + return ( +
+
+
+ {config.icon} + + Workspace Status + +
+ {config.pulseColor && ( + + )} +
+ +
+
+ Name + + {workspace?.name || 'None'} + +
+ +
+ Status + + {config.label} + +
+ + {statusMessage && ( +

{statusMessage}

+ )} + + {/* Action buttons */} + {actionNeeded === 'wakeup' && !isWakingUp && ( + + )} + + {actionNeeded === 'check_error' && ( + + View error details + + )} +
+ + {/* Toast notification */} + {showToast && ( +
+ {toastMessage} +
+ )} +
+ ); +} + +// Icons +function RunningIcon() { + return ( + + + + ); +} + +function StoppedIcon() { + return ( + + + + ); +} + +function ProvisioningIcon() { + return ( + + + + ); +} + +function ErrorIcon() { + return ( + + + + + + ); +} + +function NoWorkspaceIcon() { + return ( + + + + + + ); +} + +function LoadingIcon() { + return ( + + + + + ); +} diff --git a/src/dashboard/react-components/hooks/index.ts b/src/dashboard/react-components/hooks/index.ts index cb77c719..549b64a8 100644 --- a/src/dashboard/react-components/hooks/index.ts +++ b/src/dashboard/react-components/hooks/index.ts @@ -27,3 +27,9 @@ export { type UseRecentReposReturn, type RecentRepo, } from './useRecentRepos'; +export { + useWorkspaceStatus, + type UseWorkspaceStatusOptions, + type UseWorkspaceStatusReturn, + type WorkspaceStatus, +} from './useWorkspaceStatus'; diff --git a/src/dashboard/react-components/hooks/useWorkspaceStatus.ts b/src/dashboard/react-components/hooks/useWorkspaceStatus.ts new file mode 100644 index 00000000..d4b70315 --- /dev/null +++ b/src/dashboard/react-components/hooks/useWorkspaceStatus.ts @@ -0,0 +1,223 @@ +/** + * useWorkspaceStatus Hook + * + * React hook for monitoring workspace status with auto-wakeup capability. + * Polls for status updates and can automatically restart stopped workspaces. + */ + +import { useState, useEffect, useCallback, useRef } from 'react'; +import { cloudApi } from '../../lib/cloudApi'; + +export interface WorkspaceStatus { + id: string; + name: string; + status: string; + publicUrl?: string; + isStopped: boolean; + isRunning: boolean; + isProvisioning: boolean; + hasError: boolean; + config: { + providers: string[]; + repositories: string[]; + }; +} + +export interface UseWorkspaceStatusOptions { + /** Poll for status updates (default: true) */ + autoRefresh?: boolean; + /** Interval to poll for status in ms (default: 30000) */ + refreshInterval?: number; + /** Auto-wakeup when workspace is stopped (default: false) */ + autoWakeup?: boolean; + /** Callback when workspace status changes */ + onStatusChange?: (status: string, wasRestarted: boolean) => void; +} + +export interface UseWorkspaceStatusReturn { + /** Current workspace data (null if no workspace) */ + workspace: WorkspaceStatus | null; + /** Whether workspace exists */ + exists: boolean; + /** Whether the status check is in progress */ + isLoading: boolean; + /** Whether a wakeup is in progress */ + isWakingUp: boolean; + /** Status message for display */ + statusMessage: string; + /** Action needed (wakeup, check_error, etc) */ + actionNeeded: 'wakeup' | 'check_error' | null; + /** Error if any */ + error: string | null; + /** Manually refresh status */ + refresh: () => Promise; + /** Manually wake up workspace */ + wakeup: () => Promise<{ success: boolean; message: string }>; +} + +const DEFAULT_OPTIONS: Required = { + autoRefresh: true, + refreshInterval: 30000, // 30 seconds + autoWakeup: false, + onStatusChange: () => {}, +}; + +export function useWorkspaceStatus( + options: UseWorkspaceStatusOptions = {} +): UseWorkspaceStatusReturn { + const opts = { ...DEFAULT_OPTIONS, ...options }; + + const [workspace, setWorkspace] = useState(null); + const [exists, setExists] = useState(false); + const [isLoading, setIsLoading] = useState(true); + const [isWakingUp, setIsWakingUp] = useState(false); + const [statusMessage, setStatusMessage] = useState(''); + const [actionNeeded, setActionNeeded] = useState<'wakeup' | 'check_error' | null>(null); + const [error, setError] = useState(null); + + const intervalRef = useRef(null); + const mountedRef = useRef(true); + const previousStatusRef = useRef(null); + + // Fetch workspace status + const refresh = useCallback(async () => { + try { + setIsLoading(true); + setError(null); + + const result = await cloudApi.getPrimaryWorkspace(); + + if (!mountedRef.current) return; + + if (result.success) { + setExists(result.data.exists); + setStatusMessage(result.data.statusMessage); + setActionNeeded(result.data.actionNeeded || null); + + if (result.data.workspace) { + const ws = result.data.workspace; + setWorkspace(ws); + + // Check for status change + if (previousStatusRef.current && previousStatusRef.current !== ws.status) { + opts.onStatusChange(ws.status, false); + } + previousStatusRef.current = ws.status; + } else { + setWorkspace(null); + } + } else { + setError(result.error); + } + } catch (_e) { + if (mountedRef.current) { + setError('Failed to fetch workspace status'); + } + } finally { + if (mountedRef.current) { + setIsLoading(false); + } + } + }, [opts]); + + // Wake up workspace + const wakeup = useCallback(async (): Promise<{ success: boolean; message: string }> => { + if (!workspace?.id) { + return { success: false, message: 'No workspace to wake up' }; + } + + try { + setIsWakingUp(true); + setError(null); + + const result = await cloudApi.wakeupWorkspace(workspace.id); + + if (!mountedRef.current) { + return { success: false, message: 'Component unmounted' }; + } + + if (result.success) { + // Update local state + if (result.data.wasRestarted) { + setStatusMessage(result.data.message); + setActionNeeded(null); + opts.onStatusChange('starting', true); + + // Start more frequent polling to catch when workspace is ready + if (intervalRef.current) { + clearInterval(intervalRef.current); + } + intervalRef.current = setInterval(refresh, 5000); // Poll every 5s during startup + + // Reset to normal interval after 2 minutes + setTimeout(() => { + if (mountedRef.current && intervalRef.current) { + clearInterval(intervalRef.current); + if (opts.autoRefresh) { + intervalRef.current = setInterval(refresh, opts.refreshInterval); + } + } + }, 120000); + } + + return { success: true, message: result.data.message }; + } else { + setError(result.error); + return { success: false, message: result.error }; + } + } catch (e) { + const message = e instanceof Error ? e.message : 'Failed to wake up workspace'; + if (mountedRef.current) { + setError(message); + } + return { success: false, message }; + } finally { + if (mountedRef.current) { + setIsWakingUp(false); + } + } + }, [workspace?.id, refresh, opts]); + + // Initial fetch + useEffect(() => { + mountedRef.current = true; + refresh(); + + return () => { + mountedRef.current = false; + }; + }, [refresh]); + + // Auto-refresh polling + useEffect(() => { + if (!opts.autoRefresh) return; + + intervalRef.current = setInterval(refresh, opts.refreshInterval); + + return () => { + if (intervalRef.current) { + clearInterval(intervalRef.current); + intervalRef.current = null; + } + }; + }, [opts.autoRefresh, opts.refreshInterval, refresh]); + + // Auto-wakeup when workspace is stopped + useEffect(() => { + if (opts.autoWakeup && workspace?.isStopped && !isWakingUp) { + wakeup(); + } + }, [opts.autoWakeup, workspace?.isStopped, isWakingUp, wakeup]); + + return { + workspace, + exists, + isLoading, + isWakingUp, + statusMessage, + actionNeeded, + error, + refresh, + wakeup, + }; +} From 7f48e7ec51e8ad2438bc0ffe4a7d183d9320e34a Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 16:11:08 +0000 Subject: [PATCH 057/103] Fix route ordering and integrate WorkspaceStatusIndicator - Move /summary and /primary routes BEFORE /:id to prevent Express from matching them as workspace IDs (critical bug fix) - Remove duplicate route definitions - Add WorkspaceStatusIndicator to Header component - Shows workspace status with visual feedback and wakeup capability --- src/cloud/api/workspaces.ts | 318 +++++++----------- .../react-components/layout/Header.tsx | 7 + 2 files changed, 137 insertions(+), 188 deletions(-) diff --git a/src/cloud/api/workspaces.ts b/src/cloud/api/workspaces.ts index 3da92a52..8456bfc2 100644 --- a/src/cloud/api/workspaces.ts +++ b/src/cloud/api/workspaces.ts @@ -104,6 +104,136 @@ workspacesRouter.post('/', checkWorkspaceLimit, async (req: Request, res: Respon } }); +/** + * GET /api/workspaces/summary + * Get summary of all user workspaces for dashboard status indicator + * NOTE: This route MUST be before /:id to avoid being caught by parameterized route + */ +workspacesRouter.get('/summary', async (req: Request, res: Response) => { + const userId = req.session.userId!; + + try { + const workspaces = await db.workspaces.findByUserId(userId); + const provisioner = getProvisioner(); + + // Get live status for each workspace + const workspaceSummaries = await Promise.all( + workspaces.map(async (w) => { + let liveStatus = w.status; + try { + liveStatus = await provisioner.getStatus(w.id); + } catch { + // Fall back to DB status + } + + return { + id: w.id, + name: w.name, + status: liveStatus, + publicUrl: w.publicUrl, + isStopped: liveStatus === 'stopped', + isRunning: liveStatus === 'running', + isProvisioning: liveStatus === 'provisioning', + hasError: liveStatus === 'error', + }; + }) + ); + + // Overall status for quick dashboard indicator + const hasRunningWorkspace = workspaceSummaries.some(w => w.isRunning); + const hasStoppedWorkspace = workspaceSummaries.some(w => w.isStopped); + const hasProvisioningWorkspace = workspaceSummaries.some(w => w.isProvisioning); + + res.json({ + workspaces: workspaceSummaries, + summary: { + total: workspaceSummaries.length, + running: workspaceSummaries.filter(w => w.isRunning).length, + stopped: workspaceSummaries.filter(w => w.isStopped).length, + provisioning: workspaceSummaries.filter(w => w.isProvisioning).length, + error: workspaceSummaries.filter(w => w.hasError).length, + }, + overallStatus: hasRunningWorkspace + ? 'ready' + : hasProvisioningWorkspace + ? 'provisioning' + : hasStoppedWorkspace + ? 'stopped' + : workspaceSummaries.length === 0 + ? 'none' + : 'error', + }); + } catch (error) { + console.error('Error getting workspace summary:', error); + res.status(500).json({ error: 'Failed to get workspace summary' }); + } +}); + +/** + * GET /api/workspaces/primary + * Get the user's primary workspace (first/default) with live status + * Used by dashboard to show quick status indicator + * NOTE: This route MUST be before /:id to avoid being caught by parameterized route + */ +workspacesRouter.get('/primary', async (req: Request, res: Response) => { + const userId = req.session.userId!; + + try { + const workspaces = await db.workspaces.findByUserId(userId); + + if (workspaces.length === 0) { + return res.json({ + exists: false, + message: 'No workspace found. Connect a repository to auto-provision one.', + }); + } + + const primary = workspaces[0]; + const provisioner = getProvisioner(); + + let liveStatus = primary.status; + try { + liveStatus = await provisioner.getStatus(primary.id); + } catch { + // Fall back to DB status + } + + res.json({ + exists: true, + workspace: { + id: primary.id, + name: primary.name, + status: liveStatus, + publicUrl: primary.publicUrl, + isStopped: liveStatus === 'stopped', + isRunning: liveStatus === 'running', + isProvisioning: liveStatus === 'provisioning', + hasError: liveStatus === 'error', + config: { + providers: primary.config.providers || [], + repositories: primary.config.repositories || [], + }, + }, + // Quick messages for UI + statusMessage: liveStatus === 'running' + ? 'Workspace is running' + : liveStatus === 'stopped' + ? 'Workspace is idle (will start automatically when needed)' + : liveStatus === 'provisioning' + ? 'Workspace is being provisioned...' + : 'Workspace has an error', + actionNeeded: liveStatus === 'stopped' + ? 'wakeup' + : liveStatus === 'error' + ? 'check_error' + : null, + }); + } catch (error) { + console.error('Error getting primary workspace:', error); + res.status(500).json({ error: 'Failed to get primary workspace' }); + } +}); + /** * GET /api/workspaces/:id * Get workspace details @@ -729,191 +859,3 @@ workspacesRouter.post('/quick', checkWorkspaceLimit, async (req: Request, res: R res.status(500).json({ error: 'Failed to provision workspace' }); } }); - -/** - * GET /api/workspaces/summary - * Get summary of all user workspaces for dashboard status indicator - */ -workspacesRouter.get('/summary', async (req: Request, res: Response) => { - const userId = req.session.userId!; - - try { - const workspaces = await db.workspaces.findByUserId(userId); - const provisioner = getProvisioner(); - - // Get live status for each workspace - const workspaceSummaries = await Promise.all( - workspaces.map(async (w) => { - let liveStatus = w.status; - try { - liveStatus = await provisioner.getStatus(w.id); - } catch { - // Fall back to DB status - } - - return { - id: w.id, - name: w.name, - status: liveStatus, - publicUrl: w.publicUrl, - isStopped: liveStatus === 'stopped', - isRunning: liveStatus === 'running', - isProvisioning: liveStatus === 'provisioning', - hasError: liveStatus === 'error', - }; - }) - ); - - // Overall status for quick dashboard indicator - const hasRunningWorkspace = workspaceSummaries.some(w => w.isRunning); - const hasStoppedWorkspace = workspaceSummaries.some(w => w.isStopped); - const hasProvisioningWorkspace = workspaceSummaries.some(w => w.isProvisioning); - - res.json({ - workspaces: workspaceSummaries, - summary: { - total: workspaceSummaries.length, - running: workspaceSummaries.filter(w => w.isRunning).length, - stopped: workspaceSummaries.filter(w => w.isStopped).length, - provisioning: workspaceSummaries.filter(w => w.isProvisioning).length, - error: workspaceSummaries.filter(w => w.hasError).length, - }, - overallStatus: hasRunningWorkspace - ? 'ready' - : hasProvisioningWorkspace - ? 'provisioning' - : hasStoppedWorkspace - ? 'stopped' - : workspaceSummaries.length === 0 - ? 'none' - : 'error', - }); - } catch (error) { - console.error('Error getting workspace summary:', error); - res.status(500).json({ error: 'Failed to get workspace summary' }); - } -}); - -/** - * POST /api/workspaces/:id/wakeup - * Check if workspace is stopped and auto-restart if needed - * Returns status and whether a restart was triggered - */ -workspacesRouter.post('/:id/wakeup', async (req: Request, res: Response) => { - const userId = req.session.userId!; - const { id } = req.params; - - try { - const workspace = await db.workspaces.findById(id); - - if (!workspace) { - return res.status(404).json({ error: 'Workspace not found' }); - } - - if (workspace.userId !== userId) { - return res.status(403).json({ error: 'Unauthorized' }); - } - - const provisioner = getProvisioner(); - const currentStatus = await provisioner.getStatus(id); - - // If already running, no action needed - if (currentStatus === 'running') { - return res.json({ - status: 'running', - wasRestarted: false, - message: 'Workspace is already running', - publicUrl: workspace.publicUrl, - }); - } - - // If stopped, trigger restart - if (currentStatus === 'stopped') { - await provisioner.restart(id); - return res.json({ - status: 'starting', - wasRestarted: true, - message: 'Workspace is starting up. This typically takes 15-30 seconds.', - estimatedStartTime: 30, // seconds - publicUrl: workspace.publicUrl, - }); - } - - // Other states (provisioning, error) - res.json({ - status: currentStatus, - wasRestarted: false, - message: currentStatus === 'provisioning' - ? 'Workspace is still being provisioned' - : 'Workspace is in an error state. Please check the dashboard.', - publicUrl: workspace.publicUrl, - }); - } catch (error) { - console.error('Error waking up workspace:', error); - res.status(500).json({ error: 'Failed to wake up workspace' }); - } -}); - -/** - * GET /api/workspaces/primary - * Get the user's primary workspace (first/default) with live status - * Used by dashboard to show quick status indicator - */ -workspacesRouter.get('/primary', async (req: Request, res: Response) => { - const userId = req.session.userId!; - - try { - const workspaces = await db.workspaces.findByUserId(userId); - - if (workspaces.length === 0) { - return res.json({ - exists: false, - message: 'No workspace found. Connect a repository to auto-provision one.', - }); - } - - const primary = workspaces[0]; - const provisioner = getProvisioner(); - - let liveStatus = primary.status; - try { - liveStatus = await provisioner.getStatus(primary.id); - } catch { - // Fall back to DB status - } - - res.json({ - exists: true, - workspace: { - id: primary.id, - name: primary.name, - status: liveStatus, - publicUrl: primary.publicUrl, - isStopped: liveStatus === 'stopped', - isRunning: liveStatus === 'running', - isProvisioning: liveStatus === 'provisioning', - hasError: liveStatus === 'error', - config: { - providers: primary.config.providers || [], - repositories: primary.config.repositories || [], - }, - }, - // Quick messages for UI - statusMessage: liveStatus === 'running' - ? 'Workspace is running' - : liveStatus === 'stopped' - ? 'Workspace is idle (will start automatically when needed)' - : liveStatus === 'provisioning' - ? 'Workspace is being provisioned...' - : 'Workspace has an error', - actionNeeded: liveStatus === 'stopped' - ? 'wakeup' - : liveStatus === 'error' - ? 'check_error' - : null, - }); - } catch (error) { - console.error('Error getting primary workspace:', error); - res.status(500).json({ error: 'Failed to get primary workspace' }); - } -}); diff --git a/src/dashboard/react-components/layout/Header.tsx b/src/dashboard/react-components/layout/Header.tsx index 6d27247c..001a6737 100644 --- a/src/dashboard/react-components/layout/Header.tsx +++ b/src/dashboard/react-components/layout/Header.tsx @@ -10,6 +10,7 @@ import type { Agent, Project } from '../../types'; import { getAgentColor, getAgentInitials } from '../../lib/colors'; import { getAgentBreadcrumb } from '../../lib/hierarchy'; import { RepoContextHeader } from './RepoContextHeader'; +import { WorkspaceStatusIndicator } from '../WorkspaceStatusIndicator'; export interface HeaderProps { currentChannel: string; @@ -95,6 +96,12 @@ export function Header({
)} + {/* Workspace Status Indicator */} + + + {/* Divider after workspace status */} +
+
{isGeneral ? ( <> From c1622e63a8e9ee813453c70601348683ba910319 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 16:38:35 +0000 Subject: [PATCH 058/103] Add unified settings UI with mobile-responsive design Create comprehensive settings experience with tabbed navigation: - Dashboard settings: theme, compact mode, timestamps, sounds - Workspace settings: repos, AI providers (moved from dashboard), domains - Team settings: member management, invitations, role permissions - Billing settings: subscription plans, checkout, invoice history All panels are fully mobile-responsive with bottom tab navigation on small screens and card-based layouts replacing tables where appropriate. --- src/dashboard/lib/cloudApi.ts | 273 ++++ src/dashboard/react-components/App.tsx | 19 +- .../settings/BillingSettingsPanel.tsx | 542 ++++++++ .../settings/SettingsPage.tsx | 573 ++++++++ .../settings/TeamSettingsPanel.tsx | 460 ++++++ .../settings/WorkspaceSettingsPanel.tsx | 1235 +++++++++++++++++ .../react-components/settings/index.ts | 10 + 7 files changed, 3109 insertions(+), 3 deletions(-) create mode 100644 src/dashboard/react-components/settings/BillingSettingsPanel.tsx create mode 100644 src/dashboard/react-components/settings/SettingsPage.tsx create mode 100644 src/dashboard/react-components/settings/TeamSettingsPanel.tsx create mode 100644 src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx create mode 100644 src/dashboard/react-components/settings/index.ts diff --git a/src/dashboard/lib/cloudApi.ts b/src/dashboard/lib/cloudApi.ts index 70ac46cb..29ffa0ea 100644 --- a/src/dashboard/lib/cloudApi.ts +++ b/src/dashboard/lib/cloudApi.ts @@ -513,6 +513,279 @@ export const cloudApi = { ); }, + /** + * Update member role + */ + async updateMemberRole(workspaceId: string, memberId: string, role: string) { + return cloudFetch<{ success: boolean; role: string }>( + `/api/workspaces/${encodeURIComponent(workspaceId)}/members/${encodeURIComponent(memberId)}`, + { + method: 'PATCH', + body: JSON.stringify({ role }), + } + ); + }, + + /** + * Remove member from workspace + */ + async removeMember(workspaceId: string, memberId: string) { + return cloudFetch<{ success: boolean }>( + `/api/workspaces/${encodeURIComponent(workspaceId)}/members/${encodeURIComponent(memberId)}`, + { method: 'DELETE' } + ); + }, + + // ===== Billing API ===== + + /** + * Get all billing plans + */ + async getBillingPlans() { + return cloudFetch<{ + plans: Array<{ + tier: string; + name: string; + description: string; + price: { monthly: number; yearly: number }; + features: string[]; + limits: Record; + recommended?: boolean; + }>; + publishableKey: string; + }>('/api/billing/plans'); + }, + + /** + * Get current subscription status + */ + async getSubscription() { + return cloudFetch<{ + tier: string; + subscription: { + id: string; + tier: string; + status: string; + currentPeriodStart: string; + currentPeriodEnd: string; + cancelAtPeriodEnd: boolean; + interval: 'month' | 'year'; + } | null; + customer: { + id: string; + email: string; + name?: string; + paymentMethods: Array<{ + id: string; + type: string; + last4?: string; + brand?: string; + isDefault: boolean; + }>; + invoices: Array<{ + id: string; + number: string; + amount: number; + status: string; + date: string; + pdfUrl?: string; + }>; + } | null; + }>('/api/billing/subscription'); + }, + + /** + * Create checkout session for new subscription + */ + async createCheckoutSession(tier: string, interval: 'month' | 'year' = 'month') { + return cloudFetch<{ + sessionId: string; + checkoutUrl: string; + }>('/api/billing/checkout', { + method: 'POST', + body: JSON.stringify({ tier, interval }), + }); + }, + + /** + * Create billing portal session + */ + async createBillingPortal() { + return cloudFetch<{ + sessionId: string; + portalUrl: string; + }>('/api/billing/portal', { + method: 'POST', + }); + }, + + /** + * Change subscription tier + */ + async changeSubscription(tier: string, interval: 'month' | 'year' = 'month') { + return cloudFetch<{ + subscription: { + tier: string; + status: string; + }; + }>('/api/billing/change', { + method: 'POST', + body: JSON.stringify({ tier, interval }), + }); + }, + + /** + * Cancel subscription at period end + */ + async cancelSubscription() { + return cloudFetch<{ + subscription: { cancelAtPeriodEnd: boolean; currentPeriodEnd: string }; + message: string; + }>('/api/billing/cancel', { + method: 'POST', + }); + }, + + /** + * Resume cancelled subscription + */ + async resumeSubscription() { + return cloudFetch<{ + subscription: { cancelAtPeriodEnd: boolean }; + message: string; + }>('/api/billing/resume', { + method: 'POST', + }); + }, + + /** + * Get invoices + */ + async getInvoices() { + return cloudFetch<{ + invoices: Array<{ + id: string; + number: string; + amount: number; + status: string; + date: string; + pdfUrl?: string; + }>; + }>('/api/billing/invoices'); + }, + + // ===== Workspace Management API ===== + + /** + * Stop workspace + */ + async stopWorkspace(id: string) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(id)}/stop`, + { method: 'POST' } + ); + }, + + /** + * Delete workspace + */ + async deleteWorkspace(id: string) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(id)}`, + { method: 'DELETE' } + ); + }, + + /** + * Add repositories to workspace + */ + async addReposToWorkspace(workspaceId: string, repositoryIds: string[]) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(workspaceId)}/repos`, + { + method: 'POST', + body: JSON.stringify({ repositoryIds }), + } + ); + }, + + /** + * Set custom domain for workspace + */ + async setCustomDomain(workspaceId: string, domain: string) { + return cloudFetch<{ + success: boolean; + domain: string; + status: string; + instructions: { + type: string; + name: string; + value: string; + ttl: number; + }; + verifyEndpoint: string; + message: string; + }>(`/api/workspaces/${encodeURIComponent(workspaceId)}/domain`, { + method: 'POST', + body: JSON.stringify({ domain }), + }); + }, + + /** + * Verify custom domain + */ + async verifyCustomDomain(workspaceId: string) { + return cloudFetch<{ + success: boolean; + status: string; + domain?: string; + message?: string; + error?: string; + }>(`/api/workspaces/${encodeURIComponent(workspaceId)}/domain/verify`, { + method: 'POST', + }); + }, + + /** + * Remove custom domain + */ + async removeCustomDomain(workspaceId: string) { + return cloudFetch<{ success: boolean; message: string }>( + `/api/workspaces/${encodeURIComponent(workspaceId)}/domain`, + { method: 'DELETE' } + ); + }, + + /** + * Get detailed workspace info + */ + async getWorkspaceDetails(id: string) { + return cloudFetch<{ + id: string; + name: string; + status: string; + publicUrl?: string; + computeProvider: string; + config: { + providers: string[]; + repositories: string[]; + supervisorEnabled?: boolean; + maxAgents?: number; + }; + customDomain?: string; + customDomainStatus?: string; + errorMessage?: string; + repositories: Array<{ + id: string; + fullName: string; + syncStatus: string; + lastSyncedAt?: string; + }>; + createdAt: string; + updatedAt: string; + }>(`/api/workspaces/${encodeURIComponent(id)}`); + }, + // ===== GitHub App API ===== /** diff --git a/src/dashboard/react-components/App.tsx b/src/dashboard/react-components/App.tsx index 5482a819..c8b882cc 100644 --- a/src/dashboard/react-components/App.tsx +++ b/src/dashboard/react-components/App.tsx @@ -15,6 +15,7 @@ import { CommandPalette, type TaskCreateRequest, PRIORITY_CONFIG } from './Comma import { SpawnModal, type SpawnConfig } from './SpawnModal'; import { NewConversationModal } from './NewConversationModal'; import { SettingsPanel, defaultSettings, type Settings } from './SettingsPanel'; +import { SettingsPage } from './settings'; import { ConversationHistory } from './ConversationHistory'; import { MentionAutocomplete, getMentionQuery, completeMentionInValue, type HumanUser } from './MentionAutocomplete'; import { FileAutocomplete, getFileQuery, completeFileInValue } from './FileAutocomplete'; @@ -121,6 +122,9 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { const [isSettingsOpen, setIsSettingsOpen] = useState(false); const [settings, setSettings] = useState(defaultSettings); + // Full settings page state + const [isFullSettingsOpen, setIsFullSettingsOpen] = useState(false); + // Conversation history panel state const [isHistoryOpen, setIsHistoryOpen] = useState(false); @@ -467,9 +471,9 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { setIsSpawnModalOpen(true); }, []); - // Handle settings click + // Handle settings click - opens full settings page const handleSettingsClick = useCallback(() => { - setIsSettingsOpen(true); + setIsFullSettingsOpen(true); }, []); // Handle history click @@ -745,6 +749,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { setIsSpawnModalOpen(false); setIsNewConversationOpen(false); setIsTrajectoryOpen(false); + setIsFullSettingsOpen(false); } }; @@ -986,7 +991,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { settings={settings} onSettingsChange={setSettings} onResetSettings={() => setSettings(defaultSettings)} - csrfToken={cloudSession?.csrfToken} + csrfToken={cloudSession?.csrfToken ?? undefined} /> {/* Add Workspace Modal */} @@ -1147,6 +1152,14 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { setIsCoordinatorOpen(false); }} /> + + {/* Full Settings Page */} + {isFullSettingsOpen && ( + setIsFullSettingsOpen(false)} + /> + )}
); } diff --git a/src/dashboard/react-components/settings/BillingSettingsPanel.tsx b/src/dashboard/react-components/settings/BillingSettingsPanel.tsx new file mode 100644 index 00000000..6b950a69 --- /dev/null +++ b/src/dashboard/react-components/settings/BillingSettingsPanel.tsx @@ -0,0 +1,542 @@ +/** + * Billing Settings Panel + * + * Manage subscription, view plans, and access billing portal. + */ + +import React, { useState, useEffect, useCallback } from 'react'; +import { cloudApi } from '../../lib/cloudApi'; + +export interface BillingSettingsPanelProps { + onUpgrade?: () => void; +} + +interface Plan { + tier: string; + name: string; + description: string; + price: { monthly: number; yearly: number }; + features: string[]; + limits: Record; + recommended?: boolean; +} + +interface Subscription { + id: string; + tier: string; + status: string; + currentPeriodStart: string; + currentPeriodEnd: string; + cancelAtPeriodEnd: boolean; + interval: 'month' | 'year'; +} + +interface Invoice { + id: string; + number: string; + amount: number; + status: string; + date: string; + pdfUrl?: string; +} + +const TIER_COLORS: Record = { + free: 'bg-bg-tertiary border-border-subtle text-text-muted', + pro: 'bg-accent-cyan/10 border-accent-cyan/30 text-accent-cyan', + team: 'bg-accent-purple/10 border-accent-purple/30 text-accent-purple', + enterprise: 'bg-amber-400/10 border-amber-400/30 text-amber-400', +}; + +export function BillingSettingsPanel({ onUpgrade }: BillingSettingsPanelProps) { + const [plans, setPlans] = useState([]); + const [currentTier, setCurrentTier] = useState('free'); + const [subscription, setSubscription] = useState(null); + const [invoices, setInvoices] = useState([]); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + const [successMessage, setSuccessMessage] = useState(null); + + // Billing interval toggle + const [billingInterval, setBillingInterval] = useState<'month' | 'year'>('month'); + + // Action loading states + const [checkoutLoading, setCheckoutLoading] = useState(null); + const [portalLoading, setPortalLoading] = useState(false); + const [cancelLoading, setCancelLoading] = useState(false); + const [resumeLoading, setResumeLoading] = useState(false); + + // Load billing data + useEffect(() => { + async function loadBillingData() { + setIsLoading(true); + setError(null); + + const [plansResult, subscriptionResult, invoicesResult] = await Promise.all([ + cloudApi.getBillingPlans(), + cloudApi.getSubscription(), + cloudApi.getInvoices(), + ]); + + if (plansResult.success) { + setPlans(plansResult.data.plans); + } + + if (subscriptionResult.success) { + setCurrentTier(subscriptionResult.data.tier); + setSubscription(subscriptionResult.data.subscription); + if (subscriptionResult.data.subscription?.interval) { + setBillingInterval(subscriptionResult.data.subscription.interval); + } + } + + if (invoicesResult.success) { + setInvoices(invoicesResult.data.invoices); + } + + if (!plansResult.success) { + setError(plansResult.error); + } + + setIsLoading(false); + } + + loadBillingData(); + }, []); + + // Start checkout for plan upgrade + const handleCheckout = useCallback(async (tier: string) => { + setCheckoutLoading(tier); + + const result = await cloudApi.createCheckoutSession(tier, billingInterval); + + if (result.success && result.data.checkoutUrl) { + // Redirect to Stripe checkout + window.location.href = result.data.checkoutUrl; + } else if (!result.success) { + setError(result.error); + setCheckoutLoading(null); + } + }, [billingInterval]); + + // Open billing portal + const handleOpenPortal = useCallback(async () => { + setPortalLoading(true); + + const result = await cloudApi.createBillingPortal(); + + if (result.success && result.data.portalUrl) { + window.location.href = result.data.portalUrl; + } else if (!result.success) { + setError(result.error); + } + + setPortalLoading(false); + }, []); + + // Cancel subscription + const handleCancel = useCallback(async () => { + const confirmed = window.confirm( + 'Are you sure you want to cancel your subscription? You will retain access until the end of your billing period.' + ); + if (!confirmed) return; + + setCancelLoading(true); + + const result = await cloudApi.cancelSubscription(); + + if (result.success) { + setSubscription((prev) => + prev ? { ...prev, cancelAtPeriodEnd: true } : null + ); + setSuccessMessage(result.data.message); + setTimeout(() => setSuccessMessage(null), 5000); + } else { + setError(result.error); + } + + setCancelLoading(false); + }, []); + + // Resume subscription + const handleResume = useCallback(async () => { + setResumeLoading(true); + + const result = await cloudApi.resumeSubscription(); + + if (result.success) { + setSubscription((prev) => + prev ? { ...prev, cancelAtPeriodEnd: false } : null + ); + setSuccessMessage(result.data.message); + setTimeout(() => setSuccessMessage(null), 3000); + } else { + setError(result.error); + } + + setResumeLoading(false); + }, []); + + if (isLoading) { + return ( +
+ + Loading billing information... +
+ ); + } + + return ( +
+ {/* Messages */} + {error && ( +
+ {error} + +
+ )} + + {successMessage && ( +
+ {successMessage} +
+ )} + + {/* Current Plan */} +
+

+ Current Plan +

+
+
+
+

+ {currentTier} Plan +

+ {subscription ? ( +

+ {subscription.cancelAtPeriodEnd ? ( + + Cancels on {new Date(subscription.currentPeriodEnd).toLocaleDateString()} + + ) : ( + <> + Renews on {new Date(subscription.currentPeriodEnd).toLocaleDateString()} + + ({subscription.interval === 'year' ? 'Yearly' : 'Monthly'}) + + + )} +

+ ) : ( +

+ Free tier - upgrade to unlock more features +

+ )} +
+ +
+ {subscription && !subscription.cancelAtPeriodEnd && ( + + )} + {subscription?.cancelAtPeriodEnd && ( + + )} + {subscription && ( + + )} +
+
+
+
+ + {/* Billing Interval Toggle */} +
+ + Monthly + + + + Yearly + (Save 20%) + +
+ + {/* Available Plans */} +
+

+ Available Plans +

+
+ {plans + .filter((p) => p.tier !== 'free') + .map((plan) => ( +
+ {plan.recommended && ( +
+ Most Popular +
+ )} + +

{plan.name}

+

{plan.description}

+ +
+ + ${billingInterval === 'year' ? plan.price.yearly : plan.price.monthly} + + + /{billingInterval === 'year' ? 'year' : 'month'} + +
+ +
    + {plan.features.slice(0, 5).map((feature, i) => ( +
  • + + {feature} +
  • + ))} +
+ + {currentTier === plan.tier ? ( + + ) : ( + + )} +
+ ))} +
+
+ + {/* Enterprise CTA */} +
+
+
+

Enterprise

+

+ Custom solutions for large teams with dedicated support, SLA, and custom integrations. +

+
+ + Contact Sales + +
+
+ + {/* Invoices */} + {invoices.length > 0 && ( +
+

+ Billing History +

+ + {/* Desktop Table */} +
+ + + + + + + + + + + + {invoices.map((invoice) => ( + + + + + + + + ))} + +
+ Invoice + + Date + + Amount + + Status + + +
+ {invoice.number} + + {new Date(invoice.date).toLocaleDateString()} + + ${(invoice.amount / 100).toFixed(2)} + + + {invoice.status} + + + {invoice.pdfUrl && ( + + Download + + )} +
+
+ + {/* Mobile Card Layout */} +
+ {invoices.map((invoice) => ( +
+
+ {invoice.number} + + {invoice.status} + +
+
+ {new Date(invoice.date).toLocaleDateString()} + ${(invoice.amount / 100).toFixed(2)} +
+ {invoice.pdfUrl && ( + + Download PDF + + )} +
+ ))} +
+
+ )} +
+ ); +} + +// Icons +function LoadingSpinner() { + return ( + + + + ); +} + +function CheckIcon({ className = '' }: { className?: string }) { + return ( + + + + ); +} diff --git a/src/dashboard/react-components/settings/SettingsPage.tsx b/src/dashboard/react-components/settings/SettingsPage.tsx new file mode 100644 index 00000000..2845d154 --- /dev/null +++ b/src/dashboard/react-components/settings/SettingsPage.tsx @@ -0,0 +1,573 @@ +/** + * Unified Settings Page + * + * Full-page settings view with tabbed navigation for: + * - Dashboard Settings (personal preferences) + * - Workspace Settings (repos, providers, domains) + * - Team Settings (members, invitations) + * - Billing Settings (subscription, plans) + * + * Design: Mission Control theme - deep space aesthetic with cyan/purple accents + */ + +import React, { useState, useEffect } from 'react'; +import { cloudApi, getCsrfToken } from '../../lib/cloudApi'; +import { WorkspaceSettingsPanel } from './WorkspaceSettingsPanel'; +import { TeamSettingsPanel } from './TeamSettingsPanel'; +import { BillingSettingsPanel } from './BillingSettingsPanel'; + +export interface SettingsPageProps { + /** Current user ID for team membership checks */ + currentUserId?: string; + /** Initial tab to show */ + initialTab?: 'dashboard' | 'workspace' | 'team' | 'billing'; + /** Callback when settings page is closed */ + onClose?: () => void; +} + +interface WorkspaceSummary { + id: string; + name: string; + status: string; +} + +interface DashboardSettings { + theme: 'dark' | 'light' | 'system'; + compactMode: boolean; + showTimestamps: boolean; + soundEnabled: boolean; + notificationsEnabled: boolean; + autoScrollMessages: boolean; +} + +const DEFAULT_DASHBOARD_SETTINGS: DashboardSettings = { + theme: 'dark', + compactMode: false, + showTimestamps: true, + soundEnabled: true, + notificationsEnabled: true, + autoScrollMessages: true, +}; + +export function SettingsPage({ + currentUserId, + initialTab = 'dashboard', + onClose, +}: SettingsPageProps) { + const [activeTab, setActiveTab] = useState<'dashboard' | 'workspace' | 'team' | 'billing'>(initialTab); + const [workspaces, setWorkspaces] = useState([]); + const [selectedWorkspaceId, setSelectedWorkspaceId] = useState(null); + const [isLoadingWorkspaces, setIsLoadingWorkspaces] = useState(true); + const [dashboardSettings, setDashboardSettings] = useState(DEFAULT_DASHBOARD_SETTINGS); + + // Load workspaces + useEffect(() => { + async function loadWorkspaces() { + setIsLoadingWorkspaces(true); + const result = await cloudApi.getWorkspaceSummary(); + if (result.success && result.data.workspaces.length > 0) { + setWorkspaces(result.data.workspaces); + setSelectedWorkspaceId(result.data.workspaces[0].id); + } + setIsLoadingWorkspaces(false); + } + loadWorkspaces(); + }, []); + + // Load dashboard settings from localStorage + useEffect(() => { + const saved = localStorage.getItem('dashboard-settings'); + if (saved) { + try { + setDashboardSettings({ ...DEFAULT_DASHBOARD_SETTINGS, ...JSON.parse(saved) }); + } catch { + // Use defaults + } + } + }, []); + + // Save dashboard settings + const updateDashboardSetting = ( + key: K, + value: DashboardSettings[K] + ) => { + const newSettings = { ...dashboardSettings, [key]: value }; + setDashboardSettings(newSettings); + localStorage.setItem('dashboard-settings', JSON.stringify(newSettings)); + + // Apply theme immediately + if (key === 'theme') { + document.documentElement.setAttribute('data-theme', value as string); + } + }; + + const tabs = [ + { id: 'dashboard', label: 'Dashboard', icon: }, + { id: 'workspace', label: 'Workspace', icon: }, + { id: 'team', label: 'Team', icon: }, + { id: 'billing', label: 'Billing', icon: }, + ] as const; + + return ( +
+ {/* Background Pattern */} +
+
+
+
+ +
+ {/* Header */} +
+
+
+ +
+
+

Settings

+

Manage your workspace and preferences

+
+
+ + +
+ + {/* Mobile Tab Navigation */} +
+ {tabs.map((tab) => ( + + ))} +
+ + {/* Mobile Workspace Selector */} + {(activeTab === 'workspace' || activeTab === 'team') && workspaces.length > 1 && ( +
+ +
+ )} + + {/* Content */} +
+ {/* Desktop Sidebar Navigation */} + + + {/* Main Content */} +
+
+ {/* Dashboard Settings */} + {activeTab === 'dashboard' && ( +
+ + + {/* Appearance */} + }> + + + + + + updateDashboardSetting('compactMode', v)} + /> + + + + updateDashboardSetting('showTimestamps', v)} + /> + + + + {/* Notifications */} + }> + + updateDashboardSetting('soundEnabled', v)} + /> + + + + updateDashboardSetting('notificationsEnabled', v)} + /> + + + + {/* Behavior */} + }> + + updateDashboardSetting('autoScrollMessages', v)} + /> + + +
+ )} + + {/* Workspace Settings */} + {activeTab === 'workspace' && ( + <> + {isLoadingWorkspaces ? ( +
+
+
+
+ Loading workspaces... +
+ ) : selectedWorkspaceId ? ( + + ) : ( + } + title="No Workspace" + description="Create a workspace to get started with Agent Relay." + action={ + + } + /> + )} + + )} + + {/* Team Settings */} + {activeTab === 'team' && ( + <> + {selectedWorkspaceId ? ( +
+ + +
+ ) : ( + } + title="No Workspace Selected" + description="Select a workspace to manage team members." + /> + )} + + )} + + {/* Billing Settings */} + {activeTab === 'billing' && ( +
+ + +
+ )} +
+
+
+
+
+ ); +} + +// Utility Components +function PageHeader({ title, subtitle }: { title: string; subtitle: string }) { + return ( +
+

{title}

+

{subtitle}

+
+ ); +} + +function SettingsSection({ + title, + icon, + children, +}: { + title: string; + icon: React.ReactNode; + children: React.ReactNode; +}) { + return ( +
+
+ {icon} +

{title}

+
+
{children}
+
+ ); +} + +function SettingRow({ + label, + description, + children, +}: { + label: string; + description: string; + children: React.ReactNode; +}) { + return ( +
+
+

{label}

+

{description}

+
+ {children} +
+ ); +} + +function Toggle({ + checked, + onChange, +}: { + checked: boolean; + onChange: (value: boolean) => void; +}) { + return ( + + ); +} + +function EmptyState({ + icon, + title, + description, + action, +}: { + icon: React.ReactNode; + title: string; + description: string; + action?: React.ReactNode; +}) { + return ( +
+
+ {icon} +
+

{title}

+

{description}

+ {action} +
+ ); +} + +// Icons +function SettingsIcon({ className = '' }: { className?: string }) { + return ( + + + + + ); +} + +function DashboardIcon() { + return ( + + + + + + + ); +} + +function WorkspaceIcon() { + return ( + + + + ); +} + +function TeamIcon() { + return ( + + + + + + + ); +} + +function BillingIcon() { + return ( + + + + + ); +} + +function CloseIcon() { + return ( + + + + + ); +} + +function PaletteIcon() { + return ( + + + + + + + + ); +} + +function BellIcon() { + return ( + + + + + ); +} diff --git a/src/dashboard/react-components/settings/TeamSettingsPanel.tsx b/src/dashboard/react-components/settings/TeamSettingsPanel.tsx new file mode 100644 index 00000000..a75a4021 --- /dev/null +++ b/src/dashboard/react-components/settings/TeamSettingsPanel.tsx @@ -0,0 +1,460 @@ +/** + * Team Settings Panel + * + * Manage workspace team members, invitations, and roles. + */ + +import React, { useState, useEffect, useCallback } from 'react'; +import { cloudApi } from '../../lib/cloudApi'; + +export interface TeamSettingsPanelProps { + workspaceId: string; + currentUserId?: string; +} + +interface Member { + id: string; + userId: string; + role: 'owner' | 'admin' | 'member' | 'viewer'; + isPending: boolean; + user?: { + githubUsername: string; + email?: string; + avatarUrl?: string; + }; +} + +interface PendingInvite { + id: string; + workspaceId: string; + workspaceName: string; + role: string; + invitedAt: string; + invitedBy: string; +} + +const ROLE_COLORS: Record = { + owner: 'bg-accent-purple/20 text-accent-purple', + admin: 'bg-accent-cyan/20 text-accent-cyan', + member: 'bg-success/20 text-success', + viewer: 'bg-bg-hover text-text-muted', +}; + +const ROLE_DESCRIPTIONS: Record = { + owner: 'Full access, can delete workspace and transfer ownership', + admin: 'Can manage members, settings, and all workspace features', + member: 'Can use workspace, spawn agents, and send messages', + viewer: 'Read-only access to workspace activity', +}; + +export function TeamSettingsPanel({ + workspaceId, + currentUserId, +}: TeamSettingsPanelProps) { + const [members, setMembers] = useState([]); + const [pendingInvites, setPendingInvites] = useState([]); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + const [successMessage, setSuccessMessage] = useState(null); + + // Invite form + const [showInviteForm, setShowInviteForm] = useState(false); + const [inviteUsername, setInviteUsername] = useState(''); + const [inviteRole, setInviteRole] = useState<'admin' | 'member' | 'viewer'>('member'); + const [inviteLoading, setInviteLoading] = useState(false); + const [inviteError, setInviteError] = useState(null); + + // Role change + const [changingRoleFor, setChangingRoleFor] = useState(null); + + // Load members + useEffect(() => { + async function loadMembers() { + setIsLoading(true); + setError(null); + + const [membersResult, invitesResult] = await Promise.all([ + cloudApi.getWorkspaceMembers(workspaceId), + cloudApi.getPendingInvites(), + ]); + + if (membersResult.success) { + setMembers(membersResult.data.members as Member[]); + } else { + setError(membersResult.error); + } + + if (invitesResult.success) { + // Filter to invites for this workspace + setPendingInvites( + invitesResult.data.invites.filter((i) => i.workspaceId === workspaceId) + ); + } + + setIsLoading(false); + } + + loadMembers(); + }, [workspaceId]); + + // Invite member + const handleInvite = useCallback(async () => { + if (!inviteUsername.trim()) { + setInviteError('Please enter a GitHub username'); + return; + } + + setInviteLoading(true); + setInviteError(null); + + const result = await cloudApi.inviteMember(workspaceId, inviteUsername.trim(), inviteRole); + + if (result.success) { + // Refresh members + const membersResult = await cloudApi.getWorkspaceMembers(workspaceId); + if (membersResult.success) { + setMembers(membersResult.data.members as Member[]); + } + setInviteUsername(''); + setShowInviteForm(false); + setSuccessMessage(`Invitation sent to ${inviteUsername}`); + setTimeout(() => setSuccessMessage(null), 3000); + } else { + setInviteError(result.error); + } + + setInviteLoading(false); + }, [workspaceId, inviteUsername, inviteRole]); + + // Update member role + const handleUpdateRole = useCallback(async (memberId: string, newRole: string) => { + setChangingRoleFor(memberId); + + const result = await cloudApi.updateMemberRole(workspaceId, memberId, newRole); + + if (result.success) { + setMembers((prev) => + prev.map((m) => (m.id === memberId ? { ...m, role: newRole as Member['role'] } : m)) + ); + setSuccessMessage('Role updated successfully'); + setTimeout(() => setSuccessMessage(null), 3000); + } else { + setError(result.error); + } + + setChangingRoleFor(null); + }, [workspaceId]); + + // Remove member + const handleRemoveMember = useCallback(async (member: Member) => { + const confirmed = window.confirm( + `Are you sure you want to remove ${member.user?.githubUsername || 'this member'} from the workspace?` + ); + if (!confirmed) return; + + const result = await cloudApi.removeMember(workspaceId, member.id); + + if (result.success) { + setMembers((prev) => prev.filter((m) => m.id !== member.id)); + setSuccessMessage('Member removed successfully'); + setTimeout(() => setSuccessMessage(null), 3000); + } else { + setError(result.error); + } + }, [workspaceId]); + + // Get current user's role + const currentUserRole = members.find((m) => m.userId === currentUserId)?.role; + const canManageMembers = currentUserRole === 'owner' || currentUserRole === 'admin'; + + if (isLoading) { + return ( +
+ + Loading team members... +
+ ); + } + + return ( +
+ {/* Header */} +
+
+

+ Team Members +

+

+ {members.length} member{members.length !== 1 ? 's' : ''} +

+
+ {canManageMembers && ( + + )} +
+ + {/* Messages */} + {error && ( +
+ {error} + +
+ )} + + {successMessage && ( +
+ {successMessage} +
+ )} + + {/* Invite Form */} + {showInviteForm && ( +
+

Invite New Member

+ + {inviteError && ( +
+ {inviteError} +
+ )} + +
+
+ + setInviteUsername(e.target.value)} + placeholder="username" + className="w-full px-3 py-2 bg-bg-card border border-border-subtle rounded-lg text-sm text-text-primary placeholder:text-text-muted focus:outline-none focus:border-accent-cyan" + /> +
+
+ + +
+
+ +

+ {ROLE_DESCRIPTIONS[inviteRole]} +

+ +
+ + +
+
+ )} + + {/* Members List */} +
+ {members.map((member) => ( +
+
+ {member.user?.avatarUrl ? ( + {member.user.githubUsername} + ) : ( +
+ {member.user?.githubUsername?.[0]?.toUpperCase() || '?'} +
+ )} +
+
+

+ {member.user?.githubUsername || 'Unknown User'} +

+ {member.isPending && ( + + Pending + + )} + {member.userId === currentUserId && ( + (you) + )} +
+ {member.user?.email && ( +

{member.user.email}

+ )} +
+
+ +
+ {canManageMembers && member.role !== 'owner' && member.userId !== currentUserId ? ( + + ) : ( + + {member.role.charAt(0).toUpperCase() + member.role.slice(1)} + + )} + + {canManageMembers && member.role !== 'owner' && member.userId !== currentUserId && ( + + )} +
+
+ ))} +
+ + {/* Pending Invites for Current User */} + {pendingInvites.length > 0 && ( +
+

+ Your Pending Invitations +

+
+ {pendingInvites.map((invite) => ( +
+
+

+ {invite.workspaceName} +

+

+ Invited by {invite.invitedBy} as {invite.role} +

+
+
+ + +
+
+ ))} +
+
+ )} + + {/* Role Permissions Info */} +
+

+ Role Permissions +

+
+ {Object.entries(ROLE_DESCRIPTIONS).map(([role, description]) => ( +
+ + {role.charAt(0).toUpperCase() + role.slice(1)} + +

{description}

+
+ ))} +
+
+
+ ); +} + +// Icons +function LoadingSpinner() { + return ( + + + + ); +} + +function PlusIcon() { + return ( + + + + + ); +} + +function TrashIcon() { + return ( + + + + + ); +} diff --git a/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx b/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx new file mode 100644 index 00000000..227c7e8d --- /dev/null +++ b/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx @@ -0,0 +1,1235 @@ +/** + * Workspace Settings Panel + * + * Manage workspace configuration including repositories, + * AI providers, custom domains, and agent policies. + * + * Design: Mission Control theme with deep space aesthetic + */ + +import React, { useState, useEffect, useCallback } from 'react'; +import { cloudApi } from '../../lib/cloudApi'; + +export interface WorkspaceSettingsPanelProps { + workspaceId: string; + csrfToken?: string; + onClose?: () => void; +} + +interface WorkspaceDetails { + id: string; + name: string; + status: string; + publicUrl?: string; + computeProvider: string; + config: { + providers: string[]; + repositories: string[]; + supervisorEnabled?: boolean; + maxAgents?: number; + }; + customDomain?: string; + customDomainStatus?: string; + errorMessage?: string; + repositories: Array<{ + id: string; + fullName: string; + syncStatus: string; + lastSyncedAt?: string; + }>; + createdAt: string; + updatedAt: string; +} + +interface AvailableRepo { + id: string; + fullName: string; + isPrivate: boolean; + defaultBranch: string; + syncStatus: string; + hasNangoConnection: boolean; + lastSyncedAt?: string; +} + +interface AIProvider { + id: string; + name: string; + displayName: string; + description: string; + color: string; + cliCommand: string; + apiKeyUrl?: string; + apiKeyName?: string; + supportsOAuth?: boolean; + isConnected?: boolean; +} + +const AI_PROVIDERS: AIProvider[] = [ + { + id: 'anthropic', + name: 'Anthropic', + displayName: 'Claude', + description: 'Claude Code - recommended for code tasks', + color: '#D97757', + cliCommand: 'claude', + apiKeyUrl: 'https://console.anthropic.com/settings/keys', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'openai', + name: 'OpenAI', + displayName: 'Codex', + description: 'Codex - OpenAI coding assistant', + color: '#10A37F', + cliCommand: 'codex login', + apiKeyUrl: 'https://platform.openai.com/api-keys', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'google', + name: 'Google', + displayName: 'Gemini', + description: 'Gemini - Google AI coding assistant', + color: '#4285F4', + cliCommand: 'gemini', + apiKeyUrl: 'https://aistudio.google.com/app/apikey', + apiKeyName: 'API key', + supportsOAuth: true, + }, + { + id: 'opencode', + name: 'OpenCode', + displayName: 'OpenCode', + description: 'OpenCode - AI coding assistant', + color: '#00D4AA', + cliCommand: 'opencode', + supportsOAuth: true, + }, + { + id: 'droid', + name: 'Factory', + displayName: 'Droid', + description: 'Droid - Factory AI coding agent', + color: '#6366F1', + cliCommand: 'droid', + supportsOAuth: true, + }, +]; + +interface OAuthSession { + providerId: string; + sessionId: string; + authUrl?: string; + status: 'starting' | 'waiting_auth' | 'success' | 'error'; + error?: string; +} + +export function WorkspaceSettingsPanel({ + workspaceId, + csrfToken, + onClose, +}: WorkspaceSettingsPanelProps) { + const [workspace, setWorkspace] = useState(null); + const [availableRepos, setAvailableRepos] = useState([]); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + const [activeSection, setActiveSection] = useState<'general' | 'providers' | 'repos' | 'domain' | 'danger'>('general'); + + // Provider connection state + const [providerStatus, setProviderStatus] = useState>({}); + const [connectingProvider, setConnectingProvider] = useState(null); + const [apiKeyInput, setApiKeyInput] = useState(''); + const [providerError, setProviderError] = useState(null); + const [oauthSession, setOauthSession] = useState(null); + const [showApiKeyFallback, setShowApiKeyFallback] = useState>({}); + + // Custom domain form + const [customDomain, setCustomDomain] = useState(''); + const [domainLoading, setDomainLoading] = useState(false); + const [domainError, setDomainError] = useState(null); + const [domainInstructions, setDomainInstructions] = useState<{ + type: string; + name: string; + value: string; + ttl: number; + } | null>(null); + + // Load workspace details + useEffect(() => { + async function loadWorkspace() { + setIsLoading(true); + setError(null); + + const [wsResult, reposResult] = await Promise.all([ + cloudApi.getWorkspaceDetails(workspaceId), + cloudApi.getRepos(), + ]); + + if (wsResult.success) { + setWorkspace(wsResult.data); + if (wsResult.data.customDomain) { + setCustomDomain(wsResult.data.customDomain); + } + // Mark connected providers + const connected: Record = {}; + wsResult.data.config.providers.forEach((p) => { + connected[p] = true; + }); + setProviderStatus(connected); + } else { + setError(wsResult.error); + } + + if (reposResult.success) { + setAvailableRepos(reposResult.data.repositories); + } + + setIsLoading(false); + } + + loadWorkspace(); + }, [workspaceId]); + + // Start CLI-based OAuth flow for a provider + const startOAuthFlow = async (provider: AIProvider) => { + setProviderError(null); + setConnectingProvider(provider.id); + setOauthSession({ providerId: provider.id, sessionId: '', status: 'starting' }); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${provider.id}/start`, { + method: 'POST', + credentials: 'include', + headers, + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to start authentication'); + } + + if (data.status === 'success' || data.alreadyAuthenticated) { + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setOauthSession(null); + setConnectingProvider(null); + return; + } + + const session: OAuthSession = { + providerId: provider.id, + sessionId: data.sessionId, + authUrl: data.authUrl, + status: data.status || 'starting', + }; + setOauthSession(session); + + if (data.authUrl) { + openAuthPopup(data.authUrl, provider.displayName); + pollAuthStatus(provider.id, data.sessionId); + } else if (data.status === 'starting') { + pollAuthStatus(provider.id, data.sessionId); + } + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to start OAuth'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + const openAuthPopup = (url: string, providerName: string) => { + const width = 600; + const height = 700; + const left = window.screenX + (window.outerWidth - width) / 2; + const top = window.screenY + (window.outerHeight - height) / 2; + window.open( + url, + `${providerName} Login`, + `width=${width},height=${height},left=${left},top=${top},popup=yes` + ); + }; + + const pollAuthStatus = async (providerId: string, sessionId: string) => { + const maxAttempts = 60; + let attempts = 0; + + const poll = async () => { + if (attempts >= maxAttempts) { + setProviderError('Authentication timed out. Please try again.'); + setOauthSession(null); + setConnectingProvider(null); + return; + } + + try { + const res = await fetch(`/api/onboarding/cli/${providerId}/status/${sessionId}`, { + credentials: 'include', + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to check status'); + } + + if (data.status === 'success') { + await completeAuthFlow(providerId, sessionId); + return; + } else if (data.status === 'error') { + throw new Error(data.error || 'Authentication failed'); + } else if (data.status === 'waiting_auth' && data.authUrl && !oauthSession?.authUrl) { + setOauthSession(prev => prev ? { ...prev, authUrl: data.authUrl, status: 'waiting_auth' } : null); + openAuthPopup(data.authUrl, AI_PROVIDERS.find(p => p.id === providerId)?.displayName || 'Provider'); + } + + attempts++; + setTimeout(poll, 5000); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Auth check failed'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + poll(); + }; + + const completeAuthFlow = async (providerId: string, sessionId: string) => { + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${providerId}/complete/${sessionId}`, { + method: 'POST', + credentials: 'include', + headers, + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to complete authentication'); + } + + setProviderStatus(prev => ({ ...prev, [providerId]: true })); + setOauthSession(null); + setConnectingProvider(null); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to complete auth'); + setOauthSession(null); + setConnectingProvider(null); + } + }; + + const cancelOAuthFlow = async () => { + if (oauthSession?.sessionId) { + try { + await fetch(`/api/onboarding/cli/${oauthSession.providerId}/cancel/${oauthSession.sessionId}`, { + method: 'POST', + credentials: 'include', + }); + } catch { + // Ignore cancel errors + } + } + setOauthSession(null); + setConnectingProvider(null); + }; + + const submitApiKey = async (provider: AIProvider) => { + if (!apiKeyInput.trim()) { + setProviderError('Please enter an API key'); + return; + } + + setProviderError(null); + setConnectingProvider(provider.id); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/token/${provider.id}`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ token: apiKeyInput.trim() }), + }); + + if (!res.ok) { + const data = await res.json(); + throw new Error(data.error || 'Failed to connect'); + } + + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setApiKeyInput(''); + setConnectingProvider(null); + setShowApiKeyFallback(prev => ({ ...prev, [provider.id]: false })); + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to connect'); + setConnectingProvider(null); + } + }; + + // Restart workspace + const handleRestart = useCallback(async () => { + if (!workspace) return; + + const confirmed = window.confirm('Are you sure you want to restart this workspace?'); + if (!confirmed) return; + + const result = await cloudApi.restartWorkspace(workspace.id); + if (result.success) { + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setError(result.error); + } + }, [workspace, workspaceId]); + + // Stop workspace + const handleStop = useCallback(async () => { + if (!workspace) return; + + const confirmed = window.confirm('Are you sure you want to stop this workspace?'); + if (!confirmed) return; + + const result = await cloudApi.stopWorkspace(workspace.id); + if (result.success) { + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setError(result.error); + } + }, [workspace, workspaceId]); + + // Add repository to workspace + const handleAddRepo = useCallback(async (repoId: string) => { + if (!workspace) return; + + const result = await cloudApi.addReposToWorkspace(workspace.id, [repoId]); + if (result.success) { + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setError(result.error); + } + }, [workspace, workspaceId]); + + // Set custom domain + const handleSetDomain = useCallback(async () => { + if (!workspace || !customDomain.trim()) return; + + setDomainLoading(true); + setDomainError(null); + setDomainInstructions(null); + + const result = await cloudApi.setCustomDomain(workspace.id, customDomain.trim()); + if (result.success) { + setDomainInstructions(result.data.instructions); + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setDomainError(result.error); + } + + setDomainLoading(false); + }, [workspace, customDomain, workspaceId]); + + // Verify custom domain + const handleVerifyDomain = useCallback(async () => { + if (!workspace) return; + + setDomainLoading(true); + setDomainError(null); + + const result = await cloudApi.verifyCustomDomain(workspace.id); + if (result.success) { + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + if (result.data.status === 'active') { + setDomainInstructions(null); + } + } else { + setDomainError(result.error); + } + + setDomainLoading(false); + }, [workspace, workspaceId]); + + // Remove custom domain + const handleRemoveDomain = useCallback(async () => { + if (!workspace) return; + + const confirmed = window.confirm('Are you sure you want to remove the custom domain?'); + if (!confirmed) return; + + setDomainLoading(true); + const result = await cloudApi.removeCustomDomain(workspace.id); + if (result.success) { + setCustomDomain(''); + setDomainInstructions(null); + const wsResult = await cloudApi.getWorkspaceDetails(workspaceId); + if (wsResult.success) { + setWorkspace(wsResult.data); + } + } else { + setDomainError(result.error); + } + setDomainLoading(false); + }, [workspace, workspaceId]); + + // Delete workspace + const handleDelete = useCallback(async () => { + if (!workspace) return; + + const confirmed = window.confirm( + `Are you sure you want to delete "${workspace.name}"? This action cannot be undone.` + ); + if (!confirmed) return; + + const doubleConfirm = window.confirm( + 'This will permanently delete all workspace data. Are you absolutely sure?' + ); + if (!doubleConfirm) return; + + const result = await cloudApi.deleteWorkspace(workspace.id); + if (result.success) { + onClose?.(); + } else { + setError(result.error); + } + }, [workspace, onClose]); + + if (isLoading) { + return ( +
+
+
+
+
+
+
+ + LOADING WORKSPACE CONFIG... + +
+ ); + } + + if (error && !workspace) { + return ( +
+
+ + {error} +
+
+ ); + } + + if (!workspace) { + return null; + } + + const unassignedRepos = availableRepos.filter( + (r) => !workspace.repositories.some((wr) => wr.id === r.id) + ); + + const sections = [ + { id: 'general', label: 'General', icon: }, + { id: 'providers', label: 'AI Providers', icon: }, + { id: 'repos', label: 'Repositories', icon: }, + { id: 'domain', label: 'Domain', icon: }, + { id: 'danger', label: 'Danger', icon: }, + ]; + + return ( +
+ {/* Section Navigation */} +
+ {sections.map((section) => ( + + ))} +
+ + {/* Content */} +
+ {error && ( +
+ + {error} + +
+ )} + + {/* General Section */} + {activeSection === 'general' && ( +
+ + +
+ + + + +
+ +
+ +
+ {workspace.status === 'running' && ( + } + > + Stop Workspace + + )} + } + > + Restart Workspace + +
+
+
+ )} + + {/* AI Providers Section */} + {activeSection === 'providers' && ( +
+ + + {providerError && ( +
+ + {providerError} +
+ )} + +
+ {AI_PROVIDERS.map((provider) => ( +
+
+
+
+ {provider.displayName[0]} +
+
+

+ {provider.displayName} +

+

{provider.description}

+
+
+ + {providerStatus[provider.id] ? ( +
+
+ Connected +
+ ) : null} +
+ + {!providerStatus[provider.id] && ( +
+ {oauthSession?.providerId === provider.id ? ( +
+ {oauthSession.status === 'starting' && ( +
+
+ Starting authentication... +
+ )} + {oauthSession.status === 'waiting_auth' && ( + <> +
+ + Complete login in the popup window +
+ {oauthSession.authUrl && ( +

+ Popup didn't open?{' '} + +

+ )} + + )} + +
+ ) : showApiKeyFallback[provider.id] ? ( +
+
+ { + setConnectingProvider(provider.id); + setApiKeyInput(e.target.value); + }} + onFocus={() => setConnectingProvider(provider.id)} + className="flex-1 px-4 py-3 bg-bg-card border border-border-subtle rounded-lg text-sm text-text-primary placeholder:text-text-muted focus:outline-none focus:border-accent-cyan focus:ring-1 focus:ring-accent-cyan/30 transition-all" + /> + +
+ {provider.apiKeyUrl && ( +

+ Get your API key from{' '} + + {new URL(provider.apiKeyUrl).hostname} + +

+ )} + +
+ ) : ( +
+ + {provider.apiKeyUrl && ( + + )} +
+ )} +
+ )} + +
+

+ CLI: {provider.cliCommand} +

+
+
+ ))} +
+
+ )} + + {/* Repositories Section */} + {activeSection === 'repos' && ( +
+ + +
+ {workspace.repositories.length > 0 ? ( + workspace.repositories.map((repo) => ( +
+
+
+ +
+
+

{repo.fullName}

+

+ {repo.lastSyncedAt + ? `Synced ${new Date(repo.lastSyncedAt).toLocaleDateString()}` + : 'Not synced'} +

+
+
+ +
+ )) + ) : ( +
+ +

No repositories connected

+
+ )} +
+ + {unassignedRepos.length > 0 && ( + <> + +
+ {unassignedRepos.map((repo) => ( +
+
+
+ +
+
+

{repo.fullName}

+

+ {repo.isPrivate ? 'Private' : 'Public'} +

+
+
+ +
+ ))} +
+ + )} +
+ )} + + {/* Custom Domain Section */} + {activeSection === 'domain' && ( +
+ + +
+
+
+ +
+
+

Premium Feature

+

Requires Team or Enterprise plan

+
+
+
+ + {workspace.customDomain ? ( +
+
+
+ + Current Domain + + +
+

{workspace.customDomain}

+
+ + {workspace.customDomainStatus === 'pending' && ( + } + fullWidth + > + {domainLoading ? 'Verifying...' : 'Verify DNS Configuration'} + + )} + + } + fullWidth + > + Remove Custom Domain + +
+ ) : ( +
+
+ + setCustomDomain(e.target.value)} + placeholder="workspace.yourdomain.com" + className="w-full px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-lg text-sm text-text-primary font-mono placeholder:text-text-muted focus:outline-none focus:border-accent-cyan focus:ring-1 focus:ring-accent-cyan/30 transition-all" + /> +
+ + } + fullWidth + > + {domainLoading ? 'Setting up...' : 'Set Custom Domain'} + +
+ )} + + {domainError && ( +
+ {domainError} +
+ )} + + {domainInstructions && ( +
+

+ + DNS Configuration Required +

+

+ Add the following DNS record to your domain provider: +

+
+ + + +
+
+ )} +
+ )} + + {/* Danger Zone Section */} + {activeSection === 'danger' && ( +
+
+
+
+ +
+
+

Danger Zone

+

+ These actions are destructive and cannot be undone +

+
+
+ +
+
+
+

Delete Workspace

+

+ Permanently delete this workspace and all its data +

+
+ +
+
+
+
+ )} +
+
+ ); +} + +// Utility Components +function SectionHeader({ title, subtitle }: { title: string; subtitle: string }) { + return ( +
+

{title}

+

{subtitle}

+
+ ); +} + +function InfoCard({ + label, + value, + valueColor = 'text-text-primary', + mono = false, + indicator = false, +}: { + label: string; + value: string; + valueColor?: string; + mono?: boolean; + indicator?: boolean; +}) { + return ( +
+ +
+ {indicator &&
} +

+ {value} +

+
+
+ ); +} + +function ActionButton({ + children, + onClick, + disabled, + variant, + icon, + fullWidth, +}: { + children: React.ReactNode; + onClick: () => void; + disabled?: boolean; + variant: 'primary' | 'warning' | 'danger'; + icon?: React.ReactNode; + fullWidth?: boolean; +}) { + const variants = { + primary: 'bg-accent-cyan/10 border-accent-cyan/30 text-accent-cyan hover:bg-accent-cyan/20', + warning: 'bg-amber-400/10 border-amber-400/30 text-amber-400 hover:bg-amber-400/20', + danger: 'bg-error/10 border-error/30 text-error hover:bg-error/20', + }; + + return ( + + ); +} + +function StatusBadge({ status }: { status: string }) { + const styles: Record = { + synced: 'bg-success/15 text-success border-success/30', + active: 'bg-success/15 text-success border-success/30', + syncing: 'bg-accent-cyan/15 text-accent-cyan border-accent-cyan/30', + verifying: 'bg-accent-cyan/15 text-accent-cyan border-accent-cyan/30', + pending: 'bg-amber-400/15 text-amber-400 border-amber-400/30', + error: 'bg-error/15 text-error border-error/30', + }; + + return ( + + {status} + + ); +} + +function DNSField({ label, value }: { label: string; value: string }) { + return ( +
+ +

{value}

+
+ ); +} + +// Icons +function SettingsGearIcon() { + return ( + + + + + ); +} + +function ProviderIcon() { + return ( + + + + + + ); +} + +function RepoIcon({ className = '' }: { className?: string }) { + return ( + + + + ); +} + +function GlobeIcon({ className = '' }: { className?: string }) { + return ( + + + + + + ); +} + +function AlertIcon({ className = '' }: { className?: string }) { + return ( + + + + + + ); +} + +function LockIcon() { + return ( + + + + + ); +} + +function StopIcon() { + return ( + + + + ); +} + +function RestartIcon() { + return ( + + + + + ); +} + +function CheckIcon() { + return ( + + + + ); +} + +function TrashIcon() { + return ( + + + + + ); +} + +function CloseIcon() { + return ( + + + + + ); +} + +function InfoIcon() { + return ( + + + + + + ); +} diff --git a/src/dashboard/react-components/settings/index.ts b/src/dashboard/react-components/settings/index.ts new file mode 100644 index 00000000..cc1d45be --- /dev/null +++ b/src/dashboard/react-components/settings/index.ts @@ -0,0 +1,10 @@ +/** + * Settings Components + * + * Unified settings UI for dashboard, workspace, team, and billing. + */ + +export { SettingsPage, type SettingsPageProps } from './SettingsPage'; +export { WorkspaceSettingsPanel, type WorkspaceSettingsPanelProps } from './WorkspaceSettingsPanel'; +export { TeamSettingsPanel, type TeamSettingsPanelProps } from './TeamSettingsPanel'; +export { BillingSettingsPanel, type BillingSettingsPanelProps } from './BillingSettingsPanel'; From bf87bef2eea19fd06d00b4b8193c24867a6e2c8f Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 16:41:42 +0000 Subject: [PATCH 059/103] Update dashboard package-lock.json after npm install --- src/dashboard/package-lock.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dashboard/package-lock.json b/src/dashboard/package-lock.json index aede51dc..141ea4d6 100644 --- a/src/dashboard/package-lock.json +++ b/src/dashboard/package-lock.json @@ -8,7 +8,7 @@ "name": "@agent-relay/dashboard-v2", "version": "1.0.0", "dependencies": { - "@nangohq/frontend": "^0.69.14", + "@nangohq/frontend": "^0.69.20", "@tailwindcss/postcss": "^4.1.18", "@xterm/addon-fit": "^0.11.0", "@xterm/addon-search": "^0.16.0", From e13702a556aab2e4c4415c624918cc13f8f849cf Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 16:45:36 +0000 Subject: [PATCH 060/103] Fix mobile workspace selector to show with single workspace --- .../settings/SettingsPage.tsx | 35 +++++++++++++------ 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/src/dashboard/react-components/settings/SettingsPage.tsx b/src/dashboard/react-components/settings/SettingsPage.tsx index 2845d154..5524b8ef 100644 --- a/src/dashboard/react-components/settings/SettingsPage.tsx +++ b/src/dashboard/react-components/settings/SettingsPage.tsx @@ -158,17 +158,32 @@ export function SettingsPage({
{/* Mobile Workspace Selector */} - {(activeTab === 'workspace' || activeTab === 'team') && workspaces.length > 1 && ( + {(activeTab === 'workspace' || activeTab === 'team') && workspaces.length > 0 && (
- +
+
ws.id === selectedWorkspaceId)?.status === 'running' + ? 'bg-success' + : workspaces.find(ws => ws.id === selectedWorkspaceId)?.status === 'stopped' + ? 'bg-amber-400' + : 'bg-text-muted' + }`} + /> + {workspaces.length === 1 ? ( + {workspaces[0].name} + ) : ( + + )} +
)} From 5a5da4bed738629829b0b540345e52aa00c81b9c Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 16:59:57 +0000 Subject: [PATCH 061/103] Wire up cloud workspaces to dashboard WorkspaceSelector In cloud mode, fetch workspaces from cloudApi.getWorkspaceSummary() and display them in the sidebar WorkspaceSelector. Users can now see their active workspace and switch between workspaces. - Add cloud workspace state and fetching logic - Create effectiveWorkspaces that works for both cloud and local mode - Update WorkspaceSelector to use effective workspace values - Poll for workspace updates every 30 seconds in cloud mode --- src/dashboard/react-components/App.tsx | 76 ++++++++++++++++++++++++-- 1 file changed, 72 insertions(+), 4 deletions(-) diff --git a/src/dashboard/react-components/App.tsx b/src/dashboard/react-components/App.tsx index c8b882cc..3e82f9b3 100644 --- a/src/dashboard/react-components/App.tsx +++ b/src/dashboard/react-components/App.tsx @@ -39,6 +39,7 @@ import { useRecentRepos } from './hooks/useRecentRepos'; import { usePresence, type UserPresence } from './hooks/usePresence'; import { useCloudSessionOptional } from './CloudSessionProvider'; import { api, convertApiDecision } from '../lib/api'; +import { cloudApi } from '../lib/cloudApi'; import type { CurrentUser } from './MessageList'; /** @@ -88,6 +89,73 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { } : undefined; + // Cloud workspaces state (for cloud mode) + const [cloudWorkspaces, setCloudWorkspaces] = useState>([]); + const [activeCloudWorkspaceId, setActiveCloudWorkspaceId] = useState(null); + const [isLoadingCloudWorkspaces, setIsLoadingCloudWorkspaces] = useState(false); + + // Fetch cloud workspaces when in cloud mode + useEffect(() => { + if (!cloudSession?.user) return; + + const fetchCloudWorkspaces = async () => { + setIsLoadingCloudWorkspaces(true); + try { + const result = await cloudApi.getWorkspaceSummary(); + if (result.success && result.data.workspaces) { + setCloudWorkspaces(result.data.workspaces); + // Auto-select first workspace if none selected + if (!activeCloudWorkspaceId && result.data.workspaces.length > 0) { + setActiveCloudWorkspaceId(result.data.workspaces[0].id); + } + } + } catch (err) { + console.error('Failed to fetch cloud workspaces:', err); + } finally { + setIsLoadingCloudWorkspaces(false); + } + }; + + fetchCloudWorkspaces(); + // Poll for updates every 30 seconds + const interval = setInterval(fetchCloudWorkspaces, 30000); + return () => clearInterval(interval); + }, [cloudSession?.user, activeCloudWorkspaceId]); + + // Determine which workspaces to use (cloud mode or orchestrator) + const isCloudMode = Boolean(cloudSession?.user); + const effectiveWorkspaces = useMemo(() => { + if (isCloudMode && cloudWorkspaces.length > 0) { + // Convert cloud workspaces to the format expected by WorkspaceSelector + return cloudWorkspaces.map(ws => ({ + id: ws.id, + name: ws.name, + path: ws.path || `/workspace/${ws.name}`, + status: ws.status === 'running' ? 'active' as const : 'inactive' as const, + provider: 'claude' as const, + lastActiveAt: new Date(), + })); + } + return workspaces; + }, [isCloudMode, cloudWorkspaces, workspaces]); + + const effectiveActiveWorkspaceId = isCloudMode ? activeCloudWorkspaceId : activeWorkspaceId; + const effectiveIsLoading = isCloudMode ? isLoadingCloudWorkspaces : isOrchestratorLoading; + + // Handle workspace selection (works for both cloud and orchestrator) + const handleEffectiveWorkspaceSelect = useCallback(async (workspace: { id: string; name: string }) => { + if (isCloudMode) { + setActiveCloudWorkspaceId(workspace.id); + } else { + await switchWorkspace(workspace.id); + } + }, [isCloudMode, switchWorkspace]); + // Presence tracking for online users and typing indicators const { onlineUsers, typingUsers, sendTyping, isConnected: isPresenceConnected } = usePresence({ currentUser: currentUser @@ -779,11 +847,11 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { {/* Workspace Selector */}
setIsAddWorkspaceOpen(true)} - isLoading={isOrchestratorLoading} + isLoading={effectiveIsLoading} />
From 5a87ba7891076b1913298121512269468e61bd12 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 17:50:24 +0000 Subject: [PATCH 062/103] Add comprehensive Slack-competitive feature roadmap as beads ## P0 - Viral Growth (Priority 100-90) - bd-viral-001: Public Community Rooms with AI Agents - bd-agent-public-001: Deploy Always-On Community Agents - bd-landing-001: Landing Page with Live Community Embed - bd-channels-001: User Channels (Public & Private) ## P0 - Core Messaging (Priority 85-80) - bd-dm-001: User-to-User Direct Messages - bd-search-001: Full-Text Message Search - bd-integrations-001: GitHub Integration ## P1 - Enhanced Features (Priority 75-70) - bd-files-001: File Sharing & Attachments - bd-notifications-001: Push & Email Notifications - bd-reactions-001: Emoji Reactions - bd-mobile-001: Mobile App (React Native) ## P2 - Collaboration & Platform (Priority 65-50) - bd-integrations-002: Integration Platform (Apps) - bd-huddles-001: Voice Huddles - bd-workflows-001: Workflow Builder - bd-screen-share-001: Screen Sharing - bd-guest-001: Guest Access - bd-sso-001: SSO/SAML Authentication ## P3 - Nice to Have (Priority 45-40) - bd-audit-001: Audit Logs - bd-canvas-001: Canvas/Collaborative Docs - bd-bookmarks-001: Bookmarks & Saved Items Key differentiator: AI-native platform where agents are first-class citizens alongside humans, with public community as viral growth engine. --- .beads/beads.jsonl | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index 6ad275dd..cf9f66e0 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -1 +1,21 @@ {"id":"bd-log1","title":"Add DIY minimal logging to agent-relay daemon","description":"Implement a lightweight ~20-line logging approach for the agent-relay daemon. No external library needed.\n\nRequirements:\n- log_info(), log_warn(), log_error(), log_debug() functions\n- JSON output format for easy parsing with jq\n- Configurable via LOG_FILE and DEBUG env vars\n- Log daemon startup/shutdown\n- Log errors (delivery failures, connection issues)\n- Debug-only message delivery logging\n- Minimal performance impact (sub-5ms system)\n\nImplementation:\n```bash\n_log() {\n local level=\"$1\" msg=\"$2\"\n local ts=$(date -u +\"%Y-%m-%dT%H:%M:%SZ\")\n printf '{\"ts\":\"%s\",\"level\":\"%s\",\"msg\":\"%s\"}\\n' \"$ts\" \"$level\" \"$msg\" >> \"$LOG_FILE\"\n [[ \"$LOG_STDOUT\" == \"1\" ]] && echo \"[$level] $msg\"\n}\n\nlog_info() { _log \"INFO\" \"$1\"; }\nlog_warn() { _log \"WARN\" \"$1\"; }\nlog_error() { _log \"ERROR\" \"$1\"; }\nlog_debug() { [[ \"${DEBUG:-0}\" == \"1\" ]] && _log \"DEBUG\" \"$1\"; }\n```\n\nFiles modified:\n- src/utils/logger.ts (new file - TypeScript implementation)\n- src/daemon/server.ts (integrated logging)\n- src/utils/index.ts (exported logger)","priority":50,"status":"closed","created_at":"2026-01-01T07:40:00Z","closed_at":"2026-01-01T07:46:00Z","closed_reason":"Implemented TypeScript version with JSON output, configurable log levels, file logging support","tags":["logging","infrastructure","low-priority"]} +{"id":"bd-viral-001","title":"[VIRAL] Public Community Rooms with AI Agents","description":"Create open, public rooms on Agent Relay where anyone can join and chat with AI agents. This is our viral growth mechanism - like Discord's open servers but with AI agents as first-class participants.\n\n## Concept\nAgent Relay's public landing page will feature open rooms:\n- #help - Get help from AI agent + community\n- #roadmap - Chat with roadmap agent about upcoming features\n- #docs - Documentation agent answers questions\n- #showcase - See live agent demos\n- #general - Community discussion with AI moderation\n\n## Requirements\n1. Public rooms accessible without workspace (just GitHub login)\n2. AI agents always present and responsive\n3. Rate limiting for free users\n4. Upgrade prompts for power users\n5. Show agent activity in real-time (demonstrates product)\n6. Shareable room links\n\n## Growth Loop\n1. User discovers Agent Relay\n2. Joins public room, chats with agents\n3. Gets hooked on AI-native collaboration\n4. Creates own workspace to use agents on their code\n5. Invites team members\n\n## Implementation\n- Add 'public' flag to rooms/channels\n- Create dedicated community workspace\n- Deploy always-on agents for each room\n- Add guest/free tier with rate limits\n- Embed rooms on landing page","priority":100,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","growth","community","p0"],"depends_on":[]} +{"id":"bd-channels-001","title":"User Channels (Public & Private)","description":"Add support for user-created channels beyond agent channels.\n\n## Features\n- Create public channels (visible to all workspace members)\n- Create private channels (invite-only)\n- Channel descriptions and topics\n- Pin messages in channels\n- Channel member list\n- Leave/archive channels\n\n## Data Model\n```typescript\ninterface Channel {\n id: string;\n name: string;\n description?: string;\n topic?: string;\n isPrivate: boolean;\n workspaceId: string;\n createdBy: string;\n members: string[];\n pinnedMessages: string[];\n createdAt: Date;\n}\n```\n\n## API Endpoints\n- POST /api/channels - Create channel\n- GET /api/channels - List workspace channels\n- PATCH /api/channels/:id - Update channel\n- DELETE /api/channels/:id - Archive channel\n- POST /api/channels/:id/members - Add member\n- DELETE /api/channels/:id/members/:userId - Remove member","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":[]} +{"id":"bd-dm-001","title":"User-to-User Direct Messages","description":"Add support for direct messages between human users (not just agents).\n\n## Features\n- Start DM with any workspace member\n- DM history persisted\n- Unread indicators\n- DM list in sidebar\n- Block user option\n\n## Implementation\n- DMs are 2-person private channels\n- Reuse channel infrastructure\n- Add DM-specific UI in sidebar\n- Notification preferences for DMs","priority":85,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":["bd-channels-001"]} +{"id":"bd-search-001","title":"Full-Text Message Search","description":"Add comprehensive search across all messages.\n\n## Features\n- Search all messages in workspace\n- Filter by channel, user, date range\n- Search within files/attachments\n- Highlight search terms in results\n- Jump to message in context\n- Search shortcuts (from:user, in:channel, has:link)\n\n## Implementation\n- Add search index (consider SQLite FTS5 or Postgres full-text)\n- Search API endpoint\n- Search UI in header\n- Keyboard shortcut (Cmd+K already exists, add search mode)","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["search","core","p1"],"depends_on":[]} +{"id":"bd-files-001","title":"File Sharing & Attachments","description":"Add file upload, preview, and sharing capabilities.\n\n## Features\n- Drag & drop file upload\n- Image preview in chat\n- Code file syntax highlighting\n- PDF preview\n- File storage (S3/R2)\n- Download files\n- File size limits per plan\n\n## Implementation\n- Multipart upload endpoint\n- File storage service (Cloudflare R2)\n- Thumbnail generation for images\n- Virus scanning for uploads\n- CDN for fast delivery","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["files","core","p1"],"depends_on":[]} +{"id":"bd-reactions-001","title":"Emoji Reactions","description":"Add emoji reactions to messages.\n\n## Features\n- React with any emoji\n- See who reacted\n- Reaction counts\n- Custom workspace emojis\n- Keyboard shortcut to react\n\n## Data Model\n```typescript\ninterface Reaction {\n messageId: string;\n emoji: string;\n userId: string;\n createdAt: Date;\n}\n```","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","engagement","p1"],"depends_on":[]} +{"id":"bd-notifications-001","title":"Push & Email Notifications","description":"Add comprehensive notification system.\n\n## Features\n- Push notifications (web, mobile)\n- Email notifications (digest, mentions)\n- Per-channel notification settings\n- Do not disturb mode\n- Notification schedule\n- @here, @channel mentions\n\n## Implementation\n- Web Push API for browser notifications\n- Email service (SendGrid/Resend)\n- Notification preferences UI\n- Notification queue/worker","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["notifications","core","p1"],"depends_on":[]} +{"id":"bd-huddles-001","title":"Voice Huddles","description":"Add quick voice calls in channels (like Slack Huddles).\n\n## Features\n- Start huddle in any channel\n- See who's in huddle\n- Join/leave easily\n- Mute/unmute\n- Share screen (separate bead)\n- Huddle history\n\n## Implementation\n- WebRTC for audio\n- Consider LiveKit or Daily.co\n- SFU for multi-party calls\n- Huddle indicator in channel","priority":60,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["voice","collaboration","p2"],"depends_on":["bd-channels-001"]} +{"id":"bd-screen-share-001","title":"Screen Sharing","description":"Add screen sharing capability during huddles.\n\n## Features\n- Share entire screen or window\n- Share with audio\n- Drawing/annotation tools\n- Request control (optional)\n\n## Implementation\n- getDisplayMedia API\n- Integrate with huddles WebRTC","priority":55,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p2"],"depends_on":["bd-huddles-001"]} +{"id":"bd-integrations-001","title":"GitHub Integration","description":"Deep GitHub integration beyond OAuth.\n\n## Features\n- Post to channel on PR/issue events\n- Link previews for GitHub URLs\n- Create issues from messages\n- PR review notifications\n- Commit notifications\n- Deploy notifications\n\n## Implementation\n- GitHub App webhooks\n- Message unfurling for GitHub URLs\n- Slash command: /github create issue","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["integrations","p1"],"depends_on":[]} +{"id":"bd-integrations-002","title":"Integration Platform (Apps)","description":"Build platform for third-party integrations.\n\n## Features\n- App directory\n- Install apps to workspace\n- OAuth for apps\n- Incoming webhooks\n- Outgoing webhooks\n- Slash commands from apps\n- Bot users\n\n## Implementation\n- App manifest format\n- App installation flow\n- Webhook infrastructure\n- App permissions model","priority":65,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["platform","integrations","p2"],"depends_on":["bd-channels-001"]} +{"id":"bd-workflows-001","title":"Workflow Builder","description":"No-code automation builder (like Slack Workflow Builder).\n\n## Features\n- Trigger: message, emoji, schedule, webhook\n- Actions: send message, create task, call API\n- Variables and conditions\n- Templates library\n- AI-assisted workflow creation\n\n## Differentiator\n- Agent actions: spawn agent, assign task to agent\n- AI can suggest workflows based on patterns","priority":60,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["automation","platform","p2"],"depends_on":["bd-integrations-002"]} +{"id":"bd-mobile-001","title":"Mobile App (React Native)","description":"Native mobile apps for iOS and Android.\n\n## Features\n- Full messaging functionality\n- Push notifications\n- Share to Agent Relay\n- Quick actions\n- Offline support\n- Huddle join from mobile\n\n## Implementation\n- React Native (share code with web)\n- Expo for easier deployment\n- Native push notification handling","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["mobile","platform","p1"],"depends_on":["bd-notifications-001"]} +{"id":"bd-sso-001","title":"SSO/SAML Authentication","description":"Enterprise single sign-on support.\n\n## Features\n- SAML 2.0 support\n- OIDC support\n- Okta, Azure AD, Google Workspace\n- Just-in-time provisioning\n- SCIM for user sync\n\n## Implementation\n- SAML library integration\n- SSO configuration UI\n- Per-workspace SSO settings","priority":50,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["enterprise","security","p2"],"depends_on":[]} +{"id":"bd-audit-001","title":"Audit Logs","description":"Comprehensive audit logging for compliance.\n\n## Features\n- Log all admin actions\n- Log message edits/deletes\n- Log file access\n- Export audit logs\n- Retention policies\n- SIEM integration\n\n## Implementation\n- Audit event schema\n- Audit log storage\n- Admin UI for viewing logs\n- Export API","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["enterprise","compliance","p2"],"depends_on":[]} +{"id":"bd-guest-001","title":"Guest Access","description":"Allow external collaborators with limited access.\n\n## Features\n- Invite guests to specific channels\n- Guest badge/indicator\n- Limited permissions\n- Guest expiration\n- Convert guest to member\n\n## Implementation\n- Guest user type\n- Channel-scoped access\n- Guest invitation flow","priority":55,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p2"],"depends_on":["bd-channels-001"]} +{"id":"bd-bookmarks-001","title":"Bookmarks & Saved Items","description":"Save important messages for later.\n\n## Features\n- Bookmark any message\n- Saved items view\n- Remind me later\n- Share bookmark collections\n\n## Implementation\n- Bookmark data model\n- Saved items UI\n- Reminder scheduler","priority":40,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["productivity","p3"],"depends_on":[]} +{"id":"bd-canvas-001","title":"Canvas/Collaborative Docs","description":"Real-time collaborative documents within channels.\n\n## Features\n- Rich text editor\n- Real-time collaboration (CRDT)\n- Embed in channels\n- Version history\n- Export to markdown\n\n## Implementation\n- Tiptap or ProseMirror editor\n- Yjs for CRDT\n- Canvas data model","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p3"],"depends_on":[]} +{"id":"bd-agent-public-001","title":"Deploy Always-On Community Agents","description":"Deploy dedicated agents for public community rooms.\n\n## Agents to Deploy\n1. **DocsBot** - Answers documentation questions, links to relevant docs\n2. **RoadmapBot** - Discusses features, collects feedback, shows roadmap\n3. **HelpBot** - Troubleshooting, getting started, FAQ\n4. **ShowcaseBot** - Demos agent capabilities interactively\n5. **ModBot** - Community moderation, spam filtering\n\n## Requirements\n- Agents must be highly reliable (99.9% uptime)\n- Fast response times (<2s)\n- Trained on Agent Relay docs/codebase\n- Personality guidelines for each\n- Escalation path to humans\n\n## Implementation\n- Deploy on dedicated compute\n- Health monitoring\n- Auto-restart on failure\n- Usage analytics","priority":95,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","agents","community","p0"],"depends_on":["bd-viral-001"]} +{"id":"bd-landing-001","title":"Landing Page with Live Community Embed","description":"Update landing page to embed live community rooms.\n\n## Features\n- Show real-time activity in public rooms\n- 'Join the conversation' CTA\n- Agent activity ticker\n- User count/online indicator\n- Demo video alongside live chat\n\n## Implementation\n- WebSocket connection for visitors\n- Read-only view without login\n- Login prompt to participate\n- Responsive embed component","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","marketing","p0"],"depends_on":["bd-viral-001","bd-agent-public-001"]} From 88e9cd258e23135eb331e2bfbf86022f6734dace Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 17:53:11 +0000 Subject: [PATCH 063/103] Add 15 critical launch-blocker beads (P00 priority 115-150) ## LAUNCH BLOCKERS (Must fix before launch) - bd-critical-001: WebSocket Reconnection (150) - bd-critical-002: Rate Limiting & Abuse Prevention (145) - bd-critical-006: CSRF & Security Headers (145) - bd-critical-003: Error Boundaries (140) - bd-critical-007: Database Connection Pooling (140) - bd-critical-011: Billing Integration Complete (140) - bd-critical-004: Onboarding Flow (135) - bd-critical-008: Workspace Provisioning Reliability (135) - bd-critical-015: Backup & Disaster Recovery (135) - bd-critical-005: Loading States & Skeleton UI (130) - bd-critical-010: Analytics & Monitoring (130) - bd-critical-009: Mobile Responsive Polish (125) - bd-critical-012: Email Transactional System (125) - bd-critical-013: Terms of Service & Privacy Policy (120) - bd-critical-014: Health Check & Status Page (115) These are non-negotiable before any public launch. --- .beads/beads.jsonl | 57 +++++++++++++++++++++++++++++----------------- 1 file changed, 36 insertions(+), 21 deletions(-) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index cf9f66e0..e7779d9c 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -1,21 +1,36 @@ -{"id":"bd-log1","title":"Add DIY minimal logging to agent-relay daemon","description":"Implement a lightweight ~20-line logging approach for the agent-relay daemon. No external library needed.\n\nRequirements:\n- log_info(), log_warn(), log_error(), log_debug() functions\n- JSON output format for easy parsing with jq\n- Configurable via LOG_FILE and DEBUG env vars\n- Log daemon startup/shutdown\n- Log errors (delivery failures, connection issues)\n- Debug-only message delivery logging\n- Minimal performance impact (sub-5ms system)\n\nImplementation:\n```bash\n_log() {\n local level=\"$1\" msg=\"$2\"\n local ts=$(date -u +\"%Y-%m-%dT%H:%M:%SZ\")\n printf '{\"ts\":\"%s\",\"level\":\"%s\",\"msg\":\"%s\"}\\n' \"$ts\" \"$level\" \"$msg\" >> \"$LOG_FILE\"\n [[ \"$LOG_STDOUT\" == \"1\" ]] && echo \"[$level] $msg\"\n}\n\nlog_info() { _log \"INFO\" \"$1\"; }\nlog_warn() { _log \"WARN\" \"$1\"; }\nlog_error() { _log \"ERROR\" \"$1\"; }\nlog_debug() { [[ \"${DEBUG:-0}\" == \"1\" ]] && _log \"DEBUG\" \"$1\"; }\n```\n\nFiles modified:\n- src/utils/logger.ts (new file - TypeScript implementation)\n- src/daemon/server.ts (integrated logging)\n- src/utils/index.ts (exported logger)","priority":50,"status":"closed","created_at":"2026-01-01T07:40:00Z","closed_at":"2026-01-01T07:46:00Z","closed_reason":"Implemented TypeScript version with JSON output, configurable log levels, file logging support","tags":["logging","infrastructure","low-priority"]} -{"id":"bd-viral-001","title":"[VIRAL] Public Community Rooms with AI Agents","description":"Create open, public rooms on Agent Relay where anyone can join and chat with AI agents. This is our viral growth mechanism - like Discord's open servers but with AI agents as first-class participants.\n\n## Concept\nAgent Relay's public landing page will feature open rooms:\n- #help - Get help from AI agent + community\n- #roadmap - Chat with roadmap agent about upcoming features\n- #docs - Documentation agent answers questions\n- #showcase - See live agent demos\n- #general - Community discussion with AI moderation\n\n## Requirements\n1. Public rooms accessible without workspace (just GitHub login)\n2. AI agents always present and responsive\n3. Rate limiting for free users\n4. Upgrade prompts for power users\n5. Show agent activity in real-time (demonstrates product)\n6. Shareable room links\n\n## Growth Loop\n1. User discovers Agent Relay\n2. Joins public room, chats with agents\n3. Gets hooked on AI-native collaboration\n4. Creates own workspace to use agents on their code\n5. Invites team members\n\n## Implementation\n- Add 'public' flag to rooms/channels\n- Create dedicated community workspace\n- Deploy always-on agents for each room\n- Add guest/free tier with rate limits\n- Embed rooms on landing page","priority":100,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","growth","community","p0"],"depends_on":[]} -{"id":"bd-channels-001","title":"User Channels (Public & Private)","description":"Add support for user-created channels beyond agent channels.\n\n## Features\n- Create public channels (visible to all workspace members)\n- Create private channels (invite-only)\n- Channel descriptions and topics\n- Pin messages in channels\n- Channel member list\n- Leave/archive channels\n\n## Data Model\n```typescript\ninterface Channel {\n id: string;\n name: string;\n description?: string;\n topic?: string;\n isPrivate: boolean;\n workspaceId: string;\n createdBy: string;\n members: string[];\n pinnedMessages: string[];\n createdAt: Date;\n}\n```\n\n## API Endpoints\n- POST /api/channels - Create channel\n- GET /api/channels - List workspace channels\n- PATCH /api/channels/:id - Update channel\n- DELETE /api/channels/:id - Archive channel\n- POST /api/channels/:id/members - Add member\n- DELETE /api/channels/:id/members/:userId - Remove member","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":[]} -{"id":"bd-dm-001","title":"User-to-User Direct Messages","description":"Add support for direct messages between human users (not just agents).\n\n## Features\n- Start DM with any workspace member\n- DM history persisted\n- Unread indicators\n- DM list in sidebar\n- Block user option\n\n## Implementation\n- DMs are 2-person private channels\n- Reuse channel infrastructure\n- Add DM-specific UI in sidebar\n- Notification preferences for DMs","priority":85,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":["bd-channels-001"]} -{"id":"bd-search-001","title":"Full-Text Message Search","description":"Add comprehensive search across all messages.\n\n## Features\n- Search all messages in workspace\n- Filter by channel, user, date range\n- Search within files/attachments\n- Highlight search terms in results\n- Jump to message in context\n- Search shortcuts (from:user, in:channel, has:link)\n\n## Implementation\n- Add search index (consider SQLite FTS5 or Postgres full-text)\n- Search API endpoint\n- Search UI in header\n- Keyboard shortcut (Cmd+K already exists, add search mode)","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["search","core","p1"],"depends_on":[]} -{"id":"bd-files-001","title":"File Sharing & Attachments","description":"Add file upload, preview, and sharing capabilities.\n\n## Features\n- Drag & drop file upload\n- Image preview in chat\n- Code file syntax highlighting\n- PDF preview\n- File storage (S3/R2)\n- Download files\n- File size limits per plan\n\n## Implementation\n- Multipart upload endpoint\n- File storage service (Cloudflare R2)\n- Thumbnail generation for images\n- Virus scanning for uploads\n- CDN for fast delivery","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["files","core","p1"],"depends_on":[]} -{"id":"bd-reactions-001","title":"Emoji Reactions","description":"Add emoji reactions to messages.\n\n## Features\n- React with any emoji\n- See who reacted\n- Reaction counts\n- Custom workspace emojis\n- Keyboard shortcut to react\n\n## Data Model\n```typescript\ninterface Reaction {\n messageId: string;\n emoji: string;\n userId: string;\n createdAt: Date;\n}\n```","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","engagement","p1"],"depends_on":[]} -{"id":"bd-notifications-001","title":"Push & Email Notifications","description":"Add comprehensive notification system.\n\n## Features\n- Push notifications (web, mobile)\n- Email notifications (digest, mentions)\n- Per-channel notification settings\n- Do not disturb mode\n- Notification schedule\n- @here, @channel mentions\n\n## Implementation\n- Web Push API for browser notifications\n- Email service (SendGrid/Resend)\n- Notification preferences UI\n- Notification queue/worker","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["notifications","core","p1"],"depends_on":[]} -{"id":"bd-huddles-001","title":"Voice Huddles","description":"Add quick voice calls in channels (like Slack Huddles).\n\n## Features\n- Start huddle in any channel\n- See who's in huddle\n- Join/leave easily\n- Mute/unmute\n- Share screen (separate bead)\n- Huddle history\n\n## Implementation\n- WebRTC for audio\n- Consider LiveKit or Daily.co\n- SFU for multi-party calls\n- Huddle indicator in channel","priority":60,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["voice","collaboration","p2"],"depends_on":["bd-channels-001"]} -{"id":"bd-screen-share-001","title":"Screen Sharing","description":"Add screen sharing capability during huddles.\n\n## Features\n- Share entire screen or window\n- Share with audio\n- Drawing/annotation tools\n- Request control (optional)\n\n## Implementation\n- getDisplayMedia API\n- Integrate with huddles WebRTC","priority":55,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p2"],"depends_on":["bd-huddles-001"]} -{"id":"bd-integrations-001","title":"GitHub Integration","description":"Deep GitHub integration beyond OAuth.\n\n## Features\n- Post to channel on PR/issue events\n- Link previews for GitHub URLs\n- Create issues from messages\n- PR review notifications\n- Commit notifications\n- Deploy notifications\n\n## Implementation\n- GitHub App webhooks\n- Message unfurling for GitHub URLs\n- Slash command: /github create issue","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["integrations","p1"],"depends_on":[]} -{"id":"bd-integrations-002","title":"Integration Platform (Apps)","description":"Build platform for third-party integrations.\n\n## Features\n- App directory\n- Install apps to workspace\n- OAuth for apps\n- Incoming webhooks\n- Outgoing webhooks\n- Slash commands from apps\n- Bot users\n\n## Implementation\n- App manifest format\n- App installation flow\n- Webhook infrastructure\n- App permissions model","priority":65,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["platform","integrations","p2"],"depends_on":["bd-channels-001"]} -{"id":"bd-workflows-001","title":"Workflow Builder","description":"No-code automation builder (like Slack Workflow Builder).\n\n## Features\n- Trigger: message, emoji, schedule, webhook\n- Actions: send message, create task, call API\n- Variables and conditions\n- Templates library\n- AI-assisted workflow creation\n\n## Differentiator\n- Agent actions: spawn agent, assign task to agent\n- AI can suggest workflows based on patterns","priority":60,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["automation","platform","p2"],"depends_on":["bd-integrations-002"]} -{"id":"bd-mobile-001","title":"Mobile App (React Native)","description":"Native mobile apps for iOS and Android.\n\n## Features\n- Full messaging functionality\n- Push notifications\n- Share to Agent Relay\n- Quick actions\n- Offline support\n- Huddle join from mobile\n\n## Implementation\n- React Native (share code with web)\n- Expo for easier deployment\n- Native push notification handling","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["mobile","platform","p1"],"depends_on":["bd-notifications-001"]} -{"id":"bd-sso-001","title":"SSO/SAML Authentication","description":"Enterprise single sign-on support.\n\n## Features\n- SAML 2.0 support\n- OIDC support\n- Okta, Azure AD, Google Workspace\n- Just-in-time provisioning\n- SCIM for user sync\n\n## Implementation\n- SAML library integration\n- SSO configuration UI\n- Per-workspace SSO settings","priority":50,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["enterprise","security","p2"],"depends_on":[]} -{"id":"bd-audit-001","title":"Audit Logs","description":"Comprehensive audit logging for compliance.\n\n## Features\n- Log all admin actions\n- Log message edits/deletes\n- Log file access\n- Export audit logs\n- Retention policies\n- SIEM integration\n\n## Implementation\n- Audit event schema\n- Audit log storage\n- Admin UI for viewing logs\n- Export API","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["enterprise","compliance","p2"],"depends_on":[]} -{"id":"bd-guest-001","title":"Guest Access","description":"Allow external collaborators with limited access.\n\n## Features\n- Invite guests to specific channels\n- Guest badge/indicator\n- Limited permissions\n- Guest expiration\n- Convert guest to member\n\n## Implementation\n- Guest user type\n- Channel-scoped access\n- Guest invitation flow","priority":55,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p2"],"depends_on":["bd-channels-001"]} -{"id":"bd-bookmarks-001","title":"Bookmarks & Saved Items","description":"Save important messages for later.\n\n## Features\n- Bookmark any message\n- Saved items view\n- Remind me later\n- Share bookmark collections\n\n## Implementation\n- Bookmark data model\n- Saved items UI\n- Reminder scheduler","priority":40,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["productivity","p3"],"depends_on":[]} -{"id":"bd-canvas-001","title":"Canvas/Collaborative Docs","description":"Real-time collaborative documents within channels.\n\n## Features\n- Rich text editor\n- Real-time collaboration (CRDT)\n- Embed in channels\n- Version history\n- Export to markdown\n\n## Implementation\n- Tiptap or ProseMirror editor\n- Yjs for CRDT\n- Canvas data model","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p3"],"depends_on":[]} -{"id":"bd-agent-public-001","title":"Deploy Always-On Community Agents","description":"Deploy dedicated agents for public community rooms.\n\n## Agents to Deploy\n1. **DocsBot** - Answers documentation questions, links to relevant docs\n2. **RoadmapBot** - Discusses features, collects feedback, shows roadmap\n3. **HelpBot** - Troubleshooting, getting started, FAQ\n4. **ShowcaseBot** - Demos agent capabilities interactively\n5. **ModBot** - Community moderation, spam filtering\n\n## Requirements\n- Agents must be highly reliable (99.9% uptime)\n- Fast response times (<2s)\n- Trained on Agent Relay docs/codebase\n- Personality guidelines for each\n- Escalation path to humans\n\n## Implementation\n- Deploy on dedicated compute\n- Health monitoring\n- Auto-restart on failure\n- Usage analytics","priority":95,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","agents","community","p0"],"depends_on":["bd-viral-001"]} -{"id":"bd-landing-001","title":"Landing Page with Live Community Embed","description":"Update landing page to embed live community rooms.\n\n## Features\n- Show real-time activity in public rooms\n- 'Join the conversation' CTA\n- Agent activity ticker\n- User count/online indicator\n- Demo video alongside live chat\n\n## Implementation\n- WebSocket connection for visitors\n- Read-only view without login\n- Login prompt to participate\n- Responsive embed component","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","marketing","p0"],"depends_on":["bd-viral-001","bd-agent-public-001"]} +{"id":"bd-log1","title":"Add DIY minimal logging to agent-relay daemon","description":"Implement a lightweight ~20-line logging approach for the agent-relay daemon.","priority":50,"status":"closed","created_at":"2026-01-01T07:40:00Z","closed_at":"2026-01-01T07:46:00Z","closed_reason":"Implemented","tags":["logging","infrastructure"]} +{"id":"bd-critical-001","title":"[LAUNCH BLOCKER] Fix WebSocket Reconnection & Session Recovery","description":"WebSocket drops are losing user context and messages.\n\n## Problem\n- WebSocket disconnect = lost messages\n- No automatic reconnection with backoff\n- Session state not persisted\n- Users lose typing context on refresh\n\n## Requirements\n1. Exponential backoff reconnection (1s, 2s, 4s, 8s, max 30s)\n2. Session ID persistence in localStorage\n3. Message queue for offline sends\n4. Reconnect indicator in UI\n5. Sync missed messages on reconnect\n\n## Files\n- src/dashboard/react-components/hooks/useWebSocket.ts\n- src/daemon/server.ts","priority":150,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-002","title":"[LAUNCH BLOCKER] Rate Limiting & Abuse Prevention","description":"No rate limiting = vulnerable to abuse and cost explosion.\n\n## Requirements\n1. API rate limiting per user/IP\n2. Message rate limiting (prevent spam)\n3. Agent spawn rate limiting\n4. WebSocket connection limits\n5. Graceful degradation under load\n\n## Implementation\n- Redis-based rate limiter\n- Sliding window algorithm\n- Different limits per plan tier\n- 429 responses with Retry-After header\n\n## Limits (Free Tier)\n- 100 API requests/minute\n- 30 messages/minute\n- 5 agent spawns/hour\n- 3 concurrent WebSocket connections","priority":145,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","security","p00"],"depends_on":[]} +{"id":"bd-critical-003","title":"[LAUNCH BLOCKER] Error Boundaries & Graceful Degradation","description":"Uncaught errors crash the entire dashboard.\n\n## Requirements\n1. React Error Boundaries around major sections\n2. Fallback UI for crashed components\n3. Error reporting to backend (Sentry integration)\n4. User-friendly error messages\n5. Retry mechanisms for failed operations\n\n## Components to Wrap\n- MessageList\n- Sidebar\n- Settings panels\n- Agent cards\n- File uploads","priority":140,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","ux","p00"],"depends_on":[]} +{"id":"bd-critical-004","title":"[LAUNCH BLOCKER] Onboarding Flow for New Users","description":"New users land on empty dashboard with no guidance.\n\n## Requirements\n1. Welcome modal for first-time users\n2. Interactive tutorial/tooltips\n3. Sample workspace with demo agents\n4. Quick actions: Connect repo, spawn first agent\n5. Progress checklist\n\n## Flow\n1. Sign up โ†’ Welcome modal\n2. Connect GitHub โ†’ Show repo picker\n3. Create workspace โ†’ Auto-provision\n4. Spawn first agent โ†’ Guided prompt\n5. Send first message โ†’ Celebrate!\n\n## Metrics\n- Track funnel completion\n- Time to first agent spawn\n- Day 1 retention","priority":135,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","onboarding","growth","p00"],"depends_on":[]} +{"id":"bd-critical-005","title":"[LAUNCH BLOCKER] Loading States & Skeleton UI","description":"Blank screens during loading feel broken.\n\n## Requirements\n1. Skeleton loaders for all lists\n2. Shimmer animations\n3. Progressive loading (show what we have)\n4. Optimistic updates for actions\n5. Loading indicators for async operations\n\n## Components Needing Skeletons\n- Message list\n- Agent sidebar\n- Workspace selector\n- Settings panels\n- History page","priority":130,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","ux","p00"],"depends_on":[]} +{"id":"bd-critical-006","title":"[LAUNCH BLOCKER] CSRF & Security Headers","description":"Missing security headers and CSRF protection.\n\n## Requirements\n1. CSRF tokens on all mutations\n2. Secure cookie settings (HttpOnly, SameSite, Secure)\n3. Content-Security-Policy header\n4. X-Frame-Options: DENY\n5. X-Content-Type-Options: nosniff\n6. Rate limit auth endpoints harder\n\n## Implementation\n- Middleware for security headers\n- CSRF token generation/validation\n- Audit existing endpoints","priority":145,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","security","p00"],"depends_on":[]} +{"id":"bd-critical-007","title":"[LAUNCH BLOCKER] Database Connection Pooling & Failover","description":"Single DB connection = single point of failure.\n\n## Requirements\n1. Connection pooling (PgBouncer or built-in)\n2. Read replicas for scalability\n3. Automatic failover\n4. Query timeout limits\n5. Dead connection detection\n\n## Implementation\n- Configure pool size based on load\n- Health check endpoint\n- Graceful degradation on DB issues","priority":140,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-008","title":"[LAUNCH BLOCKER] Workspace Provisioning Reliability","description":"Workspace creation sometimes fails silently.\n\n## Requirements\n1. Idempotent workspace creation\n2. Retry logic with exponential backoff\n3. Clear error messages on failure\n4. Cleanup on partial failure\n5. Status tracking (provisioning โ†’ ready โ†’ error)\n\n## Edge Cases\n- GitHub API rate limit during repo clone\n- Compute provisioning timeout\n- DNS propagation for custom domains\n- Quota exceeded","priority":135,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-009","title":"[LAUNCH BLOCKER] Mobile Responsive Polish","description":"Dashboard unusable on phones - huge user segment.\n\n## Requirements\n1. Touch-friendly tap targets (44px min)\n2. Swipe gestures for navigation\n3. Mobile-optimized message input\n4. Responsive images/previews\n5. PWA support (installable)\n6. Test on actual devices\n\n## Priority Screens\n- Message view\n- Agent list\n- Settings\n- Onboarding","priority":125,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","mobile","ux","p00"],"depends_on":[]} +{"id":"bd-critical-010","title":"[LAUNCH BLOCKER] Analytics & Monitoring","description":"Flying blind without metrics.\n\n## Requirements\n1. Product analytics (Mixpanel/Amplitude/PostHog)\n2. Error tracking (Sentry)\n3. Performance monitoring (Core Web Vitals)\n4. Server metrics (CPU, memory, latency)\n5. Business metrics dashboard\n\n## Key Events to Track\n- Sign up, login\n- Workspace created\n- Agent spawned\n- Message sent\n- Feature usage\n- Errors\n\n## Alerts\n- Error rate spike\n- Latency p99 > 2s\n- Failed agent spawns","priority":130,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","launch-blocker","analytics","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-011","title":"[URGENT] Billing Integration Complete","description":"Can't charge users = no business.\n\n## Requirements\n1. Stripe checkout flow working end-to-end\n2. Subscription management (upgrade/downgrade/cancel)\n3. Usage-based billing for compute\n4. Invoice generation\n5. Failed payment handling\n6. Dunning emails\n\n## Test Scenarios\n- New subscription\n- Plan upgrade mid-cycle\n- Cancellation\n- Failed payment retry\n- Webhook reliability","priority":140,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","billing","p00"],"depends_on":[]} +{"id":"bd-critical-012","title":"[URGENT] Email Transactional System","description":"No email = users forget us.\n\n## Requirements\n1. Welcome email on signup\n2. Workspace invitation emails\n3. Password reset (if applicable)\n4. Agent activity digest (daily/weekly)\n5. Billing receipts\n6. Unsubscribe handling\n\n## Implementation\n- Resend or SendGrid\n- Email templates (React Email)\n- Queue for reliability\n- Delivery tracking","priority":125,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","email","p00"],"depends_on":[]} +{"id":"bd-critical-013","title":"[URGENT] Terms of Service & Privacy Policy","description":"Legal requirement before launch.\n\n## Requirements\n1. Terms of Service page\n2. Privacy Policy page\n3. Cookie consent banner\n4. Data processing agreement (for enterprise)\n5. Acceptable use policy\n6. GDPR compliance basics\n\n## Implementation\n- Legal review of AI-specific terms\n- Cookie consent mechanism\n- Data export/deletion flow","priority":120,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","legal","p00"],"depends_on":[]} +{"id":"bd-critical-014","title":"[URGENT] Health Check & Status Page","description":"Users need to know if we're down.\n\n## Requirements\n1. /health endpoint for all services\n2. Public status page (statuspage.io or custom)\n3. Uptime monitoring (every 1 min)\n4. Incident management process\n5. Status page subscription\n\n## Services to Monitor\n- API\n- WebSocket\n- Database\n- Agent compute\n- GitHub integration","priority":115,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","infrastructure","p00"],"depends_on":[]} +{"id":"bd-critical-015","title":"[URGENT] Backup & Disaster Recovery","description":"Data loss = company death.\n\n## Requirements\n1. Automated daily database backups\n2. Point-in-time recovery (PITR)\n3. Cross-region backup replication\n4. Backup restoration testing (monthly)\n5. Message/file backup strategy\n6. RTO < 4 hours, RPO < 1 hour\n\n## Implementation\n- Postgres WAL archiving\n- S3/R2 for file backups\n- Documented recovery runbook","priority":135,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","infrastructure","p00"],"depends_on":[]} +{"id":"bd-viral-001","title":"[VIRAL] Public Community Rooms with AI Agents","description":"Create open, public rooms on Agent Relay where anyone can join and chat with AI agents. This is our viral growth mechanism.\n\n## Growth Loop\n1. User discovers Agent Relay\n2. Joins public room, chats with agents\n3. Gets hooked on AI-native collaboration\n4. Creates own workspace\n5. Invites team members","priority":100,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","growth","community","p0"],"depends_on":[]} +{"id":"bd-channels-001","title":"User Channels (Public & Private)","description":"Add support for user-created channels beyond agent channels.","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":[]} +{"id":"bd-dm-001","title":"User-to-User Direct Messages","description":"Add support for direct messages between human users.","priority":85,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":["bd-channels-001"]} +{"id":"bd-search-001","title":"Full-Text Message Search","description":"Add comprehensive search across all messages.","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["search","core","p1"],"depends_on":[]} +{"id":"bd-files-001","title":"File Sharing & Attachments","description":"Add file upload, preview, and sharing capabilities.","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["files","core","p1"],"depends_on":[]} +{"id":"bd-reactions-001","title":"Emoji Reactions","description":"Add emoji reactions to messages.","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","engagement","p1"],"depends_on":[]} +{"id":"bd-notifications-001","title":"Push & Email Notifications","description":"Add comprehensive notification system.","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["notifications","core","p1"],"depends_on":[]} +{"id":"bd-huddles-001","title":"Voice Huddles","description":"Add quick voice calls in channels.","priority":60,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["voice","collaboration","p2"],"depends_on":["bd-channels-001"]} +{"id":"bd-screen-share-001","title":"Screen Sharing","description":"Add screen sharing capability during huddles.","priority":55,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p2"],"depends_on":["bd-huddles-001"]} +{"id":"bd-integrations-001","title":"GitHub Integration","description":"Deep GitHub integration beyond OAuth.","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["integrations","p1"],"depends_on":[]} +{"id":"bd-integrations-002","title":"Integration Platform (Apps)","description":"Build platform for third-party integrations.","priority":65,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["platform","integrations","p2"],"depends_on":["bd-channels-001"]} +{"id":"bd-workflows-001","title":"Workflow Builder","description":"No-code automation builder.","priority":60,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["automation","platform","p2"],"depends_on":["bd-integrations-002"]} +{"id":"bd-mobile-001","title":"Mobile App (React Native)","description":"Native mobile apps for iOS and Android.","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["mobile","platform","p1"],"depends_on":["bd-notifications-001"]} +{"id":"bd-sso-001","title":"SSO/SAML Authentication","description":"Enterprise single sign-on support.","priority":50,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["enterprise","security","p2"],"depends_on":[]} +{"id":"bd-audit-001","title":"Audit Logs","description":"Comprehensive audit logging for compliance.","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["enterprise","compliance","p2"],"depends_on":[]} +{"id":"bd-guest-001","title":"Guest Access","description":"Allow external collaborators with limited access.","priority":55,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p2"],"depends_on":["bd-channels-001"]} +{"id":"bd-bookmarks-001","title":"Bookmarks & Saved Items","description":"Save important messages for later.","priority":40,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["productivity","p3"],"depends_on":[]} +{"id":"bd-canvas-001","title":"Canvas/Collaborative Docs","description":"Real-time collaborative documents within channels.","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p3"],"depends_on":[]} +{"id":"bd-agent-public-001","title":"Deploy Always-On Community Agents","description":"Deploy dedicated agents for public community rooms (DocsBot, RoadmapBot, HelpBot, ShowcaseBot, ModBot).","priority":95,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","agents","community","p0"],"depends_on":["bd-viral-001"]} +{"id":"bd-landing-001","title":"Landing Page with Live Community Embed","description":"Update landing page to embed live community rooms with real-time activity.","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","marketing","p0"],"depends_on":["bd-viral-001","bd-agent-public-001"]} From cb74ad89285e219f03b00f9f8ed65b1a22091473 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 17:54:00 +0000 Subject: [PATCH 064/103] Remove User DMs bead - focus on private channels only --- .beads/beads.jsonl | 1 - 1 file changed, 1 deletion(-) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index e7779d9c..dcca951b 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -16,7 +16,6 @@ {"id":"bd-critical-015","title":"[URGENT] Backup & Disaster Recovery","description":"Data loss = company death.\n\n## Requirements\n1. Automated daily database backups\n2. Point-in-time recovery (PITR)\n3. Cross-region backup replication\n4. Backup restoration testing (monthly)\n5. Message/file backup strategy\n6. RTO < 4 hours, RPO < 1 hour\n\n## Implementation\n- Postgres WAL archiving\n- S3/R2 for file backups\n- Documented recovery runbook","priority":135,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","infrastructure","p00"],"depends_on":[]} {"id":"bd-viral-001","title":"[VIRAL] Public Community Rooms with AI Agents","description":"Create open, public rooms on Agent Relay where anyone can join and chat with AI agents. This is our viral growth mechanism.\n\n## Growth Loop\n1. User discovers Agent Relay\n2. Joins public room, chats with agents\n3. Gets hooked on AI-native collaboration\n4. Creates own workspace\n5. Invites team members","priority":100,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","growth","community","p0"],"depends_on":[]} {"id":"bd-channels-001","title":"User Channels (Public & Private)","description":"Add support for user-created channels beyond agent channels.","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":[]} -{"id":"bd-dm-001","title":"User-to-User Direct Messages","description":"Add support for direct messages between human users.","priority":85,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":["bd-channels-001"]} {"id":"bd-search-001","title":"Full-Text Message Search","description":"Add comprehensive search across all messages.","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["search","core","p1"],"depends_on":[]} {"id":"bd-files-001","title":"File Sharing & Attachments","description":"Add file upload, preview, and sharing capabilities.","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["files","core","p1"],"depends_on":[]} {"id":"bd-reactions-001","title":"Emoji Reactions","description":"Add emoji reactions to messages.","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","engagement","p1"],"depends_on":[]} From ea9d222fa7bcc279cdf34056d8bf01627f6f53d2 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 17:57:32 +0000 Subject: [PATCH 065/103] Update beads for public/private workspace model - Change viral bead to clarify PUBLIC WORKSPACE concept (not just rooms) - Update channels bead: access controlled at workspace level, not channel level - Simplified model: public workspaces anyone logged-in can join --- .beads/beads.jsonl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index dcca951b..ad8a486b 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -14,8 +14,8 @@ {"id":"bd-critical-013","title":"[URGENT] Terms of Service & Privacy Policy","description":"Legal requirement before launch.\n\n## Requirements\n1. Terms of Service page\n2. Privacy Policy page\n3. Cookie consent banner\n4. Data processing agreement (for enterprise)\n5. Acceptable use policy\n6. GDPR compliance basics\n\n## Implementation\n- Legal review of AI-specific terms\n- Cookie consent mechanism\n- Data export/deletion flow","priority":120,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","legal","p00"],"depends_on":[]} {"id":"bd-critical-014","title":"[URGENT] Health Check & Status Page","description":"Users need to know if we're down.\n\n## Requirements\n1. /health endpoint for all services\n2. Public status page (statuspage.io or custom)\n3. Uptime monitoring (every 1 min)\n4. Incident management process\n5. Status page subscription\n\n## Services to Monitor\n- API\n- WebSocket\n- Database\n- Agent compute\n- GitHub integration","priority":115,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","infrastructure","p00"],"depends_on":[]} {"id":"bd-critical-015","title":"[URGENT] Backup & Disaster Recovery","description":"Data loss = company death.\n\n## Requirements\n1. Automated daily database backups\n2. Point-in-time recovery (PITR)\n3. Cross-region backup replication\n4. Backup restoration testing (monthly)\n5. Message/file backup strategy\n6. RTO < 4 hours, RPO < 1 hour\n\n## Implementation\n- Postgres WAL archiving\n- S3/R2 for file backups\n- Documented recovery runbook","priority":135,"status":"open","created_at":"2026-01-04T18:00:00Z","tags":["critical","urgent","infrastructure","p00"],"depends_on":[]} -{"id":"bd-viral-001","title":"[VIRAL] Public Community Rooms with AI Agents","description":"Create open, public rooms on Agent Relay where anyone can join and chat with AI agents. This is our viral growth mechanism.\n\n## Growth Loop\n1. User discovers Agent Relay\n2. Joins public room, chats with agents\n3. Gets hooked on AI-native collaboration\n4. Creates own workspace\n5. Invites team members","priority":100,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","growth","community","p0"],"depends_on":[]} -{"id":"bd-channels-001","title":"User Channels (Public & Private)","description":"Add support for user-created channels beyond agent channels.","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":[]} +{"id":"bd-viral-001","title":"[VIRAL] Public Community Rooms with AI Agents","description":"Create a PUBLIC WORKSPACE on Agent Relay that any logged-in user can join. Contains always-on AI agents (DocsBot, HelpBot, RoadmapBot). This is our viral growth mechanism.\n\n## Growth Loop\n1. User discovers Agent Relay\n2. Joins public room, chats with agents\n3. Gets hooked on AI-native collaboration\n4. Creates own workspace\n5. Invites team members","priority":100,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","growth","community","p0"],"depends_on":[]} +{"id":"bd-channels-001","title":"User Channels within Workspaces","description":"Add support for user-created channels within workspaces. Access controlled at workspace level (public vs private workspaces).","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","core","p0"],"depends_on":[]} {"id":"bd-search-001","title":"Full-Text Message Search","description":"Add comprehensive search across all messages.","priority":80,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["search","core","p1"],"depends_on":[]} {"id":"bd-files-001","title":"File Sharing & Attachments","description":"Add file upload, preview, and sharing capabilities.","priority":75,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["files","core","p1"],"depends_on":[]} {"id":"bd-reactions-001","title":"Emoji Reactions","description":"Add emoji reactions to messages.","priority":70,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["messaging","engagement","p1"],"depends_on":[]} From a56390c3a84143f660a659c46d07d3fbd76de146 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 19:07:32 +0000 Subject: [PATCH 066/103] Add Settings button to Sidebar for better discoverability MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Settings was only accessible via: - Header gear icon (not obvious) - Command Palette (โŒ˜K โ†’ settings) Now also accessible from: - Sidebar footer (below "Spawn Agent" button) This matches user expectations for settings location. --- src/dashboard/react-components/App.tsx | 1 + .../react-components/layout/Sidebar.tsx | 21 ++++++++++++++++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/src/dashboard/react-components/App.tsx b/src/dashboard/react-components/App.tsx index 3e82f9b3..29533b73 100644 --- a/src/dashboard/react-components/App.tsx +++ b/src/dashboard/react-components/App.tsx @@ -877,6 +877,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { onLogsClick={handleLogsClick} onThreadSelect={setCurrentThread} onClose={() => setIsSidebarOpen(false)} + onSettingsClick={handleSettingsClick} />
diff --git a/src/dashboard/react-components/layout/Sidebar.tsx b/src/dashboard/react-components/layout/Sidebar.tsx index 14d11c0d..072d9c49 100644 --- a/src/dashboard/react-components/layout/Sidebar.tsx +++ b/src/dashboard/react-components/layout/Sidebar.tsx @@ -42,6 +42,8 @@ export interface SidebarProps { onThreadSelect?: (threadId: string) => void; /** Mobile: close sidebar handler */ onClose?: () => void; + /** Settings click handler */ + onSettingsClick?: () => void; } export function Sidebar({ @@ -65,6 +67,7 @@ export function Sidebar({ onLogsClick, onThreadSelect, onClose, + onSettingsClick, }: SidebarProps) { const [searchQuery, setSearchQuery] = useState(''); const [isThreadsCollapsed, setIsThreadsCollapsed] = useState(() => { @@ -203,7 +206,7 @@ export function Sidebar({
{/* Footer Actions */} -
+
+
); @@ -268,3 +278,12 @@ function CloseIcon() { ); } + +function SettingsIcon() { + return ( + + + + + ); +} From 9d7ffa117525a372ad72b4643e9cf57dfa60109d Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 4 Jan 2026 19:17:10 +0000 Subject: [PATCH 067/103] Add Workspace Settings access via WorkspaceSelector dropdown MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove settings button from sidebar footer (wrong location) - Add "Workspace Settings" button to WorkspaceSelector dropdown - Opens settings page directly to Workspace tab - Settings page now supports initialTab prop for deep linking Access points for settings: - WorkspaceSelector dropdown โ†’ "Workspace Settings" (opens to Workspace tab) - Header gear icon โ†’ Settings (opens to Dashboard tab) - Command Palette (โŒ˜K) โ†’ Settings --- src/dashboard/react-components/App.tsx | 10 ++++++++ .../react-components/WorkspaceSelector.tsx | 25 ++++++++++++++++++- .../react-components/layout/Sidebar.tsx | 21 +--------------- 3 files changed, 35 insertions(+), 21 deletions(-) diff --git a/src/dashboard/react-components/App.tsx b/src/dashboard/react-components/App.tsx index 29533b73..81bbc82a 100644 --- a/src/dashboard/react-components/App.tsx +++ b/src/dashboard/react-components/App.tsx @@ -192,6 +192,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { // Full settings page state const [isFullSettingsOpen, setIsFullSettingsOpen] = useState(false); + const [settingsInitialTab, setSettingsInitialTab] = useState<'dashboard' | 'workspace' | 'team' | 'billing'>('dashboard'); // Conversation history panel state const [isHistoryOpen, setIsHistoryOpen] = useState(false); @@ -541,6 +542,13 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { // Handle settings click - opens full settings page const handleSettingsClick = useCallback(() => { + setSettingsInitialTab('dashboard'); + setIsFullSettingsOpen(true); + }, []); + + // Handle workspace settings click - opens settings to workspace tab + const handleWorkspaceSettingsClick = useCallback(() => { + setSettingsInitialTab('workspace'); setIsFullSettingsOpen(true); }, []); @@ -851,6 +859,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { activeWorkspaceId={effectiveActiveWorkspaceId ?? undefined} onSelect={handleEffectiveWorkspaceSelect} onAddWorkspace={() => setIsAddWorkspaceOpen(true)} + onWorkspaceSettings={handleWorkspaceSettingsClick} isLoading={effectiveIsLoading} />
@@ -1226,6 +1235,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { {isFullSettingsOpen && ( setIsFullSettingsOpen(false)} /> )} diff --git a/src/dashboard/react-components/WorkspaceSelector.tsx b/src/dashboard/react-components/WorkspaceSelector.tsx index b1cab61e..40d2fd5d 100644 --- a/src/dashboard/react-components/WorkspaceSelector.tsx +++ b/src/dashboard/react-components/WorkspaceSelector.tsx @@ -23,6 +23,7 @@ export interface WorkspaceSelectorProps { activeWorkspaceId?: string; onSelect: (workspace: Workspace) => void; onAddWorkspace: () => void; + onWorkspaceSettings?: () => void; isLoading?: boolean; } @@ -31,6 +32,7 @@ export function WorkspaceSelector({ activeWorkspaceId, onSelect, onAddWorkspace, + onWorkspaceSettings, isLoading = false, }: WorkspaceSelectorProps) { const [isOpen, setIsOpen] = useState(false); @@ -122,7 +124,19 @@ export function WorkspaceSelector({ )}
-
+
+ {onWorkspaceSettings && activeWorkspace && ( + + )}
{/* Footer Actions */} -
+
-
); @@ -278,12 +268,3 @@ function CloseIcon() { ); } - -function SettingsIcon() { - return ( - - - - - ); -} From b377b01c87fc187795dca6fbb3c6bb78db451cbe Mon Sep 17 00:00:00 2001 From: Khaliq Date: Sun, 4 Jan 2026 22:59:08 +0100 Subject: [PATCH 068/103] Remove SSH port forwarding, fix CLI credential paths and formats MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove openssh-server and SSH configuration from Dockerfile - Remove SSH server startup logic from entrypoint.sh - Fix Claude credential path: ~/.claude/.credentials.json (with dot prefix) - Fix Claude credential format: claudeAiOauth.accessToken structure - Fix Codex credential path: ~/.codex/auth.json - Fix Codex credential format: tokens.access_token structure - Add support for refresh token env vars (ANTHROPIC_REFRESH_TOKEN, OPENAI_REFRESH_TOKEN) SSH was originally for Codex OAuth callback tunneling but is no longer needed with device flow support and PTY-based OAuth capture. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- deploy/workspace/Dockerfile | 9 ++++++--- deploy/workspace/entrypoint.sh | 33 +++++++++++++++++++++++---------- 2 files changed, 29 insertions(+), 13 deletions(-) diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index 612674fa..948bb394 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -31,6 +31,7 @@ WORKDIR /app # Install system dependencies for AI CLIs and git # Note: tmux not needed - daemon uses node-pty directly +# gosu is used to drop privileges from root to workspace user RUN apt-get update && apt-get install -y \ bash \ ca-certificates \ @@ -38,6 +39,7 @@ RUN apt-get update && apt-get install -y \ git \ python3 \ jq \ + gosu \ && rm -rf /var/lib/apt/lists/* # Install GitHub CLI (gh) @@ -62,11 +64,12 @@ RUN npm install -g @google/gemini-cli RUN npm install -g opencode-ai@latest # Create workspace directory -RUN mkdir -p /workspace/repos /data +RUN mkdir -p /workspace /data # Create non-root user -RUN useradd -m -u 1001 workspace -RUN chown -R workspace:workspace /app /workspace /data +RUN useradd -m -u 1001 -s /bin/bash workspace \ + && chown -R workspace:workspace /app /workspace /data + USER workspace # Install AI CLIs as workspace user (they install to ~/.local/bin) diff --git a/deploy/workspace/entrypoint.sh b/deploy/workspace/entrypoint.sh index a5ae0ad8..1ba45fa0 100644 --- a/deploy/workspace/entrypoint.sh +++ b/deploy/workspace/entrypoint.sh @@ -6,6 +6,12 @@ log() { echo "[workspace] $*" } +# Drop to workspace user if running as root +if [[ "$(id -u)" == "0" ]]; then + log "Dropping privileges to workspace user..." + exec gosu workspace "$0" "$@" +fi + PORT="${AGENT_RELAY_DASHBOARD_PORT:-${PORT:-3888}}" export AGENT_RELAY_DASHBOARD_PORT="${PORT}" export PORT="${PORT}" @@ -127,30 +133,37 @@ fi # Create credential files that CLIs expect from ENV vars passed by provisioner # ============================================================================ -# Claude CLI expects ~/.claude/credentials.json +# Claude CLI expects ~/.claude/.credentials.json (note the dot prefix on filename) +# Format: { claudeAiOauth: { accessToken: "...", refreshToken: "...", expiresAt: ... } } if [[ -n "${ANTHROPIC_TOKEN:-}" ]]; then log "Configuring Claude credentials..." mkdir -p "${HOME}/.claude" - cat > "${HOME}/.claude/credentials.json" < "${HOME}/.claude/.credentials.json" < "${HOME}/.codex/credentials.json" < "${HOME}/.codex/auth.json" < Date: Sun, 4 Jan 2026 23:07:11 +0100 Subject: [PATCH 069/103] Fix OAuth flow to show auth code input immediately when popup opens MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Set status to 'waiting_auth' immediately when authUrl is present - This ensures the auth code input field is visible right away - Add clearer instructions for Claude auth code submission - Previously status stayed at 'starting' even after popup opened ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- .../settings/WorkspaceSettingsPanel.tsx | 184 +++++++++++++++++- 1 file changed, 174 insertions(+), 10 deletions(-) diff --git a/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx b/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx index 227c7e8d..5c5605cb 100644 --- a/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx +++ b/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx @@ -7,7 +7,7 @@ * Design: Mission Control theme with deep space aesthetic */ -import React, { useState, useEffect, useCallback } from 'react'; +import React, { useState, useEffect, useCallback, useRef } from 'react'; import { cloudApi } from '../../lib/cloudApi'; export interface WorkspaceSettingsPanelProps { @@ -61,6 +61,8 @@ interface AIProvider { apiKeyUrl?: string; apiKeyName?: string; supportsOAuth?: boolean; + supportsDeviceFlow?: boolean; // Provider supports device flow (easier for headless environments) + preferApiKey?: boolean; // Show API key input by default (simpler for mobile/containers) isConnected?: boolean; } @@ -86,6 +88,7 @@ const AI_PROVIDERS: AIProvider[] = [ apiKeyUrl: 'https://platform.openai.com/api-keys', apiKeyName: 'API key', supportsOAuth: true, + supportsDeviceFlow: true, // Codex supports --device-auth for headless environments }, { id: 'google', @@ -141,9 +144,14 @@ export function WorkspaceSettingsPanel({ const [providerStatus, setProviderStatus] = useState>({}); const [connectingProvider, setConnectingProvider] = useState(null); const [apiKeyInput, setApiKeyInput] = useState(''); + const [authCodeInput, setAuthCodeInput] = useState(''); const [providerError, setProviderError] = useState(null); const [oauthSession, setOauthSession] = useState(null); const [showApiKeyFallback, setShowApiKeyFallback] = useState>({}); + // Track whether popup has been opened for current session (avoids stale closure issues) + const popupOpenedRef = useRef(null); + // Device flow preference for providers that support it + const [useDeviceFlow, setUseDeviceFlow] = useState>({}); // Custom domain form const [customDomain, setCustomDomain] = useState(''); @@ -197,6 +205,8 @@ export function WorkspaceSettingsPanel({ setProviderError(null); setConnectingProvider(provider.id); setOauthSession({ providerId: provider.id, sessionId: '', status: 'starting' }); + // Reset popup tracking for new session + popupOpenedRef.current = null; try { const headers: Record = { 'Content-Type': 'application/json' }; @@ -206,6 +216,10 @@ export function WorkspaceSettingsPanel({ method: 'POST', credentials: 'include', headers, + body: JSON.stringify({ + workspaceId, + useDeviceFlow: useDeviceFlow[provider.id] || false, + }), }); const data = await res.json(); @@ -225,11 +239,14 @@ export function WorkspaceSettingsPanel({ providerId: provider.id, sessionId: data.sessionId, authUrl: data.authUrl, - status: data.status || 'starting', + // If we have an authUrl, immediately show waiting_auth status so auth code input appears + status: data.authUrl ? 'waiting_auth' : (data.status || 'starting'), }; setOauthSession(session); if (data.authUrl) { + // Track that popup was opened for this session + popupOpenedRef.current = data.sessionId; openAuthPopup(data.authUrl, provider.displayName); pollAuthStatus(provider.id, data.sessionId); } else if (data.status === 'starting') { @@ -263,6 +280,7 @@ export function WorkspaceSettingsPanel({ setProviderError('Authentication timed out. Please try again.'); setOauthSession(null); setConnectingProvider(null); + popupOpenedRef.current = null; return; } @@ -282,7 +300,9 @@ export function WorkspaceSettingsPanel({ return; } else if (data.status === 'error') { throw new Error(data.error || 'Authentication failed'); - } else if (data.status === 'waiting_auth' && data.authUrl && !oauthSession?.authUrl) { + } else if (data.status === 'waiting_auth' && data.authUrl && popupOpenedRef.current !== sessionId) { + // Use ref to prevent multiple popups (avoids stale closure issue) + popupOpenedRef.current = sessionId; setOauthSession(prev => prev ? { ...prev, authUrl: data.authUrl, status: 'waiting_auth' } : null); openAuthPopup(data.authUrl, AI_PROVIDERS.find(p => p.id === providerId)?.displayName || 'Provider'); } @@ -293,6 +313,7 @@ export function WorkspaceSettingsPanel({ setProviderError(err instanceof Error ? err.message : 'Auth check failed'); setOauthSession(null); setConnectingProvider(null); + popupOpenedRef.current = null; } }; @@ -318,10 +339,12 @@ export function WorkspaceSettingsPanel({ setProviderStatus(prev => ({ ...prev, [providerId]: true })); setOauthSession(null); setConnectingProvider(null); + popupOpenedRef.current = null; } catch (err) { setProviderError(err instanceof Error ? err.message : 'Failed to complete auth'); setOauthSession(null); setConnectingProvider(null); + popupOpenedRef.current = null; } }; @@ -338,6 +361,62 @@ export function WorkspaceSettingsPanel({ } setOauthSession(null); setConnectingProvider(null); + setAuthCodeInput(''); + popupOpenedRef.current = null; + }; + + const submitAuthCodeToSession = async () => { + if (!oauthSession?.sessionId || !authCodeInput.trim()) { + return; + } + + setProviderError(null); + + // Extract code from URL if user pasted the full callback URL + let code = authCodeInput.trim(); + if (code.includes('code=')) { + try { + const url = new URL(code); + const extractedCode = url.searchParams.get('code'); + if (extractedCode) { + code = extractedCode; + } + } catch { + // Not a valid URL, try to extract code parameter manually + const match = code.match(/code=([^&\s]+)/); + if (match) { + code = match[1]; + } + } + } + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${oauthSession.providerId}/code/${oauthSession.sessionId}`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ code }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to submit auth code'); + } + + // Clear the input and continue polling - the CLI should now complete + setAuthCodeInput(''); + + // If immediate success, complete the flow + if (data.status === 'success') { + await completeAuthFlow(oauthSession.providerId, oauthSession.sessionId); + } + } catch (err) { + setProviderError(err instanceof Error ? err.message : 'Failed to submit auth code'); + } }; const submitApiKey = async (provider: AIProvider) => { @@ -722,6 +801,34 @@ export function WorkspaceSettingsPanel({

)} + {/* Auth code/URL input for completing auth */} +
+

+ {provider.id === 'openai' ? ( + <>After completing login, if you see "site can't be reached", copy the full URL from your browser and paste it here: + ) : provider.id === 'anthropic' ? ( + <>After completing login, copy the auth code shown on the page and paste it here: + ) : ( + <>If {provider.displayName} gives you an auth code, paste it here: + )} +

+
+ setAuthCodeInput(e.target.value)} + className="flex-1 px-3 py-2 bg-bg-card border border-border-subtle rounded-lg text-sm text-text-primary placeholder:text-text-muted focus:outline-none focus:border-accent-cyan focus:ring-1 focus:ring-accent-cyan/30 transition-all font-mono" + /> + +
+
)} + {provider.supportsOAuth && ( + + )}
- ) : ( + ) : provider.supportsOAuth ? (
+ {/* Device flow toggle for providers that support it */} + {provider.supportsDeviceFlow && ( + + )} )}
+ ) : ( + /* Provider doesn't support OAuth - show API key input directly */ +
+
+ { + setConnectingProvider(provider.id); + setApiKeyInput(e.target.value); + }} + onFocus={() => setConnectingProvider(provider.id)} + className="flex-1 px-4 py-3 bg-bg-card border border-border-subtle rounded-lg text-sm text-text-primary placeholder:text-text-muted focus:outline-none focus:border-accent-cyan focus:ring-1 focus:ring-accent-cyan/30 transition-all" + /> + +
+ {provider.apiKeyUrl && ( +

+ Get your API key from{' '} + + {new URL(provider.apiKeyUrl).hostname} + +

+ )} +

+ OAuth not available for {provider.displayName} in container environments +

+
)}
)} From 5a0d3f7ff488301bd481b39fac5545545688154c Mon Sep 17 00:00:00 2001 From: Khaliq Date: Sun, 4 Jan 2026 23:09:58 +0100 Subject: [PATCH 070/103] Add credential extraction on success and polling after code submission MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Extract credentials immediately when success pattern is detected - Poll for credentials file after auth code submission (10 attempts, 1s interval) - This handles CLIs that don't exit after auth or don't output success messages - Added logging for credential extraction debugging ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/daemon/cli-auth.ts | 357 +++++++++++++++++++++-------------------- 1 file changed, 180 insertions(+), 177 deletions(-) diff --git a/src/daemon/cli-auth.ts b/src/daemon/cli-auth.ts index 14d2dff2..234a89e2 100644 --- a/src/daemon/cli-auth.ts +++ b/src/daemon/cli-auth.ts @@ -10,132 +10,21 @@ import * as crypto from 'crypto'; import * as fs from 'fs/promises'; import * as os from 'os'; import { createLogger } from '../resiliency/logger.js'; +import { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + getSupportedProviders, + type CLIAuthConfig, + type PromptHandler, +} from '../shared/cli-auth-config.js'; const logger = createLogger('cli-auth'); -/** - * CLI auth configuration for each provider - */ -interface CLIAuthConfig { - command: string; - args: string[]; - urlPattern: RegExp; - credentialPath?: string; - displayName: string; - prompts: PromptHandler[]; - successPatterns: RegExp[]; - waitTimeout: number; -} - -interface PromptHandler { - pattern: RegExp; - response: string; - delay?: number; - description: string; -} - -const CLI_AUTH_CONFIG: Record = { - anthropic: { - command: 'claude', - args: [], - urlPattern: /(https:\/\/[^\s]+)/, - credentialPath: '~/.claude/credentials.json', - displayName: 'Claude', - waitTimeout: 30000, - prompts: [ - { - pattern: /dark\s*(mode|theme)/i, - response: '\r', - delay: 100, - description: 'Dark mode prompt', - }, - { - pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, - response: '\r', - delay: 100, - description: 'Auth method prompt', - }, - { - pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, - response: 'y\r', - delay: 100, - description: 'Trust directory prompt', - }, - ], - successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], - }, - openai: { - command: 'codex', - args: ['login'], - urlPattern: /(https:\/\/[^\s]+)/, - credentialPath: '~/.codex/credentials.json', - displayName: 'Codex', - waitTimeout: 30000, - prompts: [ - { - pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, - response: 'y\r', - delay: 100, - description: 'Trust directory prompt', - }, - ], - successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], - }, - google: { - command: 'gemini', - args: [], - urlPattern: /(https:\/\/[^\s]+)/, - displayName: 'Gemini', - waitTimeout: 30000, - prompts: [ - { - pattern: /login\s*with\s*google|google\s*account|choose.*auth/i, - response: '\r', - delay: 200, - description: 'Auth method selection', - }, - ], - successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], - }, - opencode: { - command: 'opencode', - args: ['auth', 'login'], - urlPattern: /(https:\/\/[^\s]+)/, - displayName: 'OpenCode', - waitTimeout: 30000, - prompts: [ - { - pattern: /select.*provider|choose.*provider|which.*provider/i, - response: '\r', - delay: 200, - description: 'Provider selection', - }, - { - pattern: /claude\s*pro|anthropic|select.*auth/i, - response: '\r', - delay: 200, - description: 'Auth type selection', - }, - ], - successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], - }, - droid: { - command: 'droid', - args: ['--login'], - urlPattern: /(https:\/\/[^\s]+)/, - displayName: 'Droid', - waitTimeout: 30000, - prompts: [ - { - pattern: /sign\s*in|log\s*in|authenticate/i, - response: '\r', - delay: 200, - description: 'Login prompt', - }, - ], - successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], - }, -}; +// Re-export for consumers +export { CLI_AUTH_CONFIG, getSupportedProviders }; +export type { CLIAuthConfig, PromptHandler }; /** * Auth session state @@ -146,6 +35,8 @@ interface AuthSession { status: 'starting' | 'waiting_auth' | 'success' | 'error'; authUrl?: string; token?: string; + refreshToken?: string; + tokenExpiresAt?: Date; error?: string; output: string; promptsHandled: string[]; @@ -173,44 +64,18 @@ setInterval(() => { } }, 60000); -/** - * Strip ANSI escape codes from text - */ -function stripAnsiCodes(text: string): string { - // eslint-disable-next-line no-control-regex - return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); -} - -/** - * Check if text matches any success pattern - */ -function matchesSuccessPattern(text: string, patterns: RegExp[]): boolean { - const cleanText = stripAnsiCodes(text).toLowerCase(); - return patterns.some((p) => p.test(cleanText)); -} - -/** - * Find matching prompt handler - */ -function findMatchingPrompt( - text: string, - prompts: PromptHandler[], - respondedPrompts: Set -): PromptHandler | null { - const cleanText = stripAnsiCodes(text); - for (const prompt of prompts) { - if (respondedPrompts.has(prompt.description)) continue; - if (prompt.pattern.test(cleanText)) { - return prompt; - } - } - return null; +export interface StartCLIAuthOptions { + /** Use device flow instead of standard OAuth (if provider supports it) */ + useDeviceFlow?: boolean; } /** * Start CLI auth flow */ -export function startCLIAuth(provider: string): AuthSession { +export async function startCLIAuth( + provider: string, + options: StartCLIAuthOptions = {} +): Promise { const config = CLI_AUTH_CONFIG[provider]; if (!config) { throw new Error(`Unknown provider: ${provider}`); @@ -227,10 +92,22 @@ export function startCLIAuth(provider: string): AuthSession { }; sessions.set(sessionId, session); + // Use device flow args if requested and supported + const args = options.useDeviceFlow && config.deviceFlowArgs + ? config.deviceFlowArgs + : config.args; + + logger.info('Starting CLI auth', { + provider, + sessionId, + useDeviceFlow: options.useDeviceFlow, + args, + }); + const respondedPrompts = new Set(); try { - const proc = pty.spawn(config.command, config.args, { + const proc = pty.spawn(config.command, args, { name: 'xterm-256color', cols: 120, rows: 30, @@ -246,14 +123,17 @@ export function startCLIAuth(provider: string): AuthSession { session.process = proc; - // Timeout handler + // Timeout handler - give user plenty of time to complete OAuth flow + // 5 minutes should be enough for even slow OAuth flows + const OAUTH_COMPLETION_TIMEOUT = 5 * 60 * 1000; // 5 minutes const timeout = setTimeout(() => { if (session.status === 'starting' || session.status === 'waiting_auth') { + logger.warn('CLI auth timed out', { provider, sessionId, status: session.status }); proc.kill(); session.status = 'error'; - session.error = 'Timeout waiting for auth completion'; + session.error = 'Timeout waiting for auth completion (5 minutes). Please try again.'; } - }, config.waitTimeout + 60000); // Extra time for user to complete OAuth + }, config.waitTimeout + OAUTH_COMPLETION_TIMEOUT); proc.onData((data: string) => { session.output += data; @@ -284,9 +164,26 @@ export function startCLIAuth(provider: string): AuthSession { logger.info('Auth URL captured', { provider, url: session.authUrl }); } - // Check for success + // Check for success and try to extract credentials if (matchesSuccessPattern(data, config.successPatterns)) { session.status = 'success'; + logger.info('Success pattern detected, attempting credential extraction', { provider }); + + // Try to extract credentials immediately (CLI may not exit after success) + // Use a small delay to let the CLI finish writing the file + setTimeout(async () => { + try { + const creds = await extractCredentials(provider, config); + if (creds) { + session.token = creds.token; + session.refreshToken = creds.refreshToken; + session.tokenExpiresAt = creds.expiresAt; + logger.info('Credentials extracted successfully', { provider, hasRefreshToken: !!creds.refreshToken }); + } + } catch (err) { + logger.error('Failed to extract credentials on success', { error: String(err) }); + } + }, 500); } }); @@ -297,9 +194,11 @@ export function startCLIAuth(provider: string): AuthSession { // Try to extract credentials if (session.authUrl || exitCode === 0) { try { - const token = await extractCredentials(provider, config); - if (token) { - session.token = token; + const creds = await extractCredentials(provider, config); + if (creds) { + session.token = creds.token; + session.refreshToken = creds.refreshToken; + session.tokenExpiresAt = creds.expiresAt; session.status = 'success'; } } catch (err) { @@ -328,6 +227,93 @@ export function getAuthSession(sessionId: string): AuthSession | null { return sessions.get(sessionId) || null; } +/** + * Submit auth code to a waiting session + * This writes the code to the PTY process stdin + * + * @returns Object with success status and optional error message + */ +export function submitAuthCode( + sessionId: string, + code: string +): { success: boolean; error?: string } { + const session = sessions.get(sessionId); + if (!session) { + logger.warn('Auth code submission failed: session not found', { sessionId }); + return { success: false, error: 'Session not found or expired' }; + } + + if (!session.process) { + logger.warn('Auth code submission failed: no PTY process', { + sessionId, + sessionStatus: session.status, + }); + return { + success: false, + error: 'CLI process not running. The auth session may have timed out.', + }; + } + + try { + // Write the auth code followed by enter + session.process.write(code + '\r'); + logger.info('Auth code submitted', { sessionId, codeLength: code.length }); + + // Start polling for credentials after code submission + // The CLI should write credentials shortly after receiving the code + const config = CLI_AUTH_CONFIG[session.provider]; + if (config) { + pollForCredentials(session, config); + } + + return { success: true }; + } catch (err) { + logger.error('Failed to submit auth code', { sessionId, error: String(err) }); + return { success: false, error: 'Failed to write to CLI process' }; + } +} + +/** + * Poll for credentials file after auth code submission + * Some CLIs don't output success patterns, so we check the file directly + */ +async function pollForCredentials(session: AuthSession, config: CLIAuthConfig): Promise { + const maxAttempts = 10; + const pollInterval = 1000; // 1 second + + for (let i = 0; i < maxAttempts; i++) { + await new Promise(resolve => setTimeout(resolve, pollInterval)); + + // Skip if session already has credentials or errored + if (session.token || session.status === 'error') { + return; + } + + try { + const creds = await extractCredentials(session.provider, config); + if (creds) { + session.token = creds.token; + session.refreshToken = creds.refreshToken; + session.tokenExpiresAt = creds.expiresAt; + session.status = 'success'; + logger.info('Credentials found via polling', { + provider: session.provider, + attempt: i + 1, + hasRefreshToken: !!creds.refreshToken, + }); + return; + } + } catch { + // File doesn't exist yet, continue polling + } + } + + logger.warn('Credential polling completed without finding credentials', { + provider: session.provider, + sessionId: session.id, + }); +} + /** * Cancel auth session */ @@ -347,13 +333,19 @@ export function cancelAuthSession(sessionId: string): boolean { return true; } +interface ExtractedCredentials { + token: string; + refreshToken?: string; + expiresAt?: Date; +} + /** * Extract credentials from CLI credential file */ async function extractCredentials( provider: string, config: CLIAuthConfig -): Promise { +): Promise { if (!config.credentialPath) return null; try { @@ -363,23 +355,34 @@ async function extractCredentials( // Extract token based on provider if (provider === 'anthropic') { - return creds.oauth_token || creds.access_token || creds.api_key; + // Claude stores OAuth in: { claudeAiOauth: { accessToken: "...", refreshToken: "...", expiresAt: ... } } + if (creds.claudeAiOauth?.accessToken) { + return { + token: creds.claudeAiOauth.accessToken, + refreshToken: creds.claudeAiOauth.refreshToken, + expiresAt: creds.claudeAiOauth.expiresAt ? new Date(creds.claudeAiOauth.expiresAt) : undefined, + }; + } + // Fallback to legacy formats + const token = creds.oauth_token || creds.access_token || creds.api_key; + return token ? { token } : null; } else if (provider === 'openai') { - return creds.token || creds.access_token || creds.api_key; + // Codex stores OAuth in: { tokens: { access_token: "...", refresh_token: "...", ... } } + if (creds.tokens?.access_token) { + return { + token: creds.tokens.access_token, + refreshToken: creds.tokens.refresh_token, + }; + } + // Fallback: API key or legacy formats + const token = creds.OPENAI_API_KEY || creds.token || creds.access_token || creds.api_key; + return token ? { token } : null; } - return creds.token || creds.access_token || creds.api_key || null; + const token = creds.token || creds.access_token || creds.api_key; + return token ? { token } : null; } catch { return null; } } -/** - * Get supported providers - */ -export function getSupportedProviders(): { id: string; displayName: string }[] { - return Object.entries(CLI_AUTH_CONFIG).map(([id, config]) => ({ - id, - displayName: config.displayName, - })); -} From 4e3631baad1e2a39a994a5e616b3bccd4080d9b7 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Sun, 4 Jan 2026 23:34:41 +0100 Subject: [PATCH 071/103] clean up --- .beads/issues.jsonl | 3 +- .env.example | 40 + .github/workflows/docker.yml | 4 +- .gitignore | 4 +- README.md | 2 +- deploy/workspace/Dockerfile | 4 + docker-compose.dev.yml | 20 +- docker-compose.test.yml | 4 +- docs/CLAUDE.md | 1 - docs/{ => archive}/CHANGELOG.md | 0 .../CLI-SIMPLIFICATION-COMPLETE.md | 0 docs/{ => archive}/DESIGN_BRIDGE_STAFFING.md | 0 docs/{ => archive}/DESIGN_V2.md | 0 .../archive/EXECUTIVE_SUMMARY.md | 0 docs/{ => archive}/MONETIZATION.md | 0 docs/{ => archive}/PROPOSAL-trajectories.md | 0 ROADMAP.md => docs/archive/ROADMAP.md | 0 docs/{ => archive}/SCALING_ANALYSIS.md | 0 .../TESTING_PRESENCE_FEATURES.md | 0 .../TMUX_IMPLEMENTATION_NOTES.md | 0 docs/{ => archive}/TMUX_IMPROVEMENTS.md | 0 docs/{ => archive}/dashboard-v2-plan.md | 0 docs/{ => archive}/removable-code-analysis.md | 0 dashboard.png => docs/dashboard.png | Bin package-lock.json | 1 + package.json | 6 +- src/cloud/api/cli-pty-runner.ts | 316 +- src/cloud/api/nango-auth.ts | 48 +- src/cloud/api/onboarding.ts | 207 +- src/cloud/api/workspaces.ts | 133 +- src/cloud/db/drizzle.ts | 21 +- .../db/migrations/0006_workspace_ssh.sql | 6 + .../db/migrations/meta/0005_snapshot.json | 2965 +++++++++++++++++ src/cloud/db/migrations/meta/_journal.json | 9 +- src/cloud/db/schema.ts | 5 + src/cloud/provisioner/index.ts | 105 +- src/cloud/server.ts | 272 +- src/daemon/api.ts | 2 +- src/dashboard-server/server.ts | 104 + src/dashboard/app/app/page.tsx | 108 +- src/dashboard/lib/api.ts | 125 +- src/dashboard/lib/cloudApi.ts | 5 + .../react-components/hooks/useMessages.ts | 15 +- .../hooks/useWorkspaceStatus.ts | 38 +- .../react-components/layout/Sidebar.tsx | 21 +- src/shared/cli-auth-config.ts | 288 ++ test_parser.js | 31 - test_parser.mjs | 49 - 48 files changed, 4325 insertions(+), 637 deletions(-) delete mode 120000 docs/CLAUDE.md rename docs/{ => archive}/CHANGELOG.md (100%) rename docs/{ => archive}/CLI-SIMPLIFICATION-COMPLETE.md (100%) rename docs/{ => archive}/DESIGN_BRIDGE_STAFFING.md (100%) rename docs/{ => archive}/DESIGN_V2.md (100%) rename EXECUTIVE_SUMMARY.md => docs/archive/EXECUTIVE_SUMMARY.md (100%) rename docs/{ => archive}/MONETIZATION.md (100%) rename docs/{ => archive}/PROPOSAL-trajectories.md (100%) rename ROADMAP.md => docs/archive/ROADMAP.md (100%) rename docs/{ => archive}/SCALING_ANALYSIS.md (100%) rename docs/{ => archive}/TESTING_PRESENCE_FEATURES.md (100%) rename docs/{ => archive}/TMUX_IMPLEMENTATION_NOTES.md (100%) rename docs/{ => archive}/TMUX_IMPROVEMENTS.md (100%) rename docs/{ => archive}/dashboard-v2-plan.md (100%) rename docs/{ => archive}/removable-code-analysis.md (100%) rename dashboard.png => docs/dashboard.png (100%) create mode 100644 src/cloud/db/migrations/0006_workspace_ssh.sql create mode 100644 src/cloud/db/migrations/meta/0005_snapshot.json create mode 100644 src/shared/cli-auth-config.ts delete mode 100644 test_parser.js delete mode 100644 test_parser.mjs diff --git a/.beads/issues.jsonl b/.beads/issues.jsonl index c15d018b..eaaf9714 100644 --- a/.beads/issues.jsonl +++ b/.beads/issues.jsonl @@ -55,6 +55,7 @@ {"id":"agent-relay-328","title":"Document cross-project messaging syntax for agents","description":"The parser supports cross-project messaging but agents don't know about it.\n\n## Current Syntax (supported but undocumented)\n\n```\n-\u003erelay:project-id:AgentName \u003c\u003c\u003c\nMessage to agent in another project\u003e\u003e\u003e\n\n-\u003erelay:project-id:* \u003c\u003c\u003c\nBroadcast to all agents in that project\u003e\u003e\u003e\n\n-\u003erelay:project-id:lead \u003c\u003c\u003c\nMessage to lead agent of that project\u003e\u003e\u003e\n```\n\n## Files to Update\n\n1. **docs/agent-relay-snippet.md** - Add cross-project section\n2. **CLAUDE.md** - Update the snippet (or it auto-updates)\n3. **.claude/skills/using-agent-relay/SKILL.md** - If exists\n\n## Content to Add\n\n```markdown\n## Cross-Project Messaging (Bridge Mode)\n\nWhen running with `agent-relay bridge`, you can message agents in other projects:\n\n```\n-\u003erelay:frontend:Designer \u003c\u003c\u003c\nPlease update the UI for the new auth flow\u003e\u003e\u003e\n\n-\u003erelay:backend:* \u003c\u003c\u003c\nAPI changes deployed, please pull latest\u003e\u003e\u003e\n\n-\u003erelay:shared-lib:lead \u003c\u003c\u003c\nNeed a new utility function for date formatting\u003e\u003e\u003e\n```\n\nFormat: `-\u003erelay:project-id:agent-name`\n```\n\n## Also Consider\n- How agents discover available projects\n- How to query which agents are in which project\n- Cross-project thread syntax: `[thread:project:topic]`","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-01T23:17:09.740345+01:00","updated_at":"2026-01-01T23:19:22.409354+01:00","closed_at":"2026-01-01T23:19:22.409354+01:00"} {"id":"agent-relay-329","title":"Dashboard: Show connected repos/projects indicator","description":"When multiple repos are connected (bridge mode or multi-repo workspace), the dashboard should visually indicate this.\n\n## Current State\n- Dashboard shows agents but no clear indication of which project/repo they belong to\n- No visual cue that multiple projects are bridged\n\n## Desired UX\n\n### Option A: Project badges on agents\nEach agent shows a small badge/tag with their project:\n```\n[frontend] Designer - active\n[backend] API-Dev - idle \n[shared] Utils - active\n```\n\n### Option B: Grouped sidebar\nProjects as collapsible sections (already partially exists in ProjectList):\n```\nโ–ผ frontend (3 agents)\n - Designer\n - Implementer\n - Reviewer\nโ–ผ backend (2 agents)\n - API-Dev\n - DBAdmin\n```\n\n### Option C: Header indicator\nShow connected projects count in header:\n```\n๐Ÿ”— 3 projects connected | Current: frontend\n```\n\n## Implementation Notes\n- Check ProjectList.tsx - already has project grouping logic\n- May need to enhance Header.tsx for connection indicator\n- Consider color-coding projects for quick identification\n\n## Files\n- src/dashboard/react-components/ProjectList.tsx\n- src/dashboard/react-components/layout/Header.tsx\n- src/dashboard/react-components/AgentList.tsx (for badges)","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-01T23:19:14.394353+01:00","updated_at":"2026-01-01T23:22:31.628165+01:00","closed_at":"2026-01-01T23:22:31.628165+01:00"} {"id":"agent-relay-330","title":"Add --architect flag to bridge command for cross-project coordinator","description":"When running bridge mode, optionally spawn an architect agent that coordinates across all projects.\n\n## Usage\n```bash\nagent-relay bridge --architect ~/frontend ~/backend\n# or\nagent-relay bridge --architect claude ~/frontend ~/backend\n```\n\n## Behavior\n1. Bridge connects to all project daemons (existing behavior)\n2. Spawns an Architect agent in a tmux session\n3. Architect agent has access to cross-project messaging:\n - -\u003erelay:project:agent for direct messages\n - -\u003erelay:*:* for broadcast to all\n - -\u003erelay:project:lead for project leads\n4. Architect gets injected with context about connected projects\n\n## Implementation\n- Add --architect flag to bridge command\n- Create temp workspace or use first project as base\n- Spawn tmux wrapper with Architect agent\n- Inject system prompt with project list and cross-project syntax\n\n## Agent Definition\nCould use .claude/agents/architect.md if exists, otherwise default prompt:\n- You are the Architect coordinating: [project list]\n- Use cross-project messaging syntax\n- Assign tasks to project leads\n- Resolve dependencies","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-01T23:29:07.473839+01:00","updated_at":"2026-01-01T23:35:27.952802+01:00","closed_at":"2026-01-01T23:35:27.952802+01:00"} +{"id":"agent-relay-350","title":"Global skills via PRPM","description":"Distribute @agent-relay/* skills via PRPM registry for opt-in workspace capabilities.\n\n## Goals\n- Publish skills to registry.prpm.dev\n- Users install globally (not per-project)\n- Zero context bloat until loaded\n\n## Key Tasks\n1. Research prpm --global support\n2. Define ~/.agent-relay/ user skills directory\n3. Publish @agent-relay/workspace-capabilities\n4. Publish @agent-relay/browser-testing\n5. Publish @agent-relay/container-spawning\n6. Create @agent-relay/workspace-pack collection\n7. Cloud workspace pre-installation\n\n## Skills to Publish\n- workspace-capabilities: Browser + container docs\n- browser-testing: Playwright, screenshots, VNC\n- container-spawning: Docker, presets, resource limits\n- linear-integration: Webhooks, API patterns\n- slack-integration: Bot patterns\n\n## Open Questions\n- Does prpm support --global flag?\n- Can daemon read user + project skills?\n- Conditional activation based on capabilities?\n\nSee: docs/tasks/global-skills-system.tasks.md","status":"open","priority":2,"issue_type":"epic","created_at":"2026-01-04T13:30:00Z","updated_at":"2026-01-04T13:30:00Z"} {"id":"agent-relay-37i","title":"Message deduplication uses in-memory Set without limits","description":"In tmux-wrapper.ts:65, sentMessageHashes is a Set that grows unbounded. For long-running sessions, this could cause memory issues. Add: (1) Max size with LRU eviction, (2) Time-based expiration, (3) Bloom filter alternative for memory efficiency.","status":"open","priority":2,"issue_type":"task","created_at":"2025-12-20T00:18:47.229988+01:00","updated_at":"2025-12-20T00:18:47.229988+01:00"} {"id":"agent-relay-3px","title":"Add playbook system for batch automation","description":"Implement playbook system (like Maestro's Auto Run) for batch-processing task lists through agents. Define workflows in YAML/markdown, execute automatically with context isolation. Enables reproducible multi-step automation.","status":"open","priority":3,"issue_type":"feature","created_at":"2025-12-23T17:04:54.464749+01:00","updated_at":"2025-12-23T17:04:54.464749+01:00"} {"id":"agent-relay-3tx","title":"PR-9 Review: Document configurable timeouts","status":"open","priority":3,"issue_type":"task","created_at":"2025-12-22T21:54:15.789418+01:00","updated_at":"2025-12-22T21:54:15.789418+01:00"} @@ -116,6 +117,7 @@ {"id":"agent-relay-452","title":"Trajectories should populate agents array with agent who started it","description":"When trail start is called, the trajectory's agents array is empty. It should automatically associate the agent who started the trajectory.","status":"completed","priority":2,"issue_type":"bug","created_at":"2026-01-03T14:28:39.57+01:00","updated_at":"2026-01-03T15:56:25.663159+01:00"} {"id":"agent-relay-453","title":"BUG: Spawn command fails silently when CLI not specified","description":"Users can send `-\u003erelay:spawn WorkerName` without a CLI type, but the parser silently ignores it because it requires both name AND cli. \n\nParse code at pty-wrapper.ts:931 checks `parts.length \u003e= 2` which fails for commands like:\n- `-\u003erelay:spawn Investigator`\n\nShould either:\n1. Make CLI optional with sensible default (claude)\n2. Provide error feedback when CLI is missing\n\nThis blocks relay spawn/release functionality entirely.","status":"closed","priority":0,"issue_type":"bug","assignee":"Backend","created_at":"2026-01-03T16:43:37.927258+01:00","updated_at":"2026-01-03T16:50:11.02666+01:00","closed_at":"2026-01-03T16:50:11.02666+01:00"} {"id":"agent-relay-454","title":"OpenCode headless mode integration","description":"Integrate OpenCode's headless mode (opencode run) with Agent Relay. Options: 1) Create MCP server adapter for agent-relay that OpenCode can use, 2) Document OpenCode config to work with relay. See: https://github.com/anomalyco/opencode/issues/953","status":"open","priority":3,"issue_type":"feature","created_at":"2026-01-04T01:01:55.715466+01:00","updated_at":"2026-01-04T01:01:55.715466+01:00"} +{"id":"agent-relay-455","title":"Create shared types package between backend and frontend","status":"open","priority":2,"issue_type":"task","created_at":"2026-01-04T21:03:08.485997+01:00","updated_at":"2026-01-04T21:03:08.485997+01:00"} {"id":"agent-relay-47z","title":"Express 5 may have breaking changes from Express 4 patterns","description":"package.json uses express@5.2.1 which is a major version with breaking changes from Express 4. Verify: (1) Error handling middleware patterns, (2) Router behavior, (3) Body parsing (express.json vs body-parser).","status":"open","priority":2,"issue_type":"task","created_at":"2025-12-20T00:18:49.269841+01:00","updated_at":"2025-12-20T00:18:49.269841+01:00"} {"id":"agent-relay-4e0","title":"Fix message truncation - messages cut off at source","description":"Root cause found: parser.ts:40 inline regex only captures single line. Multi-line messages are split by parsePassThrough() at line 206. Fix options: (1) Allow continuation lines in inline format, (2) Use block format for multi-line, (3) Add heuristic to join lines until next @relay pattern.","status":"closed","priority":2,"issue_type":"bug","assignee":"MistyShelter","created_at":"2025-12-19T23:40:35.082717+01:00","updated_at":"2025-12-20T00:03:54.806087+01:00","closed_at":"2025-12-20T00:03:54.806087+01:00"} {"id":"agent-relay-4ft","title":"Merge project info into status command","status":"closed","priority":2,"issue_type":"task","assignee":"Pruner","created_at":"2025-12-19T21:59:52.685495+01:00","updated_at":"2025-12-19T22:06:44.276187+01:00","closed_at":"2025-12-19T22:06:44.276187+01:00"} @@ -315,4 +317,3 @@ {"id":"agent-relay-yvf","title":"[Memory] Build task-based trajectory layer on Mem0","description":"Implement trajectory grouping on top of Mem0 observations. Map task_id to Mem0 user_id or metadata. Support chapter-based organization within trajectories.","status":"open","priority":1,"issue_type":"task","created_at":"2025-12-21T15:00:00Z","updated_at":"2025-12-21T15:00:00Z"} {"id":"agent-relay-yvg","title":"[Memory] Implement fleet-wide knowledge workspace","description":"Build knowledge workspace layer: decisions log, pattern library, cross-agent context. Query interface for agents to access fleet knowledge. Uses Mem0 for storage.","status":"open","priority":2,"issue_type":"task","created_at":"2025-12-21T15:00:00Z","updated_at":"2025-12-21T15:00:00Z"} {"id":"agent-relay-yvh","title":"[Memory] Abstract MemoryBackend for future flexibility","description":"Create MemoryBackend interface allowing swap between Mem0, Zep, or custom SQLite+Chroma. Default to Mem0. Enable migration path if needs change.","status":"open","priority":3,"issue_type":"task","created_at":"2025-12-21T15:00:00Z","updated_at":"2025-12-21T15:00:00Z"} -{"id":"agent-relay-350","title":"Global skills via PRPM","description":"Distribute @agent-relay/* skills via PRPM registry for opt-in workspace capabilities.\n\n## Goals\n- Publish skills to registry.prpm.dev\n- Users install globally (not per-project)\n- Zero context bloat until loaded\n\n## Key Tasks\n1. Research prpm --global support\n2. Define ~/.agent-relay/ user skills directory\n3. Publish @agent-relay/workspace-capabilities\n4. Publish @agent-relay/browser-testing\n5. Publish @agent-relay/container-spawning\n6. Create @agent-relay/workspace-pack collection\n7. Cloud workspace pre-installation\n\n## Skills to Publish\n- workspace-capabilities: Browser + container docs\n- browser-testing: Playwright, screenshots, VNC\n- container-spawning: Docker, presets, resource limits\n- linear-integration: Webhooks, API patterns\n- slack-integration: Bot patterns\n\n## Open Questions\n- Does prpm support --global flag?\n- Can daemon read user + project skills?\n- Conditional activation based on capabilities?\n\nSee: docs/tasks/global-skills-system.tasks.md","status":"open","priority":2,"issue_type":"epic","created_at":"2026-01-04T13:30:00.000000Z","updated_at":"2026-01-04T13:30:00.000000Z"} diff --git a/.env.example b/.env.example index 99fa631f..bb9c7b74 100644 --- a/.env.example +++ b/.env.example @@ -22,6 +22,40 @@ # Dashboard port (default: 3888) # AGENT_RELAY_DASHBOARD_PORT=3888 +# ============================================================================= +# Cloud Mode Configuration +# ============================================================================= + +# Force cloud mode in dashboard - prevents silent fallback to local mode +# Set to "true" when testing cloud features locally +# NEXT_PUBLIC_FORCE_CLOUD_MODE=true + +# ============================================================================= +# Security / Vault Configuration +# ============================================================================= + +# Vault master key for encrypting stored credentials (REQUIRED for cloud mode) +# Generate with: openssl rand -base64 32 +# VAULT_MASTER_KEY=your-32-byte-base64-encoded-key + +# ============================================================================= +# Compute Provider Configuration (for workspace provisioning) +# ============================================================================= + +# Compute provider: docker (default), fly, railway +# COMPUTE_PROVIDER=docker + +# --- Fly.io Configuration --- +# Get API token: fly tokens create deploy -x 999999h -n "agent-relay-provisioner" +# FLY_API_TOKEN=your-fly-api-token +# FLY_ORG=personal +# FLY_REGION=sjc +# FLY_WORKSPACE_DOMAIN=workspaces.yourdomain.com # optional custom domain + +# --- Railway Configuration --- +# Get API token from Railway dashboard +# RAILWAY_API_TOKEN=your-railway-api-token + # ============================================================================= # Examples # ============================================================================= @@ -36,3 +70,9 @@ # Use PostgreSQL (future): # AGENT_RELAY_STORAGE_TYPE=postgres # AGENT_RELAY_STORAGE_URL=postgres://localhost:5432/agent_relay + +# Production Fly.io setup: +# COMPUTE_PROVIDER=fly +# FLY_API_TOKEN=fo1_xxxxx +# FLY_ORG=your-org +# FLY_REGION=sjc diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index c2006537..318ce711 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -30,9 +30,9 @@ jobs: - image: agent-relay dockerfile: Dockerfile context: . - - image: agent-relay-workspace + - image: relay-workspace dockerfile: deploy/workspace/Dockerfile - context: deploy/workspace + context: . steps: - name: Checkout repository diff --git a/.gitignore b/.gitignore index 795cbb3d..67e17f6f 100644 --- a/.gitignore +++ b/.gitignore @@ -29,12 +29,12 @@ pnpm-debug.log* # Local test artifacts .agent-relay-test-*/ +.tmp/ +.tmp-*/ # Coverage output coverage/ .npm-cache -.tmp-supervisor-tests -.tmp-agent-relay-data .next diff --git a/README.md b/README.md index 4ba816ec..d1b934ce 100644 --- a/README.md +++ b/README.md @@ -190,7 +190,7 @@ prpm install @agent-relay/agent-relay-snippet --location CLAUDE.md `agent-relay up` starts a web dashboard at http://localhost:3888 -![Agent Relay Dashboard](dashboard.png) +![Agent Relay Dashboard](docs/dashboard.png) ## Development diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index 948bb394..4a8080c1 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -54,6 +54,10 @@ RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | d COPY --from=builder /app/dist ./dist COPY --from=builder /app/node_modules ./node_modules COPY --from=builder /app/package*.json ./ + +# Copy docs folder (contains relay snippets for agent spawning) +COPY docs ./docs + COPY deploy/workspace/entrypoint.sh /entrypoint.sh COPY deploy/workspace/git-credential-relay /usr/local/bin/git-credential-relay RUN chmod +x /entrypoint.sh /usr/local/bin/git-credential-relay diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index f593273d..fc1a3ddc 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -73,7 +73,8 @@ services: NANGO_SECRET_KEY: ${NANGO_SECRET_KEY:-} # Vault master key (generate with: openssl rand -base64 32) - VAULT_MASTER_KEY: ${VAULT_MASTER_KEY:-ZGV2LXZhdWx0LWtleS1jaGFuZ2UtaW4tcHJvZHVjdGlvbg==} + # Default is "dev-vault-key-32-bytes-change!!!" - MUST be exactly 32 bytes when decoded + VAULT_MASTER_KEY: ${VAULT_MASTER_KEY:-ZGV2LXZhdWx0LWtleS0zMi1ieXRlcy1jaGFuZ2UhISE=} # Stripe (set in .env.local for billing features) STRIPE_SECRET_KEY: ${STRIPE_SECRET_KEY:-sk_test_placeholder} @@ -83,6 +84,12 @@ services: # Compute provider (docker for local dev) COMPUTE_PROVIDER: docker + # Flag to indicate we're running in Docker (for localhost translation) + RUNNING_IN_DOCKER: "true" + + # Force cloud mode in dashboard (prevents silent fallback to local mode) + NEXT_PUBLIC_FORCE_CLOUD_MODE: "true" + # Provider OAuth (optional) GOOGLE_CLIENT_ID: ${GOOGLE_CLIENT_ID:-} GOOGLE_CLIENT_SECRET: ${GOOGLE_CLIENT_SECRET:-} @@ -102,17 +109,22 @@ services: # Optional: Example workspace for testing workspace: - image: ghcr.io/agentworkforce/agent-relay-workspace:latest + image: ghcr.io/agentworkforce/relay-workspace:latest build: - context: ./deploy/workspace - dockerfile: Dockerfile + context: . + dockerfile: deploy/workspace/Dockerfile + user: root # Required to start SSH server before dropping privileges ports: - "3888:3888" - "3889:3889" + - "2222:2222" # SSH for port forwarding (e.g., Codex OAuth) environment: WORKSPACE_ID: local-dev-workspace SUPERVISOR_ENABLED: "true" MAX_AGENTS: "10" + # SSH for port forwarding (Codex OAuth callback tunneling) + ENABLE_SSH: "true" + SSH_PASSWORD: ${WORKSPACE_SSH_PASSWORD:-devpassword} volumes: - workspace_data:/data - ./:/workspace:ro diff --git a/docker-compose.test.yml b/docker-compose.test.yml index fe49fcc8..a4c990c5 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -71,8 +71,8 @@ services: # Session SESSION_SECRET: test-session-secret - # Vault master key (test only) - VAULT_MASTER_KEY: dGVzdC12YXVsdC1rZXktZm9yLXRlc3Rpbmctb25seQ== + # Vault master key (test only) - "test-vault-key-32-bytes-testing!" = 32 bytes + VAULT_MASTER_KEY: dGVzdC12YXVsdC1rZXktMzItYnl0ZXMtdGVzdGluZyE= # Disable external services in test mode STRIPE_SECRET_KEY: sk_test_placeholder diff --git a/docs/CLAUDE.md b/docs/CLAUDE.md deleted file mode 120000 index 47dc3e3d..00000000 --- a/docs/CLAUDE.md +++ /dev/null @@ -1 +0,0 @@ -AGENTS.md \ No newline at end of file diff --git a/docs/CHANGELOG.md b/docs/archive/CHANGELOG.md similarity index 100% rename from docs/CHANGELOG.md rename to docs/archive/CHANGELOG.md diff --git a/docs/CLI-SIMPLIFICATION-COMPLETE.md b/docs/archive/CLI-SIMPLIFICATION-COMPLETE.md similarity index 100% rename from docs/CLI-SIMPLIFICATION-COMPLETE.md rename to docs/archive/CLI-SIMPLIFICATION-COMPLETE.md diff --git a/docs/DESIGN_BRIDGE_STAFFING.md b/docs/archive/DESIGN_BRIDGE_STAFFING.md similarity index 100% rename from docs/DESIGN_BRIDGE_STAFFING.md rename to docs/archive/DESIGN_BRIDGE_STAFFING.md diff --git a/docs/DESIGN_V2.md b/docs/archive/DESIGN_V2.md similarity index 100% rename from docs/DESIGN_V2.md rename to docs/archive/DESIGN_V2.md diff --git a/EXECUTIVE_SUMMARY.md b/docs/archive/EXECUTIVE_SUMMARY.md similarity index 100% rename from EXECUTIVE_SUMMARY.md rename to docs/archive/EXECUTIVE_SUMMARY.md diff --git a/docs/MONETIZATION.md b/docs/archive/MONETIZATION.md similarity index 100% rename from docs/MONETIZATION.md rename to docs/archive/MONETIZATION.md diff --git a/docs/PROPOSAL-trajectories.md b/docs/archive/PROPOSAL-trajectories.md similarity index 100% rename from docs/PROPOSAL-trajectories.md rename to docs/archive/PROPOSAL-trajectories.md diff --git a/ROADMAP.md b/docs/archive/ROADMAP.md similarity index 100% rename from ROADMAP.md rename to docs/archive/ROADMAP.md diff --git a/docs/SCALING_ANALYSIS.md b/docs/archive/SCALING_ANALYSIS.md similarity index 100% rename from docs/SCALING_ANALYSIS.md rename to docs/archive/SCALING_ANALYSIS.md diff --git a/docs/TESTING_PRESENCE_FEATURES.md b/docs/archive/TESTING_PRESENCE_FEATURES.md similarity index 100% rename from docs/TESTING_PRESENCE_FEATURES.md rename to docs/archive/TESTING_PRESENCE_FEATURES.md diff --git a/docs/TMUX_IMPLEMENTATION_NOTES.md b/docs/archive/TMUX_IMPLEMENTATION_NOTES.md similarity index 100% rename from docs/TMUX_IMPLEMENTATION_NOTES.md rename to docs/archive/TMUX_IMPLEMENTATION_NOTES.md diff --git a/docs/TMUX_IMPROVEMENTS.md b/docs/archive/TMUX_IMPROVEMENTS.md similarity index 100% rename from docs/TMUX_IMPROVEMENTS.md rename to docs/archive/TMUX_IMPROVEMENTS.md diff --git a/docs/dashboard-v2-plan.md b/docs/archive/dashboard-v2-plan.md similarity index 100% rename from docs/dashboard-v2-plan.md rename to docs/archive/dashboard-v2-plan.md diff --git a/docs/removable-code-analysis.md b/docs/archive/removable-code-analysis.md similarity index 100% rename from docs/removable-code-analysis.md rename to docs/archive/removable-code-analysis.md diff --git a/dashboard.png b/docs/dashboard.png similarity index 100% rename from dashboard.png rename to docs/dashboard.png diff --git a/package-lock.json b/package-lock.json index 50a6e030..f16895c4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -170,6 +170,7 @@ }, "node_modules/@clack/prompts/node_modules/is-unicode-supported": { "version": "1.3.0", + "extraneous": true, "inBundle": true, "license": "MIT", "engines": { diff --git a/package.json b/package.json index eca9f49d..ab5c6a53 100644 --- a/package.json +++ b/package.json @@ -74,12 +74,12 @@ "license": "MIT", "repository": { "type": "git", - "url": "git+https://github.com/khaliqgant/agent-relay.git" + "url": "git+https://github.com/AgentWorkforce/relay.git" }, "bugs": { - "url": "https://github.com/khaliqgant/agent-relay/issues" + "url": "https://github.com/AgentWorkforce/relay/issues" }, - "homepage": "https://github.com/khaliqgant/agent-relay#readme", + "homepage": "https://github.com/AgentWorkforce/relay#readme", "dependencies": { "@nangohq/node": "^0.69.20", "@types/jsonwebtoken": "^9.0.10", diff --git a/src/cloud/api/cli-pty-runner.ts b/src/cloud/api/cli-pty-runner.ts index 0c386b01..289c4bd7 100644 --- a/src/cloud/api/cli-pty-runner.ts +++ b/src/cloud/api/cli-pty-runner.ts @@ -10,290 +10,34 @@ import * as pty from 'node-pty'; -/** - * Interactive prompt handler configuration - * Defines patterns to detect prompts and responses to send - */ -export interface PromptHandler { - /** Pattern to detect in CLI output (case-insensitive) */ - pattern: RegExp; - /** Response to send (e.g., '\r' for enter, 'y\r' for yes+enter) */ - response: string; - /** Delay before sending response (ms) */ - delay?: number; - /** Description for logging/debugging */ - description: string; -} - -/** - * CLI auth configuration for each provider - */ -export interface CLIAuthConfig { - /** CLI command to run */ - command: string; - /** Arguments to pass */ - args: string[]; - /** Pattern to extract auth URL from output */ - urlPattern: RegExp; - /** Path to credentials file (for reading after auth) */ - credentialPath?: string; - /** Display name for UI */ - displayName: string; - /** Interactive prompts to auto-respond to */ - prompts: PromptHandler[]; - /** Success indicators in output */ - successPatterns: RegExp[]; - /** How long to wait for URL to appear (ms) */ - waitTimeout: number; -} - -/** - * CLI commands and URL patterns for each provider - * - * Each CLI tool outputs an OAuth URL when run without credentials. - * We capture stdout/stderr and extract the URL using a simple https:// pattern. - * - * IMPORTANT: These CLIs are interactive - they output the auth URL then wait - * for the user to complete OAuth in their browser. We capture the URL and - * display it in a popup for the user. - */ -export const CLI_AUTH_CONFIG: Record = { - anthropic: { - command: 'claude', - args: [], - urlPattern: /(https:\/\/[^\s]+)/, - credentialPath: '~/.claude/credentials.json', - displayName: 'Claude', - waitTimeout: 5000, - prompts: [ - { - pattern: /dark\s*(mode|theme)/i, - response: '\r', // Press enter to accept default - delay: 100, - description: 'Dark mode prompt', - }, - { - pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, - response: '\r', // Press enter for first option (subscription) - delay: 100, - description: 'Auth method prompt', - }, - { - pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, - response: 'y\r', // Yes to trust - delay: 100, - description: 'Trust directory prompt', - }, - ], - successPatterns: [ - /success/i, - /authenticated/i, - /logged\s*in/i, - ], - }, - openai: { - command: 'codex', - args: ['login'], - urlPattern: /(https:\/\/[^\s]+)/, - credentialPath: '~/.codex/credentials.json', - displayName: 'Codex', - waitTimeout: 3000, - prompts: [ - { - pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, - response: 'y\r', - delay: 100, - description: 'Trust directory prompt', - }, - ], - successPatterns: [ - /success/i, - /authenticated/i, - /logged\s*in/i, - ], - }, - google: { - command: 'gemini', - args: [], - urlPattern: /(https:\/\/[^\s]+)/, - displayName: 'Gemini', - waitTimeout: 5000, - prompts: [ - { - pattern: /login\s*with\s*google|google\s*account|choose.*auth/i, - response: '\r', // Select first option (Login with Google) - delay: 200, - description: 'Auth method selection', - }, - ], - successPatterns: [ - /success/i, - /authenticated/i, - /logged\s*in/i, - ], - }, - opencode: { - command: 'opencode', - args: ['auth', 'login'], - urlPattern: /(https:\/\/[^\s]+)/, - displayName: 'OpenCode', - waitTimeout: 5000, - prompts: [ - { - pattern: /select.*provider|choose.*provider|which.*provider/i, - response: '\r', // Select first provider - delay: 200, - description: 'Provider selection', - }, - { - pattern: /claude\s*pro|anthropic|select.*auth/i, - response: '\r', // Select first auth option - delay: 200, - description: 'Auth type selection', - }, - ], - successPatterns: [ - /success/i, - /authenticated/i, - /logged\s*in/i, - ], - }, - droid: { - command: 'droid', - args: ['--login'], - urlPattern: /(https:\/\/[^\s]+)/, - displayName: 'Droid', - waitTimeout: 5000, - prompts: [ - { - pattern: /sign\s*in|log\s*in|authenticate/i, - response: '\r', - delay: 200, - description: 'Login prompt', - }, - ], - successPatterns: [ - /success/i, - /authenticated/i, - /logged\s*in/i, - ], - }, +// Import shared config and utilities +import { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + validateAllProviderConfigs as validateAllConfigs, + getSupportedProviders, + type CLIAuthConfig, + type PromptHandler, +} from '../../shared/cli-auth-config.js'; + +// Re-export everything from shared config for backward compatibility +export { + CLI_AUTH_CONFIG, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + getSupportedProviders, + type CLIAuthConfig, + type PromptHandler, }; -/** - * Strip ANSI escape codes from text - */ -export function stripAnsiCodes(text: string): string { - // eslint-disable-next-line no-control-regex - return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); -} - -/** - * Check if text matches any success pattern - */ -export function matchesSuccessPattern(text: string, patterns: RegExp[]): boolean { - const cleanText = stripAnsiCodes(text).toLowerCase(); - return patterns.some(p => p.test(cleanText)); -} - -/** - * Find matching prompt handler for given text - */ -export function findMatchingPrompt( - text: string, - prompts: PromptHandler[], - respondedPrompts: Set -): PromptHandler | null { - const cleanText = stripAnsiCodes(text); - - for (const prompt of prompts) { - // Skip if already responded to this prompt type - if (respondedPrompts.has(prompt.description)) continue; - - if (prompt.pattern.test(cleanText)) { - return prompt; - } - } - - return null; -} - -/** - * Validate a provider's CLI auth configuration - * Returns null if valid, or an error message if invalid - */ -export function validateProviderConfig(providerId: string, config: CLIAuthConfig): string | null { - if (!config.command || typeof config.command !== 'string') { - return `${providerId}: missing or invalid 'command'`; - } - - if (!Array.isArray(config.args)) { - return `${providerId}: 'args' must be an array`; - } - - if (!(config.urlPattern instanceof RegExp)) { - return `${providerId}: 'urlPattern' must be a RegExp`; - } - - // Check urlPattern has a capture group - const testUrl = 'https://example.com/test'; - const match = testUrl.match(config.urlPattern); - if (!match || !match[1]) { - return `${providerId}: 'urlPattern' must have a capture group - got ${config.urlPattern}`; - } - - if (!config.displayName || typeof config.displayName !== 'string') { - return `${providerId}: missing or invalid 'displayName'`; - } - - if (typeof config.waitTimeout !== 'number' || config.waitTimeout <= 0) { - return `${providerId}: 'waitTimeout' must be a positive number`; - } - - if (!Array.isArray(config.prompts)) { - return `${providerId}: 'prompts' must be an array`; - } - - for (let i = 0; i < config.prompts.length; i++) { - const prompt = config.prompts[i]; - if (!(prompt.pattern instanceof RegExp)) { - return `${providerId}: prompt[${i}].pattern must be a RegExp`; - } - if (typeof prompt.response !== 'string') { - return `${providerId}: prompt[${i}].response must be a string`; - } - if (!prompt.description || typeof prompt.description !== 'string') { - return `${providerId}: prompt[${i}].description must be a non-empty string`; - } - } - - if (!Array.isArray(config.successPatterns)) { - return `${providerId}: 'successPatterns' must be an array`; - } - - for (let i = 0; i < config.successPatterns.length; i++) { - if (!(config.successPatterns[i] instanceof RegExp)) { - return `${providerId}: successPatterns[${i}] must be a RegExp`; - } - } - - return null; -} - -/** - * Validate all provider configurations - * Throws an error if any provider is invalid - */ +// Wrapper that throws instead of returning array (backward compatible) export function validateAllProviderConfigs(): void { - const errors: string[] = []; - - for (const [providerId, config] of Object.entries(CLI_AUTH_CONFIG)) { - const error = validateProviderConfig(providerId, config); - if (error) { - errors.push(error); - } - } - + const errors = validateAllConfigs(); if (errors.length > 0) { throw new Error(`Invalid provider configurations:\n${errors.join('\n')}`); } @@ -438,13 +182,3 @@ export async function runCLIAuthViaPTY( }); } -/** - * Get list of supported providers for CLI auth - */ -export function getSupportedProviders(): { id: string; displayName: string; command: string }[] { - return Object.entries(CLI_AUTH_CONFIG).map(([id, config]) => ({ - id, - displayName: config.displayName, - command: config.command, - })); -} diff --git a/src/cloud/api/nango-auth.ts b/src/cloud/api/nango-auth.ts index ca4f32b5..8e245263 100644 --- a/src/cloud/api/nango-auth.ts +++ b/src/cloud/api/nango-auth.ts @@ -11,7 +11,6 @@ import { randomUUID } from 'crypto'; import { requireAuth } from './auth.js'; import { db } from '../db/index.js'; import { nangoService, NANGO_INTEGRATIONS } from '../services/nango.js'; -import { getProvisioner } from '../provisioner/index.js'; export const nangoAuthRouter = Router(); @@ -421,10 +420,11 @@ async function handleRepoAuthWebhook( console.log(`[nango-webhook] Synced ${repos.length} repos for ${user.githubUsername} (installation: ${githubInstallationId || 'unknown'})`); - // Auto-provision a workspace if user doesn't have one - if (repos.length > 0) { - await autoProvisionWorkspaceIfNeeded(user.id, user.githubUsername || 'user', repos.map(r => r.full_name)); - } + // Note: We intentionally do NOT auto-provision workspaces here. + // Users should go through the onboarding flow at /app to: + // 1. Name their workspace + // 2. Choose which repos to include + // 3. Understand what they're creating } catch (error: unknown) { const err = error as { message?: string }; @@ -438,41 +438,3 @@ async function handleRepoAuthWebhook( } } -/** - * Auto-provision a workspace for the user if they don't have one - * This is called after repos are connected to provide immediate workspace access - */ -async function autoProvisionWorkspaceIfNeeded( - userId: string, - username: string, - repositories: string[] -): Promise { - try { - // Check if user already has a workspace - const existingWorkspaces = await db.workspaces.findByUserId(userId); - if (existingWorkspaces.length > 0) { - console.log(`[auto-provision] User ${username} already has ${existingWorkspaces.length} workspace(s), skipping auto-provision`); - return; - } - - console.log(`[auto-provision] Starting workspace provision for ${username} with ${repositories.length} repos`); - - const provisioner = getProvisioner(); - const result = await provisioner.provision({ - userId, - name: `${username}'s Workspace`, - providers: [], // No AI providers yet - user can connect them later - repositories, - }); - - if (result.status === 'error') { - console.error(`[auto-provision] Failed to provision workspace for ${username}:`, result.error); - return; - } - - console.log(`[auto-provision] Workspace ${result.workspaceId} provisioned for ${username} (status: ${result.status})`); - } catch (error) { - console.error(`[auto-provision] Error provisioning workspace for ${username}:`, error); - // Non-fatal - user can still manually create a workspace - } -} diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index a8d87e17..af6ee01a 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -41,6 +41,12 @@ import { export const onboardingRouter = Router(); +// Debug: log all requests to this router +onboardingRouter.use((req, res, next) => { + console.log(`[onboarding] ${req.method} ${req.path} - body:`, JSON.stringify(req.body)); + next(); +}); + // All routes require authentication onboardingRouter.use(requireAuth); @@ -56,9 +62,14 @@ interface CLIAuthSession { callbackUrl?: string; status: 'starting' | 'waiting_auth' | 'success' | 'error' | 'timeout'; token?: string; + refreshToken?: string; + tokenExpiresAt?: Date; error?: string; createdAt: Date; output: string; // Accumulated output for debugging + // Workspace delegation fields (set when auth runs in workspace daemon) + workspaceUrl?: string; + workspaceSessionId?: string; } const activeSessions = new Map(); @@ -89,9 +100,11 @@ setInterval(() => { * For onboarding without a workspace, users should use the API key flow. */ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response) => { + console.log('[onboarding] Route handler entered! provider:', req.params.provider); const { provider } = req.params; const userId = req.session.userId!; - const { workspaceId } = req.body; // Optional: specific workspace to use + const { workspaceId, useDeviceFlow } = req.body; // Optional: specific workspace, device flow option + console.log('[onboarding] userId:', userId, 'workspaceId:', workspaceId, 'useDeviceFlow:', useDeviceFlow); const config = CLI_AUTH_CONFIG[provider]; if (!config) { @@ -106,7 +119,12 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response let workspace; if (workspaceId) { workspace = await db.workspaces.findById(workspaceId); - if (!workspace || workspace.userId !== userId) { + if (!workspace) { + console.log(`[onboarding] Workspace ${workspaceId} not found in database`); + return res.status(404).json({ error: 'Workspace not found' }); + } + if (workspace.userId !== userId) { + console.log(`[onboarding] Workspace ${workspaceId} belongs to ${workspace.userId}, not ${userId}`); return res.status(404).json({ error: 'Workspace not found' }); } } else { @@ -125,14 +143,34 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response } // Forward auth request to workspace daemon - const workspaceUrl = workspace.publicUrl.replace(/\/$/, ''); - const authResponse = await fetch(`${workspaceUrl}/auth/cli/${provider}/start`, { + // When running in Docker, localhost refers to the container, not the host + // Use host.docker.internal on Mac/Windows to reach the host machine + let workspaceUrl = workspace.publicUrl.replace(/\/$/, ''); + + // Detect Docker by checking for /.dockerenv file or RUNNING_IN_DOCKER env var + const isInDocker = process.env.RUNNING_IN_DOCKER === 'true' || + await import('fs').then(fs => fs.existsSync('/.dockerenv')).catch(() => false); + + console.log('[onboarding] isInDocker:', isInDocker, 'RUNNING_IN_DOCKER:', process.env.RUNNING_IN_DOCKER); + + if (isInDocker && workspaceUrl.includes('localhost')) { + workspaceUrl = workspaceUrl.replace('localhost', 'host.docker.internal'); + console.log('[onboarding] Translated localhost to host.docker.internal'); + } + const targetUrl = `${workspaceUrl}/auth/cli/${provider}/start`; + console.log('[onboarding] Forwarding to workspace daemon:', targetUrl); + + const authResponse = await fetch(targetUrl, { method: 'POST', headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ useDeviceFlow }), }); + console.log('[onboarding] Workspace daemon response:', authResponse.status); + if (!authResponse.ok) { const errorData = await authResponse.json().catch(() => ({})) as { error?: string }; + console.log('[onboarding] Workspace daemon error:', errorData); return res.status(authResponse.status).json({ error: errorData.error || 'Failed to start CLI auth in workspace', }); @@ -153,13 +191,13 @@ onboardingRouter.post('/cli/:provider/start', async (req: Request, res: Response authUrl: workspaceSession.authUrl, createdAt: new Date(), output: '', + // Store workspace info for status polling and auth code forwarding + workspaceUrl, + workspaceSessionId: workspaceSession.sessionId, }; - // Store workspace info for status polling - (session as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string }).workspaceUrl = workspaceUrl; - (session as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string }).workspaceSessionId = workspaceSession.sessionId; - activeSessions.set(sessionId, session); + console.log('[onboarding] Session created:', { sessionId, workspaceUrl, workspaceSessionId: workspaceSession.sessionId }); res.json({ sessionId, @@ -182,7 +220,7 @@ onboardingRouter.get('/cli/:provider/status/:sessionId', async (req: Request, re const { provider, sessionId } = req.params; const userId = req.session.userId!; - const session = activeSessions.get(sessionId) as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string } | undefined; + const session = activeSessions.get(sessionId); if (!session) { return res.status(404).json({ error: 'Session not found or expired' }); } @@ -229,7 +267,7 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, const userId = req.session.userId!; const { token } = req.body; // Optional: user can paste token directly - const session = activeSessions.get(sessionId) as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string } | undefined; + const session = activeSessions.get(sessionId); if (!session) { return res.status(404).json({ error: 'Session not found or expired' }); } @@ -241,6 +279,8 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, try { // If token provided directly, use it let accessToken = token || session.token; + let refreshToken = session.refreshToken; + let tokenExpiresAt = session.tokenExpiresAt; // If no token yet, try to get from workspace if (!accessToken && session.workspaceUrl && session.workspaceSessionId) { @@ -249,8 +289,16 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, `${session.workspaceUrl}/auth/cli/${provider}/creds/${session.workspaceSessionId}` ); if (credsResponse.ok) { - const creds = await credsResponse.json() as { token?: string }; + const creds = await credsResponse.json() as { + token?: string; + refreshToken?: string; + expiresAt?: string; + }; accessToken = creds.token; + refreshToken = creds.refreshToken; + if (creds.expiresAt) { + tokenExpiresAt = new Date(creds.expiresAt); + } } } catch (err) { console.error('[onboarding] Failed to get credentials from workspace:', err); @@ -263,11 +311,13 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, }); } - // Store in vault + // Store in vault with refresh token and expiry await vault.storeCredential({ userId, provider, accessToken, + refreshToken, + tokenExpiresAt, scopes: getProviderScopes(provider), }); @@ -284,6 +334,84 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, } }); +/** + * POST /api/onboarding/cli/:provider/code/:sessionId + * Submit auth code to the CLI PTY session + * Used when OAuth returns a code that must be pasted into the CLI + */ +onboardingRouter.post('/cli/:provider/code/:sessionId', async (req: Request, res: Response) => { + const { provider, sessionId } = req.params; + const userId = req.session.userId!; + const { code } = req.body; + + console.log('[onboarding] Auth code submission request:', { provider, sessionId, codeLength: code?.length }); + + if (!code || typeof code !== 'string') { + return res.status(400).json({ error: 'Auth code is required' }); + } + + const session = activeSessions.get(sessionId); + if (!session) { + console.log('[onboarding] Session not found:', { sessionId, activeSessions: Array.from(activeSessions.keys()) }); + return res.status(404).json({ error: 'Session not found or expired. Please try connecting again.' }); + } + + if (session.userId !== userId) { + return res.status(403).json({ error: 'Unauthorized' }); + } + + console.log('[onboarding] Session found:', { + sessionId, + workspaceUrl: session.workspaceUrl, + workspaceSessionId: session.workspaceSessionId, + status: session.status, + }); + + // Forward to workspace daemon + if (session.workspaceUrl && session.workspaceSessionId) { + try { + const targetUrl = `${session.workspaceUrl}/auth/cli/${provider}/code/${session.workspaceSessionId}`; + console.log('[onboarding] Forwarding auth code to workspace:', targetUrl); + + const codeResponse = await fetch(targetUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ code }), + }); + + console.log('[onboarding] Workspace response:', { status: codeResponse.status }); + + if (codeResponse.ok) { + return res.json({ success: true, message: 'Auth code submitted' }); + } + + const errorData = await codeResponse.json().catch(() => ({})) as { error?: string }; + console.log('[onboarding] Workspace error:', errorData); + + // Provide more helpful error message + if (codeResponse.status === 404) { + return res.status(404).json({ + error: 'Auth session expired in workspace. The CLI process may have timed out. Please try connecting again.', + }); + } + + return res.status(codeResponse.status).json({ + error: errorData.error || 'Failed to submit auth code to workspace', + }); + } catch (err) { + console.error('[onboarding] Failed to submit auth code to workspace:', err); + return res.status(500).json({ + error: 'Failed to reach workspace. Please ensure your workspace is running and try again.', + }); + } + } + + console.log('[onboarding] No workspace session info available'); + return res.status(400).json({ + error: 'No workspace session available. This can happen if the workspace was restarted. Please try connecting again.', + }); +}); + /** * POST /api/onboarding/cli/:provider/cancel/:sessionId * Cancel a CLI auth session @@ -292,7 +420,7 @@ onboardingRouter.post('/cli/:provider/cancel/:sessionId', async (req: Request, r const { provider, sessionId } = req.params; const userId = req.session.userId!; - const session = activeSessions.get(sessionId) as CLIAuthSession & { workspaceUrl?: string; workspaceSessionId?: string } | undefined; + const session = activeSessions.get(sessionId); if (session?.userId === userId) { // Cancel on workspace side if applicable if (session.workspaceUrl && session.workspaceSessionId) { @@ -424,11 +552,28 @@ async function extractCredentials( // Extract token based on provider structure if (session.provider === 'anthropic') { - // Claude stores: { "oauth_token": "...", ... } or { "api_key": "..." } - session.token = creds.oauth_token || creds.access_token || creds.api_key; + // Claude stores OAuth in: { claudeAiOauth: { accessToken: "...", refreshToken: "...", expiresAt: ... } } + if (creds.claudeAiOauth?.accessToken) { + session.token = creds.claudeAiOauth.accessToken; + session.refreshToken = creds.claudeAiOauth.refreshToken; + if (creds.claudeAiOauth.expiresAt) { + session.tokenExpiresAt = new Date(creds.claudeAiOauth.expiresAt); + } + } else { + // Fallback to legacy formats + session.token = creds.oauth_token || creds.access_token || creds.api_key; + } } else if (session.provider === 'openai') { - // Codex might store: { "token": "..." } or { "api_key": "..." } - session.token = creds.token || creds.access_token || creds.api_key; + // Codex stores OAuth in: { tokens: { access_token: "...", refresh_token: "...", ... } } + if (creds.tokens?.access_token) { + session.token = creds.tokens.access_token; + session.refreshToken = creds.tokens.refresh_token; + // Codex doesn't store expiry in the file, but JWTs have exp claim + // We could decode it, but for now just skip + } else { + // Fallback: API key or legacy formats + session.token = creds.OPENAI_API_KEY || creds.token || creds.access_token || creds.api_key; + } } } catch (error) { // Credentials file doesn't exist or isn't readable yet @@ -451,9 +596,37 @@ function getProviderScopes(provider: string): string[] { /** * Helper: Validate a provider token by making a test API call + * + * Note: OAuth tokens from CLI flows (like `claude` CLI) are different from API keys. + * - API keys: sk-ant-api03-... (can be validated via API) + * - OAuth tokens: Session tokens from OAuth flow (can't be validated the same way) + * + * For OAuth tokens, we accept them if they look valid (non-empty, reasonable length). + * The CLI already validated the OAuth flow, so we trust those tokens. */ async function validateProviderToken(provider: string, token: string): Promise { + // Basic sanity check + if (!token || token.length < 10) { + return false; + } + try { + // Check if this looks like an API key vs OAuth token + const isAnthropicApiKey = token.startsWith('sk-ant-'); + const isOpenAIApiKey = token.startsWith('sk-'); + + // For OAuth tokens (not API keys), accept them without API validation + // The OAuth flow already authenticated the user + if (provider === 'anthropic' && !isAnthropicApiKey) { + console.log('[onboarding] Accepting OAuth token for anthropic (not an API key)'); + return true; + } + if (provider === 'openai' && !isOpenAIApiKey) { + console.log('[onboarding] Accepting OAuth token for openai (not an API key)'); + return true; + } + + // For API keys, validate via API call const endpoints: Record }> = { anthropic: { url: 'https://api.anthropic.com/v1/messages', diff --git a/src/cloud/api/workspaces.ts b/src/cloud/api/workspaces.ts index 8456bfc2..d8baefaf 100644 --- a/src/cloud/api/workspaces.ts +++ b/src/cloud/api/workspaces.ts @@ -264,6 +264,10 @@ workspacesRouter.get('/:id', async (req: Request, res: Response) => { computeProvider: workspace.computeProvider, config: workspace.config, errorMessage: workspace.errorMessage, + // SSH access for port forwarding (e.g., Codex OAuth) + sshHost: workspace.sshHost, + sshPort: workspace.sshPort, + sshPassword: workspace.sshPassword, repositories: repositories.map((r) => ({ id: r.id, fullName: r.githubFullName, @@ -660,103 +664,6 @@ async function removeDomainFromCompute(workspace: Workspace): Promise { // Railway and Docker: similar cleanup } -/** - * POST /api/workspaces/:id/connect-provider - * Trigger CLI login flow for a provider (claude, codex, opencode, droid) - * Returns the OAuth URL for the user to complete authentication - */ -const PROVIDER_CLI_COMMANDS: Record = { - anthropic: { command: 'claude', displayName: 'Claude' }, - codex: { command: 'codex login', displayName: 'Codex' }, - opencode: { command: 'opencode', displayName: 'OpenCode' }, - droid: { command: 'droid', displayName: 'Droid' }, -}; - -workspacesRouter.post('/:id/connect-provider', async (req: Request, res: Response) => { - const userId = req.session.userId!; - const { id } = req.params; - const { provider } = req.body; - - const providerConfig = PROVIDER_CLI_COMMANDS[provider]; - if (!provider || !providerConfig) { - return res.status(400).json({ - error: 'Valid provider is required', - validProviders: Object.keys(PROVIDER_CLI_COMMANDS), - }); - } - - try { - const workspace = await db.workspaces.findById(id); - - if (!workspace) { - return res.status(404).json({ error: 'Workspace not found' }); - } - - if (workspace.userId !== userId) { - return res.status(403).json({ error: 'Unauthorized' }); - } - - if (workspace.status !== 'running') { - return res.status(400).json({ error: 'Workspace must be running to connect providers' }); - } - - const containerName = workspace.computeId; - - if (!containerName) { - return res.status(400).json({ error: 'Workspace has no compute instance' }); - } - - // Run the CLI login command in the container and capture output - const { execSync } = await import('child_process'); - - try { - // For Docker containers, run the command and capture the OAuth URL - // The CLI typically outputs something like: - // "Please visit https://... to authenticate" - const output = execSync( - `docker exec ${containerName} timeout 10 ${providerConfig.command} 2>&1 || true`, - { encoding: 'utf-8', timeout: 15000 } - ); - - // Parse OAuth URL from output - const urlMatch = output.match(/https:\/\/[^\s]+/); - - if (urlMatch) { - res.json({ - success: true, - provider, - authUrl: urlMatch[0], - message: `Visit the URL to authenticate with ${providerConfig.displayName}`, - instructions: [ - '1. Click the authentication URL below', - '2. Complete the login in your browser', - '3. Return here - your workspace will automatically detect the credentials', - ], - }); - } else { - // CLI might already be authenticated or returned different output - res.json({ - success: false, - provider, - output: output.substring(0, 500), // First 500 chars for debugging - message: 'Could not extract authentication URL. The provider may already be connected.', - }); - } - } catch (execError) { - const errorMsg = execError instanceof Error ? execError.message : 'Unknown error'; - console.error(`[workspace] CLI login error for ${provider}:`, errorMsg); - - res.status(500).json({ - error: 'Failed to start authentication flow', - details: errorMsg, - }); - } - } catch (error) { - console.error('Error connecting provider:', error); - res.status(500).json({ error: 'Failed to connect provider' }); - } -}); - /** * POST /api/workspaces/:id/proxy/* * Proxy API requests to the workspace container @@ -781,8 +688,20 @@ workspacesRouter.all('/:id/proxy/{*proxyPath}', async (req: Request, res: Respon return res.status(400).json({ error: 'Workspace is not running' }); } - // Forward the request to the workspace - const targetUrl = `${workspace.publicUrl}/api/${proxyPath}`; + // Determine the internal URL for proxying + // When running inside Docker, localhost URLs won't work - use the container name instead + let targetBaseUrl = workspace.publicUrl; + const runningInDocker = process.env.RUNNING_IN_DOCKER === 'true'; + + if (runningInDocker && workspace.computeId && targetBaseUrl.includes('localhost')) { + // Replace localhost URL with container name for Docker networking + // workspace.computeId is the container name (e.g., "ar-abc12345") + // The workspace port is 3888 inside the container + targetBaseUrl = `http://${workspace.computeId}:3888`; + } + + const targetUrl = `${targetBaseUrl}/api/${proxyPath}`; + console.log(`[workspace-proxy] ${req.method} ${targetUrl}`); const fetchOptions: RequestInit = { method: req.method, @@ -796,12 +715,22 @@ workspacesRouter.all('/:id/proxy/{*proxyPath}', async (req: Request, res: Respon } const proxyRes = await fetch(targetUrl, fetchOptions); - const data = await proxyRes.json(); - res.status(proxyRes.status).json(data); + // Handle non-JSON responses gracefully + const contentType = proxyRes.headers.get('content-type'); + if (contentType?.includes('application/json')) { + const data = await proxyRes.json(); + res.status(proxyRes.status).json(data); + } else { + const text = await proxyRes.text(); + res.status(proxyRes.status).send(text); + } } catch (error) { console.error('[workspace-proxy] Error:', error); - res.status(500).json({ error: 'Failed to proxy request to workspace' }); + res.status(500).json({ + error: 'Failed to proxy request to workspace', + details: error instanceof Error ? error.message : 'Unknown error' + }); } }); diff --git a/src/cloud/db/drizzle.ts b/src/cloud/db/drizzle.ts index c82e58ac..254b6315 100644 --- a/src/cloud/db/drizzle.ts +++ b/src/cloud/db/drizzle.ts @@ -380,7 +380,14 @@ export interface WorkspaceQueries { updateStatus( id: string, status: string, - options?: { computeId?: string; publicUrl?: string; errorMessage?: string } + options?: { + computeId?: string; + publicUrl?: string; + errorMessage?: string; + sshHost?: string; + sshPort?: number; + sshPassword?: string; + } ): Promise; updateConfig(id: string, config: schema.WorkspaceConfig): Promise; setCustomDomain(id: string, customDomain: string, status?: string): Promise; @@ -423,7 +430,14 @@ export const workspaceQueries: WorkspaceQueries = { async updateStatus( id: string, status: string, - options?: { computeId?: string; publicUrl?: string; errorMessage?: string } + options?: { + computeId?: string; + publicUrl?: string; + errorMessage?: string; + sshHost?: string; + sshPort?: number; + sshPassword?: string; + } ): Promise { const db = getDb(); await db @@ -433,6 +447,9 @@ export const workspaceQueries: WorkspaceQueries = { computeId: options?.computeId, publicUrl: options?.publicUrl, errorMessage: options?.errorMessage, + sshHost: options?.sshHost, + sshPort: options?.sshPort, + sshPassword: options?.sshPassword, updatedAt: new Date(), }) .where(eq(schema.workspaces.id, id)); diff --git a/src/cloud/db/migrations/0006_workspace_ssh.sql b/src/cloud/db/migrations/0006_workspace_ssh.sql new file mode 100644 index 00000000..347269ca --- /dev/null +++ b/src/cloud/db/migrations/0006_workspace_ssh.sql @@ -0,0 +1,6 @@ +-- Add SSH access columns to workspaces for port forwarding (e.g., Codex OAuth callback tunneling) +ALTER TABLE "workspaces" ADD COLUMN IF NOT EXISTS "ssh_host" varchar(255); +--> statement-breakpoint +ALTER TABLE "workspaces" ADD COLUMN IF NOT EXISTS "ssh_port" integer; +--> statement-breakpoint +ALTER TABLE "workspaces" ADD COLUMN IF NOT EXISTS "ssh_password" varchar(255); diff --git a/src/cloud/db/migrations/meta/0005_snapshot.json b/src/cloud/db/migrations/meta/0005_snapshot.json new file mode 100644 index 00000000..41e109f0 --- /dev/null +++ b/src/cloud/db/migrations/meta/0005_snapshot.json @@ -0,0 +1,2965 @@ +{ + "id": "3c5a4f23-1159-482b-8920-9147e12ced8f", + "prevId": "00000000-0000-0000-0000-000000000000", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.agent_crashes": { + "name": "agent_crashes", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "daemon_id": { + "name": "daemon_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "pid": { + "name": "pid", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "exit_code": { + "name": "exit_code", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "signal": { + "name": "signal", + "type": "varchar(50)", + "primaryKey": false, + "notNull": false + }, + "reason": { + "name": "reason", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "likely_cause": { + "name": "likely_cause", + "type": "varchar(50)", + "primaryKey": false, + "notNull": false + }, + "confidence": { + "name": "confidence", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false + }, + "summary": { + "name": "summary", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "peak_memory": { + "name": "peak_memory", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "last_known_memory": { + "name": "last_known_memory", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "memory_trend": { + "name": "memory_trend", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false + }, + "insight_data": { + "name": "insight_data", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "last_output": { + "name": "last_output", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "crashed_at": { + "name": "crashed_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_agent_crashes_daemon_id": { + "name": "idx_agent_crashes_daemon_id", + "columns": [ + { + "expression": "daemon_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_crashes_agent_name": { + "name": "idx_agent_crashes_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_crashes_crashed_at": { + "name": "idx_agent_crashes_crashed_at", + "columns": [ + { + "expression": "crashed_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_crashes_likely_cause": { + "name": "idx_agent_crashes_likely_cause", + "columns": [ + { + "expression": "likely_cause", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "agent_crashes_daemon_id_linked_daemons_id_fk": { + "name": "agent_crashes_daemon_id_linked_daemons_id_fk", + "tableFrom": "agent_crashes", + "tableTo": "linked_daemons", + "columnsFrom": [ + "daemon_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.agent_metrics": { + "name": "agent_metrics", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "daemon_id": { + "name": "daemon_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "pid": { + "name": "pid", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'unknown'" + }, + "rss_bytes": { + "name": "rss_bytes", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "heap_used_bytes": { + "name": "heap_used_bytes", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "cpu_percent": { + "name": "cpu_percent", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "trend": { + "name": "trend", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false + }, + "trend_rate_per_minute": { + "name": "trend_rate_per_minute", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "alert_level": { + "name": "alert_level", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false, + "default": "'normal'" + }, + "high_watermark": { + "name": "high_watermark", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "average_rss": { + "name": "average_rss", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "metrics_data": { + "name": "metrics_data", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "uptime_ms": { + "name": "uptime_ms", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "started_at": { + "name": "started_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "recorded_at": { + "name": "recorded_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_agent_metrics_daemon_id": { + "name": "idx_agent_metrics_daemon_id", + "columns": [ + { + "expression": "daemon_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_metrics_agent_name": { + "name": "idx_agent_metrics_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_metrics_recorded_at": { + "name": "idx_agent_metrics_recorded_at", + "columns": [ + { + "expression": "recorded_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_metrics_alert_level": { + "name": "idx_agent_metrics_alert_level", + "columns": [ + { + "expression": "alert_level", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "agent_metrics_daemon_id_linked_daemons_id_fk": { + "name": "agent_metrics_daemon_id_linked_daemons_id_fk", + "tableFrom": "agent_metrics", + "tableTo": "linked_daemons", + "columnsFrom": [ + "daemon_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.agent_sessions": { + "name": "agent_sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "workspace_id": { + "name": "workspace_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'active'" + }, + "started_at": { + "name": "started_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "ended_at": { + "name": "ended_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "end_marker": { + "name": "end_marker", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + } + }, + "indexes": { + "idx_agent_sessions_workspace_id": { + "name": "idx_agent_sessions_workspace_id", + "columns": [ + { + "expression": "workspace_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_sessions_agent_name": { + "name": "idx_agent_sessions_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_sessions_status": { + "name": "idx_agent_sessions_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "agent_sessions_workspace_id_workspaces_id_fk": { + "name": "agent_sessions_workspace_id_workspaces_id_fk", + "tableFrom": "agent_sessions", + "tableTo": "workspaces", + "columnsFrom": [ + "workspace_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.agent_summaries": { + "name": "agent_summaries", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "session_id": { + "name": "session_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "summary": { + "name": "summary", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_agent_summaries_session_id": { + "name": "idx_agent_summaries_session_id", + "columns": [ + { + "expression": "session_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_agent_summaries_agent_name": { + "name": "idx_agent_summaries_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "agent_summaries_session_id_agent_sessions_id_fk": { + "name": "agent_summaries_session_id_agent_sessions_id_fk", + "tableFrom": "agent_summaries", + "tableTo": "agent_sessions", + "columnsFrom": [ + "session_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.ci_failure_events": { + "name": "ci_failure_events", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "repository_id": { + "name": "repository_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "repository": { + "name": "repository", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "pr_number": { + "name": "pr_number", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "branch": { + "name": "branch", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "commit_sha": { + "name": "commit_sha", + "type": "varchar(40)", + "primaryKey": false, + "notNull": false + }, + "check_name": { + "name": "check_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "check_id": { + "name": "check_id", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "conclusion": { + "name": "conclusion", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "failure_title": { + "name": "failure_title", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "failure_summary": { + "name": "failure_summary", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "failure_details": { + "name": "failure_details", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "annotations": { + "name": "annotations", + "type": "jsonb", + "primaryKey": false, + "notNull": false, + "default": "'[]'::jsonb" + }, + "workflow_name": { + "name": "workflow_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "workflow_run_id": { + "name": "workflow_run_id", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "processed_at": { + "name": "processed_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "agent_spawned": { + "name": "agent_spawned", + "type": "boolean", + "primaryKey": false, + "notNull": false, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_ci_failure_events_repository": { + "name": "idx_ci_failure_events_repository", + "columns": [ + { + "expression": "repository", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_failure_events_pr_number": { + "name": "idx_ci_failure_events_pr_number", + "columns": [ + { + "expression": "pr_number", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_failure_events_check_name": { + "name": "idx_ci_failure_events_check_name", + "columns": [ + { + "expression": "check_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_failure_events_created_at": { + "name": "idx_ci_failure_events_created_at", + "columns": [ + { + "expression": "created_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_failure_events_repo_pr": { + "name": "idx_ci_failure_events_repo_pr", + "columns": [ + { + "expression": "repository", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "pr_number", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "ci_failure_events_repository_id_repositories_id_fk": { + "name": "ci_failure_events_repository_id_repositories_id_fk", + "tableFrom": "ci_failure_events", + "tableTo": "repositories", + "columnsFrom": [ + "repository_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.ci_fix_attempts": { + "name": "ci_fix_attempts", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "failure_event_id": { + "name": "failure_event_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_id": { + "name": "agent_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'pending'" + }, + "commit_sha": { + "name": "commit_sha", + "type": "varchar(40)", + "primaryKey": false, + "notNull": false + }, + "error_message": { + "name": "error_message", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "started_at": { + "name": "started_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "completed_at": { + "name": "completed_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": { + "idx_ci_fix_attempts_failure_event": { + "name": "idx_ci_fix_attempts_failure_event", + "columns": [ + { + "expression": "failure_event_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_fix_attempts_status": { + "name": "idx_ci_fix_attempts_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_ci_fix_attempts_agent_id": { + "name": "idx_ci_fix_attempts_agent_id", + "columns": [ + { + "expression": "agent_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "ci_fix_attempts_failure_event_id_ci_failure_events_id_fk": { + "name": "ci_fix_attempts_failure_event_id_ci_failure_events_id_fk", + "tableFrom": "ci_fix_attempts", + "tableTo": "ci_failure_events", + "columnsFrom": [ + "failure_event_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.comment_mentions": { + "name": "comment_mentions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "repository_id": { + "name": "repository_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "repository": { + "name": "repository", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "source_type": { + "name": "source_type", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "source_id": { + "name": "source_id", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "issue_or_pr_number": { + "name": "issue_or_pr_number", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "comment_body": { + "name": "comment_body", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "comment_url": { + "name": "comment_url", + "type": "varchar(512)", + "primaryKey": false, + "notNull": false + }, + "author_login": { + "name": "author_login", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "author_id": { + "name": "author_id", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "mentioned_agent": { + "name": "mentioned_agent", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "mention_context": { + "name": "mention_context", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "agent_id": { + "name": "agent_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'pending'" + }, + "response_comment_id": { + "name": "response_comment_id", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "response_body": { + "name": "response_body", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "responded_at": { + "name": "responded_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_comment_mentions_repository": { + "name": "idx_comment_mentions_repository", + "columns": [ + { + "expression": "repository", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_comment_mentions_source": { + "name": "idx_comment_mentions_source", + "columns": [ + { + "expression": "source_type", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "source_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_comment_mentions_status": { + "name": "idx_comment_mentions_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_comment_mentions_mentioned_agent": { + "name": "idx_comment_mentions_mentioned_agent", + "columns": [ + { + "expression": "mentioned_agent", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "comment_mentions_repository_id_repositories_id_fk": { + "name": "comment_mentions_repository_id_repositories_id_fk", + "tableFrom": "comment_mentions", + "tableTo": "repositories", + "columnsFrom": [ + "repository_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.credentials": { + "name": "credentials", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "provider": { + "name": "provider", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "access_token": { + "name": "access_token", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "refresh_token": { + "name": "refresh_token", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "token_expires_at": { + "name": "token_expires_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "scopes": { + "name": "scopes", + "type": "text[]", + "primaryKey": false, + "notNull": false + }, + "provider_account_id": { + "name": "provider_account_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "provider_account_email": { + "name": "provider_account_email", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_credentials_user_id": { + "name": "idx_credentials_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "credentials_user_id_users_id_fk": { + "name": "credentials_user_id_users_id_fk", + "tableFrom": "credentials", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "credentials_user_provider_unique": { + "name": "credentials_user_provider_unique", + "nullsNotDistinct": false, + "columns": [ + "user_id", + "provider" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.github_installations": { + "name": "github_installations", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "installation_id": { + "name": "installation_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "account_type": { + "name": "account_type", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "account_login": { + "name": "account_login", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "account_id": { + "name": "account_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "installed_by_id": { + "name": "installed_by_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": false, + "default": "'{}'::jsonb" + }, + "events": { + "name": "events", + "type": "text[]", + "primaryKey": false, + "notNull": false + }, + "suspended": { + "name": "suspended", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "suspended_at": { + "name": "suspended_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "suspended_by": { + "name": "suspended_by", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_github_installations_account_login": { + "name": "idx_github_installations_account_login", + "columns": [ + { + "expression": "account_login", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_github_installations_installed_by": { + "name": "idx_github_installations_installed_by", + "columns": [ + { + "expression": "installed_by_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "github_installations_installed_by_id_users_id_fk": { + "name": "github_installations_installed_by_id_users_id_fk", + "tableFrom": "github_installations", + "tableTo": "users", + "columnsFrom": [ + "installed_by_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "github_installations_installation_id_unique": { + "name": "github_installations_installation_id_unique", + "nullsNotDistinct": false, + "columns": [ + "installation_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.issue_assignments": { + "name": "issue_assignments", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "repository_id": { + "name": "repository_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "repository": { + "name": "repository", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "issue_number": { + "name": "issue_number", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "issue_title": { + "name": "issue_title", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "issue_body": { + "name": "issue_body", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "issue_url": { + "name": "issue_url", + "type": "varchar(512)", + "primaryKey": false, + "notNull": false + }, + "agent_id": { + "name": "agent_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "assigned_at": { + "name": "assigned_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'pending'" + }, + "resolution": { + "name": "resolution", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "linked_pr_number": { + "name": "linked_pr_number", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "labels": { + "name": "labels", + "type": "text[]", + "primaryKey": false, + "notNull": false + }, + "priority": { + "name": "priority", + "type": "varchar(20)", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_issue_assignments_repository": { + "name": "idx_issue_assignments_repository", + "columns": [ + { + "expression": "repository", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_issue_assignments_issue_number": { + "name": "idx_issue_assignments_issue_number", + "columns": [ + { + "expression": "issue_number", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_issue_assignments_status": { + "name": "idx_issue_assignments_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_issue_assignments_agent_id": { + "name": "idx_issue_assignments_agent_id", + "columns": [ + { + "expression": "agent_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "issue_assignments_repository_id_repositories_id_fk": { + "name": "issue_assignments_repository_id_repositories_id_fk", + "tableFrom": "issue_assignments", + "tableTo": "repositories", + "columnsFrom": [ + "repository_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "issue_assignments_repo_issue_unique": { + "name": "issue_assignments_repo_issue_unique", + "nullsNotDistinct": false, + "columns": [ + "repository", + "issue_number" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.linked_daemons": { + "name": "linked_daemons", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "machine_id": { + "name": "machine_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "api_key_hash": { + "name": "api_key_hash", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'offline'" + }, + "last_seen_at": { + "name": "last_seen_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "pending_updates": { + "name": "pending_updates", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'[]'::jsonb" + }, + "message_queue": { + "name": "message_queue", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'[]'::jsonb" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_linked_daemons_user_id": { + "name": "idx_linked_daemons_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_linked_daemons_api_key_hash": { + "name": "idx_linked_daemons_api_key_hash", + "columns": [ + { + "expression": "api_key_hash", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_linked_daemons_status": { + "name": "idx_linked_daemons_status", + "columns": [ + { + "expression": "status", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "linked_daemons_user_id_users_id_fk": { + "name": "linked_daemons_user_id_users_id_fk", + "tableFrom": "linked_daemons", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "linked_daemons_user_machine_unique": { + "name": "linked_daemons_user_machine_unique", + "nullsNotDistinct": false, + "columns": [ + "user_id", + "machine_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.memory_alerts": { + "name": "memory_alerts", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "daemon_id": { + "name": "daemon_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "agent_name": { + "name": "agent_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "alert_type": { + "name": "alert_type", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "current_rss": { + "name": "current_rss", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "threshold": { + "name": "threshold", + "type": "bigint", + "primaryKey": false, + "notNull": false + }, + "message": { + "name": "message", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "recommendation": { + "name": "recommendation", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "acknowledged": { + "name": "acknowledged", + "type": "boolean", + "primaryKey": false, + "notNull": false, + "default": false + }, + "acknowledged_at": { + "name": "acknowledged_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_memory_alerts_daemon_id": { + "name": "idx_memory_alerts_daemon_id", + "columns": [ + { + "expression": "daemon_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_memory_alerts_agent_name": { + "name": "idx_memory_alerts_agent_name", + "columns": [ + { + "expression": "agent_name", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_memory_alerts_alert_type": { + "name": "idx_memory_alerts_alert_type", + "columns": [ + { + "expression": "alert_type", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_memory_alerts_created_at": { + "name": "idx_memory_alerts_created_at", + "columns": [ + { + "expression": "created_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "memory_alerts_daemon_id_linked_daemons_id_fk": { + "name": "memory_alerts_daemon_id_linked_daemons_id_fk", + "tableFrom": "memory_alerts", + "tableTo": "linked_daemons", + "columnsFrom": [ + "daemon_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.project_groups": { + "name": "project_groups", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "color": { + "name": "color", + "type": "varchar(7)", + "primaryKey": false, + "notNull": false + }, + "icon": { + "name": "icon", + "type": "varchar(50)", + "primaryKey": false, + "notNull": false + }, + "coordinator_agent": { + "name": "coordinator_agent", + "type": "jsonb", + "primaryKey": false, + "notNull": false, + "default": "'{\"enabled\":false}'::jsonb" + }, + "sort_order": { + "name": "sort_order", + "type": "bigint", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_project_groups_user_id": { + "name": "idx_project_groups_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "project_groups_user_id_users_id_fk": { + "name": "project_groups_user_id_users_id_fk", + "tableFrom": "project_groups", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "project_groups_user_name_unique": { + "name": "project_groups_user_name_unique", + "nullsNotDistinct": false, + "columns": [ + "user_id", + "name" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.repositories": { + "name": "repositories", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "workspace_id": { + "name": "workspace_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "project_group_id": { + "name": "project_group_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "installation_id": { + "name": "installation_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "nango_connection_id": { + "name": "nango_connection_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "github_full_name": { + "name": "github_full_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "github_id": { + "name": "github_id", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "default_branch": { + "name": "default_branch", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true, + "default": "'main'" + }, + "is_private": { + "name": "is_private", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "sync_status": { + "name": "sync_status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'pending'" + }, + "last_synced_at": { + "name": "last_synced_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "project_agent": { + "name": "project_agent", + "type": "jsonb", + "primaryKey": false, + "notNull": false, + "default": "'{\"enabled\":false}'::jsonb" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_repositories_user_id": { + "name": "idx_repositories_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_repositories_workspace_id": { + "name": "idx_repositories_workspace_id", + "columns": [ + { + "expression": "workspace_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_repositories_project_group_id": { + "name": "idx_repositories_project_group_id", + "columns": [ + { + "expression": "project_group_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_repositories_installation_id": { + "name": "idx_repositories_installation_id", + "columns": [ + { + "expression": "installation_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_repositories_nango_connection": { + "name": "idx_repositories_nango_connection", + "columns": [ + { + "expression": "nango_connection_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "repositories_user_id_users_id_fk": { + "name": "repositories_user_id_users_id_fk", + "tableFrom": "repositories", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "repositories_workspace_id_workspaces_id_fk": { + "name": "repositories_workspace_id_workspaces_id_fk", + "tableFrom": "repositories", + "tableTo": "workspaces", + "columnsFrom": [ + "workspace_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "repositories_project_group_id_project_groups_id_fk": { + "name": "repositories_project_group_id_project_groups_id_fk", + "tableFrom": "repositories", + "tableTo": "project_groups", + "columnsFrom": [ + "project_group_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "repositories_installation_id_github_installations_id_fk": { + "name": "repositories_installation_id_github_installations_id_fk", + "tableFrom": "repositories", + "tableTo": "github_installations", + "columnsFrom": [ + "installation_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "repositories_user_github_unique": { + "name": "repositories_user_github_unique", + "nullsNotDistinct": false, + "columns": [ + "user_id", + "github_full_name" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.subscriptions": { + "name": "subscriptions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "stripe_subscription_id": { + "name": "stripe_subscription_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "stripe_customer_id": { + "name": "stripe_customer_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "plan": { + "name": "plan", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'active'" + }, + "current_period_start": { + "name": "current_period_start", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "current_period_end": { + "name": "current_period_end", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "subscriptions_user_id_users_id_fk": { + "name": "subscriptions_user_id_users_id_fk", + "tableFrom": "subscriptions", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "subscriptions_stripe_subscription_id_unique": { + "name": "subscriptions_stripe_subscription_id_unique", + "nullsNotDistinct": false, + "columns": [ + "stripe_subscription_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.usage_records": { + "name": "usage_records", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "workspace_id": { + "name": "workspace_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "metric": { + "name": "metric", + "type": "varchar(100)", + "primaryKey": false, + "notNull": true + }, + "value": { + "name": "value", + "type": "bigint", + "primaryKey": false, + "notNull": true + }, + "recorded_at": { + "name": "recorded_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_usage_records_user_id": { + "name": "idx_usage_records_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_usage_records_recorded_at": { + "name": "idx_usage_records_recorded_at", + "columns": [ + { + "expression": "recorded_at", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "usage_records_user_id_users_id_fk": { + "name": "usage_records_user_id_users_id_fk", + "tableFrom": "usage_records", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "usage_records_workspace_id_workspaces_id_fk": { + "name": "usage_records_workspace_id_workspaces_id_fk", + "tableFrom": "usage_records", + "tableTo": "workspaces", + "columnsFrom": [ + "workspace_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "github_id": { + "name": "github_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "github_username": { + "name": "github_username", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "email": { + "name": "email", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "avatar_url": { + "name": "avatar_url", + "type": "varchar(512)", + "primaryKey": false, + "notNull": false + }, + "plan": { + "name": "plan", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'free'" + }, + "nango_connection_id": { + "name": "nango_connection_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "incoming_connection_id": { + "name": "incoming_connection_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "pending_installation_request": { + "name": "pending_installation_request", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "onboarding_completed_at": { + "name": "onboarding_completed_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_users_nango_connection": { + "name": "idx_users_nango_connection", + "columns": [ + { + "expression": "nango_connection_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_users_incoming_connection": { + "name": "idx_users_incoming_connection", + "columns": [ + { + "expression": "incoming_connection_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_github_id_unique": { + "name": "users_github_id_unique", + "nullsNotDistinct": false, + "columns": [ + "github_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.workspace_members": { + "name": "workspace_members", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "workspace_id": { + "name": "workspace_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "role": { + "name": "role", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'member'" + }, + "invited_by": { + "name": "invited_by", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "invited_at": { + "name": "invited_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "accepted_at": { + "name": "accepted_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": { + "idx_workspace_members_workspace_id": { + "name": "idx_workspace_members_workspace_id", + "columns": [ + { + "expression": "workspace_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_workspace_members_user_id": { + "name": "idx_workspace_members_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "workspace_members_workspace_id_workspaces_id_fk": { + "name": "workspace_members_workspace_id_workspaces_id_fk", + "tableFrom": "workspace_members", + "tableTo": "workspaces", + "columnsFrom": [ + "workspace_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "workspace_members_user_id_users_id_fk": { + "name": "workspace_members_user_id_users_id_fk", + "tableFrom": "workspace_members", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "workspace_members_invited_by_users_id_fk": { + "name": "workspace_members_invited_by_users_id_fk", + "tableFrom": "workspace_members", + "tableTo": "users", + "columnsFrom": [ + "invited_by" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "workspace_members_workspace_user_unique": { + "name": "workspace_members_workspace_user_unique", + "nullsNotDistinct": false, + "columns": [ + "workspace_id", + "user_id" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.workspaces": { + "name": "workspaces", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true, + "default": "'provisioning'" + }, + "compute_provider": { + "name": "compute_provider", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "compute_id": { + "name": "compute_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "public_url": { + "name": "public_url", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "custom_domain": { + "name": "custom_domain", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "custom_domain_status": { + "name": "custom_domain_status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": false + }, + "ssh_host": { + "name": "ssh_host", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "ssh_port": { + "name": "ssh_port", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "ssh_password": { + "name": "ssh_password", + "type": "varchar(255)", + "primaryKey": false, + "notNull": false + }, + "config": { + "name": "config", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "error_message": { + "name": "error_message", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "idx_workspaces_user_id": { + "name": "idx_workspaces_user_id", + "columns": [ + { + "expression": "user_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "idx_workspaces_custom_domain": { + "name": "idx_workspaces_custom_domain", + "columns": [ + { + "expression": "custom_domain", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "workspaces_user_id_users_id_fk": { + "name": "workspaces_user_id_users_id_fk", + "tableFrom": "workspaces", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/src/cloud/db/migrations/meta/_journal.json b/src/cloud/db/migrations/meta/_journal.json index bf418cfc..25480972 100644 --- a/src/cloud/db/migrations/meta/_journal.json +++ b/src/cloud/db/migrations/meta/_journal.json @@ -36,6 +36,13 @@ "when": 1736035200000, "tag": "0005_github_installations", "breakpoints": true + }, + { + "idx": 5, + "version": "5", + "when": 1736121600000, + "tag": "0006_workspace_ssh", + "breakpoints": true } ] -} +} \ No newline at end of file diff --git a/src/cloud/db/schema.ts b/src/cloud/db/schema.ts index 6c8fe0cc..e707ab0c 100644 --- a/src/cloud/db/schema.ts +++ b/src/cloud/db/schema.ts @@ -14,6 +14,7 @@ import { timestamp, boolean, bigint, + integer, jsonb, unique, index, @@ -173,6 +174,10 @@ export const workspaces = pgTable('workspaces', { publicUrl: varchar('public_url', { length: 255 }), customDomain: varchar('custom_domain', { length: 255 }), customDomainStatus: varchar('custom_domain_status', { length: 50 }), + // SSH access for port forwarding (e.g., Codex OAuth callback tunneling) + sshHost: varchar('ssh_host', { length: 255 }), + sshPort: integer('ssh_port'), + sshPassword: varchar('ssh_password', { length: 255 }), config: jsonb('config').$type().notNull().default({}), errorMessage: text('error_message'), createdAt: timestamp('created_at').defaultNow().notNull(), diff --git a/src/cloud/provisioner/index.ts b/src/cloud/provisioner/index.ts index ba0e3296..9bbe6138 100644 --- a/src/cloud/provisioner/index.ts +++ b/src/cloud/provisioner/index.ts @@ -11,9 +11,17 @@ import { vault } from '../vault/index.js'; import { nangoService } from '../services/nango.js'; const WORKSPACE_PORT = 3888; +const SSH_PORT = 2222; const FETCH_TIMEOUT_MS = 10_000; const WORKSPACE_IMAGE = process.env.WORKSPACE_IMAGE || 'ghcr.io/agentworkforce/relay-workspace:latest'; +/** + * Generate a random password for SSH access + */ +function generateSSHPassword(): string { + return crypto.randomBytes(16).toString('base64').replace(/[/+=]/g, '').substring(0, 16); +} + /** * Get a fresh GitHub App installation token from Nango. * Looks up the user's connected repositories to find a valid Nango connection. @@ -145,6 +153,9 @@ interface ComputeProvisioner { provision(workspace: Workspace, credentials: Map): Promise<{ computeId: string; publicUrl: string; + sshHost?: string; + sshPort?: number; + sshPassword?: string; }>; deprovision(workspace: Workspace): Promise; getStatus(workspace: Workspace): Promise; @@ -198,8 +209,9 @@ class FlyProvisioner implements ComputeProvisioner { async provision( workspace: Workspace, credentials: Map - ): Promise<{ computeId: string; publicUrl: string }> { + ): Promise<{ computeId: string; publicUrl: string; sshHost?: string; sshPort?: number; sshPassword?: string }> { const appName = `ar-${workspace.id.substring(0, 8)}`; + const sshPassword = generateSSHPassword(); // Create Fly app await fetchWithRetry('https://api.machines.dev/v1/apps', { @@ -214,8 +226,10 @@ class FlyProvisioner implements ComputeProvisioner { }), }); - // Set secrets (credentials) - const secrets: Record = {}; + // Set secrets (credentials + SSH password) + const secrets: Record = { + SSH_PASSWORD: sshPassword, + }; for (const [provider, token] of credentials) { secrets[`${provider.toUpperCase()}_TOKEN`] = token; } @@ -239,6 +253,7 @@ class FlyProvisioner implements ComputeProvisioner { } // Create machine with auto-stop/start for cost optimization + // SSH enabled for port forwarding (e.g., Codex OAuth) const machineResponse = await fetchWithRetry( `https://api.machines.dev/v1/apps/${appName}/machines`, { @@ -262,6 +277,8 @@ class FlyProvisioner implements ComputeProvisioner { // Git gateway configuration CLOUD_API_URL: this.cloudApiUrl, WORKSPACE_TOKEN: this.generateWorkspaceToken(workspace.id), + // SSH for port forwarding (Codex OAuth, etc.) + ENABLE_SSH: 'true', }, services: [ { @@ -276,6 +293,12 @@ class FlyProvisioner implements ComputeProvisioner { auto_start_machines: true, min_machines_running: 0, }, + { + // SSH for port forwarding + ports: [{ port: SSH_PORT, handlers: [] }], + protocol: 'tcp', + internal_port: SSH_PORT, + }, ], guest: { cpu_kind: 'shared', @@ -299,11 +322,16 @@ class FlyProvisioner implements ComputeProvisioner { ? `https://${customHostname}` : `https://${appName}.fly.dev`; + const sshHost = customHostname || `${appName}.fly.dev`; + await softHealthCheck(publicUrl); return { computeId: machine.id, publicUrl, + sshHost, + sshPort: SSH_PORT, + sshPassword, }; } @@ -775,11 +803,42 @@ class DockerProvisioner implements ComputeProvisioner { .digest('hex'); } + /** + * Wait for container to be healthy by polling the health endpoint + */ + private async waitForHealthy(publicUrl: string, timeoutMs: number = 60_000): Promise { + const startTime = Date.now(); + const pollInterval = 2000; + + console.log(`[docker] Waiting for container to be healthy at ${publicUrl}...`); + + while (Date.now() - startTime < timeoutMs) { + try { + const response = await fetch(`${publicUrl}/health`, { + method: 'GET', + signal: AbortSignal.timeout(5000), + }); + + if (response.ok) { + console.log(`[docker] Container healthy after ${Date.now() - startTime}ms`); + return; + } + } catch { + // Container not ready yet, continue polling + } + + await wait(pollInterval); + } + + throw new Error(`Container did not become healthy within ${timeoutMs}ms`); + } + async provision( workspace: Workspace, credentials: Map - ): Promise<{ computeId: string; publicUrl: string }> { + ): Promise<{ computeId: string; publicUrl: string; sshHost?: string; sshPort?: number; sshPassword?: string }> { const containerName = `ar-${workspace.id.substring(0, 8)}`; + const sshPassword = generateSSHPassword(); // Build environment variables const envArgs: string[] = [ @@ -792,27 +851,54 @@ class DockerProvisioner implements ComputeProvisioner { `-e AGENT_RELAY_DASHBOARD_PORT=${WORKSPACE_PORT}`, `-e CLOUD_API_URL=${this.cloudApiUrl}`, `-e WORKSPACE_TOKEN=${this.generateWorkspaceToken(workspace.id)}`, + // SSH for port forwarding (Codex OAuth, etc.) + `-e ENABLE_SSH=true`, + `-e SSH_PASSWORD=${sshPassword}`, ]; for (const [provider, token] of credentials) { envArgs.push(`-e ${provider.toUpperCase()}_TOKEN=${token}`); } - // Run container + // Run container with SSH port exposed const { execSync } = await import('child_process'); const hostPort = 3000 + Math.floor(Math.random() * 1000); + const sshHostPort = 2200 + Math.floor(Math.random() * 100); + + // When running in Docker, connect to the same network for container-to-container communication + const runningInDocker = process.env.RUNNING_IN_DOCKER === 'true'; + const networkArg = runningInDocker ? '--network agent-relay-dev' : ''; try { execSync( - `docker run -d --name ${containerName} -p ${hostPort}:${WORKSPACE_PORT} ${envArgs.join(' ')} ${WORKSPACE_IMAGE}`, + `docker run -d --user root --name ${containerName} ${networkArg} -p ${hostPort}:${WORKSPACE_PORT} -p ${sshHostPort}:${SSH_PORT} ${envArgs.join(' ')} ${WORKSPACE_IMAGE}`, { stdio: 'pipe' } ); + const publicUrl = `http://localhost:${hostPort}`; + + // Wait for container to be healthy before returning + // When running in Docker, use the internal container name for health check + const healthCheckUrl = runningInDocker + ? `http://${containerName}:${WORKSPACE_PORT}` + : publicUrl; + await this.waitForHealthy(healthCheckUrl); + return { computeId: containerName, - publicUrl: `http://localhost:${hostPort}`, + publicUrl, + sshHost: 'localhost', + sshPort: sshHostPort, + sshPassword, }; } catch (error) { + // Clean up container if it was created but health check failed + try { + const { execSync: execSyncCleanup } = await import('child_process'); + execSyncCleanup(`docker rm -f ${containerName}`, { stdio: 'pipe' }); + } catch { + // Ignore cleanup errors + } throw new Error(`Failed to start Docker container: ${error}`); } } @@ -935,7 +1021,7 @@ export class WorkspaceProvisioner { // Provision compute try { - const { computeId, publicUrl } = await this.provisioner.provision( + const { computeId, publicUrl, sshHost, sshPort, sshPassword } = await this.provisioner.provision( workspace, credentials ); @@ -943,6 +1029,9 @@ export class WorkspaceProvisioner { await db.workspaces.updateStatus(workspace.id, 'running', { computeId, publicUrl, + sshHost, + sshPort, + sshPassword, }); return { diff --git a/src/cloud/server.ts b/src/cloud/server.ts index 904eef77..56b60732 100644 --- a/src/cloud/server.ts +++ b/src/cloud/server.ts @@ -8,9 +8,11 @@ import cors from 'cors'; import helmet from 'helmet'; import crypto from 'crypto'; import path from 'node:path'; +import http from 'node:http'; import { fileURLToPath } from 'node:url'; import { createClient, RedisClientType } from 'redis'; import { RedisStore } from 'connect-redis'; +import { WebSocketServer, WebSocket } from 'ws'; import { getConfig } from './config.js'; import { runMigrations } from './db/index.js'; import { getScalingOrchestrator, ScalingOrchestrator } from './services/index.js'; @@ -26,7 +28,7 @@ declare module 'express-session' { } // API routers -import { authRouter } from './api/auth.js'; +import { authRouter, requireAuth } from './api/auth.js'; import { providersRouter } from './api/providers.js'; import { workspacesRouter } from './api/workspaces.js'; import { reposRouter } from './api/repos.js'; @@ -42,6 +44,38 @@ import { webhooksRouter } from './api/webhooks.js'; import { githubAppRouter } from './api/github-app.js'; import { nangoAuthRouter } from './api/nango-auth.js'; import { gitRouter } from './api/git.js'; +import { db } from './db/index.js'; + +/** + * Proxy a request to the user's primary running workspace + */ +async function proxyToUserWorkspace(req: Request, res: Response, path: string): Promise { + const userId = req.session.userId; + if (!userId) { + res.status(401).json({ error: 'Unauthorized' }); + return; + } + + try { + // Find user's running workspace + const workspaces = await db.workspaces.findByUserId(userId); + const runningWorkspace = workspaces.find(w => w.status === 'running' && w.publicUrl); + + if (!runningWorkspace || !runningWorkspace.publicUrl) { + res.status(404).json({ error: 'No running workspace found', success: false }); + return; + } + + // Proxy to workspace + const targetUrl = `${runningWorkspace.publicUrl}${path}`; + const proxyRes = await fetch(targetUrl); + const data = await proxyRes.json(); + res.status(proxyRes.status).json(data); + } catch (error) { + console.error('[trajectory-proxy] Error:', error); + res.status(500).json({ error: 'Failed to proxy request to workspace', success: false }); + } +} export interface CloudServer { app: Express; @@ -126,10 +160,19 @@ export async function createServer(): Promise { // Simple in-memory rate limiting per IP const RATE_LIMIT_WINDOW_MS = 60_000; - const RATE_LIMIT_MAX = 300; + // Higher limit in development mode + const RATE_LIMIT_MAX = process.env.NODE_ENV === 'development' ? 1000 : 300; const rateLimits = new Map(); app.use((req: Request, res: Response, next: NextFunction) => { + // Skip rate limiting for localhost in development + if (process.env.NODE_ENV === 'development') { + const ip = req.ip || ''; + if (ip === '127.0.0.1' || ip === '::1' || ip === '::ffff:127.0.0.1') { + return next(); + } + } + const now = Date.now(); const key = req.ip || 'unknown'; const entry = rateLimits.get(key); @@ -158,18 +201,32 @@ export async function createServer(): Promise { // Lightweight CSRF protection using session token const SAFE_METHODS = new Set(['GET', 'HEAD', 'OPTIONS']); - // Paths exempt from CSRF (webhooks from external services) - const CSRF_EXEMPT_PATHS = ['/api/webhooks/', '/api/auth/nango/webhook']; + // Paths exempt from CSRF (webhooks from external services, workspace proxy) + const CSRF_EXEMPT_PATHS = [ + '/api/webhooks/', + '/api/auth/nango/webhook', + ]; + // Additional pattern for workspace proxy routes (contains /proxy/) + const isWorkspaceProxyRoute = (path: string) => /^\/api\/workspaces\/[^/]+\/proxy\//.test(path); app.use((req: Request, res: Response, next: NextFunction) => { - // Skip CSRF for webhook endpoints - if (CSRF_EXEMPT_PATHS.some(path => req.path.startsWith(path))) { + // Skip CSRF for webhook endpoints and workspace proxy routes + if (CSRF_EXEMPT_PATHS.some(path => req.path.startsWith(path)) || isWorkspaceProxyRoute(req.path)) { return next(); } if (!req.session) return res.status(500).json({ error: 'Session unavailable' }); + // Generate CSRF token if not present + // Use session.save() to ensure the session is persisted even for unauthenticated users + // This is necessary because saveUninitialized: false won't auto-save new sessions if (!req.session.csrfToken) { req.session.csrfToken = crypto.randomBytes(32).toString('hex'); + // Explicitly save session to persist the CSRF token + req.session.save((err) => { + if (err) { + console.error('[csrf] Failed to save session:', err); + } + }); } res.setHeader('X-CSRF-Token', req.session.csrfToken); @@ -190,6 +247,7 @@ export async function createServer(): Promise { const token = req.get('x-csrf-token'); if (!token || token !== req.session.csrfToken) { + console.log(`[csrf] Token mismatch: received=${token?.substring(0, 8)}... expected=${req.session.csrfToken?.substring(0, 8)}...`); return res.status(403).json({ error: 'CSRF token invalid or missing', code: 'CSRF_MISMATCH', @@ -226,6 +284,23 @@ export async function createServer(): Promise { console.log('[cloud] Test helper routes enabled (non-production mode)'); } + // Trajectory proxy routes - auto-detect user's workspace and forward + // These are convenience routes so the dashboard doesn't need to know the workspace ID + app.get('/api/trajectory', requireAuth, async (req, res) => { + await proxyToUserWorkspace(req, res, '/api/trajectory'); + }); + + app.get('/api/trajectory/steps', requireAuth, async (req, res) => { + const queryString = req.query.trajectoryId + ? `?trajectoryId=${encodeURIComponent(req.query.trajectoryId as string)}` + : ''; + await proxyToUserWorkspace(req, res, `/api/trajectory/steps${queryString}`); + }); + + app.get('/api/trajectory/history', requireAuth, async (req, res) => { + await proxyToUserWorkspace(req, res, '/api/trajectory/history'); + }); + // Serve static dashboard files (Next.js static export) // Path: dist/cloud/server.js -> ../../src/dashboard/out const dashboardPath = path.join(__dirname, '../../src/dashboard/out'); @@ -254,9 +329,186 @@ export async function createServer(): Promise { }); // Server lifecycle - let server: ReturnType | null = null; + let server: http.Server | null = null; let scalingOrchestrator: ScalingOrchestrator | null = null; + // Create HTTP server for WebSocket upgrade handling + const httpServer = http.createServer(app); + + // ===== Presence WebSocket ===== + const wssPresence = new WebSocketServer({ + noServer: true, + perMessageDeflate: false, + maxPayload: 1024 * 1024, // 1MB - presence messages are small + }); + + // Track online users for presence with multi-tab support + interface UserPresenceInfo { + username: string; + avatarUrl?: string; + connectedAt: string; + lastSeen: string; + } + interface UserPresenceState { + info: UserPresenceInfo; + connections: Set; + } + const onlineUsers = new Map(); + + // Validation helpers + const isValidUsername = (username: unknown): username is string => { + if (typeof username !== 'string') return false; + return /^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,37}[a-zA-Z0-9])?$/.test(username); + }; + + const isValidAvatarUrl = (url: unknown): url is string | undefined => { + if (url === undefined || url === null) return true; + if (typeof url !== 'string') return false; + try { + const parsed = new URL(url); + return parsed.protocol === 'https:' && + (parsed.hostname === 'avatars.githubusercontent.com' || + parsed.hostname === 'github.com' || + parsed.hostname.endsWith('.githubusercontent.com')); + } catch { + return false; + } + }; + + // Handle HTTP upgrade for WebSocket + httpServer.on('upgrade', (request, socket, head) => { + const pathname = new URL(request.url || '', `http://${request.headers.host}`).pathname; + + if (pathname === '/ws/presence') { + wssPresence.handleUpgrade(request, socket, head, (ws) => { + wssPresence.emit('connection', ws, request); + }); + } else { + // Unknown WebSocket path - destroy socket + socket.destroy(); + } + }); + + // Broadcast to all presence clients + const broadcastPresence = (message: object, exclude?: WebSocket) => { + const payload = JSON.stringify(message); + wssPresence.clients.forEach((client) => { + if (client !== exclude && client.readyState === WebSocket.OPEN) { + client.send(payload); + } + }); + }; + + // Get online users list + const getOnlineUsersList = (): UserPresenceInfo[] => { + return Array.from(onlineUsers.values()).map((state) => state.info); + }; + + // Handle presence connections + wssPresence.on('connection', (ws) => { + console.log('[cloud] Presence WebSocket client connected'); + let clientUsername: string | undefined; + + ws.on('message', (data) => { + try { + const msg = JSON.parse(data.toString()); + + if (msg.type === 'presence') { + if (msg.action === 'join' && msg.user?.username) { + const username = msg.user.username; + const avatarUrl = msg.user.avatarUrl; + + if (!isValidUsername(username)) { + console.warn(`[cloud] Invalid username rejected: ${username}`); + return; + } + if (!isValidAvatarUrl(avatarUrl)) { + console.warn(`[cloud] Invalid avatar URL rejected for user ${username}`); + return; + } + + clientUsername = username; + const now = new Date().toISOString(); + + const existing = onlineUsers.get(username); + if (existing) { + existing.connections.add(ws); + existing.info.lastSeen = now; + console.log(`[cloud] User ${username} opened new tab (${existing.connections.size} connections)`); + } else { + onlineUsers.set(username, { + info: { username, avatarUrl, connectedAt: now, lastSeen: now }, + connections: new Set([ws]), + }); + + console.log(`[cloud] User ${username} came online`); + broadcastPresence({ + type: 'presence_join', + user: { username, avatarUrl, connectedAt: now, lastSeen: now }, + }, ws); + } + + ws.send(JSON.stringify({ + type: 'presence_list', + users: getOnlineUsersList(), + })); + + } else if (msg.action === 'leave') { + if (!clientUsername || msg.username !== clientUsername) return; + + const userState = onlineUsers.get(clientUsername); + if (userState) { + userState.connections.delete(ws); + if (userState.connections.size === 0) { + onlineUsers.delete(clientUsername); + console.log(`[cloud] User ${clientUsername} went offline`); + broadcastPresence({ type: 'presence_leave', username: clientUsername }); + } + } + } + } else if (msg.type === 'typing') { + if (!clientUsername || msg.username !== clientUsername) return; + + const userState = onlineUsers.get(clientUsername); + if (userState) { + userState.info.lastSeen = new Date().toISOString(); + } + + broadcastPresence({ + type: 'typing', + username: clientUsername, + avatarUrl: userState?.info.avatarUrl, + isTyping: msg.isTyping, + }, ws); + } + } catch (err) { + console.error('[cloud] Invalid presence message:', err); + } + }); + + ws.on('close', () => { + if (clientUsername) { + const userState = onlineUsers.get(clientUsername); + if (userState) { + userState.connections.delete(ws); + if (userState.connections.size === 0) { + onlineUsers.delete(clientUsername); + console.log(`[cloud] User ${clientUsername} disconnected`); + broadcastPresence({ type: 'presence_leave', username: clientUsername }); + } + } + } + }); + + ws.on('error', (err) => { + console.error('[cloud] Presence WebSocket error:', err); + }); + }); + + wssPresence.on('error', (err) => { + console.error('[cloud] Presence WebSocket server error:', err); + }); + return { app, @@ -292,9 +544,10 @@ export async function createServer(): Promise { } return new Promise((resolve) => { - server = app.listen(config.port, () => { + server = httpServer.listen(config.port, () => { console.log(`Agent Relay Cloud running on port ${config.port}`); console.log(`Public URL: ${config.publicUrl}`); + console.log(`WebSocket: ws://localhost:${config.port}/ws/presence`); resolve(); }); }); @@ -306,6 +559,9 @@ export async function createServer(): Promise { await scalingOrchestrator.shutdown(); } + // Close WebSocket server + wssPresence.close(); + if (server) { await new Promise((resolve) => server!.close(() => resolve())); } diff --git a/src/daemon/api.ts b/src/daemon/api.ts index 838e7048..174f6998 100644 --- a/src/daemon/api.ts +++ b/src/daemon/api.ts @@ -326,7 +326,7 @@ export class DaemonApi extends EventEmitter { this.routes.set('POST /auth/cli/:provider/start', async (req): Promise => { const { provider } = req.params; try { - const session = startCLIAuth(provider); + const session = await startCLIAuth(provider); return { status: 200, body: { diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index ac607a24..b8089af9 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -18,6 +18,13 @@ import type { ProjectConfig, SpawnRequest } from '../bridge/types.js'; import { listTrajectorySteps, getTrajectoryStatus, getTrajectoryHistory } from '../trajectory/integration.js'; import { loadTeamsConfig } from '../bridge/teams-config.js'; import { getMemoryMonitor } from '../resiliency/memory-monitor.js'; +import { + startCLIAuth, + getAuthSession, + cancelAuthSession, + submitAuthCode, + getSupportedProviders, +} from '../daemon/cli-auth.js'; /** * Initialize cloud persistence for session tracking. @@ -1990,6 +1997,103 @@ export async function startDashboard( }); }); + // ===== CLI Auth API (for workspace-based provider authentication) ===== + + /** + * POST /auth/cli/:provider/start - Start CLI auth flow + * Body: { useDeviceFlow?: boolean } + */ + app.post('/auth/cli/:provider/start', async (req, res) => { + const { provider } = req.params; + const { useDeviceFlow } = req.body || {}; + try { + const session = await startCLIAuth(provider, { useDeviceFlow }); + res.json({ + sessionId: session.id, + status: session.status, + authUrl: session.authUrl, + }); + } catch (err) { + res.status(400).json({ + error: err instanceof Error ? err.message : 'Failed to start CLI auth', + }); + } + }); + + /** + * GET /auth/cli/:provider/status/:sessionId - Get auth session status + */ + app.get('/auth/cli/:provider/status/:sessionId', (req, res) => { + const { sessionId } = req.params; + const session = getAuthSession(sessionId); + if (!session) { + return res.status(404).json({ error: 'Session not found' }); + } + res.json({ + status: session.status, + authUrl: session.authUrl, + error: session.error, + }); + }); + + /** + * GET /auth/cli/:provider/creds/:sessionId - Get credentials from completed auth + */ + app.get('/auth/cli/:provider/creds/:sessionId', (req, res) => { + const { sessionId } = req.params; + const session = getAuthSession(sessionId); + if (!session) { + return res.status(404).json({ error: 'Session not found' }); + } + if (session.status !== 'success') { + return res.status(400).json({ error: 'Auth not complete', status: session.status }); + } + res.json({ + token: session.token, + refreshToken: session.refreshToken, + expiresAt: session.tokenExpiresAt?.toISOString(), + }); + }); + + /** + * POST /auth/cli/:provider/cancel/:sessionId - Cancel auth session + */ + app.post('/auth/cli/:provider/cancel/:sessionId', (req, res) => { + const { sessionId } = req.params; + const cancelled = cancelAuthSession(sessionId); + if (!cancelled) { + return res.status(404).json({ error: 'Session not found' }); + } + res.json({ success: true }); + }); + + /** + * POST /auth/cli/:provider/code/:sessionId - Submit auth code to PTY + * Used when OAuth returns a code that must be pasted into the CLI + */ + app.post('/auth/cli/:provider/code/:sessionId', (req, res) => { + const { sessionId } = req.params; + const { code } = req.body; + + if (!code || typeof code !== 'string') { + return res.status(400).json({ error: 'Auth code is required' }); + } + + const result = submitAuthCode(sessionId, code); + if (!result.success) { + return res.status(404).json({ error: result.error || 'Session not found or process not running' }); + } + + res.json({ success: true, message: 'Auth code submitted' }); + }); + + /** + * GET /auth/cli/providers - List supported providers + */ + app.get('/auth/cli/providers', (req, res) => { + res.json({ providers: getSupportedProviders() }); + }); + // ===== Metrics API ===== /** diff --git a/src/dashboard/app/app/page.tsx b/src/dashboard/app/app/page.tsx index f8452d34..174fdf99 100644 --- a/src/dashboard/app/app/page.tsx +++ b/src/dashboard/app/app/page.tsx @@ -9,6 +9,7 @@ import React, { useState, useEffect, useCallback } from 'react'; import { App } from '../../react-components/App'; +import { CloudSessionProvider } from '../../react-components/CloudSessionProvider'; import { LogoIcon } from '../../react-components/Logo'; import { setActiveWorkspaceId } from '../../lib/api'; @@ -56,6 +57,9 @@ const AI_PROVIDERS: ProviderInfo[] = [ { id: 'droid', name: 'Factory', displayName: 'Droid', color: '#6366F1', cliCommand: 'droid' }, ]; +// Force cloud mode via env var - prevents silent fallback to local mode +const FORCE_CLOUD_MODE = process.env.NEXT_PUBLIC_FORCE_CLOUD_MODE === 'true'; + export default function DashboardPage() { const [state, setState] = useState('loading'); const [workspaces, setWorkspaces] = useState([]); @@ -64,7 +68,7 @@ export default function DashboardPage() { const [wsUrl, setWsUrl] = useState(undefined); const [error, setError] = useState(null); // Track cloud mode for potential future use - const [_isCloudMode, setIsCloudMode] = useState(false); + const [_isCloudMode, setIsCloudMode] = useState(FORCE_CLOUD_MODE); const [csrfToken, setCsrfToken] = useState(null); const [providerAuth, setProviderAuth] = useState(null); @@ -77,6 +81,9 @@ export default function DashboardPage() { // If session endpoint doesn't exist (404), we're in local mode if (sessionRes.status === 404) { + if (FORCE_CLOUD_MODE) { + throw new Error('Cloud mode enforced but session endpoint returned 404. Is the cloud server running?'); + } setIsCloudMode(false); setState('local'); return; @@ -139,8 +146,14 @@ export default function DashboardPage() { window.location.href = '/connect-repos'; } } catch (err) { - // If session check fails with 404, assume local mode + // If session check fails with network error, assume local mode (unless forced cloud) if (err instanceof TypeError && err.message.includes('Failed to fetch')) { + if (FORCE_CLOUD_MODE) { + console.error('Cloud mode enforced but network request failed:', err); + setError('Cloud mode enforced but failed to connect to server. Is the cloud server running?'); + setState('error'); + return; + } setIsCloudMode(false); setState('local'); return; @@ -201,8 +214,10 @@ export default function DashboardPage() { } // Poll for workspace to be ready + // Cloud deployments (Fly.io) can take 3-5 minutes for cold starts const pollForReady = async (workspaceId: string) => { - const maxAttempts = 60; // 2 minutes with 2s interval + const maxAttempts = 150; // 5 minutes with 2s interval + const pollIntervalMs = 2000; let attempts = 0; while (attempts < maxAttempts) { @@ -224,14 +239,20 @@ export default function DashboardPage() { return; } } else if (statusData.status === 'error') { - throw new Error('Workspace provisioning failed'); + const errorMsg = statusData.errorMessage || 'Workspace provisioning failed'; + throw new Error(errorMsg); } - await new Promise(resolve => setTimeout(resolve, 2000)); + await new Promise(resolve => setTimeout(resolve, pollIntervalMs)); attempts++; + + // Log progress every 30 seconds + if (attempts % 15 === 0) { + console.log(`[workspace] Still provisioning... (${Math.floor(attempts * pollIntervalMs / 1000)}s elapsed)`); + } } - throw new Error('Workspace provisioning timed out'); + throw new Error('Workspace provisioning timed out after 5 minutes. Please try again or contact support.'); }; await pollForReady(data.workspaceId); @@ -243,6 +264,14 @@ export default function DashboardPage() { }, [connectToWorkspace, csrfToken]); // Handle connecting an AI provider via CLI login + // Maps frontend provider IDs to backend provider IDs + const PROVIDER_ID_MAP: Record = { + anthropic: 'anthropic', + codex: 'openai', // Backend uses 'openai' for Codex + opencode: 'opencode', + droid: 'droid', + }; + const handleConnectProvider = useCallback(async (provider: ProviderInfo) => { if (!selectedWorkspace) return; @@ -254,17 +283,19 @@ export default function DashboardPage() { headers['X-CSRF-Token'] = csrfToken; } - const res = await fetch(`/api/workspaces/${selectedWorkspace.id}/connect-provider`, { + // Use the onboarding CLI auth endpoint which has proper PTY handling + const backendProviderId = PROVIDER_ID_MAP[provider.id] || provider.id; + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/start`, { method: 'POST', credentials: 'include', headers, - body: JSON.stringify({ provider: provider.id }), + body: JSON.stringify({ workspaceId: selectedWorkspace.id }), }); const data = await res.json(); if (!res.ok) { - throw new Error(data.error || 'Failed to start provider auth'); + throw new Error(data.error || data.message || 'Failed to start provider auth'); } if (data.authUrl) { @@ -279,10 +310,58 @@ export default function DashboardPage() { `width=${width},height=${height},left=${left},top=${top},popup=yes` ); setProviderAuth({ provider, authUrl: data.authUrl, status: 'waiting' }); + } else if (data.sessionId) { + // Session started but no URL yet - poll for status + setProviderAuth({ provider, status: 'starting' }); + // Start polling for auth URL + const pollForAuthUrl = async (sessionId: string) => { + const maxAttempts = 30; // 30 seconds + for (let i = 0; i < maxAttempts; i++) { + await new Promise(resolve => setTimeout(resolve, 1000)); + try { + const statusRes = await fetch( + `/api/onboarding/cli/${backendProviderId}/status/${sessionId}`, + { credentials: 'include' } + ); + const statusData = await statusRes.json(); + + if (statusData.authUrl) { + const width = 600; + const height = 700; + const left = window.screenX + (window.outerWidth - width) / 2; + const top = window.screenY + (window.outerHeight - height) / 2; + window.open( + statusData.authUrl, + `${provider.displayName} Login`, + `width=${width},height=${height},left=${left},top=${top},popup=yes` + ); + setProviderAuth({ provider, authUrl: statusData.authUrl, status: 'waiting' }); + return; + } else if (statusData.status === 'success') { + setProviderAuth({ provider, status: 'success' }); + setTimeout(() => { + setProviderAuth(null); + connectToWorkspace(selectedWorkspace); + }, 2000); + return; + } else if (statusData.status === 'error') { + throw new Error(statusData.error || 'Authentication failed'); + } + } catch (pollErr) { + console.warn('Error polling auth status:', pollErr); + } + } + // Timeout + setProviderAuth({ + provider, + status: 'error', + error: 'Timed out waiting for authentication URL', + }); + }; + pollForAuthUrl(data.sessionId); } else { - // No auth URL means already authenticated or error + // Already authenticated setProviderAuth({ provider, status: 'success' }); - // Auto-continue after 2 seconds setTimeout(() => { setProviderAuth(null); connectToWorkspace(selectedWorkspace); @@ -380,8 +459,13 @@ export default function DashboardPage() { } // Connected to workspace - render App with workspace's WebSocket + // Wrap in CloudSessionProvider so App has access to cloud session context if (state === 'connected' && wsUrl) { - return ; + return ( + + + + ); } // Connecting state diff --git a/src/dashboard/lib/api.ts b/src/dashboard/lib/api.ts index ac880eab..af4ba52b 100644 --- a/src/dashboard/lib/api.ts +++ b/src/dashboard/lib/api.ts @@ -24,6 +24,33 @@ const API_BASE = ''; // Workspace ID for cloud mode proxying let activeWorkspaceId: string | null = null; +// CSRF token for cloud mode requests +let csrfToken: string | null = null; + +/** + * Set the CSRF token for API requests + */ +export function setCsrfToken(token: string | null): void { + csrfToken = token; +} + +/** + * Get the current CSRF token + */ +export function getCsrfToken(): string | null { + return csrfToken; +} + +/** + * Capture CSRF token from response headers + */ +function captureCsrfToken(response: Response): void { + const token = response.headers.get('X-CSRF-Token'); + if (token) { + csrfToken = token; + } +} + /** * Set the active workspace ID for API proxying in cloud mode */ @@ -45,6 +72,43 @@ function getApiUrl(path: string): string { return `${API_BASE}${path}`; } +/** + * Wrapper for fetch that handles CSRF tokens and credentials + * All requests include credentials and capture CSRF tokens from responses. + * Non-GET requests include the CSRF token in headers. + */ +async function apiFetch( + url: string, + options: RequestInit = {} +): Promise { + const method = options.method?.toUpperCase() || 'GET'; + const headers: Record = { + ...(options.headers as Record), + }; + + // Add CSRF token for state-changing requests + if (method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS') { + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + // Ensure Content-Type is set for requests with body + if (options.body && !headers['Content-Type']) { + headers['Content-Type'] = 'application/json'; + } + } + + const response = await fetch(url, { + ...options, + headers, + credentials: 'include', + }); + + // Always capture CSRF token from response + captureCsrfToken(response); + + return response; +} + /** * Dashboard data received from WebSocket */ @@ -198,9 +262,8 @@ export const api = { */ async sendMessage(request: SendMessageRequest): Promise> { try { - const response = await fetch(getApiUrl('/api/send'), { + const response = await apiFetch(getApiUrl('/api/send'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); @@ -244,9 +307,8 @@ export const api = { data = file.data; } - const response = await fetch(getApiUrl('/api/upload'), { + const response = await apiFetch(getApiUrl('/api/upload'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ filename, mimeType, data }), }); @@ -271,9 +333,8 @@ export const api = { */ async spawnAgent(request: SpawnAgentRequest): Promise { try { - const response = await fetch(getApiUrl('/api/spawn'), { + const response = await apiFetch(getApiUrl('/api/spawn'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); @@ -288,7 +349,7 @@ export const api = { */ async getSpawnedAgents(): Promise }>> { try { - const response = await fetch(getApiUrl('/api/spawned')); + const response = await apiFetch(getApiUrl('/api/spawned')); const result = await response.json() as { success?: boolean; agents?: Array<{ name: string; cli: string; startedAt: string }>; error?: string }; if (response.ok && result.success) { @@ -306,7 +367,7 @@ export const api = { */ async releaseAgent(name: string): Promise> { try { - const response = await fetch(getApiUrl(`/api/spawned/${encodeURIComponent(name)}`), { + const response = await apiFetch(getApiUrl(`/api/spawned/${encodeURIComponent(name)}`), { method: 'DELETE', }); @@ -327,7 +388,7 @@ export const api = { */ async getData(): Promise> { try { - const response = await fetch(getApiUrl('/api/data')); + const response = await apiFetch(getApiUrl('/api/data')); const data = await response.json() as DashboardData; if (response.ok) { @@ -345,7 +406,7 @@ export const api = { */ async getBridgeData(): Promise> { try { - const response = await fetch(getApiUrl('/api/bridge')); + const response = await apiFetch(getApiUrl('/api/bridge')); const data = await response.json() as FleetData; if (response.ok) { @@ -363,7 +424,7 @@ export const api = { */ async getMetrics(): Promise> { try { - const response = await fetch(getApiUrl('/api/metrics')); + const response = await apiFetch(getApiUrl('/api/metrics')); const data = await response.json(); if (response.ok) { @@ -392,7 +453,7 @@ export const api = { if (params?.since) query.set('since', String(params.since)); if (params?.limit) query.set('limit', String(params.limit)); - const response = await fetch(getApiUrl(`/api/history/sessions?${query}`)); + const response = await apiFetch(getApiUrl(`/api/history/sessions?${query}`)); const data = await response.json(); if (response.ok) { @@ -427,7 +488,7 @@ export const api = { if (params?.order) query.set('order', params.order); if (params?.search) query.set('search', params.search); - const response = await fetch(getApiUrl(`/api/history/messages?${query}`)); + const response = await apiFetch(getApiUrl(`/api/history/messages?${query}`)); const data = await response.json(); if (response.ok) { @@ -445,7 +506,7 @@ export const api = { */ async getHistoryConversations(): Promise> { try { - const response = await fetch(getApiUrl('/api/history/conversations')); + const response = await apiFetch(getApiUrl('/api/history/conversations')); const data = await response.json(); if (response.ok) { @@ -463,7 +524,7 @@ export const api = { */ async getHistoryMessage(id: string): Promise> { try { - const response = await fetch(getApiUrl(`/api/history/message/${encodeURIComponent(id)}`)); + const response = await apiFetch(getApiUrl(`/api/history/message/${encodeURIComponent(id)}`)); const data = await response.json(); if (response.ok) { @@ -481,7 +542,7 @@ export const api = { */ async getHistoryStats(): Promise> { try { - const response = await fetch(getApiUrl('/api/history/stats')); + const response = await apiFetch(getApiUrl('/api/history/stats')); const data = await response.json(); if (response.ok) { @@ -508,7 +569,7 @@ export const api = { if (params?.query) queryParams.set('q', params.query); if (params?.limit) queryParams.set('limit', String(params.limit)); - const response = await fetch(getApiUrl(`/api/files?${queryParams}`)); + const response = await apiFetch(getApiUrl(`/api/files?${queryParams}`)); const data = await response.json(); if (response.ok) { @@ -528,7 +589,7 @@ export const api = { */ async getDecisions(): Promise> { try { - const response = await fetch(getApiUrl('/api/decisions')); + const response = await apiFetch(getApiUrl('/api/decisions')); const data = await response.json(); if (response.ok && data.success) { @@ -546,9 +607,8 @@ export const api = { */ async approveDecision(id: string, optionId?: string, response?: string): Promise> { try { - const res = await fetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}/approve`), { + const res = await apiFetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}/approve`), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ optionId, response }), }); @@ -569,9 +629,8 @@ export const api = { */ async rejectDecision(id: string, reason?: string): Promise> { try { - const res = await fetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}/reject`), { + const res = await apiFetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}/reject`), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ reason }), }); @@ -592,7 +651,7 @@ export const api = { */ async dismissDecision(id: string): Promise> { try { - const res = await fetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}`), { + const res = await apiFetch(getApiUrl(`/api/decisions/${encodeURIComponent(id)}`), { method: 'DELETE', }); @@ -615,7 +674,7 @@ export const api = { */ async getFleetServers(): Promise> { try { - const response = await fetch(getApiUrl('/api/fleet/servers')); + const response = await apiFetch(getApiUrl('/api/fleet/servers')); const data = await response.json(); if (response.ok && data.success) { @@ -633,7 +692,7 @@ export const api = { */ async getFleetStats(): Promise> { try { - const response = await fetch(getApiUrl('/api/fleet/stats')); + const response = await apiFetch(getApiUrl('/api/fleet/stats')); const data = await response.json(); if (response.ok && data.success) { @@ -660,7 +719,7 @@ export const api = { if (params?.status) queryParams.set('status', params.status); if (params?.agent) queryParams.set('agent', params.agent); - const response = await fetch(getApiUrl(`/api/tasks?${queryParams}`)); + const response = await apiFetch(getApiUrl(`/api/tasks?${queryParams}`)); const data = await response.json(); if (response.ok && data.success) { @@ -683,9 +742,8 @@ export const api = { priority: 'low' | 'medium' | 'high' | 'critical'; }): Promise> { try { - const response = await fetch(getApiUrl('/api/tasks'), { + const response = await apiFetch(getApiUrl('/api/tasks'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); @@ -709,9 +767,8 @@ export const api = { result?: string; }): Promise> { try { - const response = await fetch(getApiUrl(`/api/tasks/${encodeURIComponent(id)}`), { + const response = await apiFetch(getApiUrl(`/api/tasks/${encodeURIComponent(id)}`), { method: 'PATCH', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(updates), }); @@ -732,7 +789,7 @@ export const api = { */ async cancelTask(id: string): Promise> { try { - const response = await fetch(getApiUrl(`/api/tasks/${encodeURIComponent(id)}`), { + const response = await apiFetch(getApiUrl(`/api/tasks/${encodeURIComponent(id)}`), { method: 'DELETE', }); @@ -761,9 +818,8 @@ export const api = { description?: string; }): Promise> { try { - const response = await fetch(getApiUrl('/api/beads'), { + const response = await apiFetch(getApiUrl('/api/beads'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); @@ -788,9 +844,8 @@ export const api = { thread?: string; }): Promise> { try { - const response = await fetch(getApiUrl('/api/relay/send'), { + const response = await apiFetch(getApiUrl('/api/relay/send'), { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(request), }); diff --git a/src/dashboard/lib/cloudApi.ts b/src/dashboard/lib/cloudApi.ts index 29ffa0ea..7be869e4 100644 --- a/src/dashboard/lib/cloudApi.ts +++ b/src/dashboard/lib/cloudApi.ts @@ -5,6 +5,8 @@ * Includes automatic session expiration detection and handling. */ +import { setCsrfToken as setApiCsrfToken } from './api'; + // Session error codes from the backend export type SessionErrorCode = 'SESSION_EXPIRED' | 'USER_NOT_FOUND' | 'SESSION_ERROR'; @@ -59,11 +61,14 @@ export function getCsrfToken(): string | null { /** * Capture CSRF token from response headers + * Also syncs with the api.ts library for dashboard requests */ function captureCsrfToken(response: Response): void { const token = response.headers.get('X-CSRF-Token'); if (token) { csrfToken = token; + // Sync with api.ts for dashboard-to-workspace requests + setApiCsrfToken(token); } } diff --git a/src/dashboard/react-components/hooks/useMessages.ts b/src/dashboard/react-components/hooks/useMessages.ts index e2f46056..9f775334 100644 --- a/src/dashboard/react-components/hooks/useMessages.ts +++ b/src/dashboard/react-components/hooks/useMessages.ts @@ -7,6 +7,7 @@ import { useState, useMemo, useCallback, useEffect } from 'react'; import type { Message, SendMessageRequest } from '../../types'; +import { api } from '../../lib/api'; export interface UseMessagesOptions { messages: Message[]; @@ -261,15 +262,13 @@ export function useMessages({ request.from = senderName; } - const response = await fetch('/api/send', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(request), - }); + // Use api.sendMessage which handles: + // - Workspace proxy routing (in cloud mode) + // - CSRF token headers + // - Credentials + const result = await api.sendMessage(request); - const result = await response.json() as { success?: boolean; error?: string }; - - if (response.ok && result.success) { + if (result.success) { // Success! The optimistic message will be cleaned up when // the real message arrives via WebSocket return true; diff --git a/src/dashboard/react-components/hooks/useWorkspaceStatus.ts b/src/dashboard/react-components/hooks/useWorkspaceStatus.ts index d4b70315..066df7f5 100644 --- a/src/dashboard/react-components/hooks/useWorkspaceStatus.ts +++ b/src/dashboard/react-components/hooks/useWorkspaceStatus.ts @@ -65,7 +65,15 @@ const DEFAULT_OPTIONS: Required = { export function useWorkspaceStatus( options: UseWorkspaceStatusOptions = {} ): UseWorkspaceStatusReturn { - const opts = { ...DEFAULT_OPTIONS, ...options }; + // Stabilize options to prevent infinite re-renders + // Use refs for callbacks and useMemo for primitive values + const autoRefresh = options.autoRefresh ?? DEFAULT_OPTIONS.autoRefresh; + const refreshInterval = options.refreshInterval ?? DEFAULT_OPTIONS.refreshInterval; + const autoWakeup = options.autoWakeup ?? DEFAULT_OPTIONS.autoWakeup; + + // Store callback in ref to avoid recreating refresh on every render + const onStatusChangeRef = useRef(options.onStatusChange ?? DEFAULT_OPTIONS.onStatusChange); + onStatusChangeRef.current = options.onStatusChange ?? DEFAULT_OPTIONS.onStatusChange; const [workspace, setWorkspace] = useState(null); const [exists, setExists] = useState(false); @@ -100,7 +108,7 @@ export function useWorkspaceStatus( // Check for status change if (previousStatusRef.current && previousStatusRef.current !== ws.status) { - opts.onStatusChange(ws.status, false); + onStatusChangeRef.current(ws.status, false); } previousStatusRef.current = ws.status; } else { @@ -118,7 +126,13 @@ export function useWorkspaceStatus( setIsLoading(false); } } - }, [opts]); + }, []); // No dependencies - uses refs for callbacks + + // Store refresh interval in ref for wakeup callback + const refreshIntervalRef = useRef(refreshInterval); + refreshIntervalRef.current = refreshInterval; + const autoRefreshRef = useRef(autoRefresh); + autoRefreshRef.current = autoRefresh; // Wake up workspace const wakeup = useCallback(async (): Promise<{ success: boolean; message: string }> => { @@ -141,7 +155,7 @@ export function useWorkspaceStatus( if (result.data.wasRestarted) { setStatusMessage(result.data.message); setActionNeeded(null); - opts.onStatusChange('starting', true); + onStatusChangeRef.current('starting', true); // Start more frequent polling to catch when workspace is ready if (intervalRef.current) { @@ -153,8 +167,8 @@ export function useWorkspaceStatus( setTimeout(() => { if (mountedRef.current && intervalRef.current) { clearInterval(intervalRef.current); - if (opts.autoRefresh) { - intervalRef.current = setInterval(refresh, opts.refreshInterval); + if (autoRefreshRef.current) { + intervalRef.current = setInterval(refresh, refreshIntervalRef.current); } } }, 120000); @@ -176,7 +190,7 @@ export function useWorkspaceStatus( setIsWakingUp(false); } } - }, [workspace?.id, refresh, opts]); + }, [workspace?.id, refresh]); // Initial fetch useEffect(() => { @@ -190,9 +204,9 @@ export function useWorkspaceStatus( // Auto-refresh polling useEffect(() => { - if (!opts.autoRefresh) return; + if (!autoRefresh) return; - intervalRef.current = setInterval(refresh, opts.refreshInterval); + intervalRef.current = setInterval(refresh, refreshInterval); return () => { if (intervalRef.current) { @@ -200,14 +214,14 @@ export function useWorkspaceStatus( intervalRef.current = null; } }; - }, [opts.autoRefresh, opts.refreshInterval, refresh]); + }, [autoRefresh, refreshInterval, refresh]); // Auto-wakeup when workspace is stopped useEffect(() => { - if (opts.autoWakeup && workspace?.isStopped && !isWakingUp) { + if (autoWakeup && workspace?.isStopped && !isWakingUp) { wakeup(); } - }, [opts.autoWakeup, workspace?.isStopped, isWakingUp, wakeup]); + }, [autoWakeup, workspace?.isStopped, isWakingUp, wakeup]); return { workspace, diff --git a/src/dashboard/react-components/layout/Sidebar.tsx b/src/dashboard/react-components/layout/Sidebar.tsx index 14d11c0d..c43b2c42 100644 --- a/src/dashboard/react-components/layout/Sidebar.tsx +++ b/src/dashboard/react-components/layout/Sidebar.tsx @@ -42,6 +42,8 @@ export interface SidebarProps { onThreadSelect?: (threadId: string) => void; /** Mobile: close sidebar handler */ onClose?: () => void; + /** Handler for opening settings */ + onSettingsClick?: () => void; } export function Sidebar({ @@ -65,6 +67,7 @@ export function Sidebar({ onLogsClick, onThreadSelect, onClose, + onSettingsClick, }: SidebarProps) { const [searchQuery, setSearchQuery] = useState(''); const [isThreadsCollapsed, setIsThreadsCollapsed] = useState(() => { @@ -203,7 +206,7 @@ export function Sidebar({
{/* Footer Actions */} -
+
+
); @@ -268,3 +278,12 @@ function CloseIcon() { ); } + +function SettingsIcon() { + return ( + + + + + ); +} diff --git a/src/shared/cli-auth-config.ts b/src/shared/cli-auth-config.ts new file mode 100644 index 00000000..ff5b2d06 --- /dev/null +++ b/src/shared/cli-auth-config.ts @@ -0,0 +1,288 @@ +/** + * Shared CLI Auth Configuration + * + * Provider-specific CLI commands and patterns for OAuth authentication. + * Used by both the cloud API and workspace daemon. + */ + +/** + * Interactive prompt handler configuration + * Defines patterns to detect prompts and responses to send + */ +export interface PromptHandler { + /** Pattern to detect in CLI output (case-insensitive) */ + pattern: RegExp; + /** Response to send (e.g., '\r' for enter, 'y\r' for yes+enter) */ + response: string; + /** Delay before sending response (ms) */ + delay?: number; + /** Description for logging/debugging */ + description: string; +} + +/** + * CLI auth configuration for each provider + */ +export interface CLIAuthConfig { + /** CLI command to run */ + command: string; + /** Arguments to pass */ + args: string[]; + /** Alternative args for device flow (if supported) */ + deviceFlowArgs?: string[]; + /** Pattern to extract auth URL from output */ + urlPattern: RegExp; + /** Path to credentials file (for reading after auth) */ + credentialPath?: string; + /** Display name for UI */ + displayName: string; + /** Interactive prompts to auto-respond to */ + prompts: PromptHandler[]; + /** Success indicators in output */ + successPatterns: RegExp[]; + /** How long to wait for URL to appear (ms) */ + waitTimeout: number; + /** Whether this provider supports device flow */ + supportsDeviceFlow?: boolean; +} + +/** + * CLI commands and URL patterns for each provider + * + * Each CLI tool outputs an OAuth URL when run without credentials. + * We capture stdout/stderr and extract the URL using regex patterns. + * + * IMPORTANT: These CLIs are interactive - they output the auth URL then wait + * for the user to complete OAuth in their browser. We capture the URL and + * display it in a popup for the user. + */ +export const CLI_AUTH_CONFIG: Record = { + anthropic: { + command: 'claude', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + credentialPath: '~/.claude/.credentials.json', + displayName: 'Claude', + waitTimeout: 30000, // Claude can take a while to show the auth URL + prompts: [ + { + pattern: /dark\s*(mode|theme)/i, + response: '\r', // Press enter to accept default + delay: 100, + description: 'Dark mode prompt', + }, + { + pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, + response: '\r', // Press enter for first option (subscription) + delay: 100, + description: 'Auth method prompt', + }, + { + pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, + response: 'y\r', // Yes to trust + delay: 100, + description: 'Trust directory prompt', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + openai: { + command: 'codex', + args: ['login'], // Standard OAuth flow + deviceFlowArgs: ['login', '--device-auth'], // Device auth for headless/container environments + supportsDeviceFlow: true, + urlPattern: /(https:\/\/[^\s]+)/, + credentialPath: '~/.codex/auth.json', + displayName: 'Codex', + waitTimeout: 30000, + prompts: [ + { + pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, + response: 'y\r', + delay: 100, + description: 'Trust directory prompt', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + google: { + command: 'gemini', + args: [], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Gemini', + waitTimeout: 30000, + prompts: [ + { + pattern: /login\s*with\s*google|google\s*account|choose.*auth/i, + response: '\r', // Select first option (Login with Google) + delay: 200, + description: 'Auth method selection', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + opencode: { + command: 'opencode', + args: ['auth', 'login'], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'OpenCode', + waitTimeout: 30000, + prompts: [ + { + pattern: /select.*provider|choose.*provider|which.*provider/i, + response: '\r', // Select first provider + delay: 200, + description: 'Provider selection', + }, + { + pattern: /claude\s*pro|anthropic|select.*auth/i, + response: '\r', // Select first auth option + delay: 200, + description: 'Auth type selection', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, + droid: { + command: 'droid', + args: ['--login'], + urlPattern: /(https:\/\/[^\s]+)/, + displayName: 'Droid', + waitTimeout: 30000, + prompts: [ + { + pattern: /sign\s*in|log\s*in|authenticate/i, + response: '\r', + delay: 200, + description: 'Login prompt', + }, + ], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + }, +}; + +/** + * Strip ANSI escape codes from text + */ +export function stripAnsiCodes(text: string): string { + // eslint-disable-next-line no-control-regex + return text.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +/** + * Check if text matches any success pattern + */ +export function matchesSuccessPattern(text: string, patterns: RegExp[]): boolean { + const cleanText = stripAnsiCodes(text).toLowerCase(); + return patterns.some((p) => p.test(cleanText)); +} + +/** + * Find matching prompt handler that hasn't been responded to yet + */ +export function findMatchingPrompt( + text: string, + prompts: PromptHandler[], + respondedPrompts: Set +): PromptHandler | null { + const cleanText = stripAnsiCodes(text); + for (const prompt of prompts) { + if (respondedPrompts.has(prompt.description)) continue; + if (prompt.pattern.test(cleanText)) { + return prompt; + } + } + return null; +} + +/** + * Get list of supported provider IDs + */ +export function getSupportedProviderIds(): string[] { + return Object.keys(CLI_AUTH_CONFIG); +} + +/** + * Get list of supported providers with details + */ +export function getSupportedProviders(): { id: string; displayName: string; command: string }[] { + return Object.entries(CLI_AUTH_CONFIG).map(([id, config]) => ({ + id, + displayName: config.displayName, + command: config.command, + })); +} + +/** + * Validate a provider's CLI auth configuration + * Returns null if valid, or an error message if invalid + */ +export function validateProviderConfig( + providerId: string, + config: CLIAuthConfig +): string | null { + if (!config.command || typeof config.command !== 'string') { + return `${providerId}: missing or invalid 'command'`; + } + + if (!Array.isArray(config.args)) { + return `${providerId}: 'args' must be an array`; + } + + if (!(config.urlPattern instanceof RegExp)) { + return `${providerId}: 'urlPattern' must be a RegExp`; + } + + // Check urlPattern has a capture group + const testUrl = 'https://example.com/test'; + const match = testUrl.match(config.urlPattern); + if (!match || !match[1]) { + return `${providerId}: 'urlPattern' must have a capture group - got ${config.urlPattern}`; + } + + if (!config.displayName || typeof config.displayName !== 'string') { + return `${providerId}: missing or invalid 'displayName'`; + } + + if (typeof config.waitTimeout !== 'number' || config.waitTimeout <= 0) { + return `${providerId}: 'waitTimeout' must be a positive number`; + } + + if (!Array.isArray(config.prompts)) { + return `${providerId}: 'prompts' must be an array`; + } + + for (let i = 0; i < config.prompts.length; i++) { + const prompt = config.prompts[i]; + if (!(prompt.pattern instanceof RegExp)) { + return `${providerId}: prompt[${i}].pattern must be a RegExp`; + } + if (typeof prompt.response !== 'string') { + return `${providerId}: prompt[${i}].response must be a string`; + } + if (!prompt.description) { + return `${providerId}: prompt[${i}].description is required`; + } + } + + if (!Array.isArray(config.successPatterns)) { + return `${providerId}: 'successPatterns' must be an array`; + } + + return null; +} + +/** + * Validate all provider configurations + * Returns array of error messages (empty if all valid) + */ +export function validateAllProviderConfigs(): string[] { + const errors: string[] = []; + for (const [id, config] of Object.entries(CLI_AUTH_CONFIG)) { + const error = validateProviderConfig(id, config); + if (error) { + errors.push(error); + } + } + return errors; +} diff --git a/test_parser.js b/test_parser.js deleted file mode 100644 index b67a3f01..00000000 --- a/test_parser.js +++ /dev/null @@ -1,31 +0,0 @@ -const { OutputParser } = require('./dist/wrapper/parser.js'); - -const parser = new OutputParser({ prefix: '->relay:' }); - -// Test case 1: Simple multi-line with blank lines -const test1 = `->relay:Dashboard Line 1 - -Line 3 -Line 4`; - -console.log('Test 1: Message with blank line'); -const result1 = parser.parse(test1 + '\n'); -if (result1.commands.length > 0) { - console.log('Body:', JSON.stringify(result1.commands[0].body)); -} else { - console.log('No command parsed'); -} - -// Test case 2: Without blank lines -const test2 = `->relay:Dashboard Line 1 -Line 2 -Line 3`; - -console.log('\nTest 2: Message without blank lines'); -parser.reset(); -const result2 = parser.parse(test2 + '\n'); -if (result2.commands.length > 0) { - console.log('Body:', JSON.stringify(result2.commands[0].body)); -} else { - console.log('No command parsed'); -} diff --git a/test_parser.mjs b/test_parser.mjs deleted file mode 100644 index 93af1f13..00000000 --- a/test_parser.mjs +++ /dev/null @@ -1,49 +0,0 @@ -import { OutputParser } from './dist/wrapper/parser.js'; - -const parser = new OutputParser({ prefix: '->relay:' }); - -// Test case 1: Simple multi-line with blank lines -const test1 = `->relay:Dashboard Line 1 - -Line 3 -Line 4`; - -console.log('Test 1: Message with blank line'); -const result1 = parser.parse(test1 + '\n'); -if (result1.commands.length > 0) { - console.log('Body:', JSON.stringify(result1.commands[0].body)); - console.log('Lines:', result1.commands[0].body.split('\n').length); -} else { - console.log('No command parsed'); -} - -// Test case 2: Without blank lines -const test2 = `->relay:Dashboard Line 1 -Line 2 -Line 3`; - -console.log('\nTest 2: Message without blank lines'); -parser.reset(); -const result2 = parser.parse(test2 + '\n'); -if (result2.commands.length > 0) { - console.log('Body:', JSON.stringify(result2.commands[0].body)); - console.log('Lines:', result2.commands[0].body.split('\n').length); -} else { - console.log('No command parsed'); -} - -// Test case 3: Multi-line with blank lines in middle -const test3 = `->relay:Dashboard TASK DETAILS: Something - -More details here -Final line`; - -console.log('\nTest 3: Real-world task message'); -parser.reset(); -const result3 = parser.parse(test3 + '\n'); -if (result3.commands.length > 0) { - console.log('Body:', JSON.stringify(result3.commands[0].body)); - console.log('Lines:', result3.commands[0].body.split('\n').length); -} else { - console.log('No command parsed'); -} From 7870b16309638ef5b8bc0dc7072b08957390f9a4 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Sun, 4 Jan 2026 23:35:23 +0100 Subject: [PATCH 072/103] Fix OAuth popup not opening by waiting for authUrl capture MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The startCLIAuth function was returning immediately before the auth URL was captured asynchronously in the PTY onData handler. This meant the cloud API received a session without authUrl, and the frontend never opened the OAuth popup. Now startCLIAuth awaits a promise that resolves when: - Auth URL is captured (normal case) - CLI process exits (early exit) - 15 second timeout (fallback if CLI is slow) This ensures the caller receives the authUrl in the response. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/daemon/cli-auth.ts | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/src/daemon/cli-auth.ts b/src/daemon/cli-auth.ts index 234a89e2..e8b283ce 100644 --- a/src/daemon/cli-auth.ts +++ b/src/daemon/cli-auth.ts @@ -71,6 +71,9 @@ export interface StartCLIAuthOptions { /** * Start CLI auth flow + * + * This function waits for the auth URL to be captured before returning, + * ensuring the caller can immediately open the OAuth popup. */ export async function startCLIAuth( provider: string, @@ -106,6 +109,19 @@ export async function startCLIAuth( const respondedPrompts = new Set(); + // Create a promise that resolves when authUrl is captured or timeout + let resolveAuthUrl: () => void; + const authUrlPromise = new Promise((resolve) => { + resolveAuthUrl = resolve; + }); + + // Timeout for waiting for auth URL (shorter than the full OAuth timeout) + const AUTH_URL_WAIT_TIMEOUT = 15000; // 15 seconds to capture auth URL + const authUrlTimeout = setTimeout(() => { + logger.warn('Auth URL wait timeout, returning session without URL', { provider, sessionId }); + resolveAuthUrl(); + }, AUTH_URL_WAIT_TIMEOUT); + try { const proc = pty.spawn(config.command, args, { name: 'xterm-256color', @@ -162,6 +178,9 @@ export async function startCLIAuth( session.authUrl = match[1]; session.status = 'waiting_auth'; logger.info('Auth URL captured', { provider, url: session.authUrl }); + // Signal that we have the auth URL + clearTimeout(authUrlTimeout); + resolveAuthUrl(); } // Check for success and try to extract credentials @@ -189,6 +208,7 @@ export async function startCLIAuth( proc.onExit(async ({ exitCode }) => { clearTimeout(timeout); + clearTimeout(authUrlTimeout); logger.info('CLI process exited', { provider, exitCode }); // Try to extract credentials @@ -210,13 +230,21 @@ export async function startCLIAuth( session.status = 'error'; session.error = 'CLI exited without auth URL or credentials'; } + + // Resolve in case we're still waiting + resolveAuthUrl(); }); } catch (err) { session.status = 'error'; session.error = err instanceof Error ? err.message : 'Failed to spawn CLI'; logger.error('Failed to start CLI auth', { error: session.error }); + clearTimeout(authUrlTimeout); + resolveAuthUrl!(); } + // Wait for auth URL to be captured (or timeout) + await authUrlPromise; + return session; } From 33970b6ad0882609b16b6d8f121ecd13fe3c5ffa Mon Sep 17 00:00:00 2001 From: Khaliq Date: Sun, 4 Jan 2026 23:43:16 +0100 Subject: [PATCH 073/103] Fix CLI OAuth test module import paths in Docker container MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The test was failing because cli-pty-runner.ts imports from '../../shared/cli-auth-config.js'. When copied flat to /app/, this resolved to /shared/cli-auth-config.js which didn't exist. Fixed by: - Copying source files maintaining the relative path structure (/app/src/cloud/api/ and /app/src/shared/) - Updating ci-test-real-clis.ts import to match new structure ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- scripts/test-cli-auth/Dockerfile.real | 7 +++++-- scripts/test-cli-auth/ci-test-real-clis.ts | 5 ++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/scripts/test-cli-auth/Dockerfile.real b/scripts/test-cli-auth/Dockerfile.real index fcab07e0..4372c6a6 100644 --- a/scripts/test-cli-auth/Dockerfile.real +++ b/scripts/test-cli-auth/Dockerfile.real @@ -58,8 +58,11 @@ ENV PATH="/home/testuser/.local/bin:$PATH" COPY --chown=testuser:testuser scripts/test-cli-auth/ci-test-real-clis.ts /app/ COPY --chown=testuser:testuser scripts/test-cli-auth/package.json /app/ -# Copy the cli-pty-runner module to /app/ so it can access node_modules -COPY --chown=testuser:testuser src/cloud/api/cli-pty-runner.ts /app/ +# Copy source modules maintaining the relative path structure +# cli-pty-runner.ts imports from '../../shared/cli-auth-config.js' +# So we need: /app/src/cloud/api/cli-pty-runner.ts -> /app/src/shared/cli-auth-config.ts +COPY --chown=testuser:testuser src/cloud/api/cli-pty-runner.ts /app/src/cloud/api/ +COPY --chown=testuser:testuser src/shared/cli-auth-config.ts /app/src/shared/ # Install test dependencies RUN npm install diff --git a/scripts/test-cli-auth/ci-test-real-clis.ts b/scripts/test-cli-auth/ci-test-real-clis.ts index 8528c804..e4694125 100644 --- a/scripts/test-cli-auth/ci-test-real-clis.ts +++ b/scripts/test-cli-auth/ci-test-real-clis.ts @@ -22,13 +22,12 @@ import { writeFileSync } from 'fs'; // Import the actual config and PTY runner from cli-pty-runner.ts // This ensures tests use the EXACT SAME logic as production -// Note: In Docker container, both files are in /app/, so use relative import -// For local dev, this path also works from scripts/test-cli-auth/ +// In Docker: /app/src/cloud/api/cli-pty-runner.ts (maintains source tree structure) import { CLI_AUTH_CONFIG, runCLIAuthViaPTY, type PTYAuthResult, -} from './cli-pty-runner.js'; +} from './src/cloud/api/cli-pty-runner.js'; interface TestResult { provider: string; From b8697860ce2b646eaf4e8ed4758817e7ba1bbe0e Mon Sep 17 00:00:00 2001 From: Khaliq Date: Sun, 4 Jan 2026 23:49:16 +0100 Subject: [PATCH 074/103] update flow --- src/dashboard/app/app/page.tsx | 210 ++++++++++++++++++++++++++++----- 1 file changed, 178 insertions(+), 32 deletions(-) diff --git a/src/dashboard/app/app/page.tsx b/src/dashboard/app/app/page.tsx index 174fdf99..7290765e 100644 --- a/src/dashboard/app/app/page.tsx +++ b/src/dashboard/app/app/page.tsx @@ -43,7 +43,8 @@ interface ProviderInfo { interface ProviderAuthState { provider: ProviderInfo; authUrl?: string; - status: 'starting' | 'waiting' | 'success' | 'error'; + sessionId?: string; + status: 'starting' | 'waiting' | 'submitting' | 'success' | 'error'; error?: string; } @@ -71,6 +72,7 @@ export default function DashboardPage() { const [_isCloudMode, setIsCloudMode] = useState(FORCE_CLOUD_MODE); const [csrfToken, setCsrfToken] = useState(null); const [providerAuth, setProviderAuth] = useState(null); + const [authCode, setAuthCode] = useState(''); // Check if we're in cloud mode and fetch data useEffect(() => { @@ -309,10 +311,11 @@ export default function DashboardPage() { `${provider.displayName} Login`, `width=${width},height=${height},left=${left},top=${top},popup=yes` ); - setProviderAuth({ provider, authUrl: data.authUrl, status: 'waiting' }); + setAuthCode(''); // Clear any previous code + setProviderAuth({ provider, authUrl: data.authUrl, sessionId: data.sessionId, status: 'waiting' }); } else if (data.sessionId) { // Session started but no URL yet - poll for status - setProviderAuth({ provider, status: 'starting' }); + setProviderAuth({ provider, sessionId: data.sessionId, status: 'starting' }); // Start polling for auth URL const pollForAuthUrl = async (sessionId: string) => { const maxAttempts = 30; // 30 seconds @@ -335,7 +338,8 @@ export default function DashboardPage() { `${provider.displayName} Login`, `width=${width},height=${height},left=${left},top=${top},popup=yes` ); - setProviderAuth({ provider, authUrl: statusData.authUrl, status: 'waiting' }); + setAuthCode(''); // Clear any previous code + setProviderAuth({ provider, authUrl: statusData.authUrl, sessionId, status: 'waiting' }); return; } else if (statusData.status === 'success') { setProviderAuth({ provider, status: 'success' }); @@ -380,10 +384,57 @@ export default function DashboardPage() { const handleSkipProvider = useCallback(() => { if (selectedWorkspace) { setProviderAuth(null); + setAuthCode(''); connectToWorkspace(selectedWorkspace); } }, [selectedWorkspace, connectToWorkspace]); + // Submit auth code from OAuth popup + const handleSubmitAuthCode = useCallback(async () => { + if (!providerAuth?.sessionId || !authCode.trim()) return; + + const backendProviderId = PROVIDER_ID_MAP[providerAuth.provider.id] || providerAuth.provider.id; + + setProviderAuth(prev => prev ? { ...prev, status: 'submitting' } : null); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) { + headers['X-CSRF-Token'] = csrfToken; + } + + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/code/${providerAuth.sessionId}`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ code: authCode.trim() }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to submit auth code'); + } + + // Success - show success state, then offer to connect another or continue + setProviderAuth(prev => prev ? { ...prev, status: 'success' } : null); + setAuthCode(''); + } catch (err) { + setProviderAuth(prev => prev ? { + ...prev, + status: 'error', + error: err instanceof Error ? err.message : 'Failed to submit auth code', + } : null); + } + }, [providerAuth, authCode, csrfToken]); + + // Connect another provider after successful auth + const handleConnectAnother = useCallback(() => { + setProviderAuth(null); + setAuthCode(''); + // Stay on connect-provider screen + }, []); + const handleStartWorkspace = useCallback(async (workspace: Workspace) => { setState('loading'); setError(null); @@ -537,52 +588,147 @@ export default function DashboardPage() { {/* Provider auth modal */} {providerAuth && (
-
-
- {providerAuth.provider.displayName[0]} -
-
-

{providerAuth.provider.displayName}

-

- {providerAuth.status === 'starting' && 'Starting login...'} - {providerAuth.status === 'waiting' && 'Complete login in the popup'} - {providerAuth.status === 'success' && 'Connected!'} - {providerAuth.status === 'error' && providerAuth.error} -

+
+
+
+ {providerAuth.provider.displayName[0]} +
+
+

{providerAuth.provider.displayName}

+

+ {providerAuth.status === 'starting' && 'Starting login...'} + {providerAuth.status === 'waiting' && 'Complete authentication below'} + {providerAuth.status === 'success' && 'Connected!'} + {providerAuth.status === 'error' && providerAuth.error} +

+
+ {/* Close button for starting/waiting states */} + {(providerAuth.status === 'starting' || providerAuth.status === 'waiting') && ( + + )}
+ {/* Starting state - show spinner with cancel option */} + {providerAuth.status === 'starting' && ( +
+ + + + + Preparing authentication... +
+ )} + {providerAuth.status === 'waiting' && providerAuth.authUrl && (
-
- - - - - Complete login in the popup window + {/* Instructions */} +
+

Complete authentication:

+
    +
  1. Click the button below to open the login page
  2. +
  3. Complete authentication and copy the code shown
  4. +
  5. Paste the code below and click Submit
  6. +
-

- A popup window should have opened. If it didn't, click below: -

+ + {/* Auth URL button */} - Open Login Page Manually + Open {providerAuth.provider.displayName} Login Page + + {/* Auth code input */} +
+ + setAuthCode(e.target.value)} + placeholder="Enter the code from the login page" + className="w-full px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan transition-colors" + onKeyDown={(e) => { + if (e.key === 'Enter' && authCode.trim()) { + handleSubmitAuthCode(); + } + }} + /> +
+ + {/* Submit code button */} + + + {/* Cancel button */} +
+ )} + + {/* Submitting state */} + {providerAuth.status === 'submitting' && ( +
+ + + + + Verifying code... +
+ )} + + {/* Success state - offer to connect another or continue */} + {providerAuth.status === 'success' && ( +
+
+
+ + + +
+ {providerAuth.provider.displayName} connected successfully! +
+ + + +
)} From 20b0c834b6303f76d23abf40f681a42d2a90611d Mon Sep 17 00:00:00 2001 From: Khaliq Date: Sun, 4 Jan 2026 23:55:28 +0100 Subject: [PATCH 075/103] Add provider-specific OAuth instructions for Codex MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit For Codex (OpenAI), the OAuth flow redirects to a local URL that shows "Page not found" - this is expected behavior. Users need to copy the entire redirect URL from the address bar. Changes: - Added requiresUrlCopy and supportsDeviceFlow flags to ProviderInfo - Show Codex-specific instructions explaining the "not found" page - Added input field for pasting the redirect URL - Added "Device Flow" alternative option for supported providers - Updated handleCompleteAuth to pass auth code to backend ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/dashboard/app/app/page.tsx | 154 +++++++++++++++++++++++---------- 1 file changed, 109 insertions(+), 45 deletions(-) diff --git a/src/dashboard/app/app/page.tsx b/src/dashboard/app/app/page.tsx index 7290765e..08b1d40e 100644 --- a/src/dashboard/app/app/page.tsx +++ b/src/dashboard/app/app/page.tsx @@ -38,6 +38,10 @@ interface ProviderInfo { displayName: string; color: string; cliCommand?: string; + /** Whether this provider supports device flow (code displayed on screen) */ + supportsDeviceFlow?: boolean; + /** Whether standard flow redirects to a URL the user must copy (shows "not found" page) */ + requiresUrlCopy?: boolean; } interface ProviderAuthState { @@ -46,6 +50,8 @@ interface ProviderAuthState { sessionId?: string; status: 'starting' | 'waiting' | 'submitting' | 'success' | 'error'; error?: string; + /** Whether using device flow (code displayed on screen vs redirect URL) */ + useDeviceFlow?: boolean; } type PageState = 'loading' | 'local' | 'select-workspace' | 'no-workspaces' | 'connect-provider' | 'connecting' | 'connected' | 'error'; @@ -53,7 +59,7 @@ type PageState = 'loading' | 'local' | 'select-workspace' | 'no-workspaces' | 'c // Available AI providers const AI_PROVIDERS: ProviderInfo[] = [ { id: 'anthropic', name: 'Anthropic', displayName: 'Claude', color: '#D97757', cliCommand: 'claude' }, - { id: 'codex', name: 'OpenAI', displayName: 'Codex', color: '#10A37F', cliCommand: 'codex login' }, + { id: 'codex', name: 'OpenAI', displayName: 'Codex', color: '#10A37F', cliCommand: 'codex login', supportsDeviceFlow: true, requiresUrlCopy: true }, { id: 'opencode', name: 'OpenCode', displayName: 'OpenCode', color: '#00D4AA', cliCommand: 'opencode' }, { id: 'droid', name: 'Factory', displayName: 'Droid', color: '#6366F1', cliCommand: 'droid' }, ]; @@ -389,9 +395,19 @@ export default function DashboardPage() { } }, [selectedWorkspace, connectToWorkspace]); - // Submit auth code from OAuth popup - const handleSubmitAuthCode = useCallback(async () => { - if (!providerAuth?.sessionId || !authCode.trim()) return; + // Complete auth - polls for credentials after user completes auth in browser + const handleCompleteAuth = useCallback(async () => { + if (!providerAuth?.sessionId) return; + + // For providers that require URL copy, validate the auth code/URL is provided + if (providerAuth.provider.requiresUrlCopy && !authCode.trim()) { + setProviderAuth(prev => prev ? { + ...prev, + status: 'error', + error: 'Please paste the redirect URL from your browser', + } : null); + return; + } const backendProviderId = PROVIDER_ID_MAP[providerAuth.provider.id] || providerAuth.provider.id; @@ -403,30 +419,35 @@ export default function DashboardPage() { headers['X-CSRF-Token'] = csrfToken; } - const res = await fetch(`/api/onboarding/cli/${backendProviderId}/code/${providerAuth.sessionId}`, { + // Include auth code/URL in the body for providers that require it + const body = providerAuth.provider.requiresUrlCopy + ? JSON.stringify({ authCode: authCode.trim() }) + : undefined; + + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/complete/${providerAuth.sessionId}`, { method: 'POST', credentials: 'include', headers, - body: JSON.stringify({ code: authCode.trim() }), + body, }); const data = await res.json(); if (!res.ok) { - throw new Error(data.error || 'Failed to submit auth code'); + throw new Error(data.error || 'Failed to complete authentication'); } // Success - show success state, then offer to connect another or continue setProviderAuth(prev => prev ? { ...prev, status: 'success' } : null); - setAuthCode(''); + setAuthCode(''); // Clear the auth code input } catch (err) { setProviderAuth(prev => prev ? { ...prev, status: 'error', - error: err instanceof Error ? err.message : 'Failed to submit auth code', + error: err instanceof Error ? err.message : 'Failed to complete authentication', } : null); } - }, [providerAuth, authCode, csrfToken]); + }, [providerAuth, csrfToken, authCode]); // Connect another provider after successful auth const handleConnectAnother = useCallback(() => { @@ -633,15 +654,35 @@ export default function DashboardPage() { {providerAuth.status === 'waiting' && providerAuth.authUrl && (
- {/* Instructions */} -
-

Complete authentication:

-
    -
  1. Click the button below to open the login page
  2. -
  3. Complete authentication and copy the code shown
  4. -
  5. Paste the code below and click Submit
  6. -
-
+ {/* Provider-specific instructions */} + {providerAuth.provider.requiresUrlCopy ? ( + /* Codex/OpenAI - requires copying the redirect URL */ +
+

Complete authentication:

+
    +
  1. Click the button below to open the login page
  2. +
  3. Sign in with your {providerAuth.provider.name} account
  4. +
  5. + Important: After signing in, the page will show "Page not found" or similar - this is expected! +
  6. +
  7. Copy the entire URL from your browser's address bar
  8. +
  9. Paste it below and click Submit
  10. +
+
+ The URL will look like: http://127.0.0.1:...?code=... +
+
+ ) : ( + /* Standard flow - just sign in and return */ +
+

Complete authentication:

+
    +
  1. Click the button below to open the login page
  2. +
  3. Sign in with your {providerAuth.provider.name} account
  4. +
  5. Return here and click "I've completed login"
  6. +
+
+ )} {/* Auth URL button */} - {/* Auth code input */} -
- - setAuthCode(e.target.value)} - placeholder="Enter the code from the login page" - className="w-full px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan transition-colors" - onKeyDown={(e) => { - if (e.key === 'Enter' && authCode.trim()) { - handleSubmitAuthCode(); - } - }} - /> -
+ {/* For providers that require URL copy, show input field */} + {providerAuth.provider.requiresUrlCopy && ( +
+ + setAuthCode(e.target.value)} + placeholder="http://127.0.0.1:...?code=..." + className="w-full px-4 py-3 bg-bg-deep border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan" + /> + +
+ )} - {/* Submit code button */} - + {/* Complete button - only show for standard flow */} + {!providerAuth.provider.requiresUrlCopy && ( + + )} + + {/* Device flow option for supported providers */} + {providerAuth.provider.supportsDeviceFlow && !providerAuth.useDeviceFlow && ( +
+ +

+ Device flow shows a code you enter on the provider's website +

+
+ )} {/* Cancel button */}
- {/* Provider auth modal */} - {providerAuth && ( -
-
-
-
- {providerAuth.provider.displayName[0]} -
-
-

{providerAuth.provider.displayName}

-

- {providerAuth.status === 'starting' && 'Starting login...'} - {providerAuth.status === 'waiting' && 'Complete authentication below'} - {providerAuth.status === 'success' && 'Connected!'} - {providerAuth.status === 'error' && providerAuth.error} -

-
-
- {/* Close button for starting/waiting states */} - {(providerAuth.status === 'starting' || providerAuth.status === 'waiting') && ( - - )} -
- - {/* Starting state - show spinner with cancel option */} - {providerAuth.status === 'starting' && ( -
- - - - - Preparing authentication... -
- )} - - {providerAuth.status === 'waiting' && providerAuth.authUrl && ( -
- {/* Provider-specific instructions */} - {providerAuth.provider.requiresUrlCopy ? ( - /* Codex/OpenAI - requires copying the redirect URL */ -
-

Complete authentication:

-
    -
  1. Click the button below to open the login page
  2. -
  3. Sign in with your {providerAuth.provider.name} account
  4. -
  5. - Important: After signing in, the page will show "Page not found" or similar - this is expected! -
  6. -
  7. Copy the entire URL from your browser's address bar
  8. -
  9. Paste it below and click Submit
  10. -
-
- The URL will look like: http://127.0.0.1:...?code=... -
-
- ) : ( - /* Standard flow - just sign in and return */ -
-

Complete authentication:

-
    -
  1. Click the button below to open the login page
  2. -
  3. Sign in with your {providerAuth.provider.name} account
  4. -
  5. Return here and click "I've completed login"
  6. -
-
- )} - - {/* Auth URL button */} -
- Open {providerAuth.provider.displayName} Login Page - - - {/* For providers that require URL copy, show input field */} - {providerAuth.provider.requiresUrlCopy && ( -
- - setAuthCode(e.target.value)} - placeholder="http://127.0.0.1:...?code=..." - className="w-full px-4 py-3 bg-bg-deep border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan" - /> - -
- )} - - {/* Complete button - only show for standard flow */} - {!providerAuth.provider.requiresUrlCopy && ( - - )} - - {/* Device flow option for supported providers */} - {providerAuth.provider.supportsDeviceFlow && !providerAuth.useDeviceFlow && ( -
- -

- Device flow shows a code you enter on the provider's website -

-
- )} - - {/* Cancel button */} - -
- )} - - {/* Submitting state */} - {providerAuth.status === 'submitting' && ( -
- - - - - Verifying code... -
- )} - - {/* Success state - offer to connect another or continue */} - {providerAuth.status === 'success' && ( -
-
-
- - - -
- {providerAuth.provider.displayName} connected successfully! -
- + {/* Provider auth flow - using shared component */} + {connectingProvider && (() => { + const provider = AI_PROVIDERS.find(p => p.id === connectingProvider); + if (!provider) return null; + return ( +
+ { + // Show success state briefly, then offer options + setConnectingProvider(null); + // Stay on connect-provider screen to allow connecting more providers + // User can click "Continue to Dashboard" or connect another + }} + onCancel={() => { + setConnectingProvider(null); + }} + onError={() => { + setConnectingProvider(null); + }} + /> + + {/* After success, show options to connect another or continue */} +
-
- )} - - {providerAuth.status === 'error' && ( - - )} -
- )} +
+ ); + })()} {/* Provider list */} - {!providerAuth && ( + {!connectingProvider && (

Choose an AI Provider

{AI_PROVIDERS.map((provider) => ( -
-
-

{provider.displayName}

-

{provider.name}

-
- - - - +
+ {provider.displayName[0]} +
+
+

{provider.displayName}

+

{provider.name}

+
+ + + + + {/* Pre-auth warning for providers that require URL copy */} + {provider.requiresUrlCopy && ( +
+

+ โš ๏ธ Important: After signing in, you'll see a "Page not found" error. + This is expected! Copy the entire URL from your browser's address bar + (it will look like http://127.0.0.1:...?code=...) and paste it back here. +

+
+ )} +
))}
From 29a53c2b9f46d4f8070fd77eeef801fe2b8efd6f Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 00:18:54 +0100 Subject: [PATCH 079/103] Fix OpenCode auth: improve URL pattern and detect existing credentials MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changes: - Update OpenCode URL pattern to match actual OAuth provider URLs (Anthropic, OpenAI, Google, OpenCode.ai) - Add credential path for OpenCode (~/.local/share/opencode/auth.json) - Add OpenCode credential extraction (checks opencode, anthropic, openai, google keys) - Check for existing credentials before starting auth flow - If already authenticated, return success immediately instead of showing the login page ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/daemon/cli-auth.ts | 84 ++++++++++++++++++++++++++++++++--- src/shared/cli-auth-config.ts | 29 ++++++++---- 2 files changed, 99 insertions(+), 14 deletions(-) diff --git a/src/daemon/cli-auth.ts b/src/daemon/cli-auth.ts index 3d637fee..b4b6d0bb 100644 --- a/src/daemon/cli-auth.ts +++ b/src/daemon/cli-auth.ts @@ -95,6 +95,21 @@ export async function startCLIAuth( }; sessions.set(sessionId, session); + // Check if already authenticated (credentials exist) + try { + const existingCreds = await extractCredentials(provider, config); + if (existingCreds?.token) { + logger.info('Already authenticated - existing credentials found', { provider, sessionId }); + session.status = 'success'; + session.token = existingCreds.token; + session.refreshToken = existingCreds.refreshToken; + session.tokenExpiresAt = existingCreds.expiresAt; + return session; + } + } catch { + // No existing credentials, proceed with auth flow + } + // Use device flow args if requested and supported const args = options.useDeviceFlow && config.deviceFlowArgs ? config.deviceFlowArgs @@ -209,7 +224,19 @@ export async function startCLIAuth( proc.onExit(async ({ exitCode }) => { clearTimeout(timeout); clearTimeout(authUrlTimeout); - logger.info('CLI process exited', { provider, exitCode }); + + // Log full output for debugging PTY exit issues + const cleanOutput = stripAnsiCodes(session.output); + logger.info('CLI process exited', { + provider, + exitCode, + outputLength: session.output.length, + hasAuthUrl: !!session.authUrl, + sessionStatus: session.status, + promptsHandled: session.promptsHandled, + // Last 500 chars of output for debugging + outputTail: cleanOutput.slice(-500), + }); // Try to extract credentials if (session.authUrl || exitCode === 0) { @@ -261,24 +288,47 @@ export function getAuthSession(sessionId: string): AuthSession | null { * * @returns Object with success status and optional error message */ -export function submitAuthCode( +export async function submitAuthCode( sessionId: string, code: string -): { success: boolean; error?: string } { +): Promise<{ success: boolean; error?: string; needsRestart?: boolean }> { const session = sessions.get(sessionId); if (!session) { logger.warn('Auth code submission failed: session not found', { sessionId }); - return { success: false, error: 'Session not found or expired' }; + return { success: false, error: 'Session not found or expired', needsRestart: true }; } if (!session.process) { logger.warn('Auth code submission failed: no PTY process', { sessionId, sessionStatus: session.status, + provider: session.provider, }); + + // Try to extract credentials as a fallback - maybe auth completed in browser + const config = CLI_AUTH_CONFIG[session.provider]; + if (config) { + try { + const creds = await extractCredentials(session.provider, config); + if (creds) { + session.token = creds.token; + session.refreshToken = creds.refreshToken; + session.tokenExpiresAt = creds.expiresAt; + session.status = 'success'; + logger.info('Credentials found despite PTY exit', { provider: session.provider }); + return { success: true }; + } + } catch { + // No credentials found + } + } + + // For providers like Claude that need the code pasted into CLI, + // if the PTY is gone, user needs to restart the auth flow return { success: false, - error: 'CLI process not running. The auth session may have timed out.', + error: 'The authentication session has ended. The CLI process exited before the code could be entered. Please click "Try Again" to restart.', + needsRestart: true, }; } @@ -459,6 +509,30 @@ async function extractCredentials( // Fallback: API key or legacy formats const token = creds.OPENAI_API_KEY || creds.token || creds.access_token || creds.api_key; return token ? { token } : null; + } else if (provider === 'opencode') { + // OpenCode stores multiple providers: { opencode: {...}, anthropic: {...}, openai: {...}, google: {...} } + // Check for any valid credential - prefer OpenCode Zen, then Anthropic + if (creds.opencode?.key) { + return { token: creds.opencode.key }; + } + if (creds.anthropic?.access) { + return { + token: creds.anthropic.access, + refreshToken: creds.anthropic.refresh, + expiresAt: creds.anthropic.expires ? new Date(creds.anthropic.expires) : undefined, + }; + } + if (creds.openai?.access) { + return { + token: creds.openai.access, + refreshToken: creds.openai.refresh, + expiresAt: creds.openai.expires ? new Date(creds.openai.expires) : undefined, + }; + } + if (creds.google?.key) { + return { token: creds.google.key }; + } + return null; } const token = creds.token || creds.access_token || creds.api_key; diff --git a/src/shared/cli-auth-config.ts b/src/shared/cli-auth-config.ts index ff5b2d06..0c6b987d 100644 --- a/src/shared/cli-auth-config.ts +++ b/src/shared/cli-auth-config.ts @@ -65,6 +65,13 @@ export const CLI_AUTH_CONFIG: Record = { displayName: 'Claude', waitTimeout: 30000, // Claude can take a while to show the auth URL prompts: [ + { + // Claude Code version selection - accept default (recommended) + pattern: /which\s*version|claude\s*code\s*version|select.*version/i, + response: '\r', + delay: 100, + description: 'Version selection prompt', + }, { pattern: /dark\s*(mode|theme)/i, response: '\r', // Press enter to accept default @@ -72,7 +79,8 @@ export const CLI_AUTH_CONFIG: Record = { description: 'Dark mode prompt', }, { - pattern: /(subscription|api\s*key|how\s*would\s*you\s*like\s*to\s*authenticate)/i, + // Be more specific to avoid matching after URL is shown + pattern: /how\s*would\s*you\s*like\s*to\s*authenticate|choose.*auth.*method|select.*auth/i, response: '\r', // Press enter for first option (subscription) delay: 100, description: 'Auth method prompt', @@ -84,7 +92,7 @@ export const CLI_AUTH_CONFIG: Record = { description: 'Trust directory prompt', }, ], - successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i, /you.*(?:are|now).*logged/i], }, openai: { command: 'codex', @@ -124,24 +132,27 @@ export const CLI_AUTH_CONFIG: Record = { opencode: { command: 'opencode', args: ['auth', 'login'], - urlPattern: /(https:\/\/[^\s]+)/, + // OpenCode redirects to provider OAuth pages (Anthropic, OpenAI, Google) + urlPattern: /(https:\/\/(?:accounts\.anthropic\.com|auth\.openai\.com|accounts\.google\.com|opencode\.ai)[^\s]+)/, + credentialPath: '~/.local/share/opencode/auth.json', displayName: 'OpenCode', waitTimeout: 30000, prompts: [ { pattern: /select.*provider|choose.*provider|which.*provider/i, - response: '\r', // Select first provider - delay: 200, + response: '\r', // Select first provider (OpenCode Zen - recommended) + delay: 300, description: 'Provider selection', }, { - pattern: /claude\s*pro|anthropic|select.*auth/i, - response: '\r', // Select first auth option + pattern: /opencode\s*zen|recommended/i, + response: '\r', // Confirm provider selection delay: 200, - description: 'Auth type selection', + description: 'Confirm provider', }, ], - successPatterns: [/success/i, /authenticated/i, /logged\s*in/i], + // Success patterns include credential added and existing credentials list + successPatterns: [/success/i, /authenticated/i, /logged\s*in/i, /credential\s*added/i, /\d+\s*credentials?/i], }, droid: { command: 'droid', From c8eda7c3d2d3e49b5580298e0ed5ebcee1e701ba Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 5 Jan 2026 06:07:13 +0000 Subject: [PATCH 080/103] Fix missing await on submitAuthCode in CLI auth endpoint Bug: submitAuthCode() is async but was not awaited, causing: - result was a Promise, not the actual result object - result.success was undefined (Promise has no .success) - !undefined = true, so it always returned an error Fix: Add async/await to properly handle the Promise. --- src/dashboard-server/server.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index 873d3e31..d66aeea6 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -2072,7 +2072,7 @@ export async function startDashboard( * POST /auth/cli/:provider/code/:sessionId - Submit auth code to PTY * Used when OAuth returns a code that must be pasted into the CLI */ - app.post('/auth/cli/:provider/code/:sessionId', (req, res) => { + app.post('/auth/cli/:provider/code/:sessionId', async (req, res) => { const { sessionId } = req.params; const { code } = req.body; @@ -2080,7 +2080,7 @@ export async function startDashboard( return res.status(400).json({ error: 'Auth code is required' }); } - const result = submitAuthCode(sessionId, code); + const result = await submitAuthCode(sessionId, code); if (!result.success) { return res.status(404).json({ error: result.error || 'Session not found or process not running' }); } From 16f5ae01bb9ce44a8aa35535517f79a41bc0d9df Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 5 Jan 2026 06:15:37 +0000 Subject: [PATCH 081/103] Add 5 security beads for CLI auth pre-launch fixes Critical security issues identified in CLI OAuth flow: - bd-critical-016: Unauthenticated workspace daemon endpoints - bd-critical-017: PTY output may log sensitive tokens - bd-critical-018: No rate limiting on auth endpoints - bd-critical-019: OAuth session timeout too long (5 min) - bd-critical-020: Must force device flow in cloud mode --- .beads/beads.jsonl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index 5e418d51..b69f3bca 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -34,3 +34,8 @@ {"id":"bd-canvas-001","title":"Canvas/Collaborative Docs","description":"Real-time collaborative documents within channels.","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p3"],"depends_on":[]} {"id":"bd-agent-public-001","title":"Deploy Always-On Community Agents","description":"Deploy dedicated agents for public community rooms (DocsBot, RoadmapBot, HelpBot, ShowcaseBot, ModBot).","priority":95,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","agents","community","p0"],"depends_on":["bd-viral-001"]} {"id":"bd-landing-001","title":"Landing Page with Live Community Embed","description":"Update landing page to embed live community rooms with real-time activity.","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","marketing","p0"],"depends_on":["bd-viral-001","bd-agent-public-001"]} +{"id":"bd-critical-016","title":"[SECURITY] Workspace Daemon Auth - Unauthenticated Endpoints","description":"Workspace daemon internal endpoints have NO authentication.\n\n## Vulnerability\nEndpoints in dashboard-server/server.ts are exposed without auth:\n- POST /auth/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/complete/:sessionId\n- POST /auth/cli/:provider/start\n\n## Risk\nIf workspace daemon is exposed via publicUrl, attackers could:\n- Submit malicious codes to active auth sessions\n- Enumerate active sessions\n- DoS the PTY processes\n- Hijack OAuth flows mid-completion\n\n## Fix\n1. Add workspace auth middleware (shared secret or JWT)\n2. Validate session ownership\n3. Add request signing between cloud server and workspace daemon\n\n## Files\n- src/dashboard-server/server.ts:2075-2130\n- src/cloud/api/onboarding.ts (caller)","priority":148,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":[]} +{"id":"bd-critical-017","title":"[SECURITY] PTY Output May Log Sensitive Tokens","description":"CLI auth PTY output is logged and may contain secrets.\n\n## Vulnerability\nIn cli-auth.ts:237-238, the last 500 chars of CLI output are logged:\n```typescript\nlogger.info('CLI process exited', {\n outputTail: cleanOutput.slice(-500), // May contain tokens!\n});\n```\n\n## Risk\n- Access tokens in logs\n- Refresh tokens exposed\n- API keys visible in log aggregators\n- Credentials in error dumps\n\n## Fix\n1. Sanitize PTY output before logging\n2. Redact patterns: token=XXX, Bearer XXX, api_key=XXX\n3. Add log scrubbing middleware\n4. Review all logger.info/error calls for secrets\n\n## Files\n- src/daemon/cli-auth.ts:237-238\n- src/daemon/cli-auth.ts:177 (prompt logging)","priority":142,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":[]} +{"id":"bd-critical-018","title":"[SECURITY] CLI Auth Rate Limiting Missing","description":"No rate limiting on CLI auth endpoints allows brute-force and DoS.\n\n## Vulnerability\nThese endpoints have no rate limits:\n- POST /api/onboarding/cli/:provider/start\n- POST /api/onboarding/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/code/:sessionId\n\n## Risk\n- Attackers can spawn unlimited PTY processes (DoS)\n- Brute-force auth code submission\n- Resource exhaustion on workspace containers\n- Cost explosion from compute usage\n\n## Fix\n1. Add rate limiter middleware (express-rate-limit)\n2. Limit per-user: 5 auth starts per 15 min\n3. Limit per-session: 10 code submissions per minute\n4. Add exponential backoff on failures\n\n## Files\n- src/cloud/api/onboarding.ts\n- src/dashboard-server/server.ts","priority":138,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":["bd-critical-002"]} +{"id":"bd-critical-019","title":"[SECURITY] Auth Session Timeout Too Long","description":"5-minute OAuth session timeout creates large attack window.\n\n## Vulnerability\nIn cli-auth.ts:159:\n```typescript\nconst OAUTH_COMPLETION_TIMEOUT = 5 * 60 * 1000; // 5 minutes\n```\n\n## Risk\n- Long window for session hijacking\n- Active sessions can be enumerated\n- Stale PTY processes consume resources\n- Race conditions in token capture\n\n## Fix\n1. Reduce timeout to 2 minutes (plenty for OAuth)\n2. Add session invalidation on suspicious activity\n3. Implement one-time-use session tokens\n4. Clean up PTY immediately on error\n\n## Files\n- src/daemon/cli-auth.ts:159","priority":128,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","p00"],"depends_on":[]} +{"id":"bd-critical-020","title":"[SECURITY] Force Device Flow in Cloud Mode","description":"Standard OAuth redirect to localhost doesn't work in cloud - must force device flow.\n\n## Problem\nCodex OAuth redirects to localhost:1455, which:\n- Doesn't exist on user's machine when using cloud\n- CLI runs in container, not user's computer\n- Callback never reaches the CLI\n\n## Current Behavior\nDevice flow is opt-in via checkbox, defaults to OFF.\n\n## Fix\n1. Auto-detect cloud mode in frontend\n2. Force useDeviceFlow=true for providers that support it\n3. Hide the checkbox in cloud mode (always on)\n4. Show clear instructions for device flow\n\n## Files\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx\n- src/cloud/api/onboarding.ts","priority":135,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","ux","launch-blocker","p00"],"depends_on":[]} From daacc9c545f814c107e45f2e9e1b3311c8ee9264 Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 5 Jan 2026 06:56:18 +0000 Subject: [PATCH 082/103] Add bead for per-user credentials in shared workspaces MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit bd-critical-021: Architectural design for allowing team members to use their own provider credentials instead of sharing the workspace owner's credentials. Includes: - Database schema for workspace_credentials table - Credential resolution order (user override โ†’ workspace default โ†’ owner) - API endpoints for credential management - UI changes for provider settings - Migration path for backwards compatibility --- .beads/beads.jsonl | 1 + 1 file changed, 1 insertion(+) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index b69f3bca..8bb41947 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -39,3 +39,4 @@ {"id":"bd-critical-018","title":"[SECURITY] CLI Auth Rate Limiting Missing","description":"No rate limiting on CLI auth endpoints allows brute-force and DoS.\n\n## Vulnerability\nThese endpoints have no rate limits:\n- POST /api/onboarding/cli/:provider/start\n- POST /api/onboarding/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/code/:sessionId\n\n## Risk\n- Attackers can spawn unlimited PTY processes (DoS)\n- Brute-force auth code submission\n- Resource exhaustion on workspace containers\n- Cost explosion from compute usage\n\n## Fix\n1. Add rate limiter middleware (express-rate-limit)\n2. Limit per-user: 5 auth starts per 15 min\n3. Limit per-session: 10 code submissions per minute\n4. Add exponential backoff on failures\n\n## Files\n- src/cloud/api/onboarding.ts\n- src/dashboard-server/server.ts","priority":138,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":["bd-critical-002"]} {"id":"bd-critical-019","title":"[SECURITY] Auth Session Timeout Too Long","description":"5-minute OAuth session timeout creates large attack window.\n\n## Vulnerability\nIn cli-auth.ts:159:\n```typescript\nconst OAUTH_COMPLETION_TIMEOUT = 5 * 60 * 1000; // 5 minutes\n```\n\n## Risk\n- Long window for session hijacking\n- Active sessions can be enumerated\n- Stale PTY processes consume resources\n- Race conditions in token capture\n\n## Fix\n1. Reduce timeout to 2 minutes (plenty for OAuth)\n2. Add session invalidation on suspicious activity\n3. Implement one-time-use session tokens\n4. Clean up PTY immediately on error\n\n## Files\n- src/daemon/cli-auth.ts:159","priority":128,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","p00"],"depends_on":[]} {"id":"bd-critical-020","title":"[SECURITY] Force Device Flow in Cloud Mode","description":"Standard OAuth redirect to localhost doesn't work in cloud - must force device flow.\n\n## Problem\nCodex OAuth redirects to localhost:1455, which:\n- Doesn't exist on user's machine when using cloud\n- CLI runs in container, not user's computer\n- Callback never reaches the CLI\n\n## Current Behavior\nDevice flow is opt-in via checkbox, defaults to OFF.\n\n## Fix\n1. Auto-detect cloud mode in frontend\n2. Force useDeviceFlow=true for providers that support it\n3. Hide the checkbox in cloud mode (always on)\n4. Show clear instructions for device flow\n\n## Files\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx\n- src/cloud/api/onboarding.ts","priority":135,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","ux","launch-blocker","p00"],"depends_on":[]} +{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning uses OWNER's credentials only\n- All workspace members share same API tokens\n- No way for team members to use their own provider accounts\n\n## Issues This Causes\n1. **Shared quota** - One user's API limits consumed by entire team\n2. **No attribution** - Can't audit which user made which API call\n3. **Single point of failure** - Owner disconnects โ†’ everyone loses access\n4. **Security risk** - Shared credentials violate least privilege principle\n5. **Billing confusion** - Owner pays for all team's usage\n\n## Proposed Architecture: Hybrid Credential Model\n\n### Database Changes\n```sql\n-- New table: workspace-level credential mappings\nCREATE TABLE workspace_credentials (\n id UUID PRIMARY KEY,\n workspace_id UUID REFERENCES workspaces(id),\n user_id UUID REFERENCES users(id),\n provider TEXT NOT NULL,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN DEFAULT false,\n created_at TIMESTAMP,\n UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential for this workspace+provider\n2. Workspace default credential (if set)\n3. Workspace owner's credential (fallback)\n\n### UI Changes\n- Workspace Settings > Providers: Show who provided each credential\n- \"Use my own [Provider]\" button for team members\n- \"Set as workspace default\" for admins\n- Per-user override toggle\n\n### API Changes\n- GET /api/workspaces/:id/credentials - list all credentials with sources\n- POST /api/workspaces/:id/credentials/override - user overrides with own\n- DELETE /api/workspaces/:id/credentials/override - remove user override\n\n### Agent Spawn Changes\n- Pass requesting user's ID with spawn request\n- Resolve credentials using new priority order\n- Log which credential was used for audit trail\n\n## Files to Modify\n- src/cloud/db/schema.ts - add workspace_credentials table\n- src/cloud/db/drizzle.ts - add queries\n- src/cloud/vault/index.ts - add workspace credential resolution\n- src/cloud/provisioner/index.ts - use resolved credentials\n- src/cloud/api/workspaces.ts - credential management endpoints\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx - UI\n\n## Migration Path\n1. Add workspace_credentials table (empty)\n2. On workspace access, check for user override first\n3. Fall back to current owner credential logic\n4. Gradually encourage users to connect own credentials","priority":120,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["architecture","team","credentials","p1"],"depends_on":["bd-critical-016"]} From 5c686c88584c825b1aede4a7cbdc061115160746 Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 5 Jan 2026 07:01:52 +0000 Subject: [PATCH 083/103] Update per-user credentials bead with implementation details Key insight: Container env vars are set at provisioning time, but PTY wrapper already supports per-process env overrides. Solution: 1. Spawn request includes userId 2. Daemon fetches user's credential from cloud API 3. Override ANTHROPIC_API_KEY etc. in PTY spawn env Requires daemon-to-cloud auth (bd-critical-016) first. --- .beads/beads.jsonl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index 8bb41947..0b56e7bf 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -39,4 +39,4 @@ {"id":"bd-critical-018","title":"[SECURITY] CLI Auth Rate Limiting Missing","description":"No rate limiting on CLI auth endpoints allows brute-force and DoS.\n\n## Vulnerability\nThese endpoints have no rate limits:\n- POST /api/onboarding/cli/:provider/start\n- POST /api/onboarding/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/code/:sessionId\n\n## Risk\n- Attackers can spawn unlimited PTY processes (DoS)\n- Brute-force auth code submission\n- Resource exhaustion on workspace containers\n- Cost explosion from compute usage\n\n## Fix\n1. Add rate limiter middleware (express-rate-limit)\n2. Limit per-user: 5 auth starts per 15 min\n3. Limit per-session: 10 code submissions per minute\n4. Add exponential backoff on failures\n\n## Files\n- src/cloud/api/onboarding.ts\n- src/dashboard-server/server.ts","priority":138,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":["bd-critical-002"]} {"id":"bd-critical-019","title":"[SECURITY] Auth Session Timeout Too Long","description":"5-minute OAuth session timeout creates large attack window.\n\n## Vulnerability\nIn cli-auth.ts:159:\n```typescript\nconst OAUTH_COMPLETION_TIMEOUT = 5 * 60 * 1000; // 5 minutes\n```\n\n## Risk\n- Long window for session hijacking\n- Active sessions can be enumerated\n- Stale PTY processes consume resources\n- Race conditions in token capture\n\n## Fix\n1. Reduce timeout to 2 minutes (plenty for OAuth)\n2. Add session invalidation on suspicious activity\n3. Implement one-time-use session tokens\n4. Clean up PTY immediately on error\n\n## Files\n- src/daemon/cli-auth.ts:159","priority":128,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","p00"],"depends_on":[]} {"id":"bd-critical-020","title":"[SECURITY] Force Device Flow in Cloud Mode","description":"Standard OAuth redirect to localhost doesn't work in cloud - must force device flow.\n\n## Problem\nCodex OAuth redirects to localhost:1455, which:\n- Doesn't exist on user's machine when using cloud\n- CLI runs in container, not user's computer\n- Callback never reaches the CLI\n\n## Current Behavior\nDevice flow is opt-in via checkbox, defaults to OFF.\n\n## Fix\n1. Auto-detect cloud mode in frontend\n2. Force useDeviceFlow=true for providers that support it\n3. Hide the checkbox in cloud mode (always on)\n4. Show clear instructions for device flow\n\n## Files\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx\n- src/cloud/api/onboarding.ts","priority":135,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","ux","launch-blocker","p00"],"depends_on":[]} -{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning uses OWNER's credentials only\n- All workspace members share same API tokens\n- No way for team members to use their own provider accounts\n\n## Issues This Causes\n1. **Shared quota** - One user's API limits consumed by entire team\n2. **No attribution** - Can't audit which user made which API call\n3. **Single point of failure** - Owner disconnects โ†’ everyone loses access\n4. **Security risk** - Shared credentials violate least privilege principle\n5. **Billing confusion** - Owner pays for all team's usage\n\n## Proposed Architecture: Hybrid Credential Model\n\n### Database Changes\n```sql\n-- New table: workspace-level credential mappings\nCREATE TABLE workspace_credentials (\n id UUID PRIMARY KEY,\n workspace_id UUID REFERENCES workspaces(id),\n user_id UUID REFERENCES users(id),\n provider TEXT NOT NULL,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN DEFAULT false,\n created_at TIMESTAMP,\n UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential for this workspace+provider\n2. Workspace default credential (if set)\n3. Workspace owner's credential (fallback)\n\n### UI Changes\n- Workspace Settings > Providers: Show who provided each credential\n- \"Use my own [Provider]\" button for team members\n- \"Set as workspace default\" for admins\n- Per-user override toggle\n\n### API Changes\n- GET /api/workspaces/:id/credentials - list all credentials with sources\n- POST /api/workspaces/:id/credentials/override - user overrides with own\n- DELETE /api/workspaces/:id/credentials/override - remove user override\n\n### Agent Spawn Changes\n- Pass requesting user's ID with spawn request\n- Resolve credentials using new priority order\n- Log which credential was used for audit trail\n\n## Files to Modify\n- src/cloud/db/schema.ts - add workspace_credentials table\n- src/cloud/db/drizzle.ts - add queries\n- src/cloud/vault/index.ts - add workspace credential resolution\n- src/cloud/provisioner/index.ts - use resolved credentials\n- src/cloud/api/workspaces.ts - credential management endpoints\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx - UI\n\n## Migration Path\n1. Add workspace_credentials table (empty)\n2. On workspace access, check for user override first\n3. Fall back to current owner credential logic\n4. Gradually encourage users to connect own credentials","priority":120,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["architecture","team","credentials","p1"],"depends_on":["bd-critical-016"]} +{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning injects owner's tokens as CONTAINER-LEVEL env vars\n- All agents inherit same tokens via process.env\n- No way for team members to use their own provider accounts\n\n## Current Credential Flow\n```\nCloud Vault โ†’ Provisioner โ†’ Container Env Vars โ†’ All Agents\n ANTHROPIC_TOKEN=owner_token\n```\n\nCLI tools check (in order):\n1. Environment variable (ANTHROPIC_API_KEY, OPENAI_API_KEY)\n2. Credential file (~/.claude/.credentials.json)\n\n## Why This Is Hard\n- Container env vars set at provisioning time (once)\n- Can't change container env vars per-user at runtime\n- All processes inherit same process.env\n\n## Solution: Per-Process Env Override\n\nThe PTY wrapper already supports env overrides (pty-wrapper.ts:270):\n```typescript\nthis.ptyProcess = pty.spawn(command, args, {\n env: {\n ...process.env, // Container-level (owner's tokens)\n ...this.config.env, // Per-agent overrides โ† USE THIS\n }\n});\n```\n\n### Implementation\n\n1. **Spawn request includes userId**\n```typescript\ninterface SpawnAgentRequest {\n name: string;\n task: string;\n provider?: string;\n userId: string; // NEW: who is spawning this agent\n}\n```\n\n2. **Daemon fetches user's credentials from cloud**\n```typescript\n// Before spawning, call cloud API\nconst userCreds = await fetch(`${CLOUD_API_URL}/api/credentials/${userId}/${provider}`, {\n headers: { Authorization: `Bearer ${WORKSPACE_TOKEN}` }\n});\n```\n\n3. **Override env when spawning**\n```typescript\nconst ptyConfig: PtyWrapperConfig = {\n command: 'claude',\n env: {\n ANTHROPIC_API_KEY: userCreds.accessToken, // Override container default\n }\n};\n```\n\n### Database Changes\n```sql\nCREATE TABLE workspace_credentials (\n id UUID PRIMARY KEY,\n workspace_id UUID REFERENCES workspaces(id),\n user_id UUID REFERENCES users(id),\n provider TEXT NOT NULL,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN DEFAULT false,\n UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential (if connected)\n2. Workspace default (if set by admin)\n3. Owner's credential (container env var fallback)\n\n### API Changes\n- GET /api/workspaces/:id/credentials/:userId/:provider - get user's token\n- POST /api/workspaces/:id/credentials/override - user connects own provider\n- Daemon calls cloud API before each agent spawn\n\n### UI Changes\n- Workspace Settings > Providers: Show credential sources\n- \"Use my own [Provider]\" button for team members\n- Indicator showing whose credential is being used\n\n## Files to Modify\n- src/daemon/agent-manager.ts:88-216 - fetch user creds before spawn\n- src/cloud/api/workspaces.ts - credential resolution endpoint\n- src/cloud/db/schema.ts - workspace_credentials table\n- src/wrapper/pty-wrapper.ts - already supports env override\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx - UI\n\n## Security: Daemon Must Auth to Cloud\nDepends on bd-critical-016 (daemon auth). Without auth, any process could request any user's credentials.","priority":120,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["architecture","team","credentials","p1"],"depends_on":["bd-critical-016"]} From 149ec34532829aa45c8fc4a424f22b68e3980f55 Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 5 Jan 2026 07:09:11 +0000 Subject: [PATCH 084/103] Update per-user credentials bead: use HOME dir, not env vars Key insight: CLI tools (claude, codex) read their OWN credential files, they IGNORE SDK env vars like ANTHROPIC_API_KEY. Solution: Set HOME=/home/workspace-users/{userId} when spawning so each user's CLI finds credentials in $HOME/.claude/ etc. Added: - Per-provider credential file formats - Cleanup considerations for credential files - New credential-writer.ts module --- .beads/beads.jsonl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index 0b56e7bf..a6aece39 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -39,4 +39,4 @@ {"id":"bd-critical-018","title":"[SECURITY] CLI Auth Rate Limiting Missing","description":"No rate limiting on CLI auth endpoints allows brute-force and DoS.\n\n## Vulnerability\nThese endpoints have no rate limits:\n- POST /api/onboarding/cli/:provider/start\n- POST /api/onboarding/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/code/:sessionId\n\n## Risk\n- Attackers can spawn unlimited PTY processes (DoS)\n- Brute-force auth code submission\n- Resource exhaustion on workspace containers\n- Cost explosion from compute usage\n\n## Fix\n1. Add rate limiter middleware (express-rate-limit)\n2. Limit per-user: 5 auth starts per 15 min\n3. Limit per-session: 10 code submissions per minute\n4. Add exponential backoff on failures\n\n## Files\n- src/cloud/api/onboarding.ts\n- src/dashboard-server/server.ts","priority":138,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":["bd-critical-002"]} {"id":"bd-critical-019","title":"[SECURITY] Auth Session Timeout Too Long","description":"5-minute OAuth session timeout creates large attack window.\n\n## Vulnerability\nIn cli-auth.ts:159:\n```typescript\nconst OAUTH_COMPLETION_TIMEOUT = 5 * 60 * 1000; // 5 minutes\n```\n\n## Risk\n- Long window for session hijacking\n- Active sessions can be enumerated\n- Stale PTY processes consume resources\n- Race conditions in token capture\n\n## Fix\n1. Reduce timeout to 2 minutes (plenty for OAuth)\n2. Add session invalidation on suspicious activity\n3. Implement one-time-use session tokens\n4. Clean up PTY immediately on error\n\n## Files\n- src/daemon/cli-auth.ts:159","priority":128,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","p00"],"depends_on":[]} {"id":"bd-critical-020","title":"[SECURITY] Force Device Flow in Cloud Mode","description":"Standard OAuth redirect to localhost doesn't work in cloud - must force device flow.\n\n## Problem\nCodex OAuth redirects to localhost:1455, which:\n- Doesn't exist on user's machine when using cloud\n- CLI runs in container, not user's computer\n- Callback never reaches the CLI\n\n## Current Behavior\nDevice flow is opt-in via checkbox, defaults to OFF.\n\n## Fix\n1. Auto-detect cloud mode in frontend\n2. Force useDeviceFlow=true for providers that support it\n3. Hide the checkbox in cloud mode (always on)\n4. Show clear instructions for device flow\n\n## Files\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx\n- src/cloud/api/onboarding.ts","priority":135,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","ux","launch-blocker","p00"],"depends_on":[]} -{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning injects owner's tokens as CONTAINER-LEVEL env vars\n- All agents inherit same tokens via process.env\n- No way for team members to use their own provider accounts\n\n## Current Credential Flow\n```\nCloud Vault โ†’ Provisioner โ†’ Container Env Vars โ†’ All Agents\n ANTHROPIC_TOKEN=owner_token\n```\n\nCLI tools check (in order):\n1. Environment variable (ANTHROPIC_API_KEY, OPENAI_API_KEY)\n2. Credential file (~/.claude/.credentials.json)\n\n## Why This Is Hard\n- Container env vars set at provisioning time (once)\n- Can't change container env vars per-user at runtime\n- All processes inherit same process.env\n\n## Solution: Per-Process Env Override\n\nThe PTY wrapper already supports env overrides (pty-wrapper.ts:270):\n```typescript\nthis.ptyProcess = pty.spawn(command, args, {\n env: {\n ...process.env, // Container-level (owner's tokens)\n ...this.config.env, // Per-agent overrides โ† USE THIS\n }\n});\n```\n\n### Implementation\n\n1. **Spawn request includes userId**\n```typescript\ninterface SpawnAgentRequest {\n name: string;\n task: string;\n provider?: string;\n userId: string; // NEW: who is spawning this agent\n}\n```\n\n2. **Daemon fetches user's credentials from cloud**\n```typescript\n// Before spawning, call cloud API\nconst userCreds = await fetch(`${CLOUD_API_URL}/api/credentials/${userId}/${provider}`, {\n headers: { Authorization: `Bearer ${WORKSPACE_TOKEN}` }\n});\n```\n\n3. **Override env when spawning**\n```typescript\nconst ptyConfig: PtyWrapperConfig = {\n command: 'claude',\n env: {\n ANTHROPIC_API_KEY: userCreds.accessToken, // Override container default\n }\n};\n```\n\n### Database Changes\n```sql\nCREATE TABLE workspace_credentials (\n id UUID PRIMARY KEY,\n workspace_id UUID REFERENCES workspaces(id),\n user_id UUID REFERENCES users(id),\n provider TEXT NOT NULL,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN DEFAULT false,\n UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential (if connected)\n2. Workspace default (if set by admin)\n3. Owner's credential (container env var fallback)\n\n### API Changes\n- GET /api/workspaces/:id/credentials/:userId/:provider - get user's token\n- POST /api/workspaces/:id/credentials/override - user connects own provider\n- Daemon calls cloud API before each agent spawn\n\n### UI Changes\n- Workspace Settings > Providers: Show credential sources\n- \"Use my own [Provider]\" button for team members\n- Indicator showing whose credential is being used\n\n## Files to Modify\n- src/daemon/agent-manager.ts:88-216 - fetch user creds before spawn\n- src/cloud/api/workspaces.ts - credential resolution endpoint\n- src/cloud/db/schema.ts - workspace_credentials table\n- src/wrapper/pty-wrapper.ts - already supports env override\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx - UI\n\n## Security: Daemon Must Auth to Cloud\nDepends on bd-critical-016 (daemon auth). Without auth, any process could request any user's credentials.","priority":120,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["architecture","team","credentials","p1"],"depends_on":["bd-critical-016"]} +{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning injects owner's tokens as CONTAINER-LEVEL env vars\n- CLI tools (claude, codex) use their OWN credential files, NOT env vars\n- Claude: `~/.claude/.credentials.json`\n- Codex: `~/.codex/auth.json`\n\n## Why Env Var Override Won't Work\nCLI tools are NOT SDKs. They have their own auth:\n- `claude` CLI reads `$HOME/.claude/.credentials.json`\n- `codex` CLI reads `$HOME/.codex/auth.json`\n- They IGNORE `ANTHROPIC_API_KEY` / `OPENAI_API_KEY` env vars\n\n## Solution: Per-User HOME Directories\n\nSet `HOME` env var to user-specific directory when spawning:\n\n```typescript\n// 1. Create user's credential directory\nconst userHome = `/home/workspace-users/${userId}`;\nawait fs.mkdir(`${userHome}/.claude`, { recursive: true });\n\n// 2. Write user's credentials in CLI-expected format\nawait fs.writeFile(\n `${userHome}/.claude/.credentials.json`,\n JSON.stringify({\n claudeAiOauth: {\n accessToken: userCreds.accessToken,\n refreshToken: userCreds.refreshToken,\n expiresAt: userCreds.expiresAt\n }\n })\n);\n\n// 3. Spawn with custom HOME\npty.spawn('claude', args, {\n env: {\n ...process.env,\n HOME: userHome // Claude looks in $HOME/.claude/\n }\n});\n```\n\n### Per-Provider Credential Format\n\n**Claude** (`$HOME/.claude/.credentials.json`):\n```json\n{\n \"claudeAiOauth\": {\n \"accessToken\": \"...\",\n \"refreshToken\": \"...\",\n \"expiresAt\": \"2025-01-01T00:00:00Z\"\n }\n}\n```\n\n**Codex** (`$HOME/.codex/auth.json`):\n```json\n{\n \"tokens\": {\n \"access_token\": \"...\",\n \"refresh_token\": \"...\"\n }\n}\n```\n\n### Implementation Steps\n\n1. **Spawn request includes userId**\n```typescript\ninterface SpawnAgentRequest {\n name: string;\n task: string;\n provider?: string;\n userId: string; // Who is spawning\n}\n```\n\n2. **Daemon fetches user's credentials from cloud**\n```typescript\nconst userCreds = await fetch(\n `${CLOUD_API_URL}/api/credentials/${userId}/${provider}`,\n { headers: { Authorization: `Bearer ${WORKSPACE_TOKEN}` } }\n);\n```\n\n3. **Write credentials to user's HOME**\n```typescript\nconst userHome = `/home/workspace-users/${userId}`;\nawait writeProviderCredentials(userHome, provider, userCreds);\n```\n\n4. **Spawn with HOME override**\n```typescript\npty.spawn(cli, args, { env: { ...process.env, HOME: userHome } });\n```\n\n### Database Changes\n```sql\nCREATE TABLE workspace_credentials (\n id UUID PRIMARY KEY,\n workspace_id UUID REFERENCES workspaces(id),\n user_id UUID REFERENCES users(id),\n provider TEXT NOT NULL,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN DEFAULT false,\n UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential (if connected)\n2. Workspace default (if set by admin)\n3. Owner's credential (fallback to container default)\n\n### Cleanup Considerations\n- Credential files persist in `/home/workspace-users/`\n- Need cleanup job for expired/revoked credentials\n- Consider tmpfs or encrypted storage\n\n## Files to Modify\n- src/daemon/agent-manager.ts - write creds, set HOME on spawn\n- src/daemon/credential-writer.ts (new) - per-provider credential format\n- src/cloud/api/workspaces.ts - credential resolution endpoint\n- src/cloud/db/schema.ts - workspace_credentials table\n- src/shared/cli-auth-config.ts - credential file paths (already defined)\n\n## Security: Daemon Must Auth to Cloud\nDepends on bd-critical-016 (daemon auth). Without auth, any process could request any user's credentials.","priority":120,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["architecture","team","credentials","p1"],"depends_on":["bd-critical-016"]} From 5c49571f2ef9f524295bbd4e0b1f12e313175ef8 Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 5 Jan 2026 07:17:46 +0000 Subject: [PATCH 085/103] Update per-user credentials bead: clarify git impact Git operations are SAFE when changing HOME because: - git-credential-relay uses env vars (CLOUD_API_URL, WORKSPACE_TOKEN) - Does NOT read files from HOME directory Added prepareUserHome() function that: 1. Writes user's CLI credentials to user HOME 2. Copies gh CLI config from container HOME 3. Returns path for spawn env override Reference: deploy/workspace/entrypoint.sh shows credential format --- .beads/beads.jsonl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index a6aece39..5282997f 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -39,4 +39,4 @@ {"id":"bd-critical-018","title":"[SECURITY] CLI Auth Rate Limiting Missing","description":"No rate limiting on CLI auth endpoints allows brute-force and DoS.\n\n## Vulnerability\nThese endpoints have no rate limits:\n- POST /api/onboarding/cli/:provider/start\n- POST /api/onboarding/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/code/:sessionId\n\n## Risk\n- Attackers can spawn unlimited PTY processes (DoS)\n- Brute-force auth code submission\n- Resource exhaustion on workspace containers\n- Cost explosion from compute usage\n\n## Fix\n1. Add rate limiter middleware (express-rate-limit)\n2. Limit per-user: 5 auth starts per 15 min\n3. Limit per-session: 10 code submissions per minute\n4. Add exponential backoff on failures\n\n## Files\n- src/cloud/api/onboarding.ts\n- src/dashboard-server/server.ts","priority":138,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":["bd-critical-002"]} {"id":"bd-critical-019","title":"[SECURITY] Auth Session Timeout Too Long","description":"5-minute OAuth session timeout creates large attack window.\n\n## Vulnerability\nIn cli-auth.ts:159:\n```typescript\nconst OAUTH_COMPLETION_TIMEOUT = 5 * 60 * 1000; // 5 minutes\n```\n\n## Risk\n- Long window for session hijacking\n- Active sessions can be enumerated\n- Stale PTY processes consume resources\n- Race conditions in token capture\n\n## Fix\n1. Reduce timeout to 2 minutes (plenty for OAuth)\n2. Add session invalidation on suspicious activity\n3. Implement one-time-use session tokens\n4. Clean up PTY immediately on error\n\n## Files\n- src/daemon/cli-auth.ts:159","priority":128,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","p00"],"depends_on":[]} {"id":"bd-critical-020","title":"[SECURITY] Force Device Flow in Cloud Mode","description":"Standard OAuth redirect to localhost doesn't work in cloud - must force device flow.\n\n## Problem\nCodex OAuth redirects to localhost:1455, which:\n- Doesn't exist on user's machine when using cloud\n- CLI runs in container, not user's computer\n- Callback never reaches the CLI\n\n## Current Behavior\nDevice flow is opt-in via checkbox, defaults to OFF.\n\n## Fix\n1. Auto-detect cloud mode in frontend\n2. Force useDeviceFlow=true for providers that support it\n3. Hide the checkbox in cloud mode (always on)\n4. Show clear instructions for device flow\n\n## Files\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx\n- src/cloud/api/onboarding.ts","priority":135,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","ux","launch-blocker","p00"],"depends_on":[]} -{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning injects owner's tokens as CONTAINER-LEVEL env vars\n- CLI tools (claude, codex) use their OWN credential files, NOT env vars\n- Claude: `~/.claude/.credentials.json`\n- Codex: `~/.codex/auth.json`\n\n## Why Env Var Override Won't Work\nCLI tools are NOT SDKs. They have their own auth:\n- `claude` CLI reads `$HOME/.claude/.credentials.json`\n- `codex` CLI reads `$HOME/.codex/auth.json`\n- They IGNORE `ANTHROPIC_API_KEY` / `OPENAI_API_KEY` env vars\n\n## Solution: Per-User HOME Directories\n\nSet `HOME` env var to user-specific directory when spawning:\n\n```typescript\n// 1. Create user's credential directory\nconst userHome = `/home/workspace-users/${userId}`;\nawait fs.mkdir(`${userHome}/.claude`, { recursive: true });\n\n// 2. Write user's credentials in CLI-expected format\nawait fs.writeFile(\n `${userHome}/.claude/.credentials.json`,\n JSON.stringify({\n claudeAiOauth: {\n accessToken: userCreds.accessToken,\n refreshToken: userCreds.refreshToken,\n expiresAt: userCreds.expiresAt\n }\n })\n);\n\n// 3. Spawn with custom HOME\npty.spawn('claude', args, {\n env: {\n ...process.env,\n HOME: userHome // Claude looks in $HOME/.claude/\n }\n});\n```\n\n### Per-Provider Credential Format\n\n**Claude** (`$HOME/.claude/.credentials.json`):\n```json\n{\n \"claudeAiOauth\": {\n \"accessToken\": \"...\",\n \"refreshToken\": \"...\",\n \"expiresAt\": \"2025-01-01T00:00:00Z\"\n }\n}\n```\n\n**Codex** (`$HOME/.codex/auth.json`):\n```json\n{\n \"tokens\": {\n \"access_token\": \"...\",\n \"refresh_token\": \"...\"\n }\n}\n```\n\n### Implementation Steps\n\n1. **Spawn request includes userId**\n```typescript\ninterface SpawnAgentRequest {\n name: string;\n task: string;\n provider?: string;\n userId: string; // Who is spawning\n}\n```\n\n2. **Daemon fetches user's credentials from cloud**\n```typescript\nconst userCreds = await fetch(\n `${CLOUD_API_URL}/api/credentials/${userId}/${provider}`,\n { headers: { Authorization: `Bearer ${WORKSPACE_TOKEN}` } }\n);\n```\n\n3. **Write credentials to user's HOME**\n```typescript\nconst userHome = `/home/workspace-users/${userId}`;\nawait writeProviderCredentials(userHome, provider, userCreds);\n```\n\n4. **Spawn with HOME override**\n```typescript\npty.spawn(cli, args, { env: { ...process.env, HOME: userHome } });\n```\n\n### Database Changes\n```sql\nCREATE TABLE workspace_credentials (\n id UUID PRIMARY KEY,\n workspace_id UUID REFERENCES workspaces(id),\n user_id UUID REFERENCES users(id),\n provider TEXT NOT NULL,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN DEFAULT false,\n UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential (if connected)\n2. Workspace default (if set by admin)\n3. Owner's credential (fallback to container default)\n\n### Cleanup Considerations\n- Credential files persist in `/home/workspace-users/`\n- Need cleanup job for expired/revoked credentials\n- Consider tmpfs or encrypted storage\n\n## Files to Modify\n- src/daemon/agent-manager.ts - write creds, set HOME on spawn\n- src/daemon/credential-writer.ts (new) - per-provider credential format\n- src/cloud/api/workspaces.ts - credential resolution endpoint\n- src/cloud/db/schema.ts - workspace_credentials table\n- src/shared/cli-auth-config.ts - credential file paths (already defined)\n\n## Security: Daemon Must Auth to Cloud\nDepends on bd-critical-016 (daemon auth). Without auth, any process could request any user's credentials.","priority":120,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["architecture","team","credentials","p1"],"depends_on":["bd-critical-016"]} +{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning injects owner's tokens as CONTAINER-LEVEL env vars\n- entrypoint.sh writes owner's creds to `${HOME}/.claude/` at container start\n- CLI tools (claude, codex) read from `$HOME/.{cli}/` - NOT env vars\n\n## Why Env Var Override Won't Work\nCLI tools are NOT SDKs. They have their own auth:\n- `claude` CLI reads `$HOME/.claude/.credentials.json`\n- `codex` CLI reads `$HOME/.codex/auth.json`\n- They IGNORE `ANTHROPIC_API_KEY` / `OPENAI_API_KEY` env vars\n\n## Solution: Per-User HOME Directories\n\nSet `HOME` env var to user-specific directory when spawning.\n\n### Impact on Git Operations\n\n**Git is SAFE** - uses credential helper with env vars:\n- `git-credential-relay` reads `CLOUD_API_URL`, `WORKSPACE_ID`, `WORKSPACE_TOKEN`\n- Does NOT read files from HOME\n- Git clone/push/pull will continue working\n\n**gh CLI needs config copied:**\n- Config at `${HOME}/.config/gh/hosts.yml`\n- Copy from container HOME to user HOME\n- Or rely on `GH_TOKEN` env var (already set)\n\n### prepareUserHome() Function\n\n```typescript\nasync function prepareUserHome(userId: string, provider: string): Promise {\n const userHome = `/home/workspace-users/${userId}`;\n const containerHome = process.env.HOME || '/home/workspace';\n \n // 1. Fetch user's credentials from cloud\n const creds = await fetchUserCredentials(userId, provider);\n \n // 2. Write provider-specific credential file\n if (provider === 'anthropic') {\n await fs.mkdir(`${userHome}/.claude`, { recursive: true });\n await fs.writeFile(\n `${userHome}/.claude/.credentials.json`,\n JSON.stringify({ claudeAiOauth: creds })\n );\n } else if (provider === 'openai') {\n await fs.mkdir(`${userHome}/.codex`, { recursive: true });\n await fs.writeFile(\n `${userHome}/.codex/auth.json`,\n JSON.stringify({ tokens: { access_token: creds.accessToken, refresh_token: creds.refreshToken } })\n );\n }\n \n // 3. Copy gh CLI config (for `gh pr create` etc.)\n await fs.cp(`${containerHome}/.config/gh`, `${userHome}/.config/gh`, { recursive: true });\n \n return userHome;\n}\n```\n\n### Credential Format Reference\n\n**Claude** (`$HOME/.claude/.credentials.json`):\n```json\n{ \"claudeAiOauth\": { \"accessToken\": \"...\", \"refreshToken\": \"...\", \"expiresAt\": \"...\" } }\n```\n\n**Codex** (`$HOME/.codex/auth.json`):\n```json\n{ \"tokens\": { \"access_token\": \"...\", \"refresh_token\": \"...\" } }\n```\n\n### Database Changes\n```sql\nCREATE TABLE workspace_credentials (\n workspace_id UUID, user_id UUID, provider TEXT,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN, UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential (if connected)\n2. Workspace default (if set by admin)\n3. Owner's credential (fallback to container HOME)\n\n## Files to Modify\n- src/daemon/agent-manager.ts - call prepareUserHome before spawn\n- src/daemon/credential-writer.ts (new) - per-provider credential format\n- src/cloud/api/workspaces.ts - credential resolution endpoint\n- src/cloud/db/schema.ts - workspace_credentials table\n- deploy/workspace/entrypoint.sh - already writes owner creds (reference)\n\n## Security: Daemon Must Auth to Cloud\nDepends on bd-critical-016 (daemon auth). Without auth, any process could request any user's credentials.","priority":120,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["architecture","team","credentials","p1"],"depends_on":["bd-critical-016"]} From 7edcb30a6038cfd41a3a29277dd948b2b065961c Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 08:20:00 +0100 Subject: [PATCH 086/103] fix tests and consolidate --- src/cloud/api/onboarding.ts | 9 +- src/dashboard-server/server.ts | 14 +- .../react-components/ProviderAuthFlow.tsx | 545 ++++++++++++++++++ .../settings/WorkspaceSettingsPanel.tsx | 90 +-- src/shared/cli-auth-config.ts | 4 +- 5 files changed, 592 insertions(+), 70 deletions(-) create mode 100644 src/dashboard/react-components/ProviderAuthFlow.tsx diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index 68a475c7..e9bc7c49 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -389,14 +389,17 @@ onboardingRouter.post('/cli/:provider/code/:sessionId', async (req: Request, res console.log('[onboarding] Workspace error:', errorData); // Provide more helpful error message - if (codeResponse.status === 404) { - return res.status(404).json({ - error: 'Auth session expired in workspace. The CLI process may have timed out. Please try connecting again.', + const needsRestart = (errorData as { needsRestart?: boolean }).needsRestart; + if (codeResponse.status === 404 || codeResponse.status === 400) { + return res.status(400).json({ + error: errorData.error || 'Auth session expired in workspace. The CLI process may have timed out. Please try connecting again.', + needsRestart: needsRestart ?? true, }); } return res.status(codeResponse.status).json({ error: errorData.error || 'Failed to submit auth code to workspace', + needsRestart, }); } catch (err) { console.error('[onboarding] Failed to submit auth code to workspace:', err); diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index d66aeea6..d38bcf93 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -2082,7 +2082,12 @@ export async function startDashboard( const result = await submitAuthCode(sessionId, code); if (!result.success) { - return res.status(404).json({ error: result.error || 'Session not found or process not running' }); + // Use 400 for known errors (like PTY exited), 404 for session not found + const status = result.needsRestart ? 400 : 404; + return res.status(status).json({ + error: result.error || 'Session not found or process not running', + needsRestart: result.needsRestart, + }); } res.json({ success: true, message: 'Auth code submitted' }); @@ -2115,9 +2120,12 @@ export async function startDashboard( } // Submit the code to the CLI process - const submitResult = submitAuthCode(sessionId, code); + const submitResult = await submitAuthCode(sessionId, code); if (!submitResult.success) { - return res.status(400).json({ error: submitResult.error }); + return res.status(400).json({ + error: submitResult.error, + needsRestart: submitResult.needsRestart, + }); } // Wait a moment for credentials to be written diff --git a/src/dashboard/react-components/ProviderAuthFlow.tsx b/src/dashboard/react-components/ProviderAuthFlow.tsx new file mode 100644 index 00000000..16dbdafc --- /dev/null +++ b/src/dashboard/react-components/ProviderAuthFlow.tsx @@ -0,0 +1,545 @@ +/** + * Provider Auth Flow Component + * + * Shared component for AI provider OAuth authentication. + * Used by both the onboarding page and workspace settings. + * + * Handles different auth flows: + * - Claude/Anthropic: OAuth popup โ†’ "I've completed login" โ†’ poll for credentials + * - Codex/OpenAI: OAuth popup โ†’ copy localhost URL โ†’ paste code โ†’ submit + */ + +import React, { useState, useCallback, useRef, useEffect } from 'react'; + +export interface ProviderInfo { + id: string; + name: string; + displayName: string; + color: string; + cliCommand?: string; + /** Whether this provider's OAuth redirects to localhost (shows "site can't be reached") */ + requiresUrlCopy?: boolean; + /** Whether this provider supports device flow */ + supportsDeviceFlow?: boolean; +} + +export interface ProviderAuthFlowProps { + provider: ProviderInfo; + workspaceId: string; + csrfToken?: string; + onSuccess: () => void; + onCancel: () => void; + onError: (error: string) => void; + /** Whether to use device flow (for providers that support it) */ + useDeviceFlow?: boolean; +} + +type AuthStatus = 'idle' | 'starting' | 'waiting' | 'submitting' | 'success' | 'error'; + +// Provider ID mapping for backend +const PROVIDER_ID_MAP: Record = { + codex: 'openai', +}; + +export function ProviderAuthFlow({ + provider, + workspaceId, + csrfToken, + onSuccess, + onCancel, + onError, + useDeviceFlow = false, +}: ProviderAuthFlowProps) { + const [status, setStatus] = useState('idle'); + const [authUrl, setAuthUrl] = useState(null); + const [sessionId, setSessionId] = useState(null); + const [codeInput, setCodeInput] = useState(''); + const [errorMessage, setErrorMessage] = useState(null); + const popupOpenedRef = useRef(false); + const pollingRef = useRef(false); + + const backendProviderId = PROVIDER_ID_MAP[provider.id] || provider.id; + + // Start the OAuth flow + const startAuth = useCallback(async () => { + setStatus('starting'); + setErrorMessage(null); + popupOpenedRef.current = false; + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/start`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ workspaceId, useDeviceFlow }), + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to start authentication'); + } + + if (data.status === 'success' || data.alreadyAuthenticated) { + setStatus('success'); + onSuccess(); + return; + } + + setSessionId(data.sessionId); + + if (data.authUrl) { + setAuthUrl(data.authUrl); + setStatus('waiting'); + openAuthPopup(data.authUrl); + startPolling(data.sessionId); + } else if (data.sessionId) { + // No URL yet, poll for it + startPolling(data.sessionId); + } + } catch (err) { + const msg = err instanceof Error ? err.message : 'Failed to start authentication'; + setErrorMessage(msg); + setStatus('error'); + onError(msg); + } + }, [backendProviderId, workspaceId, csrfToken, useDeviceFlow, onSuccess, onError]); + + // Open OAuth popup + const openAuthPopup = useCallback((url: string) => { + const width = 600; + const height = 700; + const left = window.screenX + (window.outerWidth - width) / 2; + const top = window.screenY + (window.outerHeight - height) / 2; + window.open( + url, + `${provider.displayName} Login`, + `width=${width},height=${height},left=${left},top=${top},popup=yes` + ); + popupOpenedRef.current = true; + }, [provider.displayName]); + + // Poll for auth status + const startPolling = useCallback((sid: string) => { + if (pollingRef.current) return; + pollingRef.current = true; + + const maxAttempts = 60; + let attempts = 0; + + const poll = async () => { + if (attempts >= maxAttempts) { + pollingRef.current = false; + setErrorMessage('Authentication timed out. Please try again.'); + setStatus('error'); + onError('Authentication timed out'); + return; + } + + try { + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/status/${sid}`, { + credentials: 'include', + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to check status'); + } + + if (data.status === 'success') { + pollingRef.current = false; + await handleComplete(sid); + return; + } else if (data.status === 'error') { + throw new Error(data.error || 'Authentication failed'); + } else if (data.status === 'waiting_auth' && data.authUrl) { + setAuthUrl(data.authUrl); + setStatus('waiting'); + if (!popupOpenedRef.current) { + openAuthPopup(data.authUrl); + } + } + + attempts++; + setTimeout(poll, 5000); + } catch (err) { + pollingRef.current = false; + const msg = err instanceof Error ? err.message : 'Auth check failed'; + setErrorMessage(msg); + setStatus('error'); + onError(msg); + } + }; + + poll(); + }, [backendProviderId, openAuthPopup, onError]); + + // Complete auth by polling for credentials + const handleComplete = useCallback(async (sid?: string) => { + const targetSessionId = sid || sessionId; + if (!targetSessionId) return; + + setStatus('submitting'); + setErrorMessage(null); + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/complete/${targetSessionId}`, { + method: 'POST', + credentials: 'include', + headers, + }); + + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.error || 'Failed to complete authentication'); + } + + setStatus('success'); + onSuccess(); + } catch (err) { + const msg = err instanceof Error ? err.message : 'Failed to complete authentication'; + setErrorMessage(msg); + setStatus('error'); + onError(msg); + } + }, [sessionId, backendProviderId, csrfToken, onSuccess, onError]); + + // Submit auth code (for providers like Codex that need it) + const handleSubmitCode = useCallback(async () => { + if (!sessionId || !codeInput.trim()) return; + + setStatus('submitting'); + setErrorMessage(null); + + // Extract code from URL if user pasted the full callback URL + let code = codeInput.trim(); + if (code.includes('code=')) { + try { + const url = new URL(code); + const extractedCode = url.searchParams.get('code'); + if (extractedCode) { + code = extractedCode; + } + } catch { + const match = code.match(/code=([^&\s]+)/); + if (match) { + code = match[1]; + } + } + } + + try { + const headers: Record = { 'Content-Type': 'application/json' }; + if (csrfToken) headers['X-CSRF-Token'] = csrfToken; + + const res = await fetch(`/api/onboarding/cli/${backendProviderId}/code/${sessionId}`, { + method: 'POST', + credentials: 'include', + headers, + body: JSON.stringify({ code }), + }); + + const data = await res.json() as { status?: string; error?: string; needsRestart?: boolean }; + + if (!res.ok) { + // If server indicates we need to restart, show helpful message + if (data.needsRestart) { + setErrorMessage('The authentication session timed out. Please click "Try Again" to restart.'); + setStatus('error'); + return; + } + throw new Error(data.error || 'Failed to submit auth code'); + } + + setCodeInput(''); + + if (data.status === 'success') { + await handleComplete(); + } + // Otherwise continue polling + } catch (err) { + const msg = err instanceof Error ? err.message : 'Failed to submit auth code'; + setErrorMessage(msg); + setStatus('error'); + onError(msg); + } + }, [sessionId, codeInput, backendProviderId, csrfToken, handleComplete, onError]); + + // Cancel auth flow + const handleCancel = useCallback(async () => { + pollingRef.current = false; + + if (sessionId) { + try { + await fetch(`/api/onboarding/cli/${backendProviderId}/cancel/${sessionId}`, { + method: 'POST', + credentials: 'include', + }); + } catch { + // Ignore cancel errors + } + } + + setStatus('idle'); + setAuthUrl(null); + setSessionId(null); + setCodeInput(''); + setErrorMessage(null); + onCancel(); + }, [sessionId, backendProviderId, onCancel]); + + // Start auth when component mounts (parent controls when to render this component) + useEffect(() => { + if (status === 'idle') { + startAuth(); + } + // Cleanup on unmount + return () => { + pollingRef.current = false; + }; + }, [startAuth, status]); + + // Determine which flow type to use based on provider + const isCodexFlow = provider.requiresUrlCopy || provider.id === 'codex' || backendProviderId === 'openai'; + const isClaudeFlow = provider.id === 'anthropic' || backendProviderId === 'anthropic'; + + return ( +
+ {/* Header */} +
+
+ {provider.displayName[0]} +
+
+

{provider.displayName}

+

+ {status === 'starting' && 'Starting authentication...'} + {status === 'waiting' && 'Complete authentication below'} + {status === 'submitting' && 'Verifying...'} + {status === 'success' && 'Connected!'} + {status === 'error' && (errorMessage || 'Authentication failed')} +

+
+
+ + {/* Starting state */} + {status === 'starting' && ( +
+ + + + + Preparing authentication... +
+ )} + + {/* Waiting state */} + {status === 'waiting' && authUrl && ( +
+ {/* Instructions - different for each provider */} +
+

Complete authentication:

+ {isCodexFlow ? ( + /* Codex/OpenAI: OAuth redirects to localhost which is unreachable */ +
    +
  1. Click the button below to open the login page
  2. +
  3. Sign in with your {provider.displayName} account
  4. +
  5. + Important: After signing in, you'll see a "This site can't be reached" error - this is expected! +
  6. +
  7. Copy the entire URL from your browser's address bar (it starts with http://localhost...)
  8. +
  9. Paste it in the input below and click Submit
  10. +
+ ) : isClaudeFlow ? ( + /* Claude/Anthropic: Shows a code after OAuth completion */ +
    +
  1. Click the button below to open the login page
  2. +
  3. Sign in with your Anthropic account
  4. +
  5. After signing in, Anthropic will display an authentication code
  6. +
  7. Copy that code and paste it in the input below
  8. +
  9. Click Submit to complete authentication
  10. +
+ ) : ( + /* Other providers: Try polling for credentials first */ +
    +
  1. Click the button below to open the login page
  2. +
  3. Sign in with your {provider.displayName} account
  4. +
  5. If you receive a code, paste it below. Otherwise click "I've completed login"
  6. +
+ )} +
+ + {/* Auth URL button */} + + Open {provider.displayName} Login Page + + + {isCodexFlow ? ( + /* Codex: URL paste flow with warning about "site can't be reached" */ +
+
+

+ Expected behavior: After login, you'll see "This site can't be reached" - this is normal! + Copy the full URL from your browser's address bar and paste it below. +

+
+
+ setCodeInput(e.target.value)} + className="flex-1 px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan transition-colors font-mono text-sm" + onKeyDown={(e) => { + if (e.key === 'Enter' && codeInput.trim()) { + handleSubmitCode(); + } + }} + /> + +
+
+ ) : isClaudeFlow ? ( + /* Claude: Code paste flow */ +
+
+

+ Look for the code: After signing in, Anthropic will show you an authentication code. + Copy it and paste it below. +

+
+
+ setCodeInput(e.target.value)} + className="flex-1 px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan transition-colors font-mono text-sm" + onKeyDown={(e) => { + if (e.key === 'Enter' && codeInput.trim()) { + handleSubmitCode(); + } + }} + /> + +
+
+ ) : ( + /* Other providers: Code input with fallback button */ +
+
+ setCodeInput(e.target.value)} + className="flex-1 px-4 py-3 bg-bg-tertiary border border-border-subtle rounded-xl text-white placeholder-text-muted focus:outline-none focus:border-accent-cyan transition-colors font-mono text-sm" + onKeyDown={(e) => { + if (e.key === 'Enter' && codeInput.trim()) { + handleSubmitCode(); + } + }} + /> + +
+ +
+ )} + + {/* Cancel button */} + +
+ )} + + {/* Submitting state */} + {status === 'submitting' && ( +
+ + + + + Verifying authentication... +
+ )} + + {/* Success state */} + {status === 'success' && ( +
+
+ + + +
+ {provider.displayName} connected! +
+ )} + + {/* Error state */} + {status === 'error' && ( +
+
+ {errorMessage || 'Authentication failed. Please try again.'} +
+
+ + +
+
+ )} +
+ ); +} diff --git a/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx b/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx index 5c5605cb..fee7eaad 100644 --- a/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx +++ b/src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx @@ -9,6 +9,7 @@ import React, { useState, useEffect, useCallback, useRef } from 'react'; import { cloudApi } from '../../lib/cloudApi'; +import { ProviderAuthFlow } from '../ProviderAuthFlow'; export interface WorkspaceSettingsPanelProps { workspaceId: string; @@ -776,68 +777,33 @@ export function WorkspaceSettingsPanel({ {!providerStatus[provider.id] && (
- {oauthSession?.providerId === provider.id ? ( -
- {oauthSession.status === 'starting' && ( -
-
- Starting authentication... -
- )} - {oauthSession.status === 'waiting_auth' && ( - <> -
- - Complete login in the popup window -
- {oauthSession.authUrl && ( -

- Popup didn't open?{' '} - -

- )} - {/* Auth code/URL input for completing auth */} -
-

- {provider.id === 'openai' ? ( - <>After completing login, if you see "site can't be reached", copy the full URL from your browser and paste it here: - ) : provider.id === 'anthropic' ? ( - <>After completing login, copy the auth code shown on the page and paste it here: - ) : ( - <>If {provider.displayName} gives you an auth code, paste it here: - )} -

-
- setAuthCodeInput(e.target.value)} - className="flex-1 px-3 py-2 bg-bg-card border border-border-subtle rounded-lg text-sm text-text-primary placeholder:text-text-muted focus:outline-none focus:border-accent-cyan focus:ring-1 focus:ring-accent-cyan/30 transition-all font-mono" - /> - -
-
- - )} - -
+ {connectingProvider === provider.id ? ( + { + setProviderStatus(prev => ({ ...prev, [provider.id]: true })); + setConnectingProvider(null); + setOauthSession(null); + }} + onCancel={() => { + setConnectingProvider(null); + setOauthSession(null); + }} + onError={(err) => { + setProviderError(err); + setConnectingProvider(null); + setOauthSession(null); + }} + /> ) : showApiKeyFallback[provider.id] ? (
diff --git a/src/shared/cli-auth-config.ts b/src/shared/cli-auth-config.ts index 0c6b987d..69589a19 100644 --- a/src/shared/cli-auth-config.ts +++ b/src/shared/cli-auth-config.ts @@ -80,7 +80,7 @@ export const CLI_AUTH_CONFIG: Record = { }, { // Be more specific to avoid matching after URL is shown - pattern: /how\s*would\s*you\s*like\s*to\s*authenticate|choose.*auth.*method|select.*auth/i, + pattern: /how\s*would\s*you\s*like\s*to\s*authenticate|choose.*auth.*method|select.*auth|subscription\s*or.*api\s*key/i, response: '\r', // Press enter for first option (subscription) delay: 100, description: 'Auth method prompt', @@ -133,7 +133,7 @@ export const CLI_AUTH_CONFIG: Record = { command: 'opencode', args: ['auth', 'login'], // OpenCode redirects to provider OAuth pages (Anthropic, OpenAI, Google) - urlPattern: /(https:\/\/(?:accounts\.anthropic\.com|auth\.openai\.com|accounts\.google\.com|opencode\.ai)[^\s]+)/, + urlPattern: /(https:\/\/[^\s]+)/, credentialPath: '~/.local/share/opencode/auth.json', displayName: 'OpenCode', waitTimeout: 30000, From 916443bf98bb610de185f41f65af49312bcbfadb Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 5 Jan 2026 07:20:19 +0000 Subject: [PATCH 087/103] Elevate security beads priorities for credential architecture MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit bd-critical-016 (Daemon Auth): priority 148 โ†’ 160 - Now highest priority - blocks per-user credentials - Added implementation example using WORKSPACE_TOKEN - Tagged with blocks-per-user-creds bd-critical-021 (Per-User Creds): priority 120 โ†’ 155 - Elevated to p00 launch-blocker - Still depends on bd-critical-016 Execution order: 1. bd-critical-016: Add daemon auth middleware 2. bd-critical-021: Implement per-user HOME directories --- .beads/beads.jsonl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index 5282997f..28577ca1 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -34,9 +34,9 @@ {"id":"bd-canvas-001","title":"Canvas/Collaborative Docs","description":"Real-time collaborative documents within channels.","priority":45,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["collaboration","p3"],"depends_on":[]} {"id":"bd-agent-public-001","title":"Deploy Always-On Community Agents","description":"Deploy dedicated agents for public community rooms (DocsBot, RoadmapBot, HelpBot, ShowcaseBot, ModBot).","priority":95,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","agents","community","p0"],"depends_on":["bd-viral-001"]} {"id":"bd-landing-001","title":"Landing Page with Live Community Embed","description":"Update landing page to embed live community rooms with real-time activity.","priority":90,"status":"open","created_at":"2026-01-04T17:00:00Z","tags":["viral","marketing","p0"],"depends_on":["bd-viral-001","bd-agent-public-001"]} -{"id":"bd-critical-016","title":"[SECURITY] Workspace Daemon Auth - Unauthenticated Endpoints","description":"Workspace daemon internal endpoints have NO authentication.\n\n## Vulnerability\nEndpoints in dashboard-server/server.ts are exposed without auth:\n- POST /auth/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/complete/:sessionId\n- POST /auth/cli/:provider/start\n\n## Risk\nIf workspace daemon is exposed via publicUrl, attackers could:\n- Submit malicious codes to active auth sessions\n- Enumerate active sessions\n- DoS the PTY processes\n- Hijack OAuth flows mid-completion\n\n## Fix\n1. Add workspace auth middleware (shared secret or JWT)\n2. Validate session ownership\n3. Add request signing between cloud server and workspace daemon\n\n## Files\n- src/dashboard-server/server.ts:2075-2130\n- src/cloud/api/onboarding.ts (caller)","priority":148,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":[]} +{"id":"bd-critical-016","title":"[SECURITY] Workspace Daemon Auth - Unauthenticated Endpoints","description":"Workspace daemon internal endpoints have NO authentication. **BLOCKS bd-critical-021 (per-user credentials)**\n\n## Vulnerability\nEndpoints in dashboard-server/server.ts are exposed without auth:\n- POST /auth/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/complete/:sessionId\n- POST /auth/cli/:provider/start\n- GET /api/credentials/:userId/:provider (proposed for per-user creds)\n\n## Risk\nIf workspace daemon is exposed via publicUrl, attackers could:\n- Submit malicious codes to active auth sessions\n- Enumerate active sessions\n- DoS the PTY processes\n- Hijack OAuth flows mid-completion\n- **Request ANY user's credentials** (if per-user creds implemented without this)\n\n## Fix\n1. Add workspace auth middleware using WORKSPACE_TOKEN (already passed to containers)\n2. Validate HMAC signature on all daemon endpoints\n3. Add request signing between cloud server and workspace daemon\n4. Validate session ownership\n\n## Implementation\n```typescript\n// Middleware for daemon endpoints\nfunction validateWorkspaceAuth(req, res, next) {\n const token = req.headers.authorization?.replace('Bearer ', '');\n const expectedToken = process.env.WORKSPACE_TOKEN;\n \n if (!token || token !== expectedToken) {\n return res.status(401).json({ error: 'Unauthorized' });\n }\n next();\n}\n\n// Apply to all internal endpoints\napp.use('/auth/cli', validateWorkspaceAuth);\napp.use('/api/credentials', validateWorkspaceAuth);\n```\n\n## Files\n- src/dashboard-server/server.ts:2075-2130\n- src/cloud/api/onboarding.ts (caller - must send WORKSPACE_TOKEN)","priority":160,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00","blocks-per-user-creds"],"depends_on":[]} {"id":"bd-critical-017","title":"[SECURITY] PTY Output May Log Sensitive Tokens","description":"CLI auth PTY output is logged and may contain secrets.\n\n## Vulnerability\nIn cli-auth.ts:237-238, the last 500 chars of CLI output are logged:\n```typescript\nlogger.info('CLI process exited', {\n outputTail: cleanOutput.slice(-500), // May contain tokens!\n});\n```\n\n## Risk\n- Access tokens in logs\n- Refresh tokens exposed\n- API keys visible in log aggregators\n- Credentials in error dumps\n\n## Fix\n1. Sanitize PTY output before logging\n2. Redact patterns: token=XXX, Bearer XXX, api_key=XXX\n3. Add log scrubbing middleware\n4. Review all logger.info/error calls for secrets\n\n## Files\n- src/daemon/cli-auth.ts:237-238\n- src/daemon/cli-auth.ts:177 (prompt logging)","priority":142,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":[]} {"id":"bd-critical-018","title":"[SECURITY] CLI Auth Rate Limiting Missing","description":"No rate limiting on CLI auth endpoints allows brute-force and DoS.\n\n## Vulnerability\nThese endpoints have no rate limits:\n- POST /api/onboarding/cli/:provider/start\n- POST /api/onboarding/cli/:provider/code/:sessionId\n- POST /auth/cli/:provider/code/:sessionId\n\n## Risk\n- Attackers can spawn unlimited PTY processes (DoS)\n- Brute-force auth code submission\n- Resource exhaustion on workspace containers\n- Cost explosion from compute usage\n\n## Fix\n1. Add rate limiter middleware (express-rate-limit)\n2. Limit per-user: 5 auth starts per 15 min\n3. Limit per-session: 10 code submissions per minute\n4. Add exponential backoff on failures\n\n## Files\n- src/cloud/api/onboarding.ts\n- src/dashboard-server/server.ts","priority":138,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["critical","security","launch-blocker","p00"],"depends_on":["bd-critical-002"]} {"id":"bd-critical-019","title":"[SECURITY] Auth Session Timeout Too Long","description":"5-minute OAuth session timeout creates large attack window.\n\n## Vulnerability\nIn cli-auth.ts:159:\n```typescript\nconst OAUTH_COMPLETION_TIMEOUT = 5 * 60 * 1000; // 5 minutes\n```\n\n## Risk\n- Long window for session hijacking\n- Active sessions can be enumerated\n- Stale PTY processes consume resources\n- Race conditions in token capture\n\n## Fix\n1. Reduce timeout to 2 minutes (plenty for OAuth)\n2. Add session invalidation on suspicious activity\n3. Implement one-time-use session tokens\n4. Clean up PTY immediately on error\n\n## Files\n- src/daemon/cli-auth.ts:159","priority":128,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","p00"],"depends_on":[]} {"id":"bd-critical-020","title":"[SECURITY] Force Device Flow in Cloud Mode","description":"Standard OAuth redirect to localhost doesn't work in cloud - must force device flow.\n\n## Problem\nCodex OAuth redirects to localhost:1455, which:\n- Doesn't exist on user's machine when using cloud\n- CLI runs in container, not user's computer\n- Callback never reaches the CLI\n\n## Current Behavior\nDevice flow is opt-in via checkbox, defaults to OFF.\n\n## Fix\n1. Auto-detect cloud mode in frontend\n2. Force useDeviceFlow=true for providers that support it\n3. Hide the checkbox in cloud mode (always on)\n4. Show clear instructions for device flow\n\n## Files\n- src/dashboard/react-components/settings/WorkspaceSettingsPanel.tsx\n- src/cloud/api/onboarding.ts","priority":135,"status":"open","created_at":"2026-01-05T12:00:00Z","tags":["security","ux","launch-blocker","p00"],"depends_on":[]} -{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning injects owner's tokens as CONTAINER-LEVEL env vars\n- entrypoint.sh writes owner's creds to `${HOME}/.claude/` at container start\n- CLI tools (claude, codex) read from `$HOME/.{cli}/` - NOT env vars\n\n## Why Env Var Override Won't Work\nCLI tools are NOT SDKs. They have their own auth:\n- `claude` CLI reads `$HOME/.claude/.credentials.json`\n- `codex` CLI reads `$HOME/.codex/auth.json`\n- They IGNORE `ANTHROPIC_API_KEY` / `OPENAI_API_KEY` env vars\n\n## Solution: Per-User HOME Directories\n\nSet `HOME` env var to user-specific directory when spawning.\n\n### Impact on Git Operations\n\n**Git is SAFE** - uses credential helper with env vars:\n- `git-credential-relay` reads `CLOUD_API_URL`, `WORKSPACE_ID`, `WORKSPACE_TOKEN`\n- Does NOT read files from HOME\n- Git clone/push/pull will continue working\n\n**gh CLI needs config copied:**\n- Config at `${HOME}/.config/gh/hosts.yml`\n- Copy from container HOME to user HOME\n- Or rely on `GH_TOKEN` env var (already set)\n\n### prepareUserHome() Function\n\n```typescript\nasync function prepareUserHome(userId: string, provider: string): Promise {\n const userHome = `/home/workspace-users/${userId}`;\n const containerHome = process.env.HOME || '/home/workspace';\n \n // 1. Fetch user's credentials from cloud\n const creds = await fetchUserCredentials(userId, provider);\n \n // 2. Write provider-specific credential file\n if (provider === 'anthropic') {\n await fs.mkdir(`${userHome}/.claude`, { recursive: true });\n await fs.writeFile(\n `${userHome}/.claude/.credentials.json`,\n JSON.stringify({ claudeAiOauth: creds })\n );\n } else if (provider === 'openai') {\n await fs.mkdir(`${userHome}/.codex`, { recursive: true });\n await fs.writeFile(\n `${userHome}/.codex/auth.json`,\n JSON.stringify({ tokens: { access_token: creds.accessToken, refresh_token: creds.refreshToken } })\n );\n }\n \n // 3. Copy gh CLI config (for `gh pr create` etc.)\n await fs.cp(`${containerHome}/.config/gh`, `${userHome}/.config/gh`, { recursive: true });\n \n return userHome;\n}\n```\n\n### Credential Format Reference\n\n**Claude** (`$HOME/.claude/.credentials.json`):\n```json\n{ \"claudeAiOauth\": { \"accessToken\": \"...\", \"refreshToken\": \"...\", \"expiresAt\": \"...\" } }\n```\n\n**Codex** (`$HOME/.codex/auth.json`):\n```json\n{ \"tokens\": { \"access_token\": \"...\", \"refresh_token\": \"...\" } }\n```\n\n### Database Changes\n```sql\nCREATE TABLE workspace_credentials (\n workspace_id UUID, user_id UUID, provider TEXT,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN, UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential (if connected)\n2. Workspace default (if set by admin)\n3. Owner's credential (fallback to container HOME)\n\n## Files to Modify\n- src/daemon/agent-manager.ts - call prepareUserHome before spawn\n- src/daemon/credential-writer.ts (new) - per-provider credential format\n- src/cloud/api/workspaces.ts - credential resolution endpoint\n- src/cloud/db/schema.ts - workspace_credentials table\n- deploy/workspace/entrypoint.sh - already writes owner creds (reference)\n\n## Security: Daemon Must Auth to Cloud\nDepends on bd-critical-016 (daemon auth). Without auth, any process could request any user's credentials.","priority":120,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["architecture","team","credentials","p1"],"depends_on":["bd-critical-016"]} +{"id":"bd-critical-021","title":"[ARCHITECTURE] Per-User Credentials in Shared Workspaces","description":"Multi-user workspaces share single owner's credentials - breaks team usage.\n\n## Current Problem\n- Credentials stored at USER level (credentials table: userId, provider, tokens)\n- Workspace provisioning injects owner's tokens as CONTAINER-LEVEL env vars\n- entrypoint.sh writes owner's creds to `${HOME}/.claude/` at container start\n- CLI tools (claude, codex) read from `$HOME/.{cli}/` - NOT env vars\n\n## Why Env Var Override Won't Work\nCLI tools are NOT SDKs. They have their own auth:\n- `claude` CLI reads `$HOME/.claude/.credentials.json`\n- `codex` CLI reads `$HOME/.codex/auth.json`\n- They IGNORE `ANTHROPIC_API_KEY` / `OPENAI_API_KEY` env vars\n\n## Solution: Per-User HOME Directories\n\nSet `HOME` env var to user-specific directory when spawning.\n\n### Impact on Git Operations\n\n**Git is SAFE** - uses credential helper with env vars:\n- `git-credential-relay` reads `CLOUD_API_URL`, `WORKSPACE_ID`, `WORKSPACE_TOKEN`\n- Does NOT read files from HOME\n- Git clone/push/pull will continue working\n\n**gh CLI needs config copied:**\n- Config at `${HOME}/.config/gh/hosts.yml`\n- Copy from container HOME to user HOME\n- Or rely on `GH_TOKEN` env var (already set)\n\n### prepareUserHome() Function\n\n```typescript\nasync function prepareUserHome(userId: string, provider: string): Promise {\n const userHome = `/home/workspace-users/${userId}`;\n const containerHome = process.env.HOME || '/home/workspace';\n \n // 1. Fetch user's credentials from cloud\n const creds = await fetchUserCredentials(userId, provider);\n \n // 2. Write provider-specific credential file\n if (provider === 'anthropic') {\n await fs.mkdir(`${userHome}/.claude`, { recursive: true });\n await fs.writeFile(\n `${userHome}/.claude/.credentials.json`,\n JSON.stringify({ claudeAiOauth: creds })\n );\n } else if (provider === 'openai') {\n await fs.mkdir(`${userHome}/.codex`, { recursive: true });\n await fs.writeFile(\n `${userHome}/.codex/auth.json`,\n JSON.stringify({ tokens: { access_token: creds.accessToken, refresh_token: creds.refreshToken } })\n );\n }\n \n // 3. Copy gh CLI config (for `gh pr create` etc.)\n await fs.cp(`${containerHome}/.config/gh`, `${userHome}/.config/gh`, { recursive: true });\n \n return userHome;\n}\n```\n\n### Credential Format Reference\n\n**Claude** (`$HOME/.claude/.credentials.json`):\n```json\n{ \"claudeAiOauth\": { \"accessToken\": \"...\", \"refreshToken\": \"...\", \"expiresAt\": \"...\" } }\n```\n\n**Codex** (`$HOME/.codex/auth.json`):\n```json\n{ \"tokens\": { \"access_token\": \"...\", \"refresh_token\": \"...\" } }\n```\n\n### Database Changes\n```sql\nCREATE TABLE workspace_credentials (\n workspace_id UUID, user_id UUID, provider TEXT,\n credential_id UUID REFERENCES credentials(id),\n is_default BOOLEAN, UNIQUE(workspace_id, user_id, provider)\n);\n```\n\n### Credential Resolution Order\n1. User's own credential (if connected)\n2. Workspace default (if set by admin)\n3. Owner's credential (fallback to container HOME)\n\n## Files to Modify\n- src/daemon/agent-manager.ts - call prepareUserHome before spawn\n- src/daemon/credential-writer.ts (new) - per-provider credential format\n- src/cloud/api/workspaces.ts - credential resolution endpoint\n- src/cloud/db/schema.ts - workspace_credentials table\n- deploy/workspace/entrypoint.sh - already writes owner creds (reference)\n\n## Security: Daemon Must Auth to Cloud\nDepends on bd-critical-016 (daemon auth). Without auth, any process could request any user's credentials.","priority":155,"status":"open","created_at":"2026-01-05T13:00:00Z","tags":["critical","architecture","team","credentials","launch-blocker","p00"],"depends_on":["bd-critical-016"]} From 2a72d87cc883ac720ce5ec6b4f37e1a0bf040e69 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 09:57:39 +0100 Subject: [PATCH 088/103] Fix CLI auth: delay Enter key and improve polling MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Root cause: Claude CLI's Ink text input needs time to process typed input before receiving Enter. Sending code + Enter immediately failed. Changes: - cli-auth.ts: Write code first, wait 1 second, then send Enter - cli-auth.ts: Remove BROWSER=echo (caused CLI to wait for callback) - cli-auth.ts: Add keep-alive pings and better logging - dashboard-server: Wait up to 5s for credentials after code submission - dashboard-server: Return session status in code submission response - cli-auth-config: Add prompts for login success, trust directory - onboarding.test.ts: Update test assertions for new patterns - provisioner: Add WORKSPACE_DEV_MOUNT for faster local iteration - package.json: Auto-enable dev mount in cloud:api script Tested: Auth flow completes successfully with credentials saved. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- package.json | 2 +- src/cloud/api/onboarding.test.ts | 10 +-- src/cloud/provisioner/index.ts | 10 ++- src/daemon/cli-auth.ts | 104 +++++++++++++++++++++++++++++-- src/dashboard-server/server.ts | 53 +++++++++++++--- src/shared/cli-auth-config.ts | 30 ++++++--- 6 files changed, 179 insertions(+), 30 deletions(-) diff --git a/package.json b/package.json index ab5c6a53..f0617607 100644 --- a/package.json +++ b/package.json @@ -56,7 +56,7 @@ "services:down": "docker compose -f docker-compose.dev.yml down", "services:logs": "docker compose -f docker-compose.dev.yml logs -f postgres redis", "cloud:setup": "./scripts/cloud-setup.sh", - "cloud:api": "node -r dotenv/config dist/cloud/index.js", + "cloud:api": "WORKSPACE_DEV_MOUNT=true node -r dotenv/config dist/cloud/index.js", "precloud": "./scripts/cloud-setup.sh --skip-data", "cloud": "concurrently -n api,daemon,dashboard -c cyan,blue,magenta \"npm run cloud:api\" \"npm run dev:daemon\" \"npm run dev:next\"" }, diff --git a/src/cloud/api/onboarding.test.ts b/src/cloud/api/onboarding.test.ts index 5df780b6..a0bde16d 100644 --- a/src/cloud/api/onboarding.test.ts +++ b/src/cloud/api/onboarding.test.ts @@ -55,7 +55,7 @@ describe('CLI Auth Config', () => { expect(prompt2!.description).toBe('Dark mode prompt'); }); - it('detects auth method prompt', () => { + it('detects login method prompt', () => { const respondedPrompts = new Set(); const prompt1 = findMatchingPrompt( @@ -64,7 +64,7 @@ describe('CLI Auth Config', () => { respondedPrompts ); expect(prompt1).toBeTruthy(); - expect(prompt1!.description).toBe('Auth method prompt'); + expect(prompt1!.description).toBe('Login method selection'); const prompt2 = findMatchingPrompt( 'How would you like to authenticate?', @@ -72,20 +72,20 @@ describe('CLI Auth Config', () => { respondedPrompts ); expect(prompt2).toBeTruthy(); - expect(prompt2!.description).toBe('Auth method prompt'); + expect(prompt2!.description).toBe('Login method selection'); }); it('detects trust directory prompt', () => { const respondedPrompts = new Set(); const prompt = findMatchingPrompt( - 'Do you trust this directory?', + 'Do you trust the files in this folder?', config.prompts, respondedPrompts ); expect(prompt).toBeTruthy(); expect(prompt!.description).toBe('Trust directory prompt'); - expect(prompt!.response).toBe('y\r'); + expect(prompt!.response).toBe('\r'); // Press enter to select first option (Yes, proceed) }); it('does not respond to same prompt twice', () => { diff --git a/src/cloud/provisioner/index.ts b/src/cloud/provisioner/index.ts index 9bbe6138..49e05015 100644 --- a/src/cloud/provisioner/index.ts +++ b/src/cloud/provisioner/index.ts @@ -869,9 +869,17 @@ class DockerProvisioner implements ComputeProvisioner { const runningInDocker = process.env.RUNNING_IN_DOCKER === 'true'; const networkArg = runningInDocker ? '--network agent-relay-dev' : ''; + // In development, mount local dist folder for faster iteration + // Set WORKSPACE_DEV_MOUNT=true to enable + const devMount = process.env.WORKSPACE_DEV_MOUNT === 'true'; + const volumeArgs = devMount ? `-v "${process.cwd()}/dist:/app/dist:ro"` : ''; + if (devMount) { + console.log('[provisioner] Dev mode: mounting local dist/ folder into workspace container'); + } + try { execSync( - `docker run -d --user root --name ${containerName} ${networkArg} -p ${hostPort}:${WORKSPACE_PORT} -p ${sshHostPort}:${SSH_PORT} ${envArgs.join(' ')} ${WORKSPACE_IMAGE}`, + `docker run -d --user root --name ${containerName} ${networkArg} ${volumeArgs} -p ${hostPort}:${WORKSPACE_PORT} -p ${sshHostPort}:${SSH_PORT} ${envArgs.join(' ')} ${WORKSPACE_IMAGE}`, { stdio: 'pipe' } ); diff --git a/src/daemon/cli-auth.ts b/src/daemon/cli-auth.ts index b4b6d0bb..908e6d5b 100644 --- a/src/daemon/cli-auth.ts +++ b/src/daemon/cli-auth.ts @@ -95,6 +95,13 @@ export async function startCLIAuth( }; sessions.set(sessionId, session); + logger.info('CLI auth session created', { + sessionId, + provider, + totalActiveSessions: sessions.size, + allSessionIds: Array.from(sessions.keys()), + }); + // Check if already authenticated (credentials exist) try { const existingCreds = await extractCredentials(provider, config); @@ -147,7 +154,8 @@ export async function startCLIAuth( ...process.env, NO_COLOR: '1', TERM: 'xterm-256color', - BROWSER: 'echo', + // Don't set BROWSER - let CLI fail to open browser and fall back to manual paste mode + // Setting BROWSER: 'echo' caused CLI to think browser opened and wait for callback that never came DISPLAY: '', } as Record, }); @@ -166,6 +174,24 @@ export async function startCLIAuth( } }, config.waitTimeout + OAUTH_COMPLETION_TIMEOUT); + // Keep-alive: Some CLIs timeout if they don't receive stdin input + // Send a space+backspace every 20 seconds to simulate user presence + const keepAliveInterval = setInterval(() => { + if (session.status === 'waiting_auth' && session.process) { + try { + // Send space then backspace - appears as user typing but no net effect + session.process.write(' \b'); + logger.debug('Keep-alive ping sent', { + sessionId, + status: session.status, + ageSeconds: Math.round((Date.now() - session.createdAt.getTime()) / 1000), + }); + } catch { + // Process may have exited + } + } + }, 20000); + proc.onData((data: string) => { session.output += data; @@ -198,6 +224,18 @@ export async function startCLIAuth( resolveAuthUrl(); } + // Log all output after auth URL is captured (for debugging) + if (session.authUrl) { + const trimmedData = stripAnsiCodes(data).trim(); + if (trimmedData.length > 0) { + logger.info('PTY output after auth URL', { + provider, + sessionId, + output: trimmedData.substring(0, 500), + }); + } + } + // Check for success and try to extract credentials if (matchesSuccessPattern(data, config.successPatterns)) { session.status = 'success'; @@ -224,6 +262,10 @@ export async function startCLIAuth( proc.onExit(async ({ exitCode }) => { clearTimeout(timeout); clearTimeout(authUrlTimeout); + clearInterval(keepAliveInterval); + + // Clear process reference so submitAuthCode knows PTY is gone + session.process = undefined; // Log full output for debugging PTY exit issues const cleanOutput = stripAnsiCodes(session.output); @@ -292,17 +334,44 @@ export async function submitAuthCode( sessionId: string, code: string ): Promise<{ success: boolean; error?: string; needsRestart?: boolean }> { + // Log all active sessions for debugging + const activeSessionIds = Array.from(sessions.keys()); + logger.info('submitAuthCode called', { + sessionId, + codeLength: code.length, + activeSessionCount: activeSessionIds.length, + activeSessionIds, + }); + const session = sessions.get(sessionId); if (!session) { - logger.warn('Auth code submission failed: session not found', { sessionId }); + logger.warn('Auth code submission failed: session not found', { + sessionId, + activeSessionIds, + hint: 'Session may have been cleaned up or never created', + }); return { success: false, error: 'Session not found or expired', needsRestart: true }; } + logger.info('Session found for code submission', { + sessionId, + provider: session.provider, + status: session.status, + hasProcess: !!session.process, + hasAuthUrl: !!session.authUrl, + hasToken: !!session.token, + promptsHandled: session.promptsHandled, + createdAt: session.createdAt.toISOString(), + ageSeconds: Math.round((Date.now() - session.createdAt.getTime()) / 1000), + }); + if (!session.process) { logger.warn('Auth code submission failed: no PTY process', { sessionId, sessionStatus: session.status, provider: session.provider, + outputLength: session.output?.length || 0, + outputTail: session.output ? stripAnsiCodes(session.output).slice(-500) : 'no output', }); // Try to extract credentials as a fallback - maybe auth completed in browser @@ -333,9 +402,28 @@ export async function submitAuthCode( } try { - // Write the auth code followed by enter - session.process.write(code + '\r'); - logger.info('Auth code submitted', { sessionId, codeLength: code.length }); + // Clean the code - trim whitespace + const cleanCode = code.trim(); + + logger.info('Writing auth code to PTY', { + sessionId, + originalLength: code.length, + cleanLength: cleanCode.length, + codePreview: cleanCode.substring(0, 20) + '...', + }); + + // Write the auth code WITHOUT Enter first + // Claude CLI's Ink text input needs time to process the input + // before receiving Enter (tested: immediate Enter fails, delayed Enter works) + session.process.write(cleanCode); + logger.info('Auth code written, waiting before sending Enter...', { sessionId }); + + // Wait 1 second for CLI to process the typed input + await new Promise(resolve => setTimeout(resolve, 1000)); + + // Now send Enter to submit + session.process.write('\r'); + logger.info('Enter key sent', { sessionId }); // Start polling for credentials after code submission // The CLI should write credentials shortly after receiving the code @@ -347,7 +435,11 @@ export async function submitAuthCode( return { success: true }; } catch (err) { logger.error('Failed to submit auth code', { sessionId, error: String(err) }); - return { success: false, error: 'Failed to write to CLI process' }; + return { + success: false, + error: 'Failed to write to CLI process. The process may have exited. Please try again.', + needsRestart: true, + }; } } diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index d38bcf93..e0499550 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -2073,24 +2073,57 @@ export async function startDashboard( * Used when OAuth returns a code that must be pasted into the CLI */ app.post('/auth/cli/:provider/code/:sessionId', async (req, res) => { - const { sessionId } = req.params; + const { provider, sessionId } = req.params; const { code } = req.body; + console.log('[cli-auth] Auth code submission received', { provider, sessionId, codeLength: code?.length }); + if (!code || typeof code !== 'string') { return res.status(400).json({ error: 'Auth code is required' }); } - const result = await submitAuthCode(sessionId, code); - if (!result.success) { - // Use 400 for known errors (like PTY exited), 404 for session not found - const status = result.needsRestart ? 400 : 404; - return res.status(status).json({ - error: result.error || 'Session not found or process not running', - needsRestart: result.needsRestart, + try { + const result = await submitAuthCode(sessionId, code); + console.log('[cli-auth] Auth code submission result', { provider, sessionId, result }); + + if (!result.success) { + // Use 400 for all errors since they can be retried + return res.status(400).json({ + error: result.error || 'Session not found or process not running', + needsRestart: result.needsRestart ?? true, + }); + } + + // Wait a few seconds for CLI to process and write credentials + // The 1s delay in submitAuthCode + CLI processing time means credentials + // should be available within 3-5 seconds + let sessionStatus = 'waiting_auth'; + for (let i = 0; i < 10; i++) { + await new Promise(resolve => setTimeout(resolve, 500)); + const session = getAuthSession(sessionId); + if (session?.status === 'success') { + sessionStatus = 'success'; + console.log('[cli-auth] Credentials found after code submission', { provider, sessionId, attempt: i + 1 }); + break; + } + if (session?.status === 'error') { + sessionStatus = 'error'; + break; + } + } + + res.json({ + success: true, + message: 'Auth code submitted', + status: sessionStatus, + }); + } catch (err) { + console.error('[cli-auth] Auth code submission error', { provider, sessionId, error: String(err) }); + return res.status(500).json({ + error: 'Internal error submitting auth code. Please try again.', + needsRestart: true, }); } - - res.json({ success: true, message: 'Auth code submitted' }); }); /** diff --git a/src/shared/cli-auth-config.ts b/src/shared/cli-auth-config.ts index 69589a19..69b1125a 100644 --- a/src/shared/cli-auth-config.ts +++ b/src/shared/cli-auth-config.ts @@ -79,18 +79,34 @@ export const CLI_AUTH_CONFIG: Record = { description: 'Dark mode prompt', }, { - // Be more specific to avoid matching after URL is shown - pattern: /how\s*would\s*you\s*like\s*to\s*authenticate|choose.*auth.*method|select.*auth|subscription\s*or.*api\s*key/i, - response: '\r', // Press enter for first option (subscription) + // Login method selection - "Select login method:" with Claude account or Console options + pattern: /select\s*login\s*method|how\s*would\s*you\s*like\s*to\s*authenticate|choose.*auth.*method|select.*auth|subscription\s*or.*api\s*key/i, + response: '\r', // Press enter for first option (Claude account with subscription) delay: 100, - description: 'Auth method prompt', + description: 'Login method selection', }, { - pattern: /trust\s*(this|the)\s*(directory|folder|workspace)/i, - response: 'y\r', // Yes to trust - delay: 100, + // Login success - press enter to continue + pattern: /login\s*successful|logged\s*in.*press\s*enter|press\s*enter\s*to\s*continue/i, + response: '\r', + delay: 200, + description: 'Login success prompt', + }, + { + // Trust directory - matches "Do you trust the files in this folder?" and similar + pattern: /trust\s*(this|the)?\s*(files|directory|folder|workspace)|do\s*you\s*trust/i, + response: '\r', // Press enter for first option (Yes, proceed) + delay: 200, description: 'Trust directory prompt', }, + { + // Fallback: Any "press enter" or "enter to confirm/continue" prompt + // Keep this LAST so more specific handlers match first + pattern: /press\s*enter|enter\s*to\s*(confirm|continue|proceed)|hit\s*enter/i, + response: '\r', + delay: 300, + description: 'Generic enter prompt', + }, ], successPatterns: [/success/i, /authenticated/i, /logged\s*in/i, /you.*(?:are|now).*logged/i], }, From 0bff61061289ff0a2d68d13f3f970e0009a04309 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 10:02:49 +0100 Subject: [PATCH 089/103] Improve trust directory prompt pattern for CLI auth MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add "safety check" and "yes, i trust" patterns to better match Claude Code's trust prompt. Increase delay to 300ms for menu render. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/shared/cli-auth-config.ts | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/shared/cli-auth-config.ts b/src/shared/cli-auth-config.ts index 69b1125a..9046ad62 100644 --- a/src/shared/cli-auth-config.ts +++ b/src/shared/cli-auth-config.ts @@ -93,10 +93,13 @@ export const CLI_AUTH_CONFIG: Record = { description: 'Login success prompt', }, { - // Trust directory - matches "Do you trust the files in this folder?" and similar - pattern: /trust\s*(this|the)?\s*(files|directory|folder|workspace)|do\s*you\s*trust/i, + // Trust directory - matches various trust prompts including: + // "Quick safety check: Is this a project you created or one you trust?" + // "Yes, I trust this folder" + // "Do you trust the files in this folder?" + pattern: /trust\s*(this|the)?\s*(files|directory|folder|workspace)|do\s*you\s*trust|safety\s*check|yes,?\s*i\s*trust/i, response: '\r', // Press enter for first option (Yes, proceed) - delay: 200, + delay: 300, // Slightly longer delay for menu to render description: 'Trust directory prompt', }, { From a9090054e404a884f758a65066da1daba1814791 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 10:05:06 +0100 Subject: [PATCH 090/103] Add trust directory prompt handling to pty-wrapper MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When spawning agents, the pty-wrapper now auto-accepts: 1. --dangerously-skip-permissions prompt ("Yes, I accept") 2. Trust directory prompt ("Yes, I trust this folder") This ensures agents can start without manual intervention. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/wrapper/pty-wrapper.ts | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/src/wrapper/pty-wrapper.ts b/src/wrapper/pty-wrapper.ts index e3cd3d5d..a32bf552 100644 --- a/src/wrapper/pty-wrapper.ts +++ b/src/wrapper/pty-wrapper.ts @@ -574,16 +574,19 @@ export class PtyWrapper extends EventEmitter { } /** - * Auto-accept Claude's first-run prompts for --dangerously-skip-permissions - * Detects the acceptance prompt and sends "2" to select "Yes, I accept" + * Auto-accept Claude's first-run prompts + * Handles: + * 1. --dangerously-skip-permissions acceptance ("Yes, I accept") + * 2. Trust directory prompt ("Yes, I trust this folder") */ private handleAutoAcceptPrompts(data: string): void { if (this.hasAcceptedPrompt) return; if (!this.ptyProcess || !this.running) return; - // Check for the permission acceptance prompt - // Pattern: "2. Yes, I accept" in the output const cleanData = stripAnsi(data); + + // Check for the permission acceptance prompt (--dangerously-skip-permissions) + // Pattern: "2. Yes, I accept" in the output if (cleanData.includes('Yes, I accept') && cleanData.includes('No, exit')) { console.log(`[pty:${this.config.name}] Detected permission prompt, auto-accepting...`); this.hasAcceptedPrompt = true; @@ -593,6 +596,22 @@ export class PtyWrapper extends EventEmitter { this.ptyProcess.write('2'); } }, 100); + return; + } + + // Check for the trust directory prompt + // Pattern: "1. Yes, I trust this folder" with "No, exit" + if ((cleanData.includes('trust this folder') || cleanData.includes('safety check')) + && cleanData.includes('No, exit')) { + console.log(`[pty:${this.config.name}] Detected trust directory prompt, auto-accepting...`); + this.hasAcceptedPrompt = true; + // Send Enter to accept first option (already selected) + setTimeout(() => { + if (this.ptyProcess && this.running) { + this.ptyProcess.write('\r'); + } + }, 300); + return; } } From f5a76d9ee744434aea6b2ce09ded4abeb6ed32cb Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 13:01:40 +0100 Subject: [PATCH 091/103] Fix proxy wildcard, remove SSH dead code, fix workspace_members, reduce spawner message MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Fix Express 5 wildcard parameter returning array instead of string in proxy route (path segments joined with comma instead of slash) - Remove SSH dead code (ssh_host, ssh_port, ssh_password) - device flow OAuth is used instead of localhost callback tunneling - Add workspace creator as owner in workspace_members during provisioning - Reduce spawner message size from 400+ lines of docs to 5-line reminder to prevent agents from getting overwhelmed ("Meandering" state) - Fix summary reminder injection to use single-line format with proper Enter delay ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/bridge/spawner.ts | 108 +++------------- src/cloud/api/workspaces.ts | 54 ++++++-- src/cloud/db/drizzle.ts | 9 -- .../db/migrations/0007_drop_workspace_ssh.sql | 6 + src/cloud/db/migrations/meta/_journal.json | 7 + src/cloud/db/schema.ts | 4 - src/cloud/provisioner/index.ts | 89 +++++-------- src/wrapper/pty-wrapper.ts | 121 +++++++++++++----- 8 files changed, 202 insertions(+), 196 deletions(-) create mode 100644 src/cloud/db/migrations/0007_drop_workspace_ssh.sql diff --git a/src/bridge/spawner.ts b/src/bridge/spawner.ts index 26b66bc5..3a15fbd8 100644 --- a/src/bridge/spawner.ts +++ b/src/bridge/spawner.ts @@ -67,87 +67,16 @@ export type OnAgentDeathCallback = (info: { }) => void; /** - * Load the relay communication snippet. - * This defines how agents communicate via relay patterns. - * Cached after first load. + * Get a minimal relay reminder. + * Agents already have full relay docs via CLAUDE.md - this is just a brief reminder. + * Loading full docs (400+ lines) overwhelms agents and causes "meandering". */ -let relaySnippetCache: string | null = null; -let relayProtocolCache: string | null = null; - -function loadRelaySnippets(): string { - if (relaySnippetCache !== null && relayProtocolCache !== null) { - return `${relaySnippetCache}\n\n${relayProtocolCache}`; - } - - try { - // Resolve path relative to this file's location - const __filename = fileURLToPath(import.meta.url); - const __dirname = path.dirname(__filename); - // From src/bridge/ go up to package root, then into docs/ - const snippetPath = path.resolve(__dirname, '../../docs/agent-relay-snippet.md'); - const protocolPath = path.resolve(__dirname, '../../docs/agent-relay-protocol.md'); - - // Load base communication snippet - if (fs.existsSync(snippetPath)) { - relaySnippetCache = fs.readFileSync(snippetPath, 'utf-8'); - console.log('[spawner] Loaded relay communication snippet'); - } else { - // Fallback: minimal relay instructions - relaySnippetCache = `# Agent Relay Communication - -You are connected to an agent relay system. Use these patterns to communicate: - -## Sending Messages -\`\`\` -->relay:AgentName <<< -Your message here.>>> -\`\`\` - -## Communication Protocol -- **ACK immediately** when receiving a task -- **Report completion** with DONE: summary - -## Common Patterns -- \`->relay:Lead <<>>\` -- \`->relay:Lead <<>>\` -`; - console.log('[spawner] Using fallback relay snippet (docs/agent-relay-snippet.md not found)'); - } - - // Load protocol snippet (session persistence, trajectories, etc.) - if (fs.existsSync(protocolPath)) { - relayProtocolCache = fs.readFileSync(protocolPath, 'utf-8'); - console.log('[spawner] Loaded relay protocol snippet'); - } else { - // Fallback: minimal protocol instructions - relayProtocolCache = `# Agent Relay Protocol - -## Work Trajectories (Required) - -Record your work using trail commands: - -\`\`\`bash -trail start "Task description" -trail decision "Choice made" --reasoning "Why" -trail complete --summary "What was done" --confidence 0.85 -\`\`\` - -## Session End - -When done, output: -\`\`\` -[[SESSION_END]]Work complete.[[/SESSION_END]] -\`\`\` -`; - console.log('[spawner] Using fallback protocol snippet (docs/agent-relay-protocol.md not found)'); - } - } catch (err: any) { - console.error('[spawner] Failed to load relay snippets:', err.message); - relaySnippetCache = relaySnippetCache || ''; - relayProtocolCache = relayProtocolCache || ''; - } - - return `${relaySnippetCache}\n\n${relayProtocolCache}`; +function getMinimalRelayReminder(): string { + return `# Quick Relay Reference +- Send: \`->relay:Name <<>>\` +- ACK tasks, send DONE when complete +- Use \`trail start/decision/complete\` for trajectories +- Output \`[[SESSION_END]]..[[/SESSION_END]]\` when done`; } export class AgentSpawner { @@ -349,12 +278,14 @@ export class AgentSpawner { // Fall back to callbacks only if no dashboardPort is set // Note: Spawned agents CAN spawn sub-workers intentionally - the parser is strict enough // to avoid accidental spawns from documentation text (requires line start, PascalCase, known CLI) + // Use request.cwd if specified, otherwise use projectRoot + const agentCwd = request.cwd || this.projectRoot; const ptyConfig: PtyWrapperConfig = { name, command, args, socketPath: this.socketPath, - cwd: this.projectRoot, + cwd: agentCwd, logsDir: this.logsDir, dashboardPort: this.dashboardPort, // Shadow agent configuration @@ -446,16 +377,15 @@ export class AgentSpawner { }; } - // Build the full message: relay snippet + policy instructions (if any) + task + // Build the full message: minimal relay reminder + policy instructions (if any) + task let fullMessage = task || ''; - // Always prepend relay communication rules so agents know how to communicate - // This is essential because target repos may not have the snippet installed - // Includes both base communication patterns AND protocol rules (trajectories, session persistence) - const relayRules = loadRelaySnippets(); - if (relayRules) { - fullMessage = `${relayRules}\n\n---\n\n${fullMessage}`; - if (debug) console.log(`[spawner:debug] Prepended relay communication rules for ${name}`); + // Prepend a brief relay reminder (agents have full docs via CLAUDE.md) + // Note: Previously loaded full 400+ line docs which overwhelmed agents + const relayReminder = getMinimalRelayReminder(); + if (relayReminder) { + fullMessage = `${relayReminder}\n\n---\n\n${fullMessage}`; + if (debug) console.log(`[spawner:debug] Prepended relay reminder for ${name}`); } // Prepend policy instructions if enforcement is enabled diff --git a/src/cloud/api/workspaces.ts b/src/cloud/api/workspaces.ts index d8baefaf..dd1b5b86 100644 --- a/src/cloud/api/workspaces.ts +++ b/src/cloud/api/workspaces.ts @@ -264,10 +264,6 @@ workspacesRouter.get('/:id', async (req: Request, res: Response) => { computeProvider: workspace.computeProvider, config: workspace.config, errorMessage: workspace.errorMessage, - // SSH access for port forwarding (e.g., Codex OAuth) - sshHost: workspace.sshHost, - sshPort: workspace.sshPort, - sshPassword: workspace.sshPassword, repositories: repositories.map((r) => ({ id: r.id, fullName: r.githubFullName, @@ -671,7 +667,10 @@ async function removeDomainFromCompute(workspace: Workspace): Promise { */ workspacesRouter.all('/:id/proxy/{*proxyPath}', async (req: Request, res: Response) => { const userId = req.session.userId!; - const { id, proxyPath } = req.params; + const { id } = req.params; + // Express 5 wildcard params return an array of path segments, not a slash-separated string + const proxyPathParam = req.params.proxyPath; + const proxyPath = Array.isArray(proxyPathParam) ? proxyPathParam.join('/') : proxyPathParam; try { const workspace = await db.workspaces.findById(id); @@ -703,18 +702,32 @@ workspacesRouter.all('/:id/proxy/{*proxyPath}', async (req: Request, res: Respon const targetUrl = `${targetBaseUrl}/api/${proxyPath}`; console.log(`[workspace-proxy] ${req.method} ${targetUrl}`); + // Store targetUrl for error handling + (req as any)._proxyTargetUrl = targetUrl; + + // Add timeout to prevent hanging requests + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), 15000); // 15s timeout + const fetchOptions: RequestInit = { method: req.method, headers: { 'Content-Type': 'application/json', }, + signal: controller.signal, }; if (req.method !== 'GET' && req.method !== 'HEAD') { fetchOptions.body = JSON.stringify(req.body); } - const proxyRes = await fetch(targetUrl, fetchOptions); + let proxyRes: globalThis.Response; + try { + proxyRes = await fetch(targetUrl, fetchOptions); + } finally { + clearTimeout(timeout); + } + console.log(`[workspace-proxy] Response: ${proxyRes.status} ${proxyRes.statusText}`); // Handle non-JSON responses gracefully const contentType = proxyRes.headers.get('content-type'); @@ -726,10 +739,35 @@ workspacesRouter.all('/:id/proxy/{*proxyPath}', async (req: Request, res: Respon res.status(proxyRes.status).send(text); } } catch (error) { - console.error('[workspace-proxy] Error:', error); + const targetUrl = (req as any)._proxyTargetUrl || 'unknown'; + console.error('[workspace-proxy] Error proxying to:', targetUrl); + console.error('[workspace-proxy] Error details:', error); + + // Check for timeout/abort errors + if (error instanceof Error && error.name === 'AbortError') { + res.status(504).json({ + error: 'Workspace request timed out', + details: 'The workspace did not respond within 15 seconds', + targetUrl: targetUrl, + }); + return; + } + + // Check for connection refused (workspace not running) + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + if (errorMessage.includes('ECONNREFUSED') || errorMessage.includes('fetch failed')) { + res.status(503).json({ + error: 'Workspace is not reachable', + details: 'The workspace container may not be running or accepting connections', + targetUrl: targetUrl, + }); + return; + } + res.status(500).json({ error: 'Failed to proxy request to workspace', - details: error instanceof Error ? error.message : 'Unknown error' + details: errorMessage, + targetUrl: targetUrl, // Include target URL for debugging }); } }); diff --git a/src/cloud/db/drizzle.ts b/src/cloud/db/drizzle.ts index 254b6315..026f9b84 100644 --- a/src/cloud/db/drizzle.ts +++ b/src/cloud/db/drizzle.ts @@ -384,9 +384,6 @@ export interface WorkspaceQueries { computeId?: string; publicUrl?: string; errorMessage?: string; - sshHost?: string; - sshPort?: number; - sshPassword?: string; } ): Promise; updateConfig(id: string, config: schema.WorkspaceConfig): Promise; @@ -434,9 +431,6 @@ export const workspaceQueries: WorkspaceQueries = { computeId?: string; publicUrl?: string; errorMessage?: string; - sshHost?: string; - sshPort?: number; - sshPassword?: string; } ): Promise { const db = getDb(); @@ -447,9 +441,6 @@ export const workspaceQueries: WorkspaceQueries = { computeId: options?.computeId, publicUrl: options?.publicUrl, errorMessage: options?.errorMessage, - sshHost: options?.sshHost, - sshPort: options?.sshPort, - sshPassword: options?.sshPassword, updatedAt: new Date(), }) .where(eq(schema.workspaces.id, id)); diff --git a/src/cloud/db/migrations/0007_drop_workspace_ssh.sql b/src/cloud/db/migrations/0007_drop_workspace_ssh.sql new file mode 100644 index 00000000..36259c35 --- /dev/null +++ b/src/cloud/db/migrations/0007_drop_workspace_ssh.sql @@ -0,0 +1,6 @@ +-- Drop SSH columns from workspaces table (no longer needed - CLI auth uses device flow) +ALTER TABLE workspaces DROP COLUMN IF EXISTS ssh_host; +--> statement-breakpoint +ALTER TABLE workspaces DROP COLUMN IF EXISTS ssh_port; +--> statement-breakpoint +ALTER TABLE workspaces DROP COLUMN IF EXISTS ssh_password; diff --git a/src/cloud/db/migrations/meta/_journal.json b/src/cloud/db/migrations/meta/_journal.json index 25480972..f5e486f5 100644 --- a/src/cloud/db/migrations/meta/_journal.json +++ b/src/cloud/db/migrations/meta/_journal.json @@ -43,6 +43,13 @@ "when": 1736121600000, "tag": "0006_workspace_ssh", "breakpoints": true + }, + { + "idx": 6, + "version": "5", + "when": 1736208000000, + "tag": "0007_drop_workspace_ssh", + "breakpoints": true } ] } \ No newline at end of file diff --git a/src/cloud/db/schema.ts b/src/cloud/db/schema.ts index e707ab0c..ec4ef741 100644 --- a/src/cloud/db/schema.ts +++ b/src/cloud/db/schema.ts @@ -174,10 +174,6 @@ export const workspaces = pgTable('workspaces', { publicUrl: varchar('public_url', { length: 255 }), customDomain: varchar('custom_domain', { length: 255 }), customDomainStatus: varchar('custom_domain_status', { length: 50 }), - // SSH access for port forwarding (e.g., Codex OAuth callback tunneling) - sshHost: varchar('ssh_host', { length: 255 }), - sshPort: integer('ssh_port'), - sshPassword: varchar('ssh_password', { length: 255 }), config: jsonb('config').$type().notNull().default({}), errorMessage: text('error_message'), createdAt: timestamp('created_at').defaultNow().notNull(), diff --git a/src/cloud/provisioner/index.ts b/src/cloud/provisioner/index.ts index 49e05015..09ac4431 100644 --- a/src/cloud/provisioner/index.ts +++ b/src/cloud/provisioner/index.ts @@ -11,17 +11,9 @@ import { vault } from '../vault/index.js'; import { nangoService } from '../services/nango.js'; const WORKSPACE_PORT = 3888; -const SSH_PORT = 2222; const FETCH_TIMEOUT_MS = 10_000; const WORKSPACE_IMAGE = process.env.WORKSPACE_IMAGE || 'ghcr.io/agentworkforce/relay-workspace:latest'; -/** - * Generate a random password for SSH access - */ -function generateSSHPassword(): string { - return crypto.randomBytes(16).toString('base64').replace(/[/+=]/g, '').substring(0, 16); -} - /** * Get a fresh GitHub App installation token from Nango. * Looks up the user's connected repositories to find a valid Nango connection. @@ -153,9 +145,6 @@ interface ComputeProvisioner { provision(workspace: Workspace, credentials: Map): Promise<{ computeId: string; publicUrl: string; - sshHost?: string; - sshPort?: number; - sshPassword?: string; }>; deprovision(workspace: Workspace): Promise; getStatus(workspace: Workspace): Promise; @@ -209,9 +198,8 @@ class FlyProvisioner implements ComputeProvisioner { async provision( workspace: Workspace, credentials: Map - ): Promise<{ computeId: string; publicUrl: string; sshHost?: string; sshPort?: number; sshPassword?: string }> { + ): Promise<{ computeId: string; publicUrl: string }> { const appName = `ar-${workspace.id.substring(0, 8)}`; - const sshPassword = generateSSHPassword(); // Create Fly app await fetchWithRetry('https://api.machines.dev/v1/apps', { @@ -226,22 +214,22 @@ class FlyProvisioner implements ComputeProvisioner { }), }); - // Set secrets (credentials + SSH password) - const secrets: Record = { - SSH_PASSWORD: sshPassword, - }; + // Set secrets (provider credentials) + const secrets: Record = {}; for (const [provider, token] of credentials) { secrets[`${provider.toUpperCase()}_TOKEN`] = token; } - await fetchWithRetry(`https://api.machines.dev/v1/apps/${appName}/secrets`, { - method: 'POST', - headers: { - Authorization: `Bearer ${this.apiToken}`, - 'Content-Type': 'application/json', - }, - body: JSON.stringify(secrets), - }); + if (Object.keys(secrets).length > 0) { + await fetchWithRetry(`https://api.machines.dev/v1/apps/${appName}/secrets`, { + method: 'POST', + headers: { + Authorization: `Bearer ${this.apiToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(secrets), + }); + } // If custom workspace domain is configured, add certificate const customHostname = this.workspaceDomain @@ -253,7 +241,6 @@ class FlyProvisioner implements ComputeProvisioner { } // Create machine with auto-stop/start for cost optimization - // SSH enabled for port forwarding (e.g., Codex OAuth) const machineResponse = await fetchWithRetry( `https://api.machines.dev/v1/apps/${appName}/machines`, { @@ -277,8 +264,6 @@ class FlyProvisioner implements ComputeProvisioner { // Git gateway configuration CLOUD_API_URL: this.cloudApiUrl, WORKSPACE_TOKEN: this.generateWorkspaceToken(workspace.id), - // SSH for port forwarding (Codex OAuth, etc.) - ENABLE_SSH: 'true', }, services: [ { @@ -293,12 +278,6 @@ class FlyProvisioner implements ComputeProvisioner { auto_start_machines: true, min_machines_running: 0, }, - { - // SSH for port forwarding - ports: [{ port: SSH_PORT, handlers: [] }], - protocol: 'tcp', - internal_port: SSH_PORT, - }, ], guest: { cpu_kind: 'shared', @@ -322,16 +301,11 @@ class FlyProvisioner implements ComputeProvisioner { ? `https://${customHostname}` : `https://${appName}.fly.dev`; - const sshHost = customHostname || `${appName}.fly.dev`; - await softHealthCheck(publicUrl); return { computeId: machine.id, publicUrl, - sshHost, - sshPort: SSH_PORT, - sshPassword, }; } @@ -836,9 +810,8 @@ class DockerProvisioner implements ComputeProvisioner { async provision( workspace: Workspace, credentials: Map - ): Promise<{ computeId: string; publicUrl: string; sshHost?: string; sshPort?: number; sshPassword?: string }> { + ): Promise<{ computeId: string; publicUrl: string }> { const containerName = `ar-${workspace.id.substring(0, 8)}`; - const sshPassword = generateSSHPassword(); // Build environment variables const envArgs: string[] = [ @@ -851,35 +824,33 @@ class DockerProvisioner implements ComputeProvisioner { `-e AGENT_RELAY_DASHBOARD_PORT=${WORKSPACE_PORT}`, `-e CLOUD_API_URL=${this.cloudApiUrl}`, `-e WORKSPACE_TOKEN=${this.generateWorkspaceToken(workspace.id)}`, - // SSH for port forwarding (Codex OAuth, etc.) - `-e ENABLE_SSH=true`, - `-e SSH_PASSWORD=${sshPassword}`, ]; for (const [provider, token] of credentials) { envArgs.push(`-e ${provider.toUpperCase()}_TOKEN=${token}`); } - // Run container with SSH port exposed + // Run container const { execSync } = await import('child_process'); const hostPort = 3000 + Math.floor(Math.random() * 1000); - const sshHostPort = 2200 + Math.floor(Math.random() * 100); // When running in Docker, connect to the same network for container-to-container communication const runningInDocker = process.env.RUNNING_IN_DOCKER === 'true'; const networkArg = runningInDocker ? '--network agent-relay-dev' : ''; - // In development, mount local dist folder for faster iteration + // In development, mount local dist and docs folders for faster iteration // Set WORKSPACE_DEV_MOUNT=true to enable const devMount = process.env.WORKSPACE_DEV_MOUNT === 'true'; - const volumeArgs = devMount ? `-v "${process.cwd()}/dist:/app/dist:ro"` : ''; + const volumeArgs = devMount + ? `-v "${process.cwd()}/dist:/app/dist:ro" -v "${process.cwd()}/docs:/app/docs:ro"` + : ''; if (devMount) { - console.log('[provisioner] Dev mode: mounting local dist/ folder into workspace container'); + console.log('[provisioner] Dev mode: mounting local dist/ and docs/ folders into workspace container'); } try { execSync( - `docker run -d --user root --name ${containerName} ${networkArg} ${volumeArgs} -p ${hostPort}:${WORKSPACE_PORT} -p ${sshHostPort}:${SSH_PORT} ${envArgs.join(' ')} ${WORKSPACE_IMAGE}`, + `docker run -d --user root --name ${containerName} ${networkArg} ${volumeArgs} -p ${hostPort}:${WORKSPACE_PORT} ${envArgs.join(' ')} ${WORKSPACE_IMAGE}`, { stdio: 'pipe' } ); @@ -895,9 +866,6 @@ class DockerProvisioner implements ComputeProvisioner { return { computeId: containerName, publicUrl, - sshHost: 'localhost', - sshPort: sshHostPort, - sshPassword, }; } catch (error) { // Clean up container if it was created but health check failed @@ -1000,6 +968,16 @@ export class WorkspaceProvisioner { }, }); + // Add creator as owner in workspace_members for team collaboration support + await db.workspaceMembers.addMember({ + workspaceId: workspace.id, + userId: config.userId, + role: 'owner', + invitedBy: config.userId, // Self-invited as creator + }); + // Auto-accept the creator's membership + await db.workspaceMembers.acceptInvite(workspace.id, config.userId); + // Get credentials const credentials = new Map(); for (const provider of config.providers) { @@ -1029,7 +1007,7 @@ export class WorkspaceProvisioner { // Provision compute try { - const { computeId, publicUrl, sshHost, sshPort, sshPassword } = await this.provisioner.provision( + const { computeId, publicUrl } = await this.provisioner.provision( workspace, credentials ); @@ -1037,9 +1015,6 @@ export class WorkspaceProvisioner { await db.workspaces.updateStatus(workspace.id, 'running', { computeId, publicUrl, - sshHost, - sshPort, - sshPassword, }); return { diff --git a/src/wrapper/pty-wrapper.ts b/src/wrapper/pty-wrapper.ts index a32bf552..97fc6925 100644 --- a/src/wrapper/pty-wrapper.ts +++ b/src/wrapper/pty-wrapper.ts @@ -12,7 +12,7 @@ import path from 'node:path'; import { EventEmitter } from 'node:events'; import { RelayClient } from './client.js'; import type { ParsedCommand, ParsedSummary, SessionEndMarker } from './parser.js'; -import { parseSummaryWithDetails, parseSessionEndFromOutput } from './parser.js'; +import { parseSummaryWithDetails, parseSessionEndFromOutput, isPlaceholderTarget } from './parser.js'; import type { SendPayload, SendMeta, SpeakOnTrigger } from '../protocol/types.js'; import { getProjectPaths } from '../utils/project-namespace.js'; import { getTrailEnvVars } from '../trajectory/integration.js'; @@ -125,6 +125,7 @@ export class PtyWrapper extends EventEmitter { private relayPrefix: string; private cliType: CliType; private sentMessageHashes: Set = new Set(); + private receivedMessageIds: Set = new Set(); // Dedup incoming messages private processedSpawnCommands: Set = new Set(); private processedReleaseCommands: Set = new Set(); private pendingFencedSpawn: { name: string; cli: string; taskLines: string[] } | null = null; @@ -135,7 +136,7 @@ export class PtyWrapper extends EventEmitter { private injectionMetrics: InjectionMetrics = createInjectionMetrics(); private logFilePath?: string; private logStream?: fs.WriteStream; - private hasAcceptedPrompt = false; + private acceptedPrompts: Set = new Set(); // Track which prompts have been accepted private hookRegistry: HookRegistry; private sessionStartTime = Date.now(); private continuity?: ContinuityManager; @@ -575,21 +576,25 @@ export class PtyWrapper extends EventEmitter { /** * Auto-accept Claude's first-run prompts - * Handles: + * Handles multiple prompts in sequence: * 1. --dangerously-skip-permissions acceptance ("Yes, I accept") * 2. Trust directory prompt ("Yes, I trust this folder") + * 3. "Ready to code here?" permission prompt ("Yes, continue") + * + * Uses a Set to track which prompts have been accepted, allowing + * multiple different prompts to be handled in sequence. */ private handleAutoAcceptPrompts(data: string): void { - if (this.hasAcceptedPrompt) return; if (!this.ptyProcess || !this.running) return; const cleanData = stripAnsi(data); // Check for the permission acceptance prompt (--dangerously-skip-permissions) // Pattern: "2. Yes, I accept" in the output - if (cleanData.includes('Yes, I accept') && cleanData.includes('No, exit')) { + if (!this.acceptedPrompts.has('permission') && + cleanData.includes('Yes, I accept') && cleanData.includes('No, exit')) { console.log(`[pty:${this.config.name}] Detected permission prompt, auto-accepting...`); - this.hasAcceptedPrompt = true; + this.acceptedPrompts.add('permission'); // Send "2" to select "Yes, I accept" and Enter to confirm setTimeout(() => { if (this.ptyProcess && this.running) { @@ -601,10 +606,11 @@ export class PtyWrapper extends EventEmitter { // Check for the trust directory prompt // Pattern: "1. Yes, I trust this folder" with "No, exit" - if ((cleanData.includes('trust this folder') || cleanData.includes('safety check')) + if (!this.acceptedPrompts.has('trust') && + (cleanData.includes('trust this folder') || cleanData.includes('safety check')) && cleanData.includes('No, exit')) { console.log(`[pty:${this.config.name}] Detected trust directory prompt, auto-accepting...`); - this.hasAcceptedPrompt = true; + this.acceptedPrompts.add('trust'); // Send Enter to accept first option (already selected) setTimeout(() => { if (this.ptyProcess && this.running) { @@ -613,6 +619,23 @@ export class PtyWrapper extends EventEmitter { }, 300); return; } + + // Check for "Ready to code here?" permission prompt + // Pattern: "Yes, continue" with "No, exit" and "Ready to code here?" + // This prompt asks for permission to work with files in the workspace + if (!this.acceptedPrompts.has('ready-to-code') && + cleanData.includes('Yes, continue') && cleanData.includes('No, exit') + && (cleanData.includes('Ready to code here') || cleanData.includes('permission to work with your files'))) { + console.log(`[pty:${this.config.name}] Detected "Ready to code here?" prompt, auto-accepting...`); + this.acceptedPrompts.add('ready-to-code'); + // Send Enter to accept first option (already selected with โฏ) + setTimeout(() => { + if (this.ptyProcess && this.running) { + this.ptyProcess.write('\r'); + } + }, 300); + return; + } } /** @@ -687,6 +710,11 @@ export class PtyWrapper extends EventEmitter { continue; } + // Skip placeholder targets (documentation examples like "AgentName", "Lead", etc.) + if (isPlaceholderTarget(target)) { + continue; + } + // Find the closing >>> const endIdx = content.indexOf('>>>', startIdx); if (endIdx === -1) continue; @@ -704,6 +732,11 @@ export class PtyWrapper extends EventEmitter { to = target.substring(colonIdx + 1); } + // Skip placeholder targets after parsing cross-project syntax + if (isPlaceholderTarget(to)) { + continue; + } + this.sendRelayCommand({ to, kind: 'message', @@ -751,6 +784,9 @@ export class PtyWrapper extends EventEmitter { const [, target, body] = simpleMatch; if (!body) continue; + // Skip placeholder targets (documentation examples) + if (isPlaceholderTarget(target)) continue; + // Parse target for cross-project syntax const colonIdx = target.indexOf(':'); let to = target; @@ -760,6 +796,9 @@ export class PtyWrapper extends EventEmitter { to = target.substring(colonIdx + 1); } + // Skip placeholder targets after parsing cross-project syntax + if (isPlaceholderTarget(to)) continue; + this.sendRelayCommand({ to, kind: 'message', @@ -773,6 +812,9 @@ export class PtyWrapper extends EventEmitter { const [, target, threadProject, threadId, body] = targetMatch; if (!body) continue; + // Skip placeholder targets (documentation examples) + if (isPlaceholderTarget(target)) continue; + // Parse target for cross-project syntax const colonIdx = target.indexOf(':'); let to = target; @@ -782,6 +824,9 @@ export class PtyWrapper extends EventEmitter { to = target.substring(colonIdx + 1); } + // Skip placeholder targets after parsing cross-project syntax + if (isPlaceholderTarget(to)) continue; + this.sendRelayCommand({ to, kind: 'message', @@ -1064,6 +1109,19 @@ export class PtyWrapper extends EventEmitter { * @param originalTo - The original 'to' field from sender. '*' indicates this was a broadcast message. */ private handleIncomingMessage(from: string, payload: SendPayload, messageId: string, meta?: SendMeta, originalTo?: string): void { + // Deduplicate: skip if we've already received this message + if (this.receivedMessageIds.has(messageId)) { + console.log(`[pty:${this.config.name}] Skipping duplicate message: ${messageId.substring(0, 8)}`); + return; + } + this.receivedMessageIds.add(messageId); + + // Limit dedup set size to prevent memory leak + if (this.receivedMessageIds.size > 1000) { + const oldest = this.receivedMessageIds.values().next().value; + if (oldest) this.receivedMessageIds.delete(oldest); + } + this.messageQueue.push({ from, body: payload.body, messageId, thread: payload.thread, importance: meta?.importance, data: payload.data, originalTo }); this.processMessageQueue(); @@ -1191,6 +1249,9 @@ export class PtyWrapper extends EventEmitter { log: (message: string) => console.log(`[pty:${this.config.name}] ${message}`), logError: (message: string) => console.error(`[pty:${this.config.name}] ${message}`), getMetrics: () => this.injectionMetrics, + // Skip verification for PTY-based injection - CLIs don't echo input back + // so verification will always fail. Trust that pty.write() succeeds. + skipVerification: true, }; // Inject with retry and verification using shared logic @@ -1223,27 +1284,24 @@ export class PtyWrapper extends EventEmitter { } /** - * Inject usage instructions including persistence protocol + * Queue minimal agent identity notification as the first message. + * + * Full protocol instructions are in ~/.claude/CLAUDE.md (set up by entrypoint.sh). + * We only inject a brief identity message here to let the agent know its name + * and that it's connected to the relay. */ private injectInstructions(): void { - if (!this.running || !this.ptyProcess) return; - - const escapedPrefix = '\\' + this.relayPrefix; - const instructions = [ - `[Agent Relay] You are "${this.config.name}" - connected for real-time messaging.`, - `SEND: ${escapedPrefix}AgentName message`, - `PROTOCOL: (1) ACK receipt (2) Work (3) Send "DONE: summary"`, - `PERSIST: Output [[SUMMARY]]{"currentTask":"...","context":"..."}[[/SUMMARY]] after major work.`, - `END: Output [[SESSION_END]]{"summary":"..."}[[/SESSION_END]] when session complete.`, - ].join(' | '); + if (!this.running) return; - // Note: Trail instructions are injected via hooks (trajectory-hooks.ts) + // Minimal notification - full protocol is in ~/.claude/CLAUDE.md + const notification = `You are agent "${this.config.name}" connected to Agent Relay. See CLAUDE.md for the messaging protocol. ACK messages, do work, send DONE when complete.`; - try { - this.ptyProcess.write(instructions + '\r'); - } catch { - // Silent fail - } + // Queue as first message from "system" - will be injected when CLI is ready + this.messageQueue.unshift({ + from: 'system', + body: notification, + messageId: `init-${Date.now()}`, + }); } /** @@ -1410,12 +1468,17 @@ export class PtyWrapper extends EventEmitter { this.outputsSinceSummary = 0; // Inject reminder as a relay-style message - const reminder = `\n[Agent Relay] It's been ${Math.round(minutesSinceSummary)} minutes. Please output a [[SUMMARY]] block to checkpoint your progress:\n[[SUMMARY]]\n{"currentTask": "...", "completedTasks": [...], "context": "..."}\n[[/SUMMARY]]\n`; + // IMPORTANT: Must be single-line - embedded newlines cause the message to span + // multiple lines in the CLI input buffer, and the final Enter only submits + // the last (empty) line. Regular relay messages are also single-line (see buildInjectionString). + const reminder = `[Agent Relay] It's been ${Math.round(minutesSinceSummary)} minutes. Please output a [[SUMMARY]] block to checkpoint your progress: [[SUMMARY]]{"currentTask": "...", "completedTasks": [...], "context": "..."}[[/SUMMARY]]`; - // Delay slightly to not interrupt current output - setTimeout(() => { + // Delay slightly to not interrupt current output, then write + Enter + setTimeout(async () => { if (this.ptyProcess && this.running) { - this.ptyProcess.write(reminder + '\r'); + this.ptyProcess.write(reminder); + await sleep(INJECTION_CONSTANTS.ENTER_DELAY_MS); + this.ptyProcess.write('\r'); } }, 1000); } From 4395f609d5fa16a8885684e39a32fc46df8edf6e Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 13:08:25 +0100 Subject: [PATCH 092/103] Fix metrics page in cloud mode by persisting workspace ID MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Store workspace ID in localStorage when connecting to a workspace - Add initializeWorkspaceId() to load workspace ID on other pages - Update metrics page to initialize workspace context on mount - Redirect to /app if in cloud mode without workspace selected ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/dashboard/app/metrics/page.tsx | 31 ++++++++++++++++++++-- src/dashboard/lib/api.ts | 41 ++++++++++++++++++++++++++++-- 2 files changed, 68 insertions(+), 4 deletions(-) diff --git a/src/dashboard/app/metrics/page.tsx b/src/dashboard/app/metrics/page.tsx index c3016699..d187f741 100644 --- a/src/dashboard/app/metrics/page.tsx +++ b/src/dashboard/app/metrics/page.tsx @@ -9,6 +9,7 @@ import React, { useState, useEffect } from 'react'; import Link from 'next/link'; +import { getApiUrl, initializeWorkspaceId } from '../../lib/api'; interface AgentMetric { name: string; @@ -112,13 +113,39 @@ export default function MetricsPage() { const [memoryMetrics, setMemoryMetrics] = useState(null); const [error, setError] = useState(null); const [loading, setLoading] = useState(true); + const [_isCloudMode, setIsCloudMode] = useState(false); useEffect(() => { + // Initialize workspace ID from localStorage for cloud mode + const workspaceId = initializeWorkspaceId(); + + // Check if we're in cloud mode by checking for session endpoint + const checkCloudMode = async () => { + try { + const res = await fetch('/api/auth/session', { credentials: 'include' }); + if (res.status !== 404) { + setIsCloudMode(true); + // In cloud mode without workspace, redirect to app to select one + if (!workspaceId) { + window.location.href = '/app'; + return false; + } + } + return true; + } catch { + return true; // Network error = local mode + } + }; + const fetchMetrics = async () => { try { + // Check cloud mode first + const shouldContinue = await checkCloudMode(); + if (!shouldContinue) return; + const [metricsRes, memoryRes] = await Promise.all([ - fetch('/api/metrics'), - fetch('/api/metrics/agents'), + fetch(getApiUrl('/api/metrics'), { credentials: 'include' }), + fetch(getApiUrl('/api/metrics/agents'), { credentials: 'include' }), ]); if (!metricsRes.ok) throw new Error('Failed to fetch metrics'); diff --git a/src/dashboard/lib/api.ts b/src/dashboard/lib/api.ts index af4ba52b..6267c4a6 100644 --- a/src/dashboard/lib/api.ts +++ b/src/dashboard/lib/api.ts @@ -21,6 +21,9 @@ import type { // API base URL - relative in browser, can be configured for SSR const API_BASE = ''; +// Storage key for workspace ID persistence +const WORKSPACE_ID_KEY = 'agentrelay_workspace_id'; + // Workspace ID for cloud mode proxying let activeWorkspaceId: string | null = null; @@ -52,17 +55,51 @@ function captureCsrfToken(response: Response): void { } /** - * Set the active workspace ID for API proxying in cloud mode + * Set the active workspace ID for API proxying in cloud mode. + * Also persists to localStorage so other pages can access it. */ export function setActiveWorkspaceId(workspaceId: string | null): void { activeWorkspaceId = workspaceId; + // Persist to localStorage for cross-page access + if (typeof window !== 'undefined') { + if (workspaceId) { + localStorage.setItem(WORKSPACE_ID_KEY, workspaceId); + } else { + localStorage.removeItem(WORKSPACE_ID_KEY); + } + } +} + +/** + * Get the active workspace ID + */ +export function getActiveWorkspaceId(): string | null { + return activeWorkspaceId; +} + +/** + * Initialize workspace ID from localStorage if not already set. + * Call this on pages that need workspace context but aren't in the main app flow. + */ +export function initializeWorkspaceId(): string | null { + if (activeWorkspaceId) { + return activeWorkspaceId; + } + if (typeof window !== 'undefined') { + const stored = localStorage.getItem(WORKSPACE_ID_KEY); + if (stored) { + activeWorkspaceId = stored; + return stored; + } + } + return null; } /** * Get the API URL, accounting for cloud mode proxying * @param path - API path like '/api/spawn' or '/api/send' */ -function getApiUrl(path: string): string { +export function getApiUrl(path: string): string { if (activeWorkspaceId) { // In cloud mode, proxy through the cloud server // Strip /api/ prefix since the proxy endpoint adds it back From e8815e7542d5868e6743bd53c778c7ecb338ce2f Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 13:19:58 +0100 Subject: [PATCH 093/103] Exclude agents without CLI from dashboard instead of marking as human MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Agents without a proper CLI (or CLI='Unknown') were being marked as human team members and shown in the Team tab. These are typically improperly registered or stale agents. Now they're excluded entirely. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/dashboard-server/server.ts | 84 +++++++++++++++++++++++++++------- 1 file changed, 67 insertions(+), 17 deletions(-) diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index e0499550..f23fff06 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -18,6 +18,7 @@ import type { ProjectConfig, SpawnRequest } from '../bridge/types.js'; import { listTrajectorySteps, getTrajectoryStatus, getTrajectoryHistory } from '../trajectory/integration.js'; import { loadTeamsConfig } from '../bridge/teams-config.js'; import { getMemoryMonitor } from '../resiliency/memory-monitor.js'; +import { detectWorkspacePath } from '../utils/project-namespace.js'; import { startCLIAuth, getAuthSession, @@ -408,8 +409,12 @@ export async function startDashboard( : undefined; // Initialize spawner if enabled + // Use detectWorkspacePath to find the actual repo directory in cloud workspaces + const workspacePath = detectWorkspacePath(projectRoot || dataDir); + console.log(`[dashboard] Workspace path: ${workspacePath}`); + const spawner: AgentSpawner | undefined = enableSpawner - ? new AgentSpawner(projectRoot || dataDir, tmuxSession) + ? new AgentSpawner(workspacePath, tmuxSession) : undefined; // Initialize cloud persistence and memory monitoring if enabled (RELAY_CLOUD_ENABLED=true) @@ -1374,24 +1379,34 @@ export async function startDashboard( // Filter agents: // 1. Exclude "Dashboard" (internal agent, not a real team member) // 2. Exclude offline agents (no lastSeen or lastSeen > threshold) + // 3. Exclude agents without a known CLI (these are improperly registered or stale) const now = Date.now(); // 30 seconds - aligns with heartbeat timeout (5s heartbeat * 6 multiplier = 30s) // This ensures agents disappear quickly after they stop responding to heartbeats const OFFLINE_THRESHOLD_MS = 30 * 1000; - const filteredAgents = Array.from(agentsMap.values()).filter(agent => { - // Exclude Dashboard - if (agent.name === 'Dashboard') return false; - - // Exclude agents starting with __ (internal/system agents) - if (agent.name.startsWith('__')) return false; - - // Exclude offline agents (no lastSeen or too old) - if (!agent.lastSeen) return false; - const lastSeenTime = new Date(agent.lastSeen).getTime(); - if (now - lastSeenTime > OFFLINE_THRESHOLD_MS) return false; - - return true; - }); + const filteredAgents = Array.from(agentsMap.values()) + .filter(agent => { + // Exclude Dashboard + if (agent.name === 'Dashboard') return false; + + // Exclude agents starting with __ (internal/system agents) + if (agent.name.startsWith('__')) return false; + + // Exclude agents without a proper CLI (improperly registered or stale) + if (!agent.cli || agent.cli === 'Unknown') return false; + + // Exclude offline agents (no lastSeen or too old) + if (!agent.lastSeen) return false; + const lastSeenTime = new Date(agent.lastSeen).getTime(); + if (now - lastSeenTime > OFFLINE_THRESHOLD_MS) return false; + + return true; + }) + .map(agent => ({ + ...agent, + // All agents that pass the filter have a known CLI and are AI agents + isHuman: false, + })); return { agents: filteredAgents, @@ -1773,8 +1788,39 @@ export async function startDashboard( return Array.from(onlineUsers.values()).map((state) => state.info); }; + // Heartbeat to detect dead connections (30 seconds) + const PRESENCE_HEARTBEAT_INTERVAL = 30000; + const presenceHealth = new WeakMap(); + + const presenceHeartbeat = setInterval(() => { + wssPresence.clients.forEach((ws) => { + const health = presenceHealth.get(ws); + if (!health) { + presenceHealth.set(ws, { isAlive: true }); + return; + } + if (!health.isAlive) { + ws.terminate(); + return; + } + health.isAlive = false; + ws.ping(); + }); + }, PRESENCE_HEARTBEAT_INTERVAL); + + wssPresence.on('close', () => { + clearInterval(presenceHeartbeat); + }); + wssPresence.on('connection', (ws) => { - console.log('[dashboard] Presence WebSocket client connected'); + // Initialize health tracking (no log - too noisy) + presenceHealth.set(ws, { isAlive: true }); + + ws.on('pong', () => { + const health = presenceHealth.get(ws); + if (health) health.isAlive = true; + }); + let clientUsername: string | undefined; ws.on('message', (data) => { @@ -1805,7 +1851,11 @@ export async function startDashboard( // Add this connection to existing user existing.connections.add(ws); existing.info.lastSeen = now; - console.log(`[dashboard] User ${username} opened new tab (${existing.connections.size} connections)`); + // Only log at milestones to reduce noise + const count = existing.connections.size; + if (count === 2 || count === 5 || count === 10 || count % 50 === 0) { + console.log(`[dashboard] User ${username} has ${count} connections`); + } } else { // New user - create presence state onlineUsers.set(username, { From e7c71045e213d58019ba60f0e4f3a8cd1f1f6e43 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 13:33:53 +0100 Subject: [PATCH 094/103] Add spawn debug logging and fix tmux-wrapper prefix stripping MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add comprehensive [SPAWN-DEBUG] logging to pty-wrapper to track spawn command detection, prefix stripping, and execution - Fix tmux-wrapper to strip common line prefixes (including โ—) before matching spawn/release commands, matching pty-wrapper behavior - Fix TypeScript error with Set.values().next().value being undefined ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/dashboard-server/server.ts | 45 +++++++++++++++++ src/wrapper/pty-wrapper.ts | 89 ++++++++++++++++++++++++++++++---- src/wrapper/tmux-wrapper.ts | 18 +++++-- 3 files changed, 138 insertions(+), 14 deletions(-) diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index f23fff06..6d0e4157 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -1749,11 +1749,56 @@ export async function startDashboard( }); }); + // Deduplication for log output - prevent same content from being broadcast multiple times + // Key: agentName -> Set of recent content hashes (rolling window) + const recentLogHashes = new Map>(); + const MAX_LOG_HASH_WINDOW = 50; // Keep last 50 hashes per agent + + // Simple hash function for log dedup + const hashLogContent = (content: string): string => { + // Normalize whitespace and create a simple hash + const normalized = content.replace(/\s+/g, ' ').trim().slice(0, 200); + let hash = 0; + for (let i = 0; i < normalized.length; i++) { + const char = normalized.charCodeAt(i); + hash = ((hash << 5) - hash) + char; + hash = hash & hash; + } + return hash.toString(36); + }; + // Function to broadcast log output to subscribed clients const broadcastLogOutput = (agentName: string, output: string) => { const clients = logSubscriptions.get(agentName); if (!clients || clients.size === 0) return; + // Skip empty or whitespace-only output + const trimmed = output.trim(); + if (!trimmed) return; + + // Dedup: Check if we've recently broadcast this content + const hash = hashLogContent(output); + let agentHashes = recentLogHashes.get(agentName); + if (!agentHashes) { + agentHashes = new Set(); + recentLogHashes.set(agentName, agentHashes); + } + + if (agentHashes.has(hash)) { + // Already broadcast this content recently, skip + return; + } + + // Add to rolling window + agentHashes.add(hash); + if (agentHashes.size > MAX_LOG_HASH_WINDOW) { + // Remove oldest entry (first in Set iteration order) + const oldest = agentHashes.values().next().value; + if (oldest !== undefined) { + agentHashes.delete(oldest); + } + } + const payload = JSON.stringify({ type: 'output', agent: agentName, diff --git a/src/wrapper/pty-wrapper.ts b/src/wrapper/pty-wrapper.ts index 97fc6925..8c37f983 100644 --- a/src/wrapper/pty-wrapper.ts +++ b/src/wrapper/pty-wrapper.ts @@ -149,6 +149,8 @@ export class PtyWrapper extends EventEmitter { private outputsSinceSummary = 0; // Count outputs since last summary private detectedTask?: string; // Auto-detected task from agent config private sessionEndData?: SessionEndMarker; // Store SESSION_END data for handoff + private instructionsInjected = false; // Track if init instructions have been injected + private continuityInjected = false; // Track if continuity context has been injected constructor(config: PtyWrapperConfig) { super(); @@ -373,8 +375,20 @@ export class PtyWrapper extends EventEmitter { private async injectContinuityContext(): Promise { if (!this.continuity || !this.running) return; + // Guard: Only inject once per session + if (this.continuityInjected) { + console.log(`[pty:${this.config.name}] Continuity context already injected, skipping`); + return; + } + this.continuityInjected = true; + try { const context = await this.continuity.getStartupContext(this.config.name); + // Skip if no meaningful context (empty ledger or just boilerplate) + if (!context?.formatted || context.formatted.length < 50) { + console.log(`[pty:${this.config.name}] Skipping continuity injection (no meaningful context)`); + return; + } if (context?.formatted) { // Build context notification similar to TmuxWrapper const taskInfo = context.ledger?.currentTask @@ -846,14 +860,17 @@ export class PtyWrapper extends EventEmitter { const msgHash = `${cmd.to}:${cmd.body}`; if (this.sentMessageHashes.has(msgHash)) { + console.log(`[pty:${this.config.name}] Skipping duplicate message to ${cmd.to}`); return; } if (this.client.state !== 'READY') { + console.log(`[pty:${this.config.name}] Cannot send to ${cmd.to} - relay not ready (state: ${this.client.state})`); return; } const success = this.client.sendMessage(cmd.to, cmd.body, cmd.kind, cmd.data, cmd.thread); + console.log(`[pty:${this.config.name}] Sent message to ${cmd.to}: ${success ? 'success' : 'failed'}`); if (success) { this.sentMessageHashes.add(msgHash); @@ -902,14 +919,45 @@ export class PtyWrapper extends EventEmitter { const spawnAllowed = this.config.allowSpawn !== false; const canSpawn = spawnAllowed && (this.config.dashboardPort || this.config.onSpawn); const canRelease = this.config.dashboardPort || this.config.onRelease; + + // Debug: always log spawn detection for debugging + if (content.includes('->relay:spawn')) { + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Spawn pattern detected in content`); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] canSpawn=${canSpawn} (allowSpawn=${spawnAllowed}, dashboardPort=${this.config.dashboardPort}, hasOnSpawn=${!!this.config.onSpawn})`); + // Log the actual lines containing spawn + const spawnLines = content.split('\n').filter(l => l.includes('->relay:spawn')); + spawnLines.forEach((line, i) => { + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Line ${i}: "${line.substring(0, 100)}"`); + }); + } + + // Debug: always log release detection for debugging + if (content.includes('->relay:release')) { + console.log(`[pty:${this.config.name}] [RELEASE-DEBUG] Release pattern detected in content`); + console.log(`[pty:${this.config.name}] [RELEASE-DEBUG] canRelease=${canRelease} (dashboardPort=${this.config.dashboardPort}, hasOnRelease=${!!this.config.onRelease})`); + } + if (!canSpawn && !canRelease) return; const lines = content.split('\n'); const spawnPrefix = '->relay:spawn'; const releasePrefix = '->relay:release'; + // Pattern to strip common line prefixes (bullets, prompts, etc.) + // Same prefixes allowed in the message parser + const linePrefixPattern = /^(?:[>$%#โ†’โžœโ€บยปโ—โ€ขโ—ฆโ€ฃโƒ\-*โบโ—†โ—‡โ—‹โ–กโ– โ”‚โ”ƒโ”†โ”‡โ”Šโ”‹โ•Žโ•โœฆ]\s*)+/; + for (const line of lines) { - const trimmed = line.trim(); + let trimmed = line.trim(); + + // Strip common line prefixes (bullets, prompts) before checking for commands + const originalTrimmed = trimmed; + trimmed = trimmed.replace(linePrefixPattern, ''); + + // Debug: log prefix stripping when spawn detected + if (originalTrimmed.includes('->relay:spawn') && originalTrimmed !== trimmed) { + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Stripped prefix: "${originalTrimmed.substring(0, 50)}" -> "${trimmed.substring(0, 50)}"`); + } // Skip escaped commands: \->relay:spawn should not trigger if (trimmed.includes('\\->relay:')) { @@ -949,9 +997,11 @@ export class PtyWrapper extends EventEmitter { // STRICT: Must be at start of line (after whitespace) if (canSpawn && trimmed.startsWith(spawnPrefix)) { const afterSpawn = trimmed.substring(spawnPrefix.length).trim(); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Detected spawn prefix, afterSpawn: "${afterSpawn.substring(0, 60)}"`); // Check for fenced format: Name [cli] <<< (CLI optional, defaults to 'claude') const fencedMatch = afterSpawn.match(/^(\S+)(?:\s+(\S+))?\s+<<<(.*)$/); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Fenced match result: ${fencedMatch ? 'MATCHED' : 'NO MATCH'}`); if (fencedMatch) { const [, name, cliOrUndefined, inlineContent] = fencedMatch; let cli = cliOrUndefined || 'claude'; @@ -979,7 +1029,12 @@ export class PtyWrapper extends EventEmitter { this.executeSpawn(name, cli, taskStr); } } else { - // Start multi-line fenced mode + // Start multi-line fenced mode - but only if not already processed + const spawnKey = `${name}:${cli}`; + if (this.processedSpawnCommands.has(spawnKey)) { + // Already processed this spawn, skip the fenced capture + continue; + } this.pendingFencedSpawn = { name, cli, @@ -1031,14 +1086,18 @@ export class PtyWrapper extends EventEmitter { // Check for release command // STRICT: Must be at start of line (after whitespace) - if (canRelease && trimmed.startsWith(releasePrefix)) { - const afterRelease = trimmed.substring(releasePrefix.length).trim(); - const name = afterRelease.split(/\s+/)[0]; - - // STRICT: Validate agent name format - if (name && this.isValidAgentName(name) && !this.processedReleaseCommands.has(name)) { - this.processedReleaseCommands.add(name); - this.executeRelease(name); + if (trimmed.startsWith(releasePrefix)) { + console.log(`[pty:${this.config.name}] [RELEASE-DEBUG] Release prefix detected, canRelease=${canRelease}`); + if (canRelease) { + const afterRelease = trimmed.substring(releasePrefix.length).trim(); + const name = afterRelease.split(/\s+/)[0]; + console.log(`[pty:${this.config.name}] [RELEASE-DEBUG] Parsed name: ${name}, isValidName=${name ? this.isValidAgentName(name) : false}, alreadyProcessed=${this.processedReleaseCommands.has(name)}`); + + // STRICT: Validate agent name format + if (name && this.isValidAgentName(name) && !this.processedReleaseCommands.has(name)) { + this.processedReleaseCommands.add(name); + this.executeRelease(name); + } } } } @@ -1048,6 +1107,9 @@ export class PtyWrapper extends EventEmitter { * Execute spawn via API or callback */ private async executeSpawn(name: string, cli: string, task: string): Promise { + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] executeSpawn called: name=${name}, cli=${cli}, task="${task.substring(0, 50)}..."`); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] dashboardPort=${this.config.dashboardPort}, hasOnSpawn=${!!this.config.onSpawn}`); + if (this.config.dashboardPort) { // Use dashboard API for spawning (works from spawned agents) try { @@ -1293,6 +1355,13 @@ export class PtyWrapper extends EventEmitter { private injectInstructions(): void { if (!this.running) return; + // Guard: Only inject once per session + if (this.instructionsInjected) { + console.log(`[pty:${this.config.name}] Init instructions already injected, skipping`); + return; + } + this.instructionsInjected = true; + // Minimal notification - full protocol is in ~/.claude/CLAUDE.md const notification = `You are agent "${this.config.name}" connected to Agent Relay. See CLAUDE.md for the messaging protocol. ACK messages, do work, send DONE when complete.`; diff --git a/src/wrapper/tmux-wrapper.ts b/src/wrapper/tmux-wrapper.ts index dc7bcada..ac12e0a7 100644 --- a/src/wrapper/tmux-wrapper.ts +++ b/src/wrapper/tmux-wrapper.ts @@ -1265,8 +1265,15 @@ export class TmuxWrapper { const lines = content.split('\n'); + // Pattern to strip common line prefixes (bullets, prompts, etc.) + // Must include โ— (U+25CF BLACK CIRCLE) used by Claude's TUI + const linePrefixPattern = /^(?:[>$%#โ†’โžœโ€บยปโ—โ€ขโ—ฆโ€ฃโƒ\-*โบโ—†โ—‡โ—‹โ–กโ– โ”‚โ”ƒโ”†โ”‡โ”Šโ”‹โ•Žโ•โœฆ]\s*)+/; + for (const line of lines) { - const trimmed = line.trim(); + let trimmed = line.trim(); + + // Strip common line prefixes (bullets, prompts) before checking for commands + trimmed = trimmed.replace(linePrefixPattern, ''); // If we're in fenced spawn mode, accumulate lines until we see >>> if (this.pendingFencedSpawn) { @@ -1303,7 +1310,8 @@ export class TmuxWrapper { } // Check for fenced spawn start: ->relay:spawn Name [cli] <<< (CLI optional, defaults to 'claude') - const fencedSpawnMatch = trimmed.match(/^(?:[โ€ข\-*]\s*)?->relay:spawn\s+(\S+)(?:\s+(\S+))?\s+<<<(.*)$/); + // Prefixes are stripped above, so we just look for the command at start of line + const fencedSpawnMatch = trimmed.match(/^->relay:spawn\s+(\S+)(?:\s+(\S+))?\s+<<<(.*)$/); if (fencedSpawnMatch && canSpawn) { const [, name, cliOrUndefined, inlineContent] = fencedSpawnMatch; const cli = cliOrUndefined || 'claude'; @@ -1344,7 +1352,8 @@ export class TmuxWrapper { // Match single-line spawn: ->relay:spawn WorkerName [cli] ["task"] // CLI is optional - defaults to 'claude'. Task is also optional. - const spawnMatch = trimmed.match(/^(?:[โ€ข\-*]\s*)?->relay:spawn\s+(\S+)(?:\s+(\S+))?(?:\s+["'](.+?)["'])?\s*$/); + // Prefixes are stripped above, so we just look for the command at start of line + const spawnMatch = trimmed.match(/^->relay:spawn\s+(\S+)(?:\s+(\S+))?(?:\s+["'](.+?)["'])?\s*$/); if (spawnMatch && canSpawn) { const [, name, cliOrUndefined, task] = spawnMatch; const cli = cliOrUndefined || 'claude'; @@ -1375,7 +1384,8 @@ export class TmuxWrapper { } // Match ->relay:release WorkerName - const releaseMatch = trimmed.match(/^(?:[โ€ข\-*]\s*)?->relay:release\s+(\S+)\s*$/); + // Prefixes are stripped above, so we just look for the command at start of line + const releaseMatch = trimmed.match(/^->relay:release\s+(\S+)\s*$/); if (releaseMatch && canRelease) { const [, name] = releaseMatch; From 522adfc6bda77df896cfd68fe186223a6afc0667 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 13:36:44 +0100 Subject: [PATCH 095/103] Fix spawn/release not working: pass dashboardPort to AgentSpawner MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The dashboard-server was creating AgentSpawner without passing the dashboard port, so spawned agents couldn't call the spawn/release APIs for nested spawning. Now passes the port so canSpawn and canRelease are true in spawned agents. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/dashboard-server/server.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index 6d0e4157..414145b6 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -413,8 +413,9 @@ export async function startDashboard( const workspacePath = detectWorkspacePath(projectRoot || dataDir); console.log(`[dashboard] Workspace path: ${workspacePath}`); + // Pass dashboard port to spawner so spawned agents can call spawn/release APIs for nested spawning const spawner: AgentSpawner | undefined = enableSpawner - ? new AgentSpawner(workspacePath, tmuxSession) + ? new AgentSpawner(workspacePath, tmuxSession, port) : undefined; // Initialize cloud persistence and memory monitoring if enabled (RELAY_CLOUD_ENABLED=true) From f380d615c86b376145afc4a7a7cce11e47ca4e52 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 13:40:47 +0100 Subject: [PATCH 096/103] Add URL encoding to release API call in pty-wrapper MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The agent name parameter wasn't URL encoded in the DELETE request, which could cause issues with agent names containing special characters. Now matches the tmux-wrapper implementation. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/wrapper/pty-wrapper.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/wrapper/pty-wrapper.ts b/src/wrapper/pty-wrapper.ts index 8c37f983..df6f34bf 100644 --- a/src/wrapper/pty-wrapper.ts +++ b/src/wrapper/pty-wrapper.ts @@ -1144,7 +1144,7 @@ export class PtyWrapper extends EventEmitter { if (this.config.dashboardPort) { // Use dashboard API for releasing try { - const response = await fetch(`http://localhost:${this.config.dashboardPort}/api/spawned/${name}`, { + const response = await fetch(`http://localhost:${this.config.dashboardPort}/api/spawned/${encodeURIComponent(name)}`, { method: 'DELETE', }); const result = await response.json() as { success: boolean; error?: string }; From 8ff5b49df84c5bf7cff76348456dbae680be28e3 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 13:56:35 +0100 Subject: [PATCH 097/103] more fixes --- deploy/workspace/Dockerfile | 5 + deploy/workspace/entrypoint.sh | 136 ++++++++++- package.json | 2 +- scripts/test-cli-auth/mock-cli.sh | 11 +- scripts/test-pty-input-auto.js | 222 ++++++++++++++++++ scripts/test-pty-input.js | 150 ++++++++++++ src/bridge/spawner.ts | 7 +- src/bridge/types.ts | 2 + src/cloud/api/onboarding.ts | 131 +++++------ src/cloud/server.ts | 54 ++++- src/daemon/agent-registry.ts | 36 +++ src/dashboard/app/app/page.tsx | 125 +++++++--- src/dashboard/react-components/App.tsx | 15 +- .../react-components/ProviderAuthFlow.tsx | 9 +- .../react-components/WorkspaceContext.tsx | 107 +++++++++ .../react-components/XTermLogViewer.tsx | 31 +-- .../react-components/hooks/usePresence.ts | 47 +++- .../react-components/hooks/useTrajectory.ts | 25 +- src/dashboard/react-components/index.ts | 6 + .../react-components/layout/Sidebar.tsx | 133 ++++++++++- src/dashboard/types/index.ts | 2 + src/shared/cli-auth-config.ts | 9 + src/utils/project-namespace.ts | 93 ++++++++ src/wrapper/parser.ts | 11 +- src/wrapper/pty-wrapper.ts | 74 +++++- src/wrapper/shared.ts | 20 ++ 26 files changed, 1284 insertions(+), 179 deletions(-) create mode 100644 scripts/test-pty-input-auto.js create mode 100644 scripts/test-pty-input.js create mode 100644 src/dashboard/react-components/WorkspaceContext.tsx diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index 4a8080c1..bfce24d4 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -67,6 +67,11 @@ RUN npm install -g @openai/codex RUN npm install -g @google/gemini-cli RUN npm install -g opencode-ai@latest +# Create symlink for agent-relay CLI (for debugging inside container) +# The actual CLI is built as part of the app +RUN ln -sf /app/dist/cli/index.js /usr/local/bin/agent-relay && \ + chmod +x /app/dist/cli/index.js 2>/dev/null || true + # Create workspace directory RUN mkdir -p /workspace /data diff --git a/deploy/workspace/entrypoint.sh b/deploy/workspace/entrypoint.sh index 1ba45fa0..967e37e2 100644 --- a/deploy/workspace/entrypoint.sh +++ b/deploy/workspace/entrypoint.sh @@ -55,7 +55,15 @@ GHEOF # gh CLI will use GH_TOKEN if set; we export a function to refresh it # For now, set it once at startup (will be refreshed by the credential helper for git operations) - export GH_TOKEN=$(/tmp/gh-token-helper.sh 2>/dev/null || echo "") + # Retry a few times in case the cloud API isn't ready yet + export GH_TOKEN="" + for attempt in 1 2 3; do + GH_TOKEN=$(/tmp/gh-token-helper.sh 2>/dev/null || echo "") + if [[ -n "${GH_TOKEN}" ]]; then + break + fi + sleep 1 + done if [[ -n "${GH_TOKEN}" ]]; then log "GitHub CLI configured with fresh token" else @@ -150,6 +158,82 @@ EOF chmod 600 "${HOME}/.claude/.credentials.json" fi +# Configure Claude Code for cloud workspaces +# Create both settings and instructions files +log "Configuring Claude Code for cloud workspace..." +mkdir -p "${HOME}/.claude" + +# Create settings.json to auto-accept permissions (required for cloud workspaces) +# This tells Claude Code to skip the "Ready to code here?" permission prompt +# Reference: Claude Code uses this for headless/automated environments +cat > "${HOME}/.claude/settings.json" <<'SETTINGSEOF' +{ + "permissions": { + "allow": [ + "Read", + "Edit", + "Write", + "Bash", + "Glob", + "Grep", + "Task", + "WebFetch", + "WebSearch", + "NotebookEdit", + "TodoWrite" + ], + "deny": [] + }, + "autoApproveApiRequest": true +} +SETTINGSEOF +chmod 600 "${HOME}/.claude/settings.json" +log "Created Claude Code settings (auto-approve enabled)" + +# Create CLAUDE.md with agent relay protocol instructions +# This is loaded automatically by Claude Code and provides the relay protocol +if [[ -f "/app/docs/agent-relay-snippet.md" ]]; then + cp "/app/docs/agent-relay-snippet.md" "${HOME}/.claude/CLAUDE.md" + log "Copied relay protocol from /app/docs/agent-relay-snippet.md" +else + # Fallback: create minimal instructions + log "WARN: /app/docs/agent-relay-snippet.md not found, creating minimal instructions" + cat > "${HOME}/.claude/CLAUDE.md" <<'RELAYEOF' +# Agent Relay + +Real-time agent-to-agent messaging. Output `->relay:` patterns to communicate. + +## Sending Messages + +Use fenced format for reliable delivery: +``` +->relay:AgentName <<< +Your message here.>>> +``` + +Broadcast to all: `->relay:* <<>>` + +## Protocol + +1. ACK immediately when you receive a task +2. Do the work +3. Send DONE: summary when complete + +## Session Persistence + +Output periodically to checkpoint progress: +``` +[[SUMMARY]]{"currentTask":"...","completedTasks":[...],"context":"..."}[[/SUMMARY]] +``` + +When session is complete: +``` +[[SESSION_END]]{"summary":"...","completedTasks":[...]}[[/SESSION_END]] +``` +RELAYEOF +fi +log "Claude Code configuration complete" + # Codex CLI expects ~/.codex/auth.json # Format: { tokens: { access_token: "...", refresh_token: "...", ... } } if [[ -n "${OPENAI_TOKEN:-}" ]]; then @@ -179,7 +263,55 @@ EOF chmod 600 "${HOME}/.config/gcloud/application_default_credentials.json" fi -log "Starting agent-relay daemon on port ${PORT}" +# ============================================================================ +# Detect workspace path and start daemon +# The daemon must start from the same directory that spawned agents will use +# to ensure consistent socket paths +# ============================================================================ + +# Function to detect the actual workspace path (same logic as project-namespace.ts) +detect_workspace_path() { + local base_dir="${1}" + + # 1. Explicit override via env var + if [[ -n "${WORKSPACE_CWD:-}" ]]; then + echo "${WORKSPACE_CWD}" + return + fi + + # 2. Check if base_dir itself is a git repo + if [[ -d "${base_dir}/.git" ]]; then + echo "${base_dir}" + return + fi + + # 3. Scan for cloned repos (directories with .git) + local first_repo="" + for dir in "${base_dir}"/*/; do + if [[ -d "${dir}.git" ]]; then + # Use first repo found (alphabetically sorted by bash glob) + first_repo="${dir%/}" + break + fi + done + + if [[ -n "${first_repo}" ]]; then + echo "${first_repo}" + return + fi + + # 4. Fall back to base_dir + echo "${base_dir}" +} + +# Detect the actual workspace path +ACTUAL_WORKSPACE=$(detect_workspace_path "${WORKSPACE_DIR}") +log "Detected workspace path: ${ACTUAL_WORKSPACE}" + +# Change to the detected workspace before starting daemon +cd "${ACTUAL_WORKSPACE}" + +log "Starting agent-relay daemon on port ${PORT} from ${ACTUAL_WORKSPACE}" args=(/app/dist/cli/index.js up --port "${PORT}") if [[ "${SUPERVISOR_ENABLED:-true}" == "true" ]]; then diff --git a/package.json b/package.json index f0617607..0ccf8897 100644 --- a/package.json +++ b/package.json @@ -56,7 +56,7 @@ "services:down": "docker compose -f docker-compose.dev.yml down", "services:logs": "docker compose -f docker-compose.dev.yml logs -f postgres redis", "cloud:setup": "./scripts/cloud-setup.sh", - "cloud:api": "WORKSPACE_DEV_MOUNT=true node -r dotenv/config dist/cloud/index.js", + "cloud:api": "WORKSPACE_IMAGE=relay-workspace:local WORKSPACE_DEV_MOUNT=true node -r dotenv/config dist/cloud/index.js", "precloud": "./scripts/cloud-setup.sh --skip-data", "cloud": "concurrently -n api,daemon,dashboard -c cyan,blue,magenta \"npm run cloud:api\" \"npm run dev:daemon\" \"npm run dev:next\"" }, diff --git a/scripts/test-cli-auth/mock-cli.sh b/scripts/test-cli-auth/mock-cli.sh index c5c899f6..e8f5b58d 100755 --- a/scripts/test-cli-auth/mock-cli.sh +++ b/scripts/test-cli-auth/mock-cli.sh @@ -49,11 +49,12 @@ case "$PROVIDER" in echo "" sleep "$DELAY" - # Auth method prompt - echo -e "How would you like to authenticate?" - echo " 1. Use Claude ${YELLOW}subscription${NC} (recommended)" - echo " 2. Use ${YELLOW}API key${NC}" - echo -n "Choice (1-2): " + # Login method selection prompt (matches real Claude CLI) + echo -e "Select login method:" + echo "" + echo -e " โฏ 1. Claude account with ${YELLOW}subscription${NC} ยท Pro, Max, Team, or Enterprise" + echo "" + echo -e " 2. Anthropic Console account ยท API usage billing" read -r -n 1 response 2>/dev/null || true echo "" sleep "$DELAY" diff --git a/scripts/test-pty-input-auto.js b/scripts/test-pty-input-auto.js new file mode 100644 index 00000000..25791459 --- /dev/null +++ b/scripts/test-pty-input-auto.js @@ -0,0 +1,222 @@ +#!/usr/bin/env node +/** + * Automated PTY input test for Claude CLI + * Tests different input methods without user interaction + * Run inside workspace container: node /app/dist/scripts/test-pty-input-auto.js + */ + +import * as pty from 'node-pty'; + +const TEST_CODE = 'test-auth-code-12345'; +const INPUT_METHOD = process.argv[2] || '1'; + +// Debug: Log all escape sequences we send +function logHex(label, data) { + const hex = Buffer.from(data).toString('hex').replace(/(.{2})/g, '$1 ').trim(); + console.log(`[HEX] ${label}: ${hex}`); +} + +function stripAnsi(str) { + return str.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +async function main() { + console.log(`\nTesting PTY input method ${INPUT_METHOD} with code: ${TEST_CODE}\n`); + + const proc = pty.spawn('claude', [], { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: '/workspace', + env: { + ...process.env, + NO_COLOR: '1', + TERM: 'xterm-256color', + DISPLAY: '', + }, + }); + + let output = ''; + let authUrl = null; + let codePromptSeen = false; + let codeSent = false; + const prompts = [ + { pattern: /dark\s*(mode|theme)/i, response: '\r', name: 'dark mode' }, + { pattern: /select\s*login|how\s*would\s*you\s*like|subscription\s*or.*api/i, response: '\r', name: 'login method' }, + ]; + const respondedPrompts = new Set(); + + proc.onData((data) => { + output += data; + const clean = stripAnsi(data); + + // Log meaningful output + if (clean.trim()) { + const lines = clean.trim().split('\n').map(l => l.trim()).filter(l => l); + for (const line of lines) { + if (line.length > 3 && !line.match(/^[ยทโœข*โœถโœปโœฝ]+$/)) { + console.log('[PTY]', line.substring(0, 120)); + } + } + } + + // Auto-respond to prompts + for (const prompt of prompts) { + if (!respondedPrompts.has(prompt.name) && prompt.pattern.test(clean)) { + respondedPrompts.add(prompt.name); + console.log(`\n[AUTO] Responding to: ${prompt.name}`); + setTimeout(() => proc.write(prompt.response), 100); + } + } + + // Capture auth URL + const urlMatch = clean.match(/(https:\/\/[^\s]+)/); + if (urlMatch && !authUrl) { + authUrl = urlMatch[1]; + console.log('\n[CAPTURED] Auth URL detected'); + } + + // Look for code paste prompt - various patterns Claude might use + const codePromptPatterns = [ + /paste.*code/i, + /enter.*code/i, + /authorization.*code/i, + /code.*here/i, + /waiting.*code/i, + /input.*code/i, + ]; + + if (authUrl && !codePromptSeen && !codeSent) { + for (const pattern of codePromptPatterns) { + if (pattern.test(clean)) { + codePromptSeen = true; + console.log('\n[DETECTED] Code prompt pattern:', pattern.toString()); + break; + } + } + } + + // Also look for the text input box indicator from Ink + // After URL is shown and some time passes, try sending the code + if (authUrl && !codeSent) { + // Check if we see any indication we should enter the code + const outputLower = stripAnsi(output).toLowerCase(); + const hasCodePrompt = outputLower.includes('paste') || + outputLower.includes('enter the code') || + outputLower.includes('authorization code') || + outputLower.includes("browser didn't open"); + + if (hasCodePrompt || output.length > 5000) { + codeSent = true; + console.log('\n[SENDING] Sending code after prompt/timeout...'); + setTimeout(() => sendCode(proc), 500); + } + } + }); + + proc.onExit(({ exitCode }) => { + console.log('\n[EXIT] Claude exited with code:', exitCode); + console.log('[TOTAL OUTPUT LENGTH]', output.length); + + // Check for credentials + import('fs').then(fs => { + const credPath = '/home/workspace/.claude/.credentials.json'; + if (fs.existsSync(credPath)) { + console.log('[SUCCESS] Credentials file found!'); + const creds = fs.readFileSync(credPath, 'utf8'); + console.log('[CREDS]', creds.substring(0, 200)); + } else { + console.log('[RESULT] No credentials file created (expected with test code)'); + } + }); + + setTimeout(() => process.exit(exitCode), 1000); + }); + + async function sendCode(ptyProc) { + const PASTE_START = '\x1b[200~'; + const PASTE_END = '\x1b[201~'; + + console.log(`[METHOD ${INPUT_METHOD}] Sending test code...`); + + switch (INPUT_METHOD) { + case '1': + console.log('[1] Plain code + \\r (carriage return)'); + logHex('sending', TEST_CODE + '\r'); + ptyProc.write(TEST_CODE + '\r'); + break; + case '2': + console.log('[2] Plain code + \\n (newline)'); + logHex('sending', TEST_CODE + '\n'); + ptyProc.write(TEST_CODE + '\n'); + break; + case '3': + console.log('[3] Bracketed paste + \\r'); + logHex('paste start', PASTE_START); + logHex('code', TEST_CODE); + logHex('paste end', PASTE_END); + ptyProc.write(PASTE_START + TEST_CODE + PASTE_END); + await new Promise(r => setTimeout(r, 200)); + logHex('enter', '\r'); + ptyProc.write('\r'); + break; + case '4': + console.log('[4] Plain code + \\r\\n (CRLF)'); + logHex('sending', TEST_CODE + '\r\n'); + ptyProc.write(TEST_CODE + '\r\n'); + break; + case '5': + console.log('[5] Character by character + \\r'); + for (const char of TEST_CODE) { + ptyProc.write(char); + await new Promise(r => setTimeout(r, 10)); + } + await new Promise(r => setTimeout(r, 200)); + logHex('enter', '\r'); + ptyProc.write('\r'); + break; + case '6': + console.log('[6] Code then wait, then Enter separately'); + logHex('code only', TEST_CODE); + ptyProc.write(TEST_CODE); + await new Promise(r => setTimeout(r, 1000)); + console.log('[6] Now sending Enter...'); + logHex('enter', '\r'); + ptyProc.write('\r'); + break; + case '7': + console.log('[7] Send Enter first then code then Enter'); + ptyProc.write('\r'); // Clear any existing state + await new Promise(r => setTimeout(r, 200)); + logHex('code + enter', TEST_CODE + '\r'); + ptyProc.write(TEST_CODE + '\r'); + break; + case '8': + console.log('[8] Ctrl+M (same as \\r but explicit)'); + logHex('code + ctrl-m', TEST_CODE + '\x0d'); + ptyProc.write(TEST_CODE + '\x0d'); + break; + default: + console.log('[DEFAULT] Plain code + \\r'); + ptyProc.write(TEST_CODE + '\r'); + } + + console.log('[SENT] Waiting for response...'); + + // Give it more time to process and show error + setTimeout(() => { + console.log('\n[TIMEOUT] Test complete, terminating...'); + console.log('[FINAL OUTPUT CHECK] Last 500 chars of output:'); + console.log(stripAnsi(output).slice(-500)); + ptyProc.kill(); + }, 20000); + } + + // Failsafe timeout + setTimeout(() => { + console.log('\n[FAILSAFE] Max time reached, terminating...'); + proc.kill(); + }, 60000); +} + +main().catch(console.error); diff --git a/scripts/test-pty-input.js b/scripts/test-pty-input.js new file mode 100644 index 00000000..7fc26f2d --- /dev/null +++ b/scripts/test-pty-input.js @@ -0,0 +1,150 @@ +#!/usr/bin/env node +/** + * Test PTY input methods for Claude CLI + * Run inside workspace container: node /app/dist/scripts/test-pty-input.js + */ + +import * as pty from 'node-pty'; +import * as readline from 'readline'; + +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +function ask(question) { + return new Promise(resolve => rl.question(question, resolve)); +} + +function stripAnsi(str) { + return str.replace(/\x1b\[[0-9;]*[a-zA-Z]/g, ''); +} + +async function main() { + console.log('Starting Claude CLI via PTY...\n'); + + const proc = pty.spawn('claude', [], { + name: 'xterm-256color', + cols: 120, + rows: 30, + cwd: '/workspace', + env: { + ...process.env, + NO_COLOR: '1', + TERM: 'xterm-256color', + DISPLAY: '', + }, + }); + + let output = ''; + let authUrl = null; + const prompts = [ + { pattern: /dark\s*(mode|theme)/i, response: '\r', name: 'dark mode' }, + { pattern: /select\s*login|how\s*would\s*you\s*like|subscription\s*or.*api/i, response: '\r', name: 'login method' }, + ]; + const respondedPrompts = new Set(); + + proc.onData((data) => { + output += data; + const clean = stripAnsi(data); + + // Log output + if (clean.trim()) { + console.log('[PTY]', clean.substring(0, 200)); + } + + // Auto-respond to prompts + for (const prompt of prompts) { + if (!respondedPrompts.has(prompt.name) && prompt.pattern.test(clean)) { + respondedPrompts.add(prompt.name); + console.log(`\n[AUTO] Responding to: ${prompt.name}`); + setTimeout(() => proc.write(prompt.response), 100); + } + } + + // Capture auth URL + const urlMatch = clean.match(/(https:\/\/[^\s]+)/); + if (urlMatch && !authUrl) { + authUrl = urlMatch[1]; + console.log('\n[CAPTURED] Auth URL:', authUrl.substring(0, 80) + '...'); + promptForCode(); + } + }); + + proc.onExit(({ exitCode }) => { + console.log('\n[EXIT] Claude exited with code:', exitCode); + console.log('[OUTPUT LENGTH]', output.length); + rl.close(); + process.exit(exitCode); + }); + + async function promptForCode() { + console.log('\n========================================'); + console.log('Complete OAuth in browser, then paste the code here.'); + console.log('========================================\n'); + + const code = await ask('Paste auth code: '); + + console.log('\nSelect input method:'); + console.log('1. Plain code + \\r'); + console.log('2. Plain code + \\n'); + console.log('3. Bracketed paste + \\r'); + console.log('4. Bracketed paste + \\n'); + console.log('5. Character by character + \\r'); + + const method = await ask('Choice (1-5): '); + + const PASTE_START = '\x1b[200~'; + const PASTE_END = '\x1b[201~'; + const cleanCode = code.trim(); + + console.log(`\n[SENDING] Using method ${method}...`); + + switch (method) { + case '1': + proc.write(cleanCode + '\r'); + break; + case '2': + proc.write(cleanCode + '\n'); + break; + case '3': + proc.write(PASTE_START + cleanCode + PASTE_END); + await new Promise(r => setTimeout(r, 200)); + proc.write('\r'); + break; + case '4': + proc.write(PASTE_START + cleanCode + PASTE_END); + await new Promise(r => setTimeout(r, 200)); + proc.write('\n'); + break; + case '5': + for (const char of cleanCode) { + proc.write(char); + await new Promise(r => setTimeout(r, 10)); + } + await new Promise(r => setTimeout(r, 200)); + proc.write('\r'); + break; + default: + proc.write(cleanCode + '\r'); + } + + console.log('[SENT] Waiting for response...\n'); + + // Wait and watch output + setTimeout(() => { + console.log('\n[CHECK] Checking for credentials file...'); + import('fs').then(fs => { + const credPath = '/home/workspace/.claude/.credentials.json'; + if (fs.existsSync(credPath)) { + console.log('[SUCCESS] Credentials file found!'); + console.log(fs.readFileSync(credPath, 'utf8').substring(0, 200)); + } else { + console.log('[FAIL] No credentials file yet'); + } + }); + }, 5000); + } +} + +main().catch(console.error); diff --git a/src/bridge/spawner.ts b/src/bridge/spawner.ts index 3a15fbd8..a101aaf7 100644 --- a/src/bridge/spawner.ts +++ b/src/bridge/spawner.ts @@ -143,6 +143,7 @@ export class AgentSpawner { * Called after the dashboard server starts and we know the actual port. */ setDashboardPort(port: number): void { + console.log(`[spawner] Dashboard port set to ${port} - nested spawns now enabled`); this.dashboardPort = port; } @@ -275,11 +276,15 @@ export class AgentSpawner { // Create PtyWrapper config // Use dashboardPort for nested spawns (API-based, works in non-TTY contexts) - // Fall back to callbacks only if no dashboardPort is set + // Fall back to callbacks only if no dashboardPort is not set // Note: Spawned agents CAN spawn sub-workers intentionally - the parser is strict enough // to avoid accidental spawns from documentation text (requires line start, PascalCase, known CLI) // Use request.cwd if specified, otherwise use projectRoot const agentCwd = request.cwd || this.projectRoot; + + // Log whether nested spawning will be enabled for this agent + console.log(`[spawner] Spawning ${name}: dashboardPort=${this.dashboardPort || 'none'} (${this.dashboardPort ? 'nested spawns enabled' : 'nested spawns disabled'})`); + const ptyConfig: PtyWrapperConfig = { name, command, diff --git a/src/bridge/types.ts b/src/bridge/types.ts index e604c883..df04f8cc 100644 --- a/src/bridge/types.ts +++ b/src/bridge/types.ts @@ -41,6 +41,8 @@ export interface SpawnRequest { task: string; /** Optional team name to organize agents under */ team?: string; + /** Working directory for the agent (defaults to detected workspace) */ + cwd?: string; /** Name of the agent requesting the spawn (for policy enforcement) */ spawnerName?: string; /** Shadow execution mode (subagent = no extra process) */ diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index e9bc7c49..02b6b7f3 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -261,11 +261,17 @@ onboardingRouter.get('/cli/:provider/status/:sessionId', async (req: Request, re /** * POST /api/onboarding/cli/:provider/complete/:sessionId * Mark CLI auth as complete and store credentials + * + * Handles two modes: + * 1. Workspace delegation: Forwards to workspace daemon to complete auth, then fetches credentials + * 2. Direct: Uses token from body or session */ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, res: Response) => { const { provider, sessionId } = req.params; const userId = req.session.userId!; - const { token } = req.body; // Optional: user can paste token directly + const { token, authCode } = req.body; // token for direct mode, authCode for Codex redirect + + console.log(`[onboarding] POST /cli/${provider}/complete/${sessionId} - token: ${token ? 'provided' : 'none'}, authCode: ${authCode ? 'provided' : 'none'}`); const session = activeSessions.get(sessionId); if (!session) { @@ -277,31 +283,58 @@ onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, } try { - // If token provided directly, use it let accessToken = token || session.token; let refreshToken = session.refreshToken; let tokenExpiresAt = session.tokenExpiresAt; - // If no token yet, try to get from workspace - if (!accessToken && session.workspaceUrl && session.workspaceSessionId) { - try { - const credsResponse = await fetch( - `${session.workspaceUrl}/auth/cli/${provider}/creds/${session.workspaceSessionId}` - ); - if (credsResponse.ok) { - const creds = await credsResponse.json() as { - token?: string; - refreshToken?: string; - expiresAt?: string; - }; - accessToken = creds.token; - refreshToken = creds.refreshToken; - if (creds.expiresAt) { - tokenExpiresAt = new Date(creds.expiresAt); + // If using workspace delegation, forward complete request first + if (session.workspaceUrl && session.workspaceSessionId) { + // Forward authCode to workspace if provided (for Codex-style redirects) + if (authCode) { + const backendProviderId = provider === 'anthropic' ? 'anthropic' : provider; + const targetUrl = `${session.workspaceUrl}/auth/cli/${backendProviderId}/complete/${session.workspaceSessionId}`; + console.log('[onboarding] Forwarding complete request to workspace:', targetUrl); + + const completeResponse = await fetch(targetUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ authCode }), + }); + + if (!completeResponse.ok) { + const errorData = await completeResponse.json().catch(() => ({})) as { error?: string }; + return res.status(completeResponse.status).json({ + error: errorData.error || 'Failed to complete authentication in workspace', + }); + } + session.status = 'success'; + } + + // Fetch credentials from workspace + if (!accessToken) { + try { + const credsResponse = await fetch( + `${session.workspaceUrl}/auth/cli/${provider}/creds/${session.workspaceSessionId}` + ); + if (credsResponse.ok) { + const creds = await credsResponse.json() as { + token?: string; + refreshToken?: string; + expiresAt?: string; + }; + accessToken = creds.token; + refreshToken = creds.refreshToken; + if (creds.expiresAt) { + tokenExpiresAt = new Date(creds.expiresAt); + } + console.log('[onboarding] Fetched credentials from workspace:', { + hasToken: !!accessToken, + hasRefreshToken: !!refreshToken, + }); } + } catch (err) { + console.error('[onboarding] Failed to get credentials from workspace:', err); } - } catch (err) { - console.error('[onboarding] Failed to get credentials from workspace:', err); } } @@ -415,62 +448,8 @@ onboardingRouter.post('/cli/:provider/code/:sessionId', async (req: Request, res }); }); -/** - * POST /api/onboarding/cli/:provider/complete/:sessionId - * Complete auth - for providers like Codex, accepts authCode (redirect URL with code) - * For providers like Claude, just polls for credentials - */ -onboardingRouter.post('/cli/:provider/complete/:sessionId', async (req: Request, res: Response) => { - const { provider, sessionId } = req.params; - const { authCode } = req.body || {}; - const userId = req.session.userId!; - - console.log(`[onboarding] POST /cli/${provider}/complete/${sessionId} - authCode: ${authCode ? 'provided' : 'none'}`); - - const session = activeSessions.get(sessionId); - if (!session) { - return res.status(404).json({ error: 'Session not found or expired' }); - } - - if (session.userId !== userId) { - return res.status(403).json({ error: 'Unauthorized' }); - } - - // Forward to workspace daemon - if (session.workspaceUrl && session.workspaceSessionId) { - try { - const backendProviderId = provider === 'anthropic' ? 'anthropic' : provider; - const targetUrl = `${session.workspaceUrl}/auth/cli/${backendProviderId}/complete/${session.workspaceSessionId}`; - console.log('[onboarding] Forwarding complete request to workspace:', targetUrl); - - // Forward the authCode if provided - const completeResponse = await fetch(targetUrl, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: authCode ? JSON.stringify({ authCode }) : undefined, - }); - - if (completeResponse.ok) { - session.status = 'success'; - return res.json({ success: true, message: 'Authentication complete' }); - } - - const errorData = await completeResponse.json().catch(() => ({})) as { error?: string }; - return res.status(completeResponse.status).json({ - error: errorData.error || 'Failed to complete authentication', - }); - } catch (err) { - console.error('[onboarding] Failed to complete auth via workspace:', err); - return res.status(500).json({ - error: 'Failed to reach workspace. Please ensure your workspace is running.', - }); - } - } - - return res.status(400).json({ - error: 'No workspace session available. Please try connecting again.', - }); -}); +// Note: POST /cli/:provider/complete/:sessionId handler is defined above (lines 269-368) +// It handles both direct token storage and workspace delegation with authCode forwarding /** * POST /api/onboarding/cli/:provider/cancel/:sessionId diff --git a/src/cloud/server.ts b/src/cloud/server.ts index 56b60732..2169a2c1 100644 --- a/src/cloud/server.ts +++ b/src/cloud/server.ts @@ -404,9 +404,55 @@ export async function createServer(): Promise { return Array.from(onlineUsers.values()).map((state) => state.info); }; + // Heartbeat interval to detect dead connections (30 seconds) + const PRESENCE_HEARTBEAT_INTERVAL = 30000; + const PRESENCE_HEARTBEAT_TIMEOUT = 35000; // Allow 5s grace period + + // Track connection health for heartbeat + const connectionHealth = new WeakMap(); + + // Heartbeat interval to clean up dead connections + const presenceHeartbeat = setInterval(() => { + const now = Date.now(); + wssPresence.clients.forEach((ws) => { + const health = connectionHealth.get(ws); + if (!health) { + // New connection without health tracking - initialize it + connectionHealth.set(ws, { isAlive: true, lastPing: now }); + return; + } + + if (!health.isAlive) { + // Connection didn't respond to last ping - terminate it + ws.terminate(); + return; + } + + // Mark as not alive until we get a pong + health.isAlive = false; + health.lastPing = now; + ws.ping(); + }); + }, PRESENCE_HEARTBEAT_INTERVAL); + + // Clean up interval on server close + wssPresence.on('close', () => { + clearInterval(presenceHeartbeat); + }); + // Handle presence connections wssPresence.on('connection', (ws) => { - console.log('[cloud] Presence WebSocket client connected'); + // Initialize health tracking (no log - too noisy) + connectionHealth.set(ws, { isAlive: true, lastPing: Date.now() }); + + // Handle pong responses (heartbeat) + ws.on('pong', () => { + const health = connectionHealth.get(ws); + if (health) { + health.isAlive = true; + } + }); + let clientUsername: string | undefined; ws.on('message', (data) => { @@ -434,7 +480,11 @@ export async function createServer(): Promise { if (existing) { existing.connections.add(ws); existing.info.lastSeen = now; - console.log(`[cloud] User ${username} opened new tab (${existing.connections.size} connections)`); + // Only log at milestones to reduce noise + const count = existing.connections.size; + if (count === 2 || count === 5 || count === 10 || count % 50 === 0) { + console.log(`[cloud] User ${username} has ${count} connections`); + } } else { onlineUsers.set(username, { info: { username, avatarUrl, connectedAt: now, lastSeen: now }, diff --git a/src/daemon/agent-registry.ts b/src/daemon/agent-registry.ts index f2b4ad0f..0bbba06e 100644 --- a/src/daemon/agent-registry.ts +++ b/src/daemon/agent-registry.ts @@ -164,6 +164,42 @@ export class AgentRegistry { return Array.from(this.agents.values()); } + /** + * Remove an agent from the registry. + */ + remove(agentName: string): boolean { + const deleted = this.agents.delete(agentName); + if (deleted) { + this.save(); + } + return deleted; + } + + /** + * Remove agents that haven't been seen for longer than the threshold. + * @param thresholdMs - Time in milliseconds (default: 24 hours) + * @returns Number of agents removed + */ + pruneStale(thresholdMs: number = 24 * 60 * 60 * 1000): number { + const cutoff = Date.now() - thresholdMs; + let removed = 0; + + for (const [name, record] of this.agents) { + const lastSeenTime = new Date(record.lastSeen).getTime(); + if (lastSeenTime < cutoff) { + this.agents.delete(name); + removed++; + log.info('Pruned stale agent', { name, lastSeen: record.lastSeen }); + } + } + + if (removed > 0) { + this.save(); + } + + return removed; + } + private ensureRecord(agentName: string): AgentRecord { const existing = this.agents.get(agentName); if (existing) return existing; diff --git a/src/dashboard/app/app/page.tsx b/src/dashboard/app/app/page.tsx index 6edf1c88..e5b80f2b 100644 --- a/src/dashboard/app/app/page.tsx +++ b/src/dashboard/app/app/page.tsx @@ -435,7 +435,10 @@ export default function DashboardPage() { {/* Provider auth flow - using shared component */} {connectingProvider && (() => { - const provider = AI_PROVIDERS.find(p => p.id === connectingProvider); + // Handle codex-device as codex with device flow + const isDeviceFlow = connectingProvider === 'codex-device'; + const providerId = isDeviceFlow ? 'codex' : connectingProvider; + const provider = AI_PROVIDERS.find(p => p.id === providerId); if (!provider) return null; return (
@@ -445,10 +448,13 @@ export default function DashboardPage() { name: provider.name, displayName: provider.displayName, color: provider.color, - requiresUrlCopy: provider.requiresUrlCopy, + // Don't require URL copy for device flow + requiresUrlCopy: isDeviceFlow ? false : provider.requiresUrlCopy, + supportsDeviceFlow: provider.supportsDeviceFlow, }} workspaceId={selectedWorkspace!.id} csrfToken={csrfToken || undefined} + useDeviceFlow={isDeviceFlow} onSuccess={() => { // Show success state briefly, then offer options setConnectingProvider(null); @@ -488,34 +494,95 @@ export default function DashboardPage() {

Choose an AI Provider

{AI_PROVIDERS.map((provider) => ( -
- - {/* Pre-auth warning for providers that require URL copy */} - {provider.requiresUrlCopy && ( -
-

- โš ๏ธ Important: After signing in, you'll see a "Page not found" error. - This is expected! Copy the entire URL from your browser's address bar - (it will look like http://127.0.0.1:...?code=...) and paste it back here. -

+
+ {/* Special expanded section for Codex with device flow option */} + {provider.id === 'codex' ? ( +
+
+
+ {provider.displayName[0]} +
+
+

{provider.displayName}

+

{provider.name}

+
+
+ + {/* Warning about localhost redirect */} +
+

โš ๏ธ Heads up about the login flow

+

+ OpenAI's OAuth redirects to localhost after login, + which will show a "Page not found" or "This site can't be reached" error. + This is expected! You'll need to copy the URL from your browser and paste it back here. +

+
+ + {/* Two auth options */} +
+ + + +
+ ) : ( + /* Standard provider button */ + )}
))} diff --git a/src/dashboard/react-components/App.tsx b/src/dashboard/react-components/App.tsx index 81bbc82a..b7ad5a19 100644 --- a/src/dashboard/react-components/App.tsx +++ b/src/dashboard/react-components/App.tsx @@ -38,7 +38,8 @@ import { useTrajectory } from './hooks/useTrajectory'; import { useRecentRepos } from './hooks/useRecentRepos'; import { usePresence, type UserPresence } from './hooks/usePresence'; import { useCloudSessionOptional } from './CloudSessionProvider'; -import { api, convertApiDecision } from '../lib/api'; +import { WorkspaceProvider } from './WorkspaceContext'; +import { api, convertApiDecision, setActiveWorkspaceId as setApiWorkspaceId } from '../lib/api'; import { cloudApi } from '../lib/cloudApi'; import type { CurrentUser } from './MessageList'; @@ -147,6 +148,16 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { const effectiveActiveWorkspaceId = isCloudMode ? activeCloudWorkspaceId : activeWorkspaceId; const effectiveIsLoading = isCloudMode ? isLoadingCloudWorkspaces : isOrchestratorLoading; + // Sync the active workspace ID with the api module for cloud mode proxying + useEffect(() => { + if (isCloudMode && activeCloudWorkspaceId) { + setApiWorkspaceId(activeCloudWorkspaceId); + } else if (!isCloudMode) { + // Clear the workspace ID when not in cloud mode + setApiWorkspaceId(null); + } + }, [isCloudMode, activeCloudWorkspaceId]); + // Handle workspace selection (works for both cloud and orchestrator) const handleEffectiveWorkspaceSelect = useCallback(async (workspace: { id: string; name: string }) => { if (isCloudMode) { @@ -834,6 +845,7 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { }, [handleSpawnClick, handleNewConversationClick]); return ( +
{/* Mobile Sidebar Overlay */}
)}
+ ); } diff --git a/src/dashboard/react-components/ProviderAuthFlow.tsx b/src/dashboard/react-components/ProviderAuthFlow.tsx index 16dbdafc..60b3f259 100644 --- a/src/dashboard/react-components/ProviderAuthFlow.tsx +++ b/src/dashboard/react-components/ProviderAuthFlow.tsx @@ -203,7 +203,8 @@ export function ProviderAuthFlow({ } setStatus('success'); - onSuccess(); + // Brief delay to show success message before parent unmounts component + setTimeout(() => onSuccess(), 1500); } catch (err) { const msg = err instanceof Error ? err.message : 'Failed to complete authentication'; setErrorMessage(msg); @@ -247,7 +248,7 @@ export function ProviderAuthFlow({ body: JSON.stringify({ code }), }); - const data = await res.json() as { status?: string; error?: string; needsRestart?: boolean }; + const data = await res.json() as { success?: boolean; status?: string; error?: string; needsRestart?: boolean }; if (!res.ok) { // If server indicates we need to restart, show helpful message @@ -261,7 +262,9 @@ export function ProviderAuthFlow({ setCodeInput(''); - if (data.status === 'success') { + // Backend returns { success: true } not { status: 'success' } + if (data.success) { + // Code was accepted, now complete the auth flow to store credentials await handleComplete(); } // Otherwise continue polling diff --git a/src/dashboard/react-components/WorkspaceContext.tsx b/src/dashboard/react-components/WorkspaceContext.tsx new file mode 100644 index 00000000..0a10ed0f --- /dev/null +++ b/src/dashboard/react-components/WorkspaceContext.tsx @@ -0,0 +1,107 @@ +/** + * Workspace Context + * + * Provides the current workspace's base URL for WebSocket connections. + * Used by LogViewer and other components that need to connect to workspace-specific endpoints. + */ + +import React, { createContext, useContext, useMemo } from 'react'; + +interface WorkspaceContextValue { + /** Base WebSocket URL for the workspace (e.g., wss://workspace-abc.agentrelay.dev) */ + wsBaseUrl: string | null; + /** Whether we're in cloud mode (workspace URL is different from page host) */ + isCloudMode: boolean; +} + +const WorkspaceContext = createContext({ + wsBaseUrl: null, + isCloudMode: false, +}); + +export interface WorkspaceProviderProps { + children: React.ReactNode; + /** The workspace WebSocket URL (e.g., wss://workspace-abc.agentrelay.dev/ws) */ + wsUrl?: string; +} + +/** + * Extract base URL from a WebSocket URL + * e.g., wss://workspace-abc.agentrelay.dev/ws -> wss://workspace-abc.agentrelay.dev + */ +function getBaseUrl(wsUrl: string): string { + try { + const url = new URL(wsUrl); + return `${url.protocol}//${url.host}`; + } catch { + return wsUrl; + } +} + +export function WorkspaceProvider({ children, wsUrl }: WorkspaceProviderProps) { + const value = useMemo(() => { + if (!wsUrl) { + return { wsBaseUrl: null, isCloudMode: false }; + } + + const wsBaseUrl = getBaseUrl(wsUrl); + + // Check if we're in cloud mode by comparing the workspace URL host with the current page host + let isCloudMode = false; + if (typeof window !== 'undefined') { + try { + const wsHost = new URL(wsUrl).host; + isCloudMode = wsHost !== window.location.host; + } catch { + // Ignore parse errors + } + } + + return { wsBaseUrl, isCloudMode }; + }, [wsUrl]); + + return ( + + {children} + + ); +} + +/** + * Hook to access the workspace context + */ +export function useWorkspace(): WorkspaceContextValue { + return useContext(WorkspaceContext); +} + +/** + * Get the WebSocket URL for a specific path within the workspace + * Falls back to current host if not in a workspace context + */ +export function useWorkspaceWsUrl(path: string): string { + const { wsBaseUrl } = useWorkspace(); + + return useMemo(() => { + if (wsBaseUrl) { + return `${wsBaseUrl}${path}`; + } + + // Fallback to current host + if (typeof window === 'undefined') { + return `ws://localhost:3889${path}`; + } + + const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; + const isDev = process.env.NODE_ENV === 'development'; + const { hostname, port } = window.location; + + // Next.js dev runs on 3888, dashboard server on 3889 + if (isDev && port === '3888') { + return `${protocol}//${hostname || 'localhost'}:3889${path}`; + } + + return `${protocol}//${window.location.host}${path}`; + }, [wsBaseUrl, path]); +} + +export default WorkspaceContext; diff --git a/src/dashboard/react-components/XTermLogViewer.tsx b/src/dashboard/react-components/XTermLogViewer.tsx index af9ea23a..666f8157 100644 --- a/src/dashboard/react-components/XTermLogViewer.tsx +++ b/src/dashboard/react-components/XTermLogViewer.tsx @@ -10,6 +10,7 @@ import { Terminal } from '@xterm/xterm'; import { FitAddon } from '@xterm/addon-fit'; import { SearchAddon } from '@xterm/addon-search'; import { getAgentColor } from '../lib/colors'; +import { useWorkspaceWsUrl } from './WorkspaceContext'; export interface XTermLogViewerProps { /** Agent name to stream logs from */ @@ -50,27 +51,7 @@ const TERMINAL_THEME = { brightWhite: '#ffffff', }; -/** - * Get WebSocket URL for agent log streaming - */ -function getLogStreamUrl(agentName: string): string { - const path = `/ws/logs/${encodeURIComponent(agentName)}`; - const isDev = process.env.NODE_ENV === 'development'; - - if (typeof window === 'undefined') { - return `ws://localhost:3889${path}`; - } - - const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; - const { hostname, port } = window.location; - - if (isDev && port === '3888') { - const host = hostname || 'localhost'; - return `${protocol}//${host}:3889${path}`; - } - - return `${protocol}//${window.location.host}${path}`; -} +// getLogStreamUrl removed - now using useWorkspaceWsUrl hook export function XTermLogViewer({ agentName, @@ -97,6 +78,9 @@ export function XTermLogViewer({ const searchInputRef = useRef(null); const colors = getAgentColor(agentName); + // Get WebSocket URL from workspace context (handles cloud vs local mode) + const logStreamUrl = useWorkspaceWsUrl(`/ws/logs/${encodeURIComponent(agentName)}`); + // Initialize terminal useEffect(() => { if (!containerRef.current) return; @@ -152,8 +136,7 @@ export function XTermLogViewer({ setIsConnecting(true); setError(null); - const url = getLogStreamUrl(agentName); - const ws = new WebSocket(url); + const ws = new WebSocket(logStreamUrl); wsRef.current = ws; ws.onopen = () => { @@ -254,7 +237,7 @@ export function XTermLogViewer({ } } }; - }, [agentName]); + }, [logStreamUrl, agentName]); // Disconnect from WebSocket const disconnect = useCallback(() => { diff --git a/src/dashboard/react-components/hooks/usePresence.ts b/src/dashboard/react-components/hooks/usePresence.ts index 71396e8a..897ebdd9 100644 --- a/src/dashboard/react-components/hooks/usePresence.ts +++ b/src/dashboard/react-components/hooks/usePresence.ts @@ -87,6 +87,7 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn const wsRef = useRef(null); const reconnectTimeoutRef = useRef(null); const typingTimeoutRef = useRef(null); + const isConnectingRef = useRef(false); // Prevent race conditions // Clear stale typing indicators (after 3 seconds of no update) useEffect(() => { @@ -103,13 +104,16 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn const connect = useCallback(() => { if (!currentUser) return; // Don't connect without user info if (wsRef.current?.readyState === WebSocket.OPEN) return; + if (isConnectingRef.current) return; // Prevent concurrent connect attempts + isConnectingRef.current = true; const url = wsUrl || getPresenceUrl(); try { const ws = new WebSocket(url); ws.onopen = () => { + isConnectingRef.current = false; setIsConnected(true); // Announce presence @@ -124,13 +128,16 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn }; ws.onclose = () => { + isConnectingRef.current = false; setIsConnected(false); wsRef.current = null; - // Reconnect after 2 seconds - reconnectTimeoutRef.current = setTimeout(() => { - connect(); - }, 2000); + // Reconnect after 2 seconds (only if not intentionally disconnected) + if (currentUser) { + reconnectTimeoutRef.current = setTimeout(() => { + connect(); + }, 2000); + } }; ws.onerror = (event) => { @@ -200,21 +207,30 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn }, [currentUser, wsUrl]); const disconnect = useCallback(() => { + // Clear reconnect timeout first if (reconnectTimeoutRef.current) { clearTimeout(reconnectTimeoutRef.current); reconnectTimeoutRef.current = null; } + // Reset connecting flag + isConnectingRef.current = false; + if (wsRef.current) { + // Prevent auto-reconnect by removing onclose handler before closing + const ws = wsRef.current; + ws.onclose = null; + ws.onerror = null; + // Send leave message before closing - if (wsRef.current.readyState === WebSocket.OPEN && currentUser) { - wsRef.current.send(JSON.stringify({ + if (ws.readyState === WebSocket.OPEN && currentUser) { + ws.send(JSON.stringify({ type: 'presence', action: 'leave', username: currentUser.username, })); } - wsRef.current.close(); + ws.close(); wsRef.current = null; } @@ -249,15 +265,26 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn }, [currentUser]); // Connect when user is available + // Use refs to avoid effect re-running on function reference changes + const currentUserRef = useRef(currentUser); + currentUserRef.current = currentUser; + useEffect(() => { - if (autoConnect && currentUser) { - connect(); + if (!autoConnect || !currentUserRef.current) return; + + // Prevent connecting if already connected or connecting + if (wsRef.current && wsRef.current.readyState !== WebSocket.CLOSED) { + return; } + connect(); + return () => { disconnect(); }; - }, [autoConnect, currentUser, connect, disconnect]); + // Only re-run when autoConnect or currentUser identity changes + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [autoConnect, currentUser?.username]); // Send leave on page unload useEffect(() => { diff --git a/src/dashboard/react-components/hooks/useTrajectory.ts b/src/dashboard/react-components/hooks/useTrajectory.ts index ca61dd3e..184c5326 100644 --- a/src/dashboard/react-components/hooks/useTrajectory.ts +++ b/src/dashboard/react-components/hooks/useTrajectory.ts @@ -7,6 +7,7 @@ import { useState, useEffect, useCallback, useRef } from 'react'; import type { TrajectoryStep } from '../TrajectoryViewer'; +import { getApiUrl } from '../../lib/api'; interface TrajectoryStatus { active: boolean; @@ -67,7 +68,11 @@ export function useTrajectory(options: UseTrajectoryOptions = {}): UseTrajectory // Fetch trajectory status const fetchStatus = useCallback(async () => { try { - const response = await fetch(`${apiBaseUrl}/api/trajectory`); + // Use apiBaseUrl if provided, otherwise use getApiUrl for cloud mode routing + const url = apiBaseUrl + ? `${apiBaseUrl}/api/trajectory` + : getApiUrl('/api/trajectory'); + const response = await fetch(url, { credentials: 'include' }); const data = await response.json(); if (data.success !== false) { @@ -86,7 +91,10 @@ export function useTrajectory(options: UseTrajectoryOptions = {}): UseTrajectory // Fetch trajectory history const fetchHistory = useCallback(async () => { try { - const response = await fetch(`${apiBaseUrl}/api/trajectory/history`); + const url = apiBaseUrl + ? `${apiBaseUrl}/api/trajectory/history` + : getApiUrl('/api/trajectory/history'); + const response = await fetch(url, { credentials: 'include' }); const data = await response.json(); if (data.success) { @@ -101,11 +109,14 @@ export function useTrajectory(options: UseTrajectoryOptions = {}): UseTrajectory const fetchSteps = useCallback(async () => { try { const trajectoryId = selectedTrajectoryId; - const url = trajectoryId - ? `${apiBaseUrl}/api/trajectory/steps?trajectoryId=${encodeURIComponent(trajectoryId)}` - : `${apiBaseUrl}/api/trajectory/steps`; - - const response = await fetch(url); + const basePath = trajectoryId + ? `/api/trajectory/steps?trajectoryId=${encodeURIComponent(trajectoryId)}` + : '/api/trajectory/steps'; + const url = apiBaseUrl + ? `${apiBaseUrl}${basePath}` + : getApiUrl(basePath); + + const response = await fetch(url, { credentials: 'include' }); const data = await response.json(); if (data.success) { diff --git a/src/dashboard/react-components/index.ts b/src/dashboard/react-components/index.ts index df3dddcb..ebdbd453 100644 --- a/src/dashboard/react-components/index.ts +++ b/src/dashboard/react-components/index.ts @@ -38,6 +38,12 @@ export { useCloudSessionOptional, type CloudSessionProviderProps, } from './CloudSessionProvider'; +export { + WorkspaceProvider, + useWorkspace, + useWorkspaceWsUrl, + type WorkspaceProviderProps, +} from './WorkspaceContext'; // Layout Components export { Sidebar, type SidebarProps } from './layout/Sidebar'; diff --git a/src/dashboard/react-components/layout/Sidebar.tsx b/src/dashboard/react-components/layout/Sidebar.tsx index c43b2c42..138902bd 100644 --- a/src/dashboard/react-components/layout/Sidebar.tsx +++ b/src/dashboard/react-components/layout/Sidebar.tsx @@ -14,6 +14,9 @@ import { ThreadList } from '../ThreadList'; import { LogoIcon } from '../Logo'; const THREADS_COLLAPSED_KEY = 'agent-relay-threads-collapsed'; +const SIDEBAR_TAB_KEY = 'agent-relay-sidebar-tab'; + +export type SidebarTab = 'agents' | 'team'; export interface SidebarProps { agents: Agent[]; @@ -70,6 +73,15 @@ export function Sidebar({ onSettingsClick, }: SidebarProps) { const [searchQuery, setSearchQuery] = useState(''); + const [activeTab, setActiveTab] = useState(() => { + // Initialize from localStorage + try { + const stored = localStorage.getItem(SIDEBAR_TAB_KEY); + return (stored === 'team' ? 'team' : 'agents') as SidebarTab; + } catch { + return 'agents'; + } + }); const [isThreadsCollapsed, setIsThreadsCollapsed] = useState(() => { // Initialize from localStorage try { @@ -80,6 +92,15 @@ export function Sidebar({ } }); + // Persist tab state to localStorage + useEffect(() => { + try { + localStorage.setItem(SIDEBAR_TAB_KEY, activeTab); + } catch { + // localStorage not available + } + }, [activeTab]); + // Persist collapsed state to localStorage useEffect(() => { try { @@ -89,6 +110,10 @@ export function Sidebar({ } }, [isThreadsCollapsed]); + // Separate AI agents from human team members + const aiAgents = agents.filter(a => !a.isHuman); + const humanMembers = agents.filter(a => a.isHuman); + // Determine if we should show unified project view const hasProjects = projects.length > 0; @@ -141,12 +166,48 @@ export function Sidebar({ )}
+ {/* Agents/Team Tabs */} + {humanMembers.length > 0 && ( +
+ + +
+ )} + {/* Search */}
setSearchQuery(e.target.value)} className="flex-1 bg-transparent border-none text-text-primary text-sm outline-none placeholder:text-text-muted" @@ -177,10 +238,51 @@ export function Sidebar({ {/* Agent/Project List */}
- {hasProjects ? ( + {activeTab === 'team' && humanMembers.length > 0 ? ( + /* Team Members List */ +
+ {humanMembers + .filter(m => !searchQuery || m.name.toLowerCase().includes(searchQuery.toLowerCase())) + .map((member) => ( + + ))} + {humanMembers.filter(m => !searchQuery || m.name.toLowerCase().includes(searchQuery.toLowerCase())).length === 0 && ( +
+ +

No team members match "{searchQuery}"

+
+ )} +
+ ) : hasProjects ? ( ) : ( onAgentSelect?.(agent)} @@ -287,3 +389,26 @@ function SettingsIcon() { ); } + +function RobotIcon() { + return ( + + + + + + + + ); +} + +function UsersIcon() { + return ( + + + + + + + ); +} diff --git a/src/dashboard/types/index.ts b/src/dashboard/types/index.ts index 91235df4..7dfaf856 100644 --- a/src/dashboard/types/index.ts +++ b/src/dashboard/types/index.ts @@ -24,6 +24,8 @@ export interface Agent { lastMessageReceivedAt?: number; // Timestamp when agent last received a message lastOutputAt?: number; // Timestamp when agent last produced output isStuck?: boolean; // True when agent received message but hasn't responded within threshold + isHuman?: boolean; // True if this is a human user, not an AI agent + avatarUrl?: string; // Avatar URL for human users // Profile fields for understanding agent behavior profile?: AgentProfile; } diff --git a/src/shared/cli-auth-config.ts b/src/shared/cli-auth-config.ts index 9046ad62..670018fc 100644 --- a/src/shared/cli-auth-config.ts +++ b/src/shared/cli-auth-config.ts @@ -102,6 +102,15 @@ export const CLI_AUTH_CONFIG: Record = { delay: 300, // Slightly longer delay for menu to render description: 'Trust directory prompt', }, + { + // "Ready to code here?" permission prompt - asks for file access permission + // Shows "Yes, continue" / "No, exit" options with "Enter to confirm" + // This is different from trust directory - it's about granting file permissions + pattern: /ready\s*to\s*code\s*here|permission\s*to\s*work\s*with\s*your\s*files|yes,?\s*continue/i, + response: '\r', // Press enter to accept "Yes, continue" (already selected) + delay: 300, + description: 'Ready to code permission prompt', + }, { // Fallback: Any "press enter" or "enter to confirm/continue" prompt // Keep this LAST so more specific handlers match first diff --git a/src/utils/project-namespace.ts b/src/utils/project-namespace.ts index dcc9e5c4..e706af1f 100644 --- a/src/utils/project-namespace.ts +++ b/src/utils/project-namespace.ts @@ -168,3 +168,96 @@ export function listProjects(): Array<{ projectId: string; projectRoot: string; return projects; } + +/** + * Detect the actual workspace directory for cloud deployments. + * + * In cloud workspaces, repos are cloned to /workspace/{repo-name}. + * This function finds the correct working directory: + * + * Priority: + * 1. WORKSPACE_CWD env var (explicit override) + * 2. If baseDir itself is a git repo, use it + * 3. Scan baseDir for cloned repos - use the first one found (alphabetically) + * 4. Fall back to baseDir + * + * @param baseDir - The base workspace directory (e.g., /workspace) + * @returns The actual workspace path to use + */ +export function detectWorkspacePath(baseDir: string): string { + // 1. Explicit override + if (process.env.WORKSPACE_CWD) { + return process.env.WORKSPACE_CWD; + } + + // 2. Check if baseDir itself is a git repo + if (fs.existsSync(path.join(baseDir, '.git'))) { + return baseDir; + } + + // 3. Scan for cloned repos (directories with .git) + try { + const entries = fs.readdirSync(baseDir, { withFileTypes: true }); + const repos: string[] = []; + + for (const entry of entries) { + if (entry.isDirectory()) { + const repoPath = path.join(baseDir, entry.name); + const gitPath = path.join(repoPath, '.git'); + if (fs.existsSync(gitPath)) { + repos.push(repoPath); + } + } + } + + // Sort alphabetically for consistent behavior + repos.sort(); + + // Use the first repo found + if (repos.length > 0) { + if (repos.length > 1) { + console.log(`[workspace] Multiple repos found, using first: ${repos[0]} (others: ${repos.slice(1).join(', ')})`); + } else { + console.log(`[workspace] Detected repo: ${repos[0]}`); + } + return repos[0]; + } + } catch (err) { + // Failed to scan, fall back + console.warn(`[workspace] Failed to scan ${baseDir}:`, err); + } + + // 4. Fall back to baseDir + return baseDir; +} + +/** + * List all git repos in a workspace directory. + * Useful for allowing users to select which repo to work in. + * + * @param baseDir - The base workspace directory + * @returns Array of repo paths + */ +export function listWorkspaceRepos(baseDir: string): string[] { + const repos: string[] = []; + + try { + const entries = fs.readdirSync(baseDir, { withFileTypes: true }); + + for (const entry of entries) { + if (entry.isDirectory()) { + const repoPath = path.join(baseDir, entry.name); + const gitPath = path.join(repoPath, '.git'); + if (fs.existsSync(gitPath)) { + repos.push(repoPath); + } + } + } + + repos.sort(); + } catch { + // Failed to scan + } + + return repos; +} diff --git a/src/wrapper/parser.ts b/src/wrapper/parser.ts index b7c4b646..5e336bea 100644 --- a/src/wrapper/parser.ts +++ b/src/wrapper/parser.ts @@ -125,7 +125,14 @@ const PLACEHOLDER_TARGETS = new Set([ 'targetagent', 'someagent', 'otheragent', - 'worker', // Too generic, often used in examples + 'name', // Generic placeholder + // NOTE: Removed 'lead', 'developer', 'reviewer', 'architect', 'designer' - these are valid agent names! + // Only include truly placeholder names that would never be real agents + 'sender', + 'agent', + 'workername', // ->relay:spawn WorkerName examples + 'myagent', + 'youragent', ]); /** @@ -140,7 +147,7 @@ function isInstructionalText(body: string): boolean { * Check if a target name is a placeholder commonly used in documentation/examples. * These should not be treated as real message targets. */ -function isPlaceholderTarget(target: string): boolean { +export function isPlaceholderTarget(target: string): boolean { return PLACEHOLDER_TARGETS.has(target.toLowerCase()); } diff --git a/src/wrapper/pty-wrapper.ts b/src/wrapper/pty-wrapper.ts index df6f34bf..41b495af 100644 --- a/src/wrapper/pty-wrapper.ts +++ b/src/wrapper/pty-wrapper.ts @@ -151,6 +151,9 @@ export class PtyWrapper extends EventEmitter { private sessionEndData?: SessionEndMarker; // Store SESSION_END data for handoff private instructionsInjected = false; // Track if init instructions have been injected private continuityInjected = false; // Track if continuity context has been injected + private recentLogChunks: Map = new Map(); // Dedup log streaming (hash -> timestamp) + private static readonly LOG_DEDUP_WINDOW_MS = 500; // Window for considering logs as duplicates + private static readonly LOG_DEDUP_MAX_SIZE = 100; // Max entries in dedup map constructor(config: PtyWrapperConfig) { super(); @@ -484,9 +487,10 @@ export class PtyWrapper extends EventEmitter { // Stream to daemon for dashboard log viewing (if connected) // Filter out Claude's extended thinking blocks before streaming + // Also deduplicate to prevent terminal redraws from causing duplicate log entries if (this.config.streamLogs !== false && this.client.state === 'READY') { const filteredData = this.filterThinkingBlocks(data); - if (filteredData) { + if (filteredData && !this.isDuplicateLogChunk(filteredData)) { this.client.sendLog(filteredData); } } @@ -588,6 +592,51 @@ export class PtyWrapper extends EventEmitter { return outputLines.join('\n'); } + /** + * Check if a log chunk is a duplicate (recently streamed). + * Prevents terminal redraws from causing duplicate log entries in the dashboard. + * + * Uses content normalization and time-based deduplication: + * - Strips whitespace and normalizes content for comparison + * - Considers chunks with same normalized content within LOG_DEDUP_WINDOW_MS as duplicates + * - Cleans up old entries to prevent memory growth + */ + private isDuplicateLogChunk(data: string): boolean { + // Normalize: strip excessive whitespace, limit to first 200 chars for hash + // This helps catch redraws that might have slight formatting differences + const normalized = stripAnsi(data).replace(/\s+/g, ' ').trim().substring(0, 200); + + // Very short chunks (likely control chars or partial output) - allow through + if (normalized.length < 10) { + return false; + } + + // Simple hash using string as key + const hash = normalized; + const now = Date.now(); + + // Check if this chunk was recently streamed + const lastSeen = this.recentLogChunks.get(hash); + if (lastSeen && (now - lastSeen) < PtyWrapper.LOG_DEDUP_WINDOW_MS) { + return true; // Duplicate + } + + // Record this chunk + this.recentLogChunks.set(hash, now); + + // Cleanup: remove old entries if map is getting large + if (this.recentLogChunks.size > PtyWrapper.LOG_DEDUP_MAX_SIZE) { + const cutoff = now - PtyWrapper.LOG_DEDUP_WINDOW_MS * 2; + for (const [key, timestamp] of this.recentLogChunks) { + if (timestamp < cutoff) { + this.recentLogChunks.delete(key); + } + } + } + + return false; // Not a duplicate + } + /** * Auto-accept Claude's first-run prompts * Handles multiple prompts in sequence: @@ -943,20 +992,21 @@ export class PtyWrapper extends EventEmitter { const spawnPrefix = '->relay:spawn'; const releasePrefix = '->relay:release'; - // Pattern to strip common line prefixes (bullets, prompts, etc.) - // Same prefixes allowed in the message parser - const linePrefixPattern = /^(?:[>$%#โ†’โžœโ€บยปโ—โ€ขโ—ฆโ€ฃโƒ\-*โบโ—†โ—‡โ—‹โ–กโ– โ”‚โ”ƒโ”†โ”‡โ”Šโ”‹โ•Žโ•โœฆ]\s*)+/; - for (const line of lines) { let trimmed = line.trim(); - // Strip common line prefixes (bullets, prompts) before checking for commands - const originalTrimmed = trimmed; - trimmed = trimmed.replace(linePrefixPattern, ''); - - // Debug: log prefix stripping when spawn detected - if (originalTrimmed.includes('->relay:spawn') && originalTrimmed !== trimmed) { - console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Stripped prefix: "${originalTrimmed.substring(0, 50)}" -> "${trimmed.substring(0, 50)}"`); + // Strip bullet/prompt prefixes but PRESERVE the ->relay: pattern + // Look for ->relay: in the line and only strip what comes before it + const relayIdx = trimmed.indexOf('->relay:'); + if (relayIdx > 0) { + // There's content before ->relay: - check if it's just prefix chars + const beforeRelay = trimmed.substring(0, relayIdx); + // Only strip if the prefix is just bullets/prompts/whitespace + if (/^[\sโ—โ€ขโ—ฆโ€ฃโƒโบโ—†โ—‡โ—‹โ–กโ– โ”‚โ”ƒโ”†โ”‡โ”Šโ”‹โ•Žโ•โœฆโ†’โžœโ€บยป$%#*]+$/.test(beforeRelay)) { + const originalTrimmed = trimmed; + trimmed = trimmed.substring(relayIdx); + console.log(`[pty:${this.config.name}] [SPAWN-DEBUG] Stripped prefix: "${originalTrimmed.substring(0, 60)}" -> "${trimmed.substring(0, 60)}"`); + } } // Skip escaped commands: \->relay:spawn should not trigger diff --git a/src/wrapper/shared.ts b/src/wrapper/shared.ts index 59bbaa12..9d49ece7 100644 --- a/src/wrapper/shared.ts +++ b/src/wrapper/shared.ts @@ -241,6 +241,12 @@ export interface InjectionCallbacks { logError: (message: string) => void; /** Get the injection metrics object to update */ getMetrics: () => InjectionMetrics; + /** + * Skip verification and trust that write succeeded. + * Set to true for PTY-based injection where CLIs don't echo input. + * When true, injection succeeds on first attempt without verification. + */ + skipVerification?: boolean; } /** @@ -300,6 +306,20 @@ export async function injectWithRetry( const metrics = callbacks.getMetrics(); metrics.total++; + // Skip verification mode: trust that write() succeeds without checking output + // Used for PTY-based injection where CLIs don't echo input back + if (callbacks.skipVerification) { + try { + await callbacks.performInjection(injection); + metrics.successFirstTry++; + return { success: true, attempts: 1 }; + } catch (err: any) { + callbacks.logError(`Injection error: ${err?.message || err}`); + metrics.failed++; + return { success: false, attempts: 1 }; + } + } + for (let attempt = 0; attempt < INJECTION_CONSTANTS.MAX_RETRIES; attempt++) { try { // On retry attempts, first check if message already exists (race condition fix) From a0007c3c0b32c5df96b672f57c2c1e52475dfc25 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 14:10:08 +0100 Subject: [PATCH 098/103] git config --- deploy/workspace/entrypoint.sh | 13 +++++++++++ src/cloud/provisioner/index.ts | 14 +++++++++++- src/dashboard-server/server.ts | 22 +++++++++++++++---- .../react-components/hooks/useWebSocket.ts | 1 + src/wrapper/parser.ts | 10 ++------- 5 files changed, 47 insertions(+), 13 deletions(-) diff --git a/deploy/workspace/entrypoint.sh b/deploy/workspace/entrypoint.sh index 967e37e2..1d56ce9e 100644 --- a/deploy/workspace/entrypoint.sh +++ b/deploy/workspace/entrypoint.sh @@ -30,6 +30,13 @@ if [[ -n "${CLOUD_API_URL:-}" && -n "${WORKSPACE_ID:-}" && -n "${WORKSPACE_TOKEN git config --global credential.useHttpPath true export GIT_TERMINAL_PROMPT=0 + # Configure git identity for commits + # Use env vars if set, otherwise default to "Agent Relay" / "agent@agent-relay.com" + DEFAULT_GIT_EMAIL="${AGENT_NAME:-agent}@agent-relay.com" + git config --global user.name "${GIT_USER_NAME:-Agent Relay}" + git config --global user.email "${GIT_USER_EMAIL:-${DEFAULT_GIT_EMAIL}}" + log "Git identity configured: ${GIT_USER_NAME:-Agent Relay} <${GIT_USER_EMAIL:-${DEFAULT_GIT_EMAIL}}>" + # Configure gh CLI to use the same token mechanism # gh auth login expects a token via stdin or GH_TOKEN env var # We'll set up a wrapper that fetches fresh tokens @@ -87,6 +94,12 @@ EOF export GIT_ASKPASS="${GIT_ASKPASS_SCRIPT}" export GIT_TERMINAL_PROMPT=0 export GH_TOKEN="${GITHUB_TOKEN}" + + # Configure git identity for commits + DEFAULT_GIT_EMAIL="${AGENT_NAME:-agent}@agent-relay.com" + git config --global user.name "${GIT_USER_NAME:-Agent Relay}" + git config --global user.email "${GIT_USER_EMAIL:-${DEFAULT_GIT_EMAIL}}" + log "Git identity configured: ${GIT_USER_NAME:-Agent Relay} <${GIT_USER_EMAIL:-${DEFAULT_GIT_EMAIL}}>" fi clone_or_update_repo() { diff --git a/src/cloud/provisioner/index.ts b/src/cloud/provisioner/index.ts index 09ac4431..460240e5 100644 --- a/src/cloud/provisioner/index.ts +++ b/src/cloud/provisioner/index.ts @@ -762,12 +762,24 @@ class RailwayProvisioner implements ComputeProvisioner { */ class DockerProvisioner implements ComputeProvisioner { private cloudApiUrl: string; + private cloudApiUrlForContainer: string; private sessionSecret: string; constructor() { const config = getConfig(); this.cloudApiUrl = config.publicUrl; this.sessionSecret = config.sessionSecret; + + // For Docker containers, localhost won't work - they need to reach the host + // Convert localhost URLs to host.docker.internal for container access + if (this.cloudApiUrl.includes('localhost') || this.cloudApiUrl.includes('127.0.0.1')) { + this.cloudApiUrlForContainer = this.cloudApiUrl + .replace('localhost', 'host.docker.internal') + .replace('127.0.0.1', 'host.docker.internal'); + console.log(`[docker] Container API URL: ${this.cloudApiUrlForContainer} (host: ${this.cloudApiUrl})`); + } else { + this.cloudApiUrlForContainer = this.cloudApiUrl; + } } private generateWorkspaceToken(workspaceId: string): string { @@ -822,7 +834,7 @@ class DockerProvisioner implements ComputeProvisioner { `-e PROVIDERS=${(workspace.config.providers ?? []).join(',')}`, `-e PORT=${WORKSPACE_PORT}`, `-e AGENT_RELAY_DASHBOARD_PORT=${WORKSPACE_PORT}`, - `-e CLOUD_API_URL=${this.cloudApiUrl}`, + `-e CLOUD_API_URL=${this.cloudApiUrlForContainer}`, `-e WORKSPACE_TOKEN=${this.generateWorkspaceToken(workspace.id)}`, ]; diff --git a/src/dashboard-server/server.ts b/src/dashboard-server/server.ts index 414145b6..c00a2864 100644 --- a/src/dashboard-server/server.ts +++ b/src/dashboard-server/server.ts @@ -1377,15 +1377,18 @@ export async function startDashboard( getAgentSummaries(), ]); - // Filter agents: + // Filter and separate agents from human users: // 1. Exclude "Dashboard" (internal agent, not a real team member) // 2. Exclude offline agents (no lastSeen or lastSeen > threshold) // 3. Exclude agents without a known CLI (these are improperly registered or stale) + // 4. Separate human users (cli === 'dashboard') from AI agents const now = Date.now(); // 30 seconds - aligns with heartbeat timeout (5s heartbeat * 6 multiplier = 30s) // This ensures agents disappear quickly after they stop responding to heartbeats const OFFLINE_THRESHOLD_MS = 30 * 1000; - const filteredAgents = Array.from(agentsMap.values()) + + // First pass: filter out invalid/offline entries + const validEntries = Array.from(agentsMap.values()) .filter(agent => { // Exclude Dashboard if (agent.name === 'Dashboard') return false; @@ -1402,15 +1405,26 @@ export async function startDashboard( if (now - lastSeenTime > OFFLINE_THRESHOLD_MS) return false; return true; - }) + }); + + // Separate AI agents from human users + const filteredAgents = validEntries + .filter(agent => agent.cli !== 'dashboard') .map(agent => ({ ...agent, - // All agents that pass the filter have a known CLI and are AI agents isHuman: false, })); + const humanUsers = validEntries + .filter(agent => agent.cli === 'dashboard') + .map(agent => ({ + ...agent, + isHuman: true, + })); + return { agents: filteredAgents, + users: humanUsers, messages: allMessages, activity: allMessages, // For now, activity log is just the message log sessions, diff --git a/src/dashboard/react-components/hooks/useWebSocket.ts b/src/dashboard/react-components/hooks/useWebSocket.ts index 664fd3a7..8e076902 100644 --- a/src/dashboard/react-components/hooks/useWebSocket.ts +++ b/src/dashboard/react-components/hooks/useWebSocket.ts @@ -10,6 +10,7 @@ import type { Agent, Message, Session, AgentSummary, FleetData } from '../../typ export interface DashboardData { agents: Agent[]; + users?: Agent[]; // Human users (cli === 'dashboard') messages: Message[]; sessions?: Session[]; summaries?: AgentSummary[]; diff --git a/src/wrapper/parser.ts b/src/wrapper/parser.ts index 5e336bea..cf122638 100644 --- a/src/wrapper/parser.ts +++ b/src/wrapper/parser.ts @@ -125,14 +125,8 @@ const PLACEHOLDER_TARGETS = new Set([ 'targetagent', 'someagent', 'otheragent', - 'name', // Generic placeholder - // NOTE: Removed 'lead', 'developer', 'reviewer', 'architect', 'designer' - these are valid agent names! - // Only include truly placeholder names that would never be real agents - 'sender', - 'agent', - 'workername', // ->relay:spawn WorkerName examples - 'myagent', - 'youragent', + 'worker', // Too generic, often used in examples + // NOTE: Don't add 'agent', 'name', 'lead', 'developer', etc. - these can be valid agent names! ]); /** From 76ed3c59bc5b3e05b6f83eba29a26b16ddc9258a Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 14:17:30 +0100 Subject: [PATCH 099/103] lint fixes --- src/bridge/spawner.ts | 1 - src/cloud/api/onboarding.ts | 9 ++------- src/cloud/api/webhooks.ts | 2 +- src/cloud/db/drizzle.ts | 1 - src/cloud/db/schema.ts | 1 - src/cloud/server.ts | 2 +- src/cloud/services/ci-agent-spawner.ts | 2 +- src/cloud/webhooks/router.ts | 1 - src/daemon/api.ts | 1 - 9 files changed, 5 insertions(+), 15 deletions(-) diff --git a/src/bridge/spawner.ts b/src/bridge/spawner.ts index a101aaf7..5c4b056f 100644 --- a/src/bridge/spawner.ts +++ b/src/bridge/spawner.ts @@ -6,7 +6,6 @@ import fs from 'node:fs'; import path from 'node:path'; -import { fileURLToPath } from 'node:url'; import { sleep } from './utils.js'; import { getProjectPaths } from '../utils/project-namespace.js'; import { resolveCommand } from '../utils/command-resolver.js'; diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index 02b6b7f3..892544a3 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -33,12 +33,6 @@ export { type PromptHandler, } from './cli-pty-runner.js'; -import { - CLI_AUTH_CONFIG, - runCLIAuthViaPTY, - matchesSuccessPattern, -} from './cli-pty-runner.js'; - export const onboardingRouter = Router(); // Debug: log all requests to this router @@ -575,8 +569,9 @@ onboardingRouter.post('/complete', async (req: Request, res: Response) => { /** * Helper: Extract credentials from CLI credential file + * @deprecated Currently unused - kept for potential future use */ -async function extractCredentials( +async function _extractCredentials( session: CLIAuthSession, config: typeof CLI_AUTH_CONFIG[string] ): Promise { diff --git a/src/cloud/api/webhooks.ts b/src/cloud/api/webhooks.ts index dddee56f..3ffe2271 100644 --- a/src/cloud/api/webhooks.ts +++ b/src/cloud/api/webhooks.ts @@ -703,7 +703,7 @@ async function handleIssueCommentEvent(payload: IssueCommentPayload): Promise { const db = getDb(); // Find active fix attempts by joining with failure events - const activeStatuses = ['pending', 'in_progress']; return db .select({ id: schema.ciFixAttempts.id, diff --git a/src/cloud/db/schema.ts b/src/cloud/db/schema.ts index ec4ef741..6c8fe0cc 100644 --- a/src/cloud/db/schema.ts +++ b/src/cloud/db/schema.ts @@ -14,7 +14,6 @@ import { timestamp, boolean, bigint, - integer, jsonb, unique, index, diff --git a/src/cloud/server.ts b/src/cloud/server.ts index 2169a2c1..9a233450 100644 --- a/src/cloud/server.ts +++ b/src/cloud/server.ts @@ -406,7 +406,7 @@ export async function createServer(): Promise { // Heartbeat interval to detect dead connections (30 seconds) const PRESENCE_HEARTBEAT_INTERVAL = 30000; - const PRESENCE_HEARTBEAT_TIMEOUT = 35000; // Allow 5s grace period + const _PRESENCE_HEARTBEAT_TIMEOUT = 35000; // Allow 5s grace period (reserved for future use) // Track connection health for heartbeat const connectionHealth = new WeakMap(); diff --git a/src/cloud/services/ci-agent-spawner.ts b/src/cloud/services/ci-agent-spawner.ts index d94aaf2f..f9236d93 100644 --- a/src/cloud/services/ci-agent-spawner.ts +++ b/src/cloud/services/ci-agent-spawner.ts @@ -295,7 +295,7 @@ _โ€” ${getAppName()}_` /** * Build the prompt for the CI fix agent */ -function buildAgentPrompt(failureEvent: CIFailureEvent, repository: Repository): string { +function buildAgentPrompt(failureEvent: CIFailureEvent, _repository: Repository): string { const annotations = failureEvent.annotations as CIAnnotation[] | null; const annotationsList = annotations && annotations.length > 0 ? annotations diff --git a/src/cloud/webhooks/router.ts b/src/cloud/webhooks/router.ts index 59340214..3cb7d4df 100644 --- a/src/cloud/webhooks/router.ts +++ b/src/cloud/webhooks/router.ts @@ -16,7 +16,6 @@ import type { NormalizedEvent, WebhookAction, WebhookResult, - WebhookResponse, } from './types.js'; import { getParser } from './parsers/index.js'; import { getResponder } from './responders/index.js'; diff --git a/src/daemon/api.ts b/src/daemon/api.ts index 91856b44..174f6998 100644 --- a/src/daemon/api.ts +++ b/src/daemon/api.ts @@ -23,7 +23,6 @@ import { startCLIAuth, getAuthSession, cancelAuthSession, - completeAuthSession, getSupportedProviders, } from './cli-auth.js'; From 84cee86a06871827967add4612689bb9e0e9d6ea Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 14:21:03 +0100 Subject: [PATCH 100/103] Fix lint errors and import issues MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add proper import for CLI_AUTH_CONFIG in onboarding.ts (was only re-exported) - Prefix unused repository parameters with underscore in mention-handler.ts - Remove unused readFileSync import from browser-testing.ts - Remove unused ExecSyncOptions import from container-spawner.ts - Add missing deps to usePresence hook to satisfy eslint - Add debug logging to git-credential-relay ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- deploy/workspace/git-credential-relay | 39 ++++++++++++++++--- src/cloud/api/onboarding.ts | 18 ++++++++- src/cloud/services/mention-handler.ts | 4 +- src/daemon/services/browser-testing.ts | 2 +- src/daemon/services/container-spawner.ts | 2 +- .../react-components/hooks/usePresence.ts | 3 +- 6 files changed, 56 insertions(+), 12 deletions(-) diff --git a/deploy/workspace/git-credential-relay b/deploy/workspace/git-credential-relay index 874e9dee..5af74d00 100644 --- a/deploy/workspace/git-credential-relay +++ b/deploy/workspace/git-credential-relay @@ -15,8 +15,18 @@ set -euo pipefail +# Debug logging (enable with GIT_CREDENTIAL_DEBUG=1) +debug() { + if [[ "${GIT_CREDENTIAL_DEBUG:-}" == "1" ]]; then + echo "git-credential-relay: $*" >&2 + fi +} + +debug "Called with args: $*" + # Only handle 'get' operation if [[ "${1:-}" != "get" ]]; then + debug "Ignoring non-get operation" exit 0 fi @@ -29,13 +39,16 @@ done # Only provide credentials for github.com host="${input[host]:-}" +debug "Host: $host" if [[ "$host" != "github.com" ]]; then + debug "Not github.com, skipping" exit 0 fi # Check required environment variables if [[ -z "${WORKSPACE_ID:-}" ]]; then echo "git-credential-relay: WORKSPACE_ID not set" >&2 + echo "git-credential-relay: Hint - check if env vars are passed to agent process" >&2 exit 1 fi @@ -49,11 +62,27 @@ if [[ -z "${WORKSPACE_TOKEN:-}" ]]; then exit 1 fi -# Fetch fresh token from gateway -response=$(curl -sf \ - -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ - "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" \ - 2>/dev/null) +debug "Fetching token from ${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" + +# Fetch fresh token from gateway (capture stderr for debugging) +http_code="" +response="" +if [[ "${GIT_CREDENTIAL_DEBUG:-}" == "1" ]]; then + # With debug, show full curl output + response=$(curl -sf -w "\n%{http_code}" \ + -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ + "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" \ + 2>&1) || true + http_code="${response##*$'\n'}" + response="${response%$'\n'*}" + debug "HTTP response code: $http_code" + debug "Response: ${response:0:200}" +else + response=$(curl -sf \ + -H "Authorization: Bearer ${WORKSPACE_TOKEN}" \ + "${CLOUD_API_URL}/api/git/token?workspaceId=${WORKSPACE_ID}" \ + 2>/dev/null) || true +fi if [[ -z "$response" ]]; then echo "git-credential-relay: Failed to fetch token from gateway" >&2 diff --git a/src/cloud/api/onboarding.ts b/src/cloud/api/onboarding.ts index 892544a3..4788be8a 100644 --- a/src/cloud/api/onboarding.ts +++ b/src/cloud/api/onboarding.ts @@ -17,6 +17,22 @@ import { requireAuth } from './auth.js'; import { db } from '../db/index.js'; import { vault } from '../vault/index.js'; +// Import for local use +import { + CLI_AUTH_CONFIG, + runCLIAuthViaPTY, + stripAnsiCodes, + matchesSuccessPattern, + findMatchingPrompt, + validateProviderConfig, + validateAllProviderConfigs, + getSupportedProviders, + type CLIAuthConfig, + type PTYAuthResult, + type PTYAuthOptions, + type PromptHandler, +} from './cli-pty-runner.js'; + // Re-export from shared module for backward compatibility export { CLI_AUTH_CONFIG, @@ -31,7 +47,7 @@ export { type PTYAuthResult, type PTYAuthOptions, type PromptHandler, -} from './cli-pty-runner.js'; +}; export const onboardingRouter = Router(); diff --git a/src/cloud/services/mention-handler.ts b/src/cloud/services/mention-handler.ts index 1342e7fc..6cecaafb 100644 --- a/src/cloud/services/mention-handler.ts +++ b/src/cloud/services/mention-handler.ts @@ -237,7 +237,7 @@ _โ€” ${getAppName()}_` /** * Build a prompt for handling a mention */ -function buildMentionPrompt(mention: CommentMention, repository: Repository): string { +function buildMentionPrompt(mention: CommentMention, _repository: Repository): string { const agentDescription = isKnownAgent(mention.mentionedAgent) ? KNOWN_AGENTS[mention.mentionedAgent] : 'Custom agent'; @@ -388,7 +388,7 @@ _โ€” ${getAppName()}_` /** * Build a prompt for an issue assignment */ -function buildIssuePrompt(assignment: IssueAssignment, repository: Repository): string { +function buildIssuePrompt(assignment: IssueAssignment, _repository: Repository): string { const priorityNote = assignment.priority ? `\n**Priority:** ${assignment.priority.toUpperCase()}` : ''; diff --git a/src/daemon/services/browser-testing.ts b/src/daemon/services/browser-testing.ts index 31eb5493..e0283f0c 100644 --- a/src/daemon/services/browser-testing.ts +++ b/src/daemon/services/browser-testing.ts @@ -12,7 +12,7 @@ */ import { spawn, execSync } from 'child_process'; -import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs'; +import { existsSync, writeFileSync, mkdirSync } from 'fs'; import { join } from 'path'; export interface ScreenshotOptions { diff --git a/src/daemon/services/container-spawner.ts b/src/daemon/services/container-spawner.ts index 130c7683..8eb4f887 100644 --- a/src/daemon/services/container-spawner.ts +++ b/src/daemon/services/container-spawner.ts @@ -11,7 +11,7 @@ * - Language-specific toolchains */ -import { spawn, execSync, ExecSyncOptions } from 'child_process'; +import { spawn, execSync } from 'child_process'; import { existsSync } from 'fs'; export interface ContainerConfig { diff --git a/src/dashboard/react-components/hooks/usePresence.ts b/src/dashboard/react-components/hooks/usePresence.ts index 897ebdd9..efeac8ee 100644 --- a/src/dashboard/react-components/hooks/usePresence.ts +++ b/src/dashboard/react-components/hooks/usePresence.ts @@ -283,8 +283,7 @@ export function usePresence(options: UsePresenceOptions = {}): UsePresenceReturn disconnect(); }; // Only re-run when autoConnect or currentUser identity changes - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [autoConnect, currentUser?.username]); + }, [autoConnect, currentUser?.username, connect, disconnect]); // Send leave on page unload useEffect(() => { From 90137ed6e227ff33aa40a1ea26db6ddf1f3fbd5b Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 14:24:02 +0100 Subject: [PATCH 101/103] Add build tools to runtime Docker image for native modules MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Agents need make and g++ to run npm install on repos with native dependencies like node-pty. Previously these were only in the builder stage, not available at runtime. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- deploy/workspace/Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/deploy/workspace/Dockerfile b/deploy/workspace/Dockerfile index bfce24d4..bdefa4de 100644 --- a/deploy/workspace/Dockerfile +++ b/deploy/workspace/Dockerfile @@ -32,6 +32,7 @@ WORKDIR /app # Install system dependencies for AI CLIs and git # Note: tmux not needed - daemon uses node-pty directly # gosu is used to drop privileges from root to workspace user +# Build tools (make, g++) needed for agents to npm install repos with native modules RUN apt-get update && apt-get install -y \ bash \ ca-certificates \ @@ -40,6 +41,8 @@ RUN apt-get update && apt-get install -y \ python3 \ jq \ gosu \ + make \ + g++ \ && rm -rf /var/lib/apt/lists/* # Install GitHub CLI (gh) From 9939882a548b42032c107fff7435d905a1d68f60 Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 14:26:32 +0100 Subject: [PATCH 102/103] Allow blob: URLs in CSP for pasted images MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Image paste creates blob: URLs which were blocked by the Content-Security-Policy. Added blob: to imgSrc directive. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- src/cloud/server.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cloud/server.ts b/src/cloud/server.ts index 9a233450..6529d356 100644 --- a/src/cloud/server.ts +++ b/src/cloud/server.ts @@ -107,7 +107,7 @@ export async function createServer(): Promise { scriptSrc: ["'self'", "'unsafe-inline'", "'unsafe-eval'", "https://connect.nango.dev"], styleSrc: ["'self'", "'unsafe-inline'", "https://fonts.googleapis.com", "https://connect.nango.dev"], fontSrc: ["'self'", "https://fonts.gstatic.com", "data:"], - imgSrc: ["'self'", "data:", "https:"], + imgSrc: ["'self'", "data:", "https:", "blob:"], connectSrc: ["'self'", "wss:", "ws:", "https:", "https://api.nango.dev", "https://connect.nango.dev"], frameSrc: ["'self'", "https://connect.nango.dev", "https://github.com"], childSrc: ["'self'", "https://connect.nango.dev", "blob:"], From 8403b93c20d919cccfd28041a7bb4f0c99953d0e Mon Sep 17 00:00:00 2001 From: Khaliq Date: Mon, 5 Jan 2026 14:27:07 +0100 Subject: [PATCH 103/103] better wrap logic --- src/wrapper/pty-wrapper.ts | 34 ++++++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/src/wrapper/pty-wrapper.ts b/src/wrapper/pty-wrapper.ts index 41b495af..e292883a 100644 --- a/src/wrapper/pty-wrapper.ts +++ b/src/wrapper/pty-wrapper.ts @@ -154,6 +154,8 @@ export class PtyWrapper extends EventEmitter { private recentLogChunks: Map = new Map(); // Dedup log streaming (hash -> timestamp) private static readonly LOG_DEDUP_WINDOW_MS = 500; // Window for considering logs as duplicates private static readonly LOG_DEDUP_MAX_SIZE = 100; // Max entries in dedup map + private lastParsedLength = 0; // Track last parsed position to avoid re-parsing entire buffer + private lastContinuityParsedLength = 0; // Same for continuity commands constructor(config: PtyWrapperConfig) { super(); @@ -533,9 +535,15 @@ export class PtyWrapper extends EventEmitter { // Parse for continuity commands (->continuity:save, ->continuity:load, etc.) // Use rawBuffer (accumulated content) not immediate chunk, since multi-line // fenced commands like ->continuity:save <<<...>>> span multiple output events - this.parseContinuityCommands(cleanContent).catch(err => { - console.error(`[pty:${this.config.name}] Continuity command parsing error:`, err); - }); + // Optimization: Only parse new content with lookback for incomplete fenced commands + if (cleanContent.length > this.lastContinuityParsedLength) { + const lookbackStart = Math.max(0, this.lastContinuityParsedLength - 500); + const contentToParse = cleanContent.substring(lookbackStart); + this.parseContinuityCommands(contentToParse).catch(err => { + console.error(`[pty:${this.config.name}] Continuity command parsing error:`, err); + }); + this.lastContinuityParsedLength = cleanContent.length; + } // Track outputs and potentially remind about summaries this.trackOutputAndRemind(data); @@ -735,18 +743,32 @@ export class PtyWrapper extends EventEmitter { * Parse relay commands from output. * Handles both single-line and multi-line (fenced) formats. * Deduplication via sentMessageHashes. + * + * Optimization: Only parses new content since last parse to avoid O(nยฒ) behavior. + * Uses lookback buffer for incomplete fenced messages that span output chunks. */ private parseRelayCommands(): void { const cleanContent = stripAnsi(this.rawBuffer); + // Skip if no new content + if (cleanContent.length <= this.lastParsedLength) return; + + // For fenced messages, need some lookback for incomplete fences that span chunks + // 500 chars is enough to capture most relay message headers + const lookbackStart = Math.max(0, this.lastParsedLength - 500); + const contentToParse = cleanContent.substring(lookbackStart); + // First, try to find fenced multi-line messages: ->relay:Target <<<\n...\n>>> - this.parseFencedMessages(cleanContent); + this.parseFencedMessages(contentToParse); // Then parse single-line messages - this.parseSingleLineMessages(cleanContent); + this.parseSingleLineMessages(contentToParse); // Parse spawn/release commands - this.parseSpawnReleaseCommands(cleanContent); + this.parseSpawnReleaseCommands(contentToParse); + + // Update parsed position + this.lastParsedLength = cleanContent.length; } /**