diff --git a/.claude/rules/react-dashboard.md b/.claude/rules/react-dashboard.md
index da7ec5c3..e88851ee 100644
--- a/.claude/rules/react-dashboard.md
+++ b/.claude/rules/react-dashboard.md
@@ -54,6 +54,38 @@ paths:
- Handle loading and error states appropriately
- Use `try/catch` for async operations
+## Next.js App Router
+
+- Pages using `useSearchParams()` MUST be wrapped in a `` boundary for static generation
+- Pattern: Create a `{Page}Content` component that uses the hook, wrap it in `` in the default export
+- Always provide a loading fallback component
+
+```tsx
+// Required pattern for useSearchParams
+import { Suspense } from 'react';
+import { useSearchParams } from 'next/navigation';
+
+function PageLoading() {
+ return
Loading...
;
+}
+
+function PageContent() {
+ const searchParams = useSearchParams();
+ const param = searchParams.get('param');
+ // ... component logic
+}
+
+export default function Page() {
+ return (
+ }>
+
+
+ );
+}
+```
+
+- See `app/cloud/link/page.tsx` and `app/login/page.tsx` for examples
+
## Common Patterns
```tsx
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 3f103118..2ae82245 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -121,8 +121,14 @@ jobs:
update-workspaces:
runs-on: ubuntu-latest
needs: [build-and-push]
- # Only run on main branch pushes, not releases or manual runs
- if: github.event_name == 'push' && github.ref == 'refs/heads/main'
+ # Only run on main branch pushes when build-and-push succeeded
+ # Note: Can't use success() here because it checks the entire dependency chain,
+ # including build-base which is often skipped. Use explicit result check instead.
+ if: |
+ always() &&
+ needs.build-and-push.result == 'success' &&
+ github.event_name == 'push' &&
+ github.ref == 'refs/heads/main'
steps:
- name: Update workspace images
env:
@@ -139,7 +145,7 @@ jobs:
response=$(curl -s -w "\n%{http_code}" -X POST "${CLOUD_API_URL}/api/admin/workspaces/update-image" \
-H "x-admin-secret: ${ADMIN_API_SECRET}" \
-H "Content-Type: application/json" \
- -d '{"image": "ghcr.io/agentworkforce/relay-workspace:latest", "skipRestart": true}')
+ -d '{"image": "ghcr.io/agentworkforce/relay-workspace:latest", "skipRestart": false}')
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
diff --git a/.github/workflows/migrations.yml b/.github/workflows/migrations.yml
new file mode 100644
index 00000000..b82870b0
--- /dev/null
+++ b/.github/workflows/migrations.yml
@@ -0,0 +1,64 @@
+name: Database Migrations
+
+on:
+ push:
+ branches: [main]
+ paths:
+ - 'src/cloud/db/**'
+ - 'drizzle.config.ts'
+ - '.github/workflows/migrations.yml'
+ pull_request:
+ branches: [main]
+ paths:
+ - 'src/cloud/db/**'
+ - 'drizzle.config.ts'
+ - '.github/workflows/migrations.yml'
+ # Allow manual trigger
+ workflow_dispatch:
+
+jobs:
+ migrations:
+ name: Run Migrations
+ runs-on: ubuntu-latest
+
+ services:
+ postgres:
+ image: postgres:16
+ env:
+ POSTGRES_USER: agent_relay
+ POSTGRES_PASSWORD: test_password
+ POSTGRES_DB: agent_relay_test
+ ports:
+ - 5432:5432
+ # Health check to wait for postgres to be ready
+ options: >-
+ --health-cmd pg_isready
+ --health-interval 10s
+ --health-timeout 5s
+ --health-retries 5
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ cache: 'npm'
+
+ - name: Install dependencies
+ run: npm ci
+
+ - name: Build project
+ run: npm run build
+
+ - name: Run migrations
+ env:
+ DATABASE_URL: postgres://agent_relay:test_password@localhost:5432/agent_relay_test
+ run: node scripts/run-migrations.js
+
+ - name: Verify schema
+ env:
+ DATABASE_URL: postgres://agent_relay:test_password@localhost:5432/agent_relay_test
+ run: node scripts/verify-schema.js
diff --git a/.trajectories/completed/2026-01/traj_oszg9flv74pk.json b/.trajectories/completed/2026-01/traj_oszg9flv74pk.json
new file mode 100644
index 00000000..99aa3c85
--- /dev/null
+++ b/.trajectories/completed/2026-01/traj_oszg9flv74pk.json
@@ -0,0 +1,73 @@
+{
+ "id": "traj_oszg9flv74pk",
+ "version": 1,
+ "task": {
+ "title": "Fix cloud link authentication flow"
+ },
+ "status": "completed",
+ "startedAt": "2026-01-08T09:01:35.826Z",
+ "agents": [
+ {
+ "name": "khaliqgant",
+ "role": "lead",
+ "joinedAt": "2026-01-08T09:01:35.827Z"
+ }
+ ],
+ "chapters": [
+ {
+ "id": "chap_fnyyswrra94t",
+ "title": "Work",
+ "agentName": "default",
+ "startedAt": "2026-01-08T09:01:42.677Z",
+ "events": [
+ {
+ "ts": 1767862902677,
+ "type": "decision",
+ "content": "Fixed cloud link page auth check: Fixed cloud link page auth check",
+ "raw": {
+ "question": "Fixed cloud link page auth check",
+ "chosen": "Fixed cloud link page auth check",
+ "alternatives": [],
+ "reasoning": "checkAuth() was looking for data.userId but /api/auth/session returns { authenticated: true, user: { id } }. Changed to check data.authenticated && data.user?.id"
+ },
+ "significance": "high"
+ },
+ {
+ "ts": 1767862908073,
+ "type": "decision",
+ "content": "Added return URL support to login page: Added return URL support to login page",
+ "raw": {
+ "question": "Added return URL support to login page",
+ "chosen": "Added return URL support to login page",
+ "alternatives": [],
+ "reasoning": "Login page ignored ?return= query param, always redirecting to /app after auth. Added useSearchParams to read return URL and redirect back (e.g., to cloud link page)"
+ },
+ "significance": "high"
+ },
+ {
+ "ts": 1767862912381,
+ "type": "decision",
+ "content": "Wrapped login page in Suspense boundary: Wrapped login page in Suspense boundary",
+ "raw": {
+ "question": "Wrapped login page in Suspense boundary",
+ "chosen": "Wrapped login page in Suspense boundary",
+ "alternatives": [],
+ "reasoning": "useSearchParams requires Suspense for Next.js static generation. Created LoginContent component wrapped in Suspense with LoginLoading fallback"
+ },
+ "significance": "high"
+ }
+ ],
+ "endedAt": "2026-01-08T09:01:57.389Z"
+ }
+ ],
+ "commits": [],
+ "filesChanged": [],
+ "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay",
+ "tags": [],
+ "completedAt": "2026-01-08T09:01:57.389Z",
+ "retrospective": {
+ "summary": "Fixed two bugs in cloud link flow: 1) Auth check used wrong response shape 2) Login page ignored return URL param. Also added Suspense boundary for Next.js static gen.",
+ "approach": "Standard approach",
+ "confidence": 0.9
+ }
+}
\ No newline at end of file
diff --git a/.trajectories/completed/2026-01/traj_oszg9flv74pk.md b/.trajectories/completed/2026-01/traj_oszg9flv74pk.md
new file mode 100644
index 00000000..c095ed06
--- /dev/null
+++ b/.trajectories/completed/2026-01/traj_oszg9flv74pk.md
@@ -0,0 +1,41 @@
+# Trajectory: Fix cloud link authentication flow
+
+> **Status:** ✅ Completed
+> **Confidence:** 90%
+> **Started:** January 8, 2026 at 10:01 AM
+> **Completed:** January 8, 2026 at 10:01 AM
+
+---
+
+## Summary
+
+Fixed two bugs in cloud link flow: 1) Auth check used wrong response shape 2) Login page ignored return URL param. Also added Suspense boundary for Next.js static gen.
+
+**Approach:** Standard approach
+
+---
+
+## Key Decisions
+
+### Fixed cloud link page auth check
+- **Chose:** Fixed cloud link page auth check
+- **Reasoning:** checkAuth() was looking for data.userId but /api/auth/session returns { authenticated: true, user: { id } }. Changed to check data.authenticated && data.user?.id
+
+### Added return URL support to login page
+- **Chose:** Added return URL support to login page
+- **Reasoning:** Login page ignored ?return= query param, always redirecting to /app after auth. Added useSearchParams to read return URL and redirect back (e.g., to cloud link page)
+
+### Wrapped login page in Suspense boundary
+- **Chose:** Wrapped login page in Suspense boundary
+- **Reasoning:** useSearchParams requires Suspense for Next.js static generation. Created LoginContent component wrapped in Suspense with LoginLoading fallback
+
+---
+
+## Chapters
+
+### 1. Work
+*Agent: default*
+
+- Fixed cloud link page auth check: Fixed cloud link page auth check
+- Added return URL support to login page: Added return URL support to login page
+- Wrapped login page in Suspense boundary: Wrapped login page in Suspense boundary
diff --git a/.trajectories/completed/2026-01/traj_rsavt0jipi3c.json b/.trajectories/completed/2026-01/traj_rsavt0jipi3c.json
new file mode 100644
index 00000000..c9dd8abd
--- /dev/null
+++ b/.trajectories/completed/2026-01/traj_rsavt0jipi3c.json
@@ -0,0 +1,109 @@
+{
+ "id": "traj_rsavt0jipi3c",
+ "version": 1,
+ "task": {
+ "title": "Power agent session - ready for tasks"
+ },
+ "status": "completed",
+ "startedAt": "2026-01-08T07:54:35.678Z",
+ "agents": [
+ {
+ "name": "khaliqgant",
+ "role": "lead",
+ "joinedAt": "2026-01-08T07:54:35.679Z"
+ }
+ ],
+ "chapters": [
+ {
+ "id": "chap_cgughl8lm8b5",
+ "title": "Work",
+ "agentName": "default",
+ "startedAt": "2026-01-08T08:04:56.261Z",
+ "events": [
+ {
+ "ts": 1767859496262,
+ "type": "decision",
+ "content": "Fixed cloud link auth flow - two bugs: Fixed cloud link auth flow - two bugs",
+ "raw": {
+ "question": "Fixed cloud link auth flow - two bugs",
+ "chosen": "Fixed cloud link auth flow - two bugs",
+ "alternatives": [],
+ "reasoning": "1) Cloud link page checked for data.userId but API returns data.authenticated + data.user.id. 2) Login page ignored return URL param, so after login it went to /app instead of back to cloud link page"
+ },
+ "significance": "high"
+ },
+ {
+ "ts": 1767859507874,
+ "type": "decision",
+ "content": "Fixed login page return URL support: Fixed login page return URL support",
+ "raw": {
+ "question": "Fixed login page return URL support",
+ "chosen": "Fixed login page return URL support",
+ "alternatives": [],
+ "reasoning": "Added useSearchParams to read return query param and redirect back after login instead of always going to /app"
+ },
+ "significance": "high"
+ },
+ {
+ "ts": 1767860361297,
+ "type": "decision",
+ "content": "Added Suspense boundary to login page: Added Suspense boundary to login page",
+ "raw": {
+ "question": "Added Suspense boundary to login page",
+ "chosen": "Added Suspense boundary to login page",
+ "alternatives": [],
+ "reasoning": "useSearchParams requires Suspense for Next.js static generation - wrapped LoginContent in Suspense with LoginLoading fallback"
+ },
+ "significance": "high"
+ },
+ {
+ "ts": 1767860499290,
+ "type": "decision",
+ "content": "Added useSearchParams/Suspense rule to react-dashboard.md: Added useSearchParams/Suspense rule to react-dashboard.md",
+ "raw": {
+ "question": "Added useSearchParams/Suspense rule to react-dashboard.md",
+ "chosen": "Added useSearchParams/Suspense rule to react-dashboard.md",
+ "alternatives": [],
+ "reasoning": "Prevents future build failures - useSearchParams requires Suspense boundary for Next.js static generation"
+ },
+ "significance": "high"
+ },
+ {
+ "ts": 1767861773992,
+ "type": "decision",
+ "content": "Changed update-workspaces condition to use explicit result check: Changed update-workspaces condition to use explicit result check",
+ "raw": {
+ "question": "Changed update-workspaces condition to use explicit result check",
+ "chosen": "Changed update-workspaces condition to use explicit result check",
+ "alternatives": [],
+ "reasoning": "success() checks entire dependency chain including skipped build-base. Using always() + needs.build-and-push.result == 'success' checks only direct dependency"
+ },
+ "significance": "high"
+ },
+ {
+ "ts": 1767862760607,
+ "type": "decision",
+ "content": "Changed skipRestart to false in update-workspaces: Changed skipRestart to false in update-workspaces",
+ "raw": {
+ "question": "Changed skipRestart to false in update-workspaces",
+ "chosen": "Changed skipRestart to false in update-workspaces",
+ "alternatives": [],
+ "reasoning": "If no active agents, workspace should restart immediately to apply new image since there's no work to disrupt"
+ },
+ "significance": "high"
+ }
+ ],
+ "endedAt": "2026-01-08T09:01:29.981Z"
+ }
+ ],
+ "commits": [],
+ "filesChanged": [],
+ "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay",
+ "tags": [],
+ "completedAt": "2026-01-08T09:01:29.981Z",
+ "retrospective": {
+ "summary": "General session - mixed work on cloud link auth, docker workflow, and React rules",
+ "approach": "Standard approach",
+ "confidence": 0.7
+ }
+}
\ No newline at end of file
diff --git a/.trajectories/completed/2026-01/traj_rsavt0jipi3c.md b/.trajectories/completed/2026-01/traj_rsavt0jipi3c.md
new file mode 100644
index 00000000..37ca1c9d
--- /dev/null
+++ b/.trajectories/completed/2026-01/traj_rsavt0jipi3c.md
@@ -0,0 +1,56 @@
+# Trajectory: Power agent session - ready for tasks
+
+> **Status:** ✅ Completed
+> **Confidence:** 70%
+> **Started:** January 8, 2026 at 08:54 AM
+> **Completed:** January 8, 2026 at 10:01 AM
+
+---
+
+## Summary
+
+General session - mixed work on cloud link auth, docker workflow, and React rules
+
+**Approach:** Standard approach
+
+---
+
+## Key Decisions
+
+### Fixed cloud link auth flow - two bugs
+- **Chose:** Fixed cloud link auth flow - two bugs
+- **Reasoning:** 1) Cloud link page checked for data.userId but API returns data.authenticated + data.user.id. 2) Login page ignored return URL param, so after login it went to /app instead of back to cloud link page
+
+### Fixed login page return URL support
+- **Chose:** Fixed login page return URL support
+- **Reasoning:** Added useSearchParams to read return query param and redirect back after login instead of always going to /app
+
+### Added Suspense boundary to login page
+- **Chose:** Added Suspense boundary to login page
+- **Reasoning:** useSearchParams requires Suspense for Next.js static generation - wrapped LoginContent in Suspense with LoginLoading fallback
+
+### Added useSearchParams/Suspense rule to react-dashboard.md
+- **Chose:** Added useSearchParams/Suspense rule to react-dashboard.md
+- **Reasoning:** Prevents future build failures - useSearchParams requires Suspense boundary for Next.js static generation
+
+### Changed update-workspaces condition to use explicit result check
+- **Chose:** Changed update-workspaces condition to use explicit result check
+- **Reasoning:** success() checks entire dependency chain including skipped build-base. Using always() + needs.build-and-push.result == 'success' checks only direct dependency
+
+### Changed skipRestart to false in update-workspaces
+- **Chose:** Changed skipRestart to false in update-workspaces
+- **Reasoning:** If no active agents, workspace should restart immediately to apply new image since there's no work to disrupt
+
+---
+
+## Chapters
+
+### 1. Work
+*Agent: default*
+
+- Fixed cloud link auth flow - two bugs: Fixed cloud link auth flow - two bugs
+- Fixed login page return URL support: Fixed login page return URL support
+- Added Suspense boundary to login page: Added Suspense boundary to login page
+- Added useSearchParams/Suspense rule to react-dashboard.md: Added useSearchParams/Suspense rule to react-dashboard.md
+- Changed update-workspaces condition to use explicit result check: Changed update-workspaces condition to use explicit result check
+- Changed skipRestart to false in update-workspaces: Changed skipRestart to false in update-workspaces
diff --git a/.trajectories/completed/2026-01/traj_xjqvmep5ed3h.json b/.trajectories/completed/2026-01/traj_xjqvmep5ed3h.json
new file mode 100644
index 00000000..9822cd78
--- /dev/null
+++ b/.trajectories/completed/2026-01/traj_xjqvmep5ed3h.json
@@ -0,0 +1,61 @@
+{
+ "id": "traj_xjqvmep5ed3h",
+ "version": 1,
+ "task": {
+ "title": "Fix update-workspaces GitHub Action job"
+ },
+ "status": "completed",
+ "startedAt": "2026-01-08T09:02:08.758Z",
+ "agents": [
+ {
+ "name": "khaliqgant",
+ "role": "lead",
+ "joinedAt": "2026-01-08T09:02:08.759Z"
+ }
+ ],
+ "chapters": [
+ {
+ "id": "chap_idiabu3o77zd",
+ "title": "Work",
+ "agentName": "default",
+ "startedAt": "2026-01-08T09:02:14.052Z",
+ "events": [
+ {
+ "ts": 1767862934052,
+ "type": "decision",
+ "content": "Changed job condition from success() to explicit needs check: Changed job condition from success() to explicit needs check",
+ "raw": {
+ "question": "Changed job condition from success() to explicit needs check",
+ "chosen": "Changed job condition from success() to explicit needs check",
+ "alternatives": [],
+ "reasoning": "success() checks entire dependency chain including build-base which is often skipped. Changed to always() + needs.build-and-push.result == 'success' to only check direct dependency"
+ },
+ "significance": "high"
+ },
+ {
+ "ts": 1767862939841,
+ "type": "decision",
+ "content": "Changed skipRestart from true to false: Changed skipRestart from true to false",
+ "raw": {
+ "question": "Changed skipRestart from true to false",
+ "chosen": "Changed skipRestart from true to false",
+ "alternatives": [],
+ "reasoning": "With skipRestart:true, running workspaces without active agents would only update config but not restart. Since no agents = no work to disrupt, should restart immediately to apply new image"
+ },
+ "significance": "high"
+ }
+ ],
+ "endedAt": "2026-01-08T09:02:24.262Z"
+ }
+ ],
+ "commits": [],
+ "filesChanged": [],
+ "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay",
+ "tags": [],
+ "completedAt": "2026-01-08T09:02:24.262Z",
+ "retrospective": {
+ "summary": "Fixed update-workspaces job: 1) Changed condition to check direct dependency result instead of success() which fails on skipped upstream jobs 2) Set skipRestart:false so idle workspaces restart immediately",
+ "approach": "Standard approach",
+ "confidence": 0.85
+ }
+}
\ No newline at end of file
diff --git a/.trajectories/completed/2026-01/traj_xjqvmep5ed3h.md b/.trajectories/completed/2026-01/traj_xjqvmep5ed3h.md
new file mode 100644
index 00000000..85da74e0
--- /dev/null
+++ b/.trajectories/completed/2026-01/traj_xjqvmep5ed3h.md
@@ -0,0 +1,36 @@
+# Trajectory: Fix update-workspaces GitHub Action job
+
+> **Status:** ✅ Completed
+> **Confidence:** 85%
+> **Started:** January 8, 2026 at 10:02 AM
+> **Completed:** January 8, 2026 at 10:02 AM
+
+---
+
+## Summary
+
+Fixed update-workspaces job: 1) Changed condition to check direct dependency result instead of success() which fails on skipped upstream jobs 2) Set skipRestart:false so idle workspaces restart immediately
+
+**Approach:** Standard approach
+
+---
+
+## Key Decisions
+
+### Changed job condition from success() to explicit needs check
+- **Chose:** Changed job condition from success() to explicit needs check
+- **Reasoning:** success() checks entire dependency chain including build-base which is often skipped. Changed to always() + needs.build-and-push.result == 'success' to only check direct dependency
+
+### Changed skipRestart from true to false
+- **Chose:** Changed skipRestart from true to false
+- **Reasoning:** With skipRestart:true, running workspaces without active agents would only update config but not restart. Since no agents = no work to disrupt, should restart immediately to apply new image
+
+---
+
+## Chapters
+
+### 1. Work
+*Agent: default*
+
+- Changed job condition from success() to explicit needs check: Changed job condition from success() to explicit needs check
+- Changed skipRestart from true to false: Changed skipRestart from true to false
diff --git a/.trajectories/completed/2026-01/traj_y7n6hfbf7dmg.json b/.trajectories/completed/2026-01/traj_y7n6hfbf7dmg.json
new file mode 100644
index 00000000..de512e6e
--- /dev/null
+++ b/.trajectories/completed/2026-01/traj_y7n6hfbf7dmg.json
@@ -0,0 +1,49 @@
+{
+ "id": "traj_y7n6hfbf7dmg",
+ "version": 1,
+ "task": {
+ "title": "Add useSearchParams/Suspense rule to react-dashboard"
+ },
+ "status": "completed",
+ "startedAt": "2026-01-08T09:02:29.285Z",
+ "agents": [
+ {
+ "name": "khaliqgant",
+ "role": "lead",
+ "joinedAt": "2026-01-08T09:02:29.285Z"
+ }
+ ],
+ "chapters": [
+ {
+ "id": "chap_4f8h46e935ub",
+ "title": "Work",
+ "agentName": "default",
+ "startedAt": "2026-01-08T09:02:34.375Z",
+ "events": [
+ {
+ "ts": 1767862954376,
+ "type": "decision",
+ "content": "Added Next.js App Router section to react-dashboard.md rule: Added Next.js App Router section to react-dashboard.md rule",
+ "raw": {
+ "question": "Added Next.js App Router section to react-dashboard.md rule",
+ "chosen": "Added Next.js App Router section to react-dashboard.md rule",
+ "alternatives": [],
+ "reasoning": "Prevents future build failures - documents that useSearchParams requires Suspense boundary with code example and references to existing files (cloud/link/page.tsx, login/page.tsx)"
+ },
+ "significance": "high"
+ }
+ ],
+ "endedAt": "2026-01-08T09:02:38.286Z"
+ }
+ ],
+ "commits": [],
+ "filesChanged": [],
+ "projectId": "/Users/khaliqgant/Projects/agent-workforce/relay",
+ "tags": [],
+ "completedAt": "2026-01-08T09:02:38.286Z",
+ "retrospective": {
+ "summary": "Added Next.js App Router section to .claude/rules/react-dashboard.md documenting the useSearchParams + Suspense requirement with code pattern and file references",
+ "approach": "Standard approach",
+ "confidence": 0.95
+ }
+}
\ No newline at end of file
diff --git a/.trajectories/completed/2026-01/traj_y7n6hfbf7dmg.md b/.trajectories/completed/2026-01/traj_y7n6hfbf7dmg.md
new file mode 100644
index 00000000..019d4937
--- /dev/null
+++ b/.trajectories/completed/2026-01/traj_y7n6hfbf7dmg.md
@@ -0,0 +1,31 @@
+# Trajectory: Add useSearchParams/Suspense rule to react-dashboard
+
+> **Status:** ✅ Completed
+> **Confidence:** 95%
+> **Started:** January 8, 2026 at 10:02 AM
+> **Completed:** January 8, 2026 at 10:02 AM
+
+---
+
+## Summary
+
+Added Next.js App Router section to .claude/rules/react-dashboard.md documenting the useSearchParams + Suspense requirement with code pattern and file references
+
+**Approach:** Standard approach
+
+---
+
+## Key Decisions
+
+### Added Next.js App Router section to react-dashboard.md rule
+- **Chose:** Added Next.js App Router section to react-dashboard.md rule
+- **Reasoning:** Prevents future build failures - documents that useSearchParams requires Suspense boundary with code example and references to existing files (cloud/link/page.tsx, login/page.tsx)
+
+---
+
+## Chapters
+
+### 1. Work
+*Agent: default*
+
+- Added Next.js App Router section to react-dashboard.md rule: Added Next.js App Router section to react-dashboard.md rule
diff --git a/.trajectories/index.json b/.trajectories/index.json
index 3dec3b9c..fc0b0776 100644
--- a/.trajectories/index.json
+++ b/.trajectories/index.json
@@ -1,6 +1,6 @@
{
"version": 1,
- "lastUpdated": "2026-01-07T21:41:49.091Z",
+ "lastUpdated": "2026-01-08T09:02:38.297Z",
"trajectories": {
"traj_ozd98si6a7ns": {
"title": "Fix thinking indicator showing on all messages",
@@ -498,6 +498,34 @@
"startedAt": "2026-01-07T21:41:28.024Z",
"completedAt": "2026-01-07T21:41:49.080Z",
"path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_lgtodco7dp1n.json"
+ },
+ "traj_rsavt0jipi3c": {
+ "title": "Power agent session - ready for tasks",
+ "status": "completed",
+ "startedAt": "2026-01-08T07:54:35.678Z",
+ "completedAt": "2026-01-08T09:01:29.981Z",
+ "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_rsavt0jipi3c.json"
+ },
+ "traj_oszg9flv74pk": {
+ "title": "Fix cloud link authentication flow",
+ "status": "completed",
+ "startedAt": "2026-01-08T09:01:35.826Z",
+ "completedAt": "2026-01-08T09:01:57.389Z",
+ "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_oszg9flv74pk.json"
+ },
+ "traj_xjqvmep5ed3h": {
+ "title": "Fix update-workspaces GitHub Action job",
+ "status": "completed",
+ "startedAt": "2026-01-08T09:02:08.758Z",
+ "completedAt": "2026-01-08T09:02:24.262Z",
+ "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_xjqvmep5ed3h.json"
+ },
+ "traj_y7n6hfbf7dmg": {
+ "title": "Add useSearchParams/Suspense rule to react-dashboard",
+ "status": "completed",
+ "startedAt": "2026-01-08T09:02:29.285Z",
+ "completedAt": "2026-01-08T09:02:38.286Z",
+ "path": "/Users/khaliqgant/Projects/agent-workforce/relay/.trajectories/completed/2026-01/traj_y7n6hfbf7dmg.json"
}
}
}
\ No newline at end of file
diff --git a/docs/proposals/activity-state-detection-addition.md b/docs/proposals/activity-state-detection-addition.md
new file mode 100644
index 00000000..13bee92b
--- /dev/null
+++ b/docs/proposals/activity-state-detection-addition.md
@@ -0,0 +1,304 @@
+# Proposed Addition to Progress Tracker Sidecar Spec
+
+> **Location**: Add as section "3.5 Activity State Detector" between "Pattern Analyzer" and "Reminder System"
+
+---
+
+## 3.5 Activity State Detector
+
+**Purpose:** Provide fast, deterministic activity state detection without LLM overhead
+
+The Pattern Analyzer (section 3) uses LLM for intelligent analysis, but has latency and cost implications. The Activity State Detector provides a complementary **sub-second detection layer** using velocity analysis and pattern matching—inspired by [NTM's detection system](https://github.com/Dicklesworthstone/ntm).
+
+### Activity States
+
+| State | Description | Detection Method |
+|-------|-------------|------------------|
+| `waiting` | At prompt, awaiting input | Prompt pattern match |
+| `thinking` | Processing/planning | Thinking indicator patterns |
+| `generating` | Actively outputting text | Output velocity > 10 chars/sec |
+| `tool_executing` | Running a tool | Tool start patterns |
+| `compacting` | Context window compaction | Compaction patterns |
+| `error` | Error encountered | Error patterns |
+| `stalled` | Stuck in same state too long | Time threshold exceeded |
+| `idle` | No activity for extended period | Velocity = 0 for > 30s |
+
+### Detection Algorithm
+
+The detector combines three signal types:
+
+#### 1. Velocity Analysis
+
+Measure output rate by comparing character counts between captures:
+
+```typescript
+// Velocity thresholds (chars/sec)
+HIGH_VELOCITY = 10.0 // → generating
+MEDIUM_VELOCITY = 2.0 // → generating (slower)
+LOW_VELOCITY = 1.0 // → thinking or transitioning
+IDLE_VELOCITY = 0.0 // → waiting or stalled
+```
+
+**Implementation notes:**
+- Count Unicode runes, not bytes
+- Strip ANSI escape sequences before counting
+- Negative deltas (scroll/clear) treated as zero
+- Use circular buffer of 10 samples for smoothing
+
+#### 2. Pattern Matching
+
+Apply CLI-specific regex patterns in priority order:
+
+```typescript
+// Priority: Error > Compacting > Tool > Thinking > Prompt > Velocity
+
+const PATTERNS = {
+ // Universal - all CLIs
+ thinking: [
+ /⏺\s*Thinking/i,
+ /●\s*Thinking/i,
+ /\.{3,}$/, // Trailing dots
+ /[⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏]/, // Braille spinner
+ ],
+
+ tool_executing: [
+ /●\s*(Read|Write|Edit|Bash|Glob|Grep|Task)/,
+ /⏺\s*(Read|Write|Edit|Bash|Glob|Grep|Task)/,
+ ],
+
+ error: [
+ /(?i)rate.?limit/,
+ /(?i)429|too many requests/,
+ /(?i)API.?error|exception/,
+ /(?i)panic:|SIGSEGV|fatal/,
+ /(?i)connection (refused|timeout)/,
+ ],
+
+ compacting: [
+ /(?i)context.*compact/,
+ /(?i)auto-compact triggered/,
+ /(?i)summarizing.*conversation/,
+ ],
+
+ // CLI-specific prompts
+ claude_prompt: [
+ /(?i)claude\s*>?\s*$/,
+ /╰─>\s*$/,
+ ],
+
+ codex_prompt: [
+ /(?i)codex\s*>?\s*$/,
+ /\$\s*$/,
+ ],
+
+ gemini_prompt: [
+ /(?i)gemini\s*>?\s*$/,
+ />>>\s*$/,
+ ],
+};
+```
+
+#### 3. Temporal Analysis
+
+Track time in current state to detect stalls:
+
+```typescript
+// Stall thresholds per state
+const STALL_THRESHOLDS = {
+ thinking: 60_000, // 1 min thinking is suspicious
+ tool_executing: 300_000, // 5 min tool execution suspicious
+ generating: 30_000, // 30s no new output while "generating"
+ default: 120_000, // 2 min default
+};
+```
+
+### Hysteresis (Anti-Flicker)
+
+Prevent rapid state oscillation:
+- Transitions require **2 seconds of stability**
+- **Exception:** Error states activate immediately (safety-critical)
+- First detection establishes baseline immediately
+
+### Data Structures
+
+```typescript
+interface ActivitySnapshot {
+ state: ActivityState;
+ since: number; // Timestamp when entered this state
+ duration: number; // Time in current state (ms)
+ confidence: number; // 0-1, detection confidence
+ tool?: string; // If tool_executing, which tool
+ error?: string; // If error, the message
+ velocity: number; // Current chars/sec
+ tokenVelocity: number; // Estimated tokens/min (~velocity/4*60)
+}
+
+interface StateTransition {
+ from: ActivityState;
+ to: ActivityState;
+ timestamp: number;
+ trigger: string; // What caused transition
+}
+```
+
+### Integration with Pattern Analyzer
+
+The Activity State Detector and Pattern Analyzer work together:
+
+```
+Log/Output Stream
+ │
+ ▼
+┌──────────────────────┐
+│ Activity State │ ← Fast (< 100ms)
+│ Detector │ ← Deterministic
+│ (velocity + patterns)│ ← Runs every poll
+└──────────┬───────────┘
+ │
+ ▼
+ state = 'stalled' OR
+ state = 'error' OR
+ duration > threshold?
+ │
+ yes │ no
+ │ └─────────────────────────────┐
+ ▼ │
+┌──────────────────────┐ │
+│ Pattern Analyzer │ ← Slower (1-5s) │
+│ (LLM-powered) │ ← Expensive │
+│ │ ← Only when needed │
+└──────────┬───────────┘ │
+ │ │
+ ▼ │
+ recommendation continue
+ (remind/escalate/ monitoring
+ restart/none)
+```
+
+**Benefits:**
+- Fast detection: < 100ms vs 1-5s for LLM
+- Cost reduction: Only invoke LLM when needed
+- Accuracy: Deterministic for known patterns, LLM for ambiguous cases
+- Real-time dashboard: Activity state updates every poll cycle
+
+### Health Score Computation
+
+Aggregate activity signals into a composite health score:
+
+```typescript
+function computeHealth(activity: ActivitySnapshot): HealthState {
+ // Priority order (highest = most severe)
+
+ if (activity.state === 'error') {
+ return { status: 'unhealthy', reason: activity.error };
+ }
+
+ if (activity.state === 'stalled' && activity.duration > 300_000) {
+ return { status: 'unhealthy', reason: 'Stalled > 5 min' };
+ }
+
+ if (activity.state === 'stalled') {
+ return { status: 'degraded', reason: 'Agent stalled' };
+ }
+
+ if (activity.state === 'idle' && activity.duration > 300_000) {
+ return { status: 'degraded', reason: 'Idle > 5 min' };
+ }
+
+ return { status: 'healthy' };
+}
+```
+
+### Dashboard Display
+
+Real-time activity state in agent cards:
+
+```
+┌─────────────────────────────────────────┐
+│ Agent: Frontend │
+│ ┌─────┐ │
+│ │ 🟢 │ generating (2m 34s) │
+│ └─────┘ │
+│ Velocity: 847 tok/min │
+│ Tool: — │
+│ Health: healthy │
+└─────────────────────────────────────────┘
+```
+
+State indicators:
+- 🔵 `waiting` - Ready for input
+- 🟡 `thinking` - Processing (pulse animation)
+- 🟢 `generating` - Active output
+- 🟣 `tool_executing` - Running tool
+- 🟠 `stalled` - Needs attention (pulse animation)
+- 🔴 `error` - Error state
+- ⚪ `idle` - No recent activity
+
+### Configuration
+
+```typescript
+interface ActivityDetectorConfig {
+ // Velocity thresholds
+ highVelocityThreshold: number; // Default: 10.0 chars/sec
+ mediumVelocityThreshold: number; // Default: 2.0 chars/sec
+ idleVelocityThreshold: number; // Default: 1.0 chars/sec
+
+ // Time thresholds
+ stallThresholdMs: number; // Default: 30000 (30s)
+ idleThresholdMs: number; // Default: 30000 (30s)
+ hysteresisMs: number; // Default: 2000 (2s)
+
+ // Buffer sizes
+ velocitySampleCount: number; // Default: 10
+ transitionHistoryCount: number; // Default: 20
+
+ // CLI type (affects pattern selection)
+ cliType: 'claude' | 'codex' | 'gemini' | 'other';
+}
+```
+
+### Implementation Notes
+
+1. **ANSI stripping**: Use a robust ANSI stripper before analysis
+2. **Unicode handling**: Count runes, not bytes
+3. **Buffer management**: Circular buffers with fixed size
+4. **Thread safety**: Detector may be called from poll loop and API
+5. **Metrics**: Emit Prometheus metrics for state durations
+6. **Events**: Publish state transitions to event bus
+
+---
+
+## Beads Task Addition
+
+Add to `docs/PROGRESS_TRACKER_BEADS_TASKS.md`:
+
+```
+## Task: activity-state-detector
+parent: progress-tracker
+effort: 8h
+priority: high
+
+Implement fast, deterministic activity state detection layer.
+
+### Subtasks
+- [ ] ActivityState enum and types (1h)
+- [ ] VelocityTracker with circular buffer (2h)
+- [ ] CLI-specific pattern definitions (1h)
+- [ ] State machine with hysteresis (2h)
+- [ ] Health score computation (1h)
+- [ ] Integration with polling loop (1h)
+
+### Acceptance Criteria
+- [ ] Detects all 8 activity states
+- [ ] < 100ms detection latency
+- [ ] Hysteresis prevents state flicker
+- [ ] Dashboard shows real-time activity
+- [ ] Unit tests for all state transitions
+```
+
+---
+
+## References
+
+- [NTM Activity Detection](https://github.com/Dicklesworthstone/ntm) - Inspiration for velocity + pattern approach
+- Competitor analysis in `docs/competitive/NTM_ANALYSIS.md` (proposed)
diff --git a/package.json b/package.json
index 24d5e6ec..78103215 100644
--- a/package.json
+++ b/package.json
@@ -41,6 +41,8 @@
"clean": "rm -rf dist",
"db:generate": "drizzle-kit generate",
"db:migrate": "drizzle-kit migrate",
+ "db:migrate:run": "node scripts/run-migrations.js",
+ "db:migrate:verify": "node scripts/verify-schema.js",
"db:push": "drizzle-kit push",
"db:studio": "drizzle-kit studio",
"services:up": "docker compose -f docker-compose.dev.yml up -d postgres redis && echo '✓ Postgres and Redis running'",
diff --git a/scripts/run-migrations.js b/scripts/run-migrations.js
new file mode 100644
index 00000000..e2a865d7
--- /dev/null
+++ b/scripts/run-migrations.js
@@ -0,0 +1,43 @@
+#!/usr/bin/env node
+/**
+ * Run database migrations (standalone)
+ *
+ * This script is used in CI to verify migrations run successfully.
+ * It connects to the database and runs all pending migrations.
+ *
+ * This is a standalone script that doesn't depend on the cloud config,
+ * so it only requires DATABASE_URL to run.
+ *
+ * Usage: DATABASE_URL=postgres://... node scripts/run-migrations.js
+ */
+
+import pg from 'pg';
+import { drizzle } from 'drizzle-orm/node-postgres';
+import { migrate } from 'drizzle-orm/node-postgres/migrator';
+
+const { Pool } = pg;
+
+async function main() {
+ console.log('Starting database migrations...');
+ console.log(`Database URL: ${process.env.DATABASE_URL?.replace(/:[^:@]+@/, ':***@') || 'not set'}`);
+
+ if (!process.env.DATABASE_URL) {
+ console.error('ERROR: DATABASE_URL environment variable is required');
+ process.exit(1);
+ }
+
+ const pool = new Pool({ connectionString: process.env.DATABASE_URL });
+ const db = drizzle(pool);
+
+ try {
+ await migrate(db, { migrationsFolder: './src/cloud/db/migrations' });
+ console.log('All migrations completed successfully');
+ } catch (error) {
+ console.error('Migration failed:', error);
+ process.exit(1);
+ } finally {
+ await pool.end();
+ }
+}
+
+main();
diff --git a/scripts/verify-schema.js b/scripts/verify-schema.js
new file mode 100644
index 00000000..561959ee
--- /dev/null
+++ b/scripts/verify-schema.js
@@ -0,0 +1,134 @@
+#!/usr/bin/env node
+/**
+ * Verify database schema after migrations
+ *
+ * This script verifies that all expected tables exist after migrations.
+ * It dynamically reads table definitions from the schema to avoid hardcoding.
+ *
+ * Usage: DATABASE_URL=postgres://... node scripts/verify-schema.js
+ */
+
+import pg from 'pg';
+import * as schema from '../dist/cloud/db/schema.js';
+
+const { Pool } = pg;
+
+/**
+ * Extract table names from the schema module.
+ * Drizzle pgTable objects store their name in Symbol.for('drizzle:Name').
+ */
+function getTablesFromSchema() {
+ const tables = [];
+ const drizzleNameSymbol = Symbol.for('drizzle:Name');
+
+ for (const [key, value] of Object.entries(schema)) {
+ // Skip relation definitions (they end with 'Relations')
+ if (key.endsWith('Relations')) continue;
+
+ // Drizzle tables have the table name in a Symbol
+ if (value && typeof value === 'object' && value[drizzleNameSymbol]) {
+ tables.push(value[drizzleNameSymbol]);
+ }
+ }
+ return tables;
+}
+
+// Dynamically get tables from schema
+const SCHEMA_TABLES = getTablesFromSchema();
+const EXPECTED_TABLES = [...SCHEMA_TABLES];
+
+// Key columns to spot-check (subset of critical columns)
+const EXPECTED_COLUMNS = {
+ users: ['id', 'email', 'created_at'],
+ workspaces: ['id', 'user_id', 'name', 'status'],
+ linked_daemons: ['id', 'user_id', 'workspace_id', 'status'],
+};
+
+async function main() {
+ console.log('Verifying database schema...\n');
+
+ if (!process.env.DATABASE_URL) {
+ console.error('ERROR: DATABASE_URL environment variable is required');
+ process.exit(1);
+ }
+
+ console.log(`Found ${SCHEMA_TABLES.length} tables in schema.ts:`);
+ console.log(` ${SCHEMA_TABLES.join(', ')}\n`);
+
+ const pool = new Pool({ connectionString: process.env.DATABASE_URL });
+
+ try {
+ // Get all tables in the public schema
+ const tablesResult = await pool.query(`
+ SELECT table_name
+ FROM information_schema.tables
+ WHERE table_schema = 'public'
+ ORDER BY table_name
+ `);
+
+ const existingTables = tablesResult.rows.map((r) => r.table_name);
+ console.log('Existing tables:', existingTables.join(', '));
+ console.log('');
+
+ // Check for missing tables
+ const missingTables = EXPECTED_TABLES.filter((t) => !existingTables.includes(t));
+ if (missingTables.length > 0) {
+ console.error('MISSING TABLES:', missingTables.join(', '));
+ process.exit(1);
+ }
+ console.log(`All ${EXPECTED_TABLES.length} expected tables exist`);
+
+ // Verify key columns
+ console.log('\nVerifying key columns...');
+ for (const [table, columns] of Object.entries(EXPECTED_COLUMNS)) {
+ const columnsResult = await pool.query(
+ `
+ SELECT column_name
+ FROM information_schema.columns
+ WHERE table_schema = 'public' AND table_name = $1
+ `,
+ [table]
+ );
+
+ const existingColumns = columnsResult.rows.map((r) => r.column_name);
+ const missingColumns = columns.filter((c) => !existingColumns.includes(c));
+
+ if (missingColumns.length > 0) {
+ console.error(`Table '${table}' missing columns: ${missingColumns.join(', ')}`);
+ console.error(`Existing columns: ${existingColumns.join(', ')}`);
+ process.exit(1);
+ }
+ console.log(` ${table}: OK (${columns.length} key columns verified)`);
+ }
+
+ // Check migration history (table may be in public or drizzle schema)
+ try {
+ // Try public schema first, then drizzle schema
+ let migrationsResult;
+ try {
+ migrationsResult = await pool.query(`
+ SELECT id, hash, created_at FROM public.__drizzle_migrations ORDER BY created_at
+ `);
+ } catch {
+ migrationsResult = await pool.query(`
+ SELECT id, hash, created_at FROM drizzle.__drizzle_migrations ORDER BY created_at
+ `);
+ }
+ console.log(`\nMigration history: ${migrationsResult.rows.length} migrations applied`);
+ for (const row of migrationsResult.rows) {
+ console.log(` - ${row.id} (${new Date(Number(row.created_at)).toISOString()})`);
+ }
+ } catch {
+ console.log('\nMigration history: (table not found, but migrations ran successfully)');
+ }
+
+ console.log('\nSchema verification passed!');
+ } catch (error) {
+ console.error('Schema verification failed:', error);
+ process.exit(1);
+ } finally {
+ await pool.end();
+ }
+}
+
+main();
diff --git a/src/cloud/api/daemons.ts b/src/cloud/api/daemons.ts
index dd2388ed..484344c1 100644
--- a/src/cloud/api/daemons.ts
+++ b/src/cloud/api/daemons.ts
@@ -45,13 +45,28 @@ function hashApiKey(apiKey: string): string {
*/
daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) => {
const userId = req.session.userId!;
- const { name, machineId, metadata } = req.body;
+ const { name, machineId, metadata, workspaceId } = req.body;
if (!machineId || typeof machineId !== 'string') {
return res.status(400).json({ error: 'machineId is required' });
}
try {
+ // Validate workspace ownership if provided
+ if (workspaceId) {
+ const workspace = await db.workspaces.findById(workspaceId);
+ if (!workspace) {
+ return res.status(404).json({ error: 'Workspace not found' });
+ }
+ if (workspace.userId !== userId) {
+ // Check if user is a member of the workspace
+ const member = await db.workspaceMembers.findMembership(workspaceId, userId);
+ if (!member) {
+ return res.status(403).json({ error: 'Not authorized to link to this workspace' });
+ }
+ }
+ }
+
// Check if this machine is already linked
const existing = await db.linkedDaemons.findByMachineId(userId, machineId);
@@ -63,6 +78,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) =>
await db.linkedDaemons.update(existing.id, {
name: name || existing.name,
apiKeyHash,
+ workspaceId: workspaceId || existing.workspaceId,
metadata: metadata || existing.metadata,
status: 'online',
lastSeenAt: new Date(),
@@ -71,6 +87,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) =>
return res.json({
success: true,
daemonId: existing.id,
+ workspaceId: workspaceId || existing.workspaceId,
apiKey, // Only returned once!
message: 'Daemon re-linked with new API key',
});
@@ -82,6 +99,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) =>
const daemon = await db.linkedDaemons.create({
userId,
+ workspaceId: workspaceId || null,
name: name || `Daemon on ${machineId.substring(0, 8)}`,
machineId,
apiKeyHash,
@@ -92,6 +110,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) =>
res.status(201).json({
success: true,
daemonId: daemon.id,
+ workspaceId: workspaceId || null,
apiKey, // Only returned once - user must save this!
message: 'Daemon linked successfully. Save your API key - it cannot be retrieved later.',
});
@@ -128,6 +147,64 @@ daemonsRouter.get('/', requireAuth, async (req: Request, res: Response) => {
}
});
+/**
+ * GET /api/daemons/workspace/:workspaceId/agents
+ * Get local agents for a specific workspace
+ */
+daemonsRouter.get('/workspace/:workspaceId/agents', requireAuth, async (req: Request, res: Response) => {
+ const userId = req.session.userId!;
+ const { workspaceId } = req.params;
+
+ try {
+ // Verify user has access to this workspace
+ const workspace = await db.workspaces.findById(workspaceId);
+ if (!workspace) {
+ return res.status(404).json({ error: 'Workspace not found' });
+ }
+
+ // Check if user owns the workspace or is a member
+ if (workspace.userId !== userId) {
+ const member = await db.workspaceMembers.findMembership(workspaceId, userId);
+ if (!member) {
+ return res.status(403).json({ error: 'Not authorized to access this workspace' });
+ }
+ }
+
+ // Get all linked daemons for this workspace
+ const daemons = await db.linkedDaemons.findByWorkspaceId(workspaceId);
+
+ // Extract agents from each daemon's metadata
+ const localAgents = daemons.flatMap((daemon) => {
+ const metadata = daemon.metadata as Record | null;
+ const agents = (metadata?.agents as Array<{ name: string; status: string }>) || [];
+ return agents.map((agent) => ({
+ name: agent.name,
+ status: agent.status,
+ isLocal: true,
+ daemonId: daemon.id,
+ daemonName: daemon.name,
+ daemonStatus: daemon.status,
+ machineId: daemon.machineId,
+ lastSeenAt: daemon.lastSeenAt,
+ }));
+ });
+
+ res.json({
+ agents: localAgents,
+ daemons: daemons.map((d) => ({
+ id: d.id,
+ name: d.name,
+ machineId: d.machineId,
+ status: d.status,
+ lastSeenAt: d.lastSeenAt,
+ })),
+ });
+ } catch (error) {
+ console.error('Error fetching local agents:', error);
+ res.status(500).json({ error: 'Failed to fetch local agents' });
+ }
+});
+
/**
* DELETE /api/daemons/:id
* Unlink a daemon
diff --git a/src/cloud/db/drizzle.ts b/src/cloud/db/drizzle.ts
index d2731f87..3fc5a5c6 100644
--- a/src/cloud/db/drizzle.ts
+++ b/src/cloud/db/drizzle.ts
@@ -639,6 +639,7 @@ export interface DaemonUpdate {
export interface LinkedDaemonQueries {
findById(id: string): Promise;
findByUserId(userId: string): Promise;
+ findByWorkspaceId(workspaceId: string): Promise;
findByMachineId(userId: string, machineId: string): Promise;
findByApiKeyHash(apiKeyHash: string): Promise;
create(data: schema.NewLinkedDaemon): Promise;
@@ -647,6 +648,7 @@ export interface LinkedDaemonQueries {
delete(id: string): Promise;
markStale(): Promise;
getAllAgentsForUser(userId: string): Promise;
+ getAgentsForWorkspace(workspaceId: string): Promise;
getPendingUpdates(id: string): Promise;
queueUpdate(id: string, update: DaemonUpdate): Promise;
queueMessage(id: string, message: Record): Promise;
@@ -670,6 +672,15 @@ export const linkedDaemonQueries: LinkedDaemonQueries = {
.orderBy(desc(schema.linkedDaemons.lastSeenAt));
},
+ async findByWorkspaceId(workspaceId: string): Promise {
+ const db = getDb();
+ return db
+ .select()
+ .from(schema.linkedDaemons)
+ .where(eq(schema.linkedDaemons.workspaceId, workspaceId))
+ .orderBy(desc(schema.linkedDaemons.lastSeenAt));
+ },
+
async findByMachineId(userId: string, machineId: string): Promise {
const db = getDb();
const result = await db
@@ -750,6 +761,21 @@ export const linkedDaemonQueries: LinkedDaemonQueries = {
}));
},
+ async getAgentsForWorkspace(workspaceId: string): Promise {
+ const db = getDb();
+ const daemons = await db
+ .select()
+ .from(schema.linkedDaemons)
+ .where(eq(schema.linkedDaemons.workspaceId, workspaceId));
+
+ return daemons.map((d) => ({
+ daemonId: d.id,
+ daemonName: d.name,
+ machineId: d.machineId,
+ agents: ((d.metadata as Record)?.agents as Array<{ name: string; status: string }>) || [],
+ }));
+ },
+
async getPendingUpdates(id: string): Promise {
const db = getDb();
const result = await db.select().from(schema.linkedDaemons).where(eq(schema.linkedDaemons.id, id));
diff --git a/src/cloud/db/migrations/0011_linked_daemon_workspace.sql b/src/cloud/db/migrations/0011_linked_daemon_workspace.sql
new file mode 100644
index 00000000..227b87ff
--- /dev/null
+++ b/src/cloud/db/migrations/0011_linked_daemon_workspace.sql
@@ -0,0 +1,8 @@
+-- Add workspaceId to linked_daemons for associating local agents with workspaces
+ALTER TABLE "linked_daemons" ADD COLUMN IF NOT EXISTS "workspace_id" uuid;--> statement-breakpoint
+DO $$ BEGIN
+ ALTER TABLE "linked_daemons" ADD CONSTRAINT "linked_daemons_workspace_id_workspaces_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspaces"("id") ON DELETE set null ON UPDATE no action;
+EXCEPTION
+ WHEN duplicate_object THEN null;
+END $$;--> statement-breakpoint
+CREATE INDEX IF NOT EXISTS "idx_linked_daemons_workspace_id" ON "linked_daemons" USING btree ("workspace_id");
diff --git a/src/cloud/db/migrations/meta/_journal.json b/src/cloud/db/migrations/meta/_journal.json
index ef64ab56..9c1d063b 100644
--- a/src/cloud/db/migrations/meta/_journal.json
+++ b/src/cloud/db/migrations/meta/_journal.json
@@ -71,6 +71,13 @@
"when": 1736208003000,
"tag": "0010_remove_credential_tokens",
"breakpoints": true
+ },
+ {
+ "idx": 10,
+ "version": "5",
+ "when": 1736294400000,
+ "tag": "0011_linked_daemon_workspace",
+ "breakpoints": true
}
]
}
\ No newline at end of file
diff --git a/src/cloud/db/schema.ts b/src/cloud/db/schema.ts
index e49b758d..a5b919fe 100644
--- a/src/cloud/db/schema.ts
+++ b/src/cloud/db/schema.ts
@@ -190,6 +190,7 @@ export const workspacesRelations = relations(workspaces, ({ one, many }) => ({
}),
members: many(workspaceMembers),
repositories: many(repositories),
+ linkedDaemons: many(linkedDaemons),
}));
// ============================================================================
@@ -323,6 +324,7 @@ export const repositoriesRelations = relations(repositories, ({ one }) => ({
export const linkedDaemons = pgTable('linked_daemons', {
id: uuid('id').primaryKey().defaultRandom(),
userId: uuid('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }),
+ workspaceId: uuid('workspace_id').references(() => workspaces.id, { onDelete: 'set null' }),
name: varchar('name', { length: 255 }).notNull(),
machineId: varchar('machine_id', { length: 255 }).notNull(),
apiKeyHash: varchar('api_key_hash', { length: 255 }).notNull(),
@@ -336,6 +338,7 @@ export const linkedDaemons = pgTable('linked_daemons', {
}, (table) => ({
userMachineIdx: unique('linked_daemons_user_machine_unique').on(table.userId, table.machineId),
userIdIdx: index('idx_linked_daemons_user_id').on(table.userId),
+ workspaceIdIdx: index('idx_linked_daemons_workspace_id').on(table.workspaceId),
apiKeyHashIdx: index('idx_linked_daemons_api_key_hash').on(table.apiKeyHash),
statusIdx: index('idx_linked_daemons_status').on(table.status),
}));
@@ -345,6 +348,10 @@ export const linkedDaemonsRelations = relations(linkedDaemons, ({ one }) => ({
fields: [linkedDaemons.userId],
references: [users.id],
}),
+ workspace: one(workspaces, {
+ fields: [linkedDaemons.workspaceId],
+ references: [workspaces.id],
+ }),
}));
// ============================================================================
diff --git a/src/cloud/server.ts b/src/cloud/server.ts
index 6057884a..1f1f4d44 100644
--- a/src/cloud/server.ts
+++ b/src/cloud/server.ts
@@ -211,11 +211,12 @@ export async function createServer(): Promise {
// Lightweight CSRF protection using session token
const SAFE_METHODS = new Set(['GET', 'HEAD', 'OPTIONS']);
- // Paths exempt from CSRF (webhooks from external services, workspace proxy, local auth callbacks)
+ // Paths exempt from CSRF (webhooks from external services, workspace proxy, local auth callbacks, admin API)
const CSRF_EXEMPT_PATHS = [
'/api/webhooks/',
'/api/auth/nango/webhook',
'/api/auth/codex-helper/callback',
+ '/api/admin/', // Admin API uses X-Admin-Secret header auth
];
// Additional pattern for workspace proxy routes (contains /proxy/)
const isWorkspaceProxyRoute = (path: string) => /^\/api\/workspaces\/[^/]+\/proxy\//.test(path);
@@ -372,6 +373,7 @@ export async function createServer(): Promise {
let scalingOrchestrator: ScalingOrchestrator | null = null;
let computeEnforcement: ComputeEnforcementService | null = null;
let introExpiration: IntroExpirationService | null = null;
+ let daemonStaleCheckInterval: ReturnType | null = null;
// Create HTTP server for WebSocket upgrade handling
const httpServer = http.createServer(app);
@@ -749,6 +751,20 @@ export async function createServer(): Promise {
}
}
+ // Start daemon stale check (mark daemons offline if no heartbeat for 2+ minutes)
+ // Runs every 60 seconds regardless of RELAY_CLOUD_ENABLED
+ daemonStaleCheckInterval = setInterval(async () => {
+ try {
+ const count = await db.linkedDaemons.markStale();
+ if (count > 0) {
+ console.log(`[cloud] Marked ${count} daemon(s) as offline (stale)`);
+ }
+ } catch (error) {
+ console.error('[cloud] Failed to mark stale daemons:', error);
+ }
+ }, 60_000); // Every 60 seconds
+ console.log('[cloud] Daemon stale check started (60s interval)');
+
return new Promise((resolve) => {
server = httpServer.listen(config.port, () => {
console.log(`Agent Relay Cloud running on port ${config.port}`);
@@ -775,6 +791,12 @@ export async function createServer(): Promise {
introExpiration.stop();
}
+ // Stop daemon stale check
+ if (daemonStaleCheckInterval) {
+ clearInterval(daemonStaleCheckInterval);
+ daemonStaleCheckInterval = null;
+ }
+
// Close WebSocket server
wssPresence.close();
diff --git a/src/dashboard/app/cloud/link/page.tsx b/src/dashboard/app/cloud/link/page.tsx
index b200de97..69e13ba3 100644
--- a/src/dashboard/app/cloud/link/page.tsx
+++ b/src/dashboard/app/cloud/link/page.tsx
@@ -23,9 +23,16 @@ interface MachineInfo {
machineName: string;
}
+interface Workspace {
+ id: string;
+ name: string;
+ status: string;
+}
+
interface LinkResult {
apiKey: string;
daemonId: string;
+ workspaceId: string | null;
}
// Loading fallback for Suspense
@@ -48,6 +55,8 @@ function CloudLinkContent() {
const [linkResult, setLinkResult] = useState(null);
const [error, setError] = useState('');
const [copied, setCopied] = useState(false);
+ const [workspaces, setWorkspaces] = useState([]);
+ const [selectedWorkspaceId, setSelectedWorkspaceId] = useState(null);
// Extract machine info from URL params
useEffect(() => {
@@ -67,8 +76,15 @@ function CloudLinkContent() {
const checkAuth = async () => {
try {
- const data = await api.get<{ userId?: string }>('/api/auth/session');
- if (data.userId) {
+ const data = await api.get<{ authenticated?: boolean; user?: { id: string } }>('/api/auth/session');
+ if (data.authenticated && data.user?.id) {
+ // Fetch user's workspaces
+ const workspacesData = await api.get<{ workspaces: Workspace[] }>('/api/workspaces');
+ setWorkspaces(workspacesData.workspaces || []);
+ // Auto-select first workspace if only one exists
+ if (workspacesData.workspaces?.length === 1) {
+ setSelectedWorkspaceId(workspacesData.workspaces[0].id);
+ }
setState('ready');
} else {
setState('auth-required');
@@ -82,13 +98,20 @@ function CloudLinkContent() {
const handleLink = async () => {
if (!machineInfo) return;
+ // Require workspace selection if workspaces exist
+ if (workspaces.length > 0 && !selectedWorkspaceId) {
+ setError('Please select a workspace');
+ return;
+ }
+
setState('linking');
setError('');
try {
- const result = await api.post<{ apiKey: string; daemonId: string }>('/api/daemons/link', {
+ const result = await api.post<{ apiKey: string; daemonId: string; workspaceId: string | null }>('/api/daemons/link', {
machineId: machineInfo.machineId,
name: machineInfo.machineName,
+ workspaceId: selectedWorkspaceId,
metadata: {
linkedVia: 'cli',
userAgent: navigator.userAgent,
@@ -98,6 +121,7 @@ function CloudLinkContent() {
setLinkResult({
apiKey: result.apiKey,
daemonId: result.daemonId,
+ workspaceId: result.workspaceId,
});
setState('success');
} catch (err: any) {
@@ -221,6 +245,60 @@ function CloudLinkContent() {
+ {/* Workspace selector */}
+ {workspaces.length > 0 && (
+
+
+
+ Local agents from this machine will appear in the selected workspace's dashboard.
+