diff --git a/.github/workflows/migrations.yml b/.github/workflows/migrations.yml new file mode 100644 index 00000000..b82870b0 --- /dev/null +++ b/.github/workflows/migrations.yml @@ -0,0 +1,64 @@ +name: Database Migrations + +on: + push: + branches: [main] + paths: + - 'src/cloud/db/**' + - 'drizzle.config.ts' + - '.github/workflows/migrations.yml' + pull_request: + branches: [main] + paths: + - 'src/cloud/db/**' + - 'drizzle.config.ts' + - '.github/workflows/migrations.yml' + # Allow manual trigger + workflow_dispatch: + +jobs: + migrations: + name: Run Migrations + runs-on: ubuntu-latest + + services: + postgres: + image: postgres:16 + env: + POSTGRES_USER: agent_relay + POSTGRES_PASSWORD: test_password + POSTGRES_DB: agent_relay_test + ports: + - 5432:5432 + # Health check to wait for postgres to be ready + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: 'npm' + + - name: Install dependencies + run: npm ci + + - name: Build project + run: npm run build + + - name: Run migrations + env: + DATABASE_URL: postgres://agent_relay:test_password@localhost:5432/agent_relay_test + run: node scripts/run-migrations.js + + - name: Verify schema + env: + DATABASE_URL: postgres://agent_relay:test_password@localhost:5432/agent_relay_test + run: node scripts/verify-schema.js diff --git a/package.json b/package.json index 24d5e6ec..78103215 100644 --- a/package.json +++ b/package.json @@ -41,6 +41,8 @@ "clean": "rm -rf dist", "db:generate": "drizzle-kit generate", "db:migrate": "drizzle-kit migrate", + "db:migrate:run": "node scripts/run-migrations.js", + "db:migrate:verify": "node scripts/verify-schema.js", "db:push": "drizzle-kit push", "db:studio": "drizzle-kit studio", "services:up": "docker compose -f docker-compose.dev.yml up -d postgres redis && echo '✓ Postgres and Redis running'", diff --git a/scripts/run-migrations.js b/scripts/run-migrations.js new file mode 100644 index 00000000..e2a865d7 --- /dev/null +++ b/scripts/run-migrations.js @@ -0,0 +1,43 @@ +#!/usr/bin/env node +/** + * Run database migrations (standalone) + * + * This script is used in CI to verify migrations run successfully. + * It connects to the database and runs all pending migrations. + * + * This is a standalone script that doesn't depend on the cloud config, + * so it only requires DATABASE_URL to run. + * + * Usage: DATABASE_URL=postgres://... node scripts/run-migrations.js + */ + +import pg from 'pg'; +import { drizzle } from 'drizzle-orm/node-postgres'; +import { migrate } from 'drizzle-orm/node-postgres/migrator'; + +const { Pool } = pg; + +async function main() { + console.log('Starting database migrations...'); + console.log(`Database URL: ${process.env.DATABASE_URL?.replace(/:[^:@]+@/, ':***@') || 'not set'}`); + + if (!process.env.DATABASE_URL) { + console.error('ERROR: DATABASE_URL environment variable is required'); + process.exit(1); + } + + const pool = new Pool({ connectionString: process.env.DATABASE_URL }); + const db = drizzle(pool); + + try { + await migrate(db, { migrationsFolder: './src/cloud/db/migrations' }); + console.log('All migrations completed successfully'); + } catch (error) { + console.error('Migration failed:', error); + process.exit(1); + } finally { + await pool.end(); + } +} + +main(); diff --git a/scripts/verify-schema.js b/scripts/verify-schema.js new file mode 100644 index 00000000..561959ee --- /dev/null +++ b/scripts/verify-schema.js @@ -0,0 +1,134 @@ +#!/usr/bin/env node +/** + * Verify database schema after migrations + * + * This script verifies that all expected tables exist after migrations. + * It dynamically reads table definitions from the schema to avoid hardcoding. + * + * Usage: DATABASE_URL=postgres://... node scripts/verify-schema.js + */ + +import pg from 'pg'; +import * as schema from '../dist/cloud/db/schema.js'; + +const { Pool } = pg; + +/** + * Extract table names from the schema module. + * Drizzle pgTable objects store their name in Symbol.for('drizzle:Name'). + */ +function getTablesFromSchema() { + const tables = []; + const drizzleNameSymbol = Symbol.for('drizzle:Name'); + + for (const [key, value] of Object.entries(schema)) { + // Skip relation definitions (they end with 'Relations') + if (key.endsWith('Relations')) continue; + + // Drizzle tables have the table name in a Symbol + if (value && typeof value === 'object' && value[drizzleNameSymbol]) { + tables.push(value[drizzleNameSymbol]); + } + } + return tables; +} + +// Dynamically get tables from schema +const SCHEMA_TABLES = getTablesFromSchema(); +const EXPECTED_TABLES = [...SCHEMA_TABLES]; + +// Key columns to spot-check (subset of critical columns) +const EXPECTED_COLUMNS = { + users: ['id', 'email', 'created_at'], + workspaces: ['id', 'user_id', 'name', 'status'], + linked_daemons: ['id', 'user_id', 'workspace_id', 'status'], +}; + +async function main() { + console.log('Verifying database schema...\n'); + + if (!process.env.DATABASE_URL) { + console.error('ERROR: DATABASE_URL environment variable is required'); + process.exit(1); + } + + console.log(`Found ${SCHEMA_TABLES.length} tables in schema.ts:`); + console.log(` ${SCHEMA_TABLES.join(', ')}\n`); + + const pool = new Pool({ connectionString: process.env.DATABASE_URL }); + + try { + // Get all tables in the public schema + const tablesResult = await pool.query(` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + ORDER BY table_name + `); + + const existingTables = tablesResult.rows.map((r) => r.table_name); + console.log('Existing tables:', existingTables.join(', ')); + console.log(''); + + // Check for missing tables + const missingTables = EXPECTED_TABLES.filter((t) => !existingTables.includes(t)); + if (missingTables.length > 0) { + console.error('MISSING TABLES:', missingTables.join(', ')); + process.exit(1); + } + console.log(`All ${EXPECTED_TABLES.length} expected tables exist`); + + // Verify key columns + console.log('\nVerifying key columns...'); + for (const [table, columns] of Object.entries(EXPECTED_COLUMNS)) { + const columnsResult = await pool.query( + ` + SELECT column_name + FROM information_schema.columns + WHERE table_schema = 'public' AND table_name = $1 + `, + [table] + ); + + const existingColumns = columnsResult.rows.map((r) => r.column_name); + const missingColumns = columns.filter((c) => !existingColumns.includes(c)); + + if (missingColumns.length > 0) { + console.error(`Table '${table}' missing columns: ${missingColumns.join(', ')}`); + console.error(`Existing columns: ${existingColumns.join(', ')}`); + process.exit(1); + } + console.log(` ${table}: OK (${columns.length} key columns verified)`); + } + + // Check migration history (table may be in public or drizzle schema) + try { + // Try public schema first, then drizzle schema + let migrationsResult; + try { + migrationsResult = await pool.query(` + SELECT id, hash, created_at FROM public.__drizzle_migrations ORDER BY created_at + `); + } catch { + migrationsResult = await pool.query(` + SELECT id, hash, created_at FROM drizzle.__drizzle_migrations ORDER BY created_at + `); + } + console.log(`\nMigration history: ${migrationsResult.rows.length} migrations applied`); + for (const row of migrationsResult.rows) { + console.log(` - ${row.id} (${new Date(Number(row.created_at)).toISOString()})`); + } + } catch { + console.log('\nMigration history: (table not found, but migrations ran successfully)'); + } + + console.log('\nSchema verification passed!'); + } catch (error) { + console.error('Schema verification failed:', error); + process.exit(1); + } finally { + await pool.end(); + } +} + +main(); diff --git a/src/cloud/api/daemons.ts b/src/cloud/api/daemons.ts index dd2388ed..484344c1 100644 --- a/src/cloud/api/daemons.ts +++ b/src/cloud/api/daemons.ts @@ -45,13 +45,28 @@ function hashApiKey(apiKey: string): string { */ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) => { const userId = req.session.userId!; - const { name, machineId, metadata } = req.body; + const { name, machineId, metadata, workspaceId } = req.body; if (!machineId || typeof machineId !== 'string') { return res.status(400).json({ error: 'machineId is required' }); } try { + // Validate workspace ownership if provided + if (workspaceId) { + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + if (workspace.userId !== userId) { + // Check if user is a member of the workspace + const member = await db.workspaceMembers.findMembership(workspaceId, userId); + if (!member) { + return res.status(403).json({ error: 'Not authorized to link to this workspace' }); + } + } + } + // Check if this machine is already linked const existing = await db.linkedDaemons.findByMachineId(userId, machineId); @@ -63,6 +78,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) => await db.linkedDaemons.update(existing.id, { name: name || existing.name, apiKeyHash, + workspaceId: workspaceId || existing.workspaceId, metadata: metadata || existing.metadata, status: 'online', lastSeenAt: new Date(), @@ -71,6 +87,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) => return res.json({ success: true, daemonId: existing.id, + workspaceId: workspaceId || existing.workspaceId, apiKey, // Only returned once! message: 'Daemon re-linked with new API key', }); @@ -82,6 +99,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) => const daemon = await db.linkedDaemons.create({ userId, + workspaceId: workspaceId || null, name: name || `Daemon on ${machineId.substring(0, 8)}`, machineId, apiKeyHash, @@ -92,6 +110,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) => res.status(201).json({ success: true, daemonId: daemon.id, + workspaceId: workspaceId || null, apiKey, // Only returned once - user must save this! message: 'Daemon linked successfully. Save your API key - it cannot be retrieved later.', }); @@ -128,6 +147,64 @@ daemonsRouter.get('/', requireAuth, async (req: Request, res: Response) => { } }); +/** + * GET /api/daemons/workspace/:workspaceId/agents + * Get local agents for a specific workspace + */ +daemonsRouter.get('/workspace/:workspaceId/agents', requireAuth, async (req: Request, res: Response) => { + const userId = req.session.userId!; + const { workspaceId } = req.params; + + try { + // Verify user has access to this workspace + const workspace = await db.workspaces.findById(workspaceId); + if (!workspace) { + return res.status(404).json({ error: 'Workspace not found' }); + } + + // Check if user owns the workspace or is a member + if (workspace.userId !== userId) { + const member = await db.workspaceMembers.findMembership(workspaceId, userId); + if (!member) { + return res.status(403).json({ error: 'Not authorized to access this workspace' }); + } + } + + // Get all linked daemons for this workspace + const daemons = await db.linkedDaemons.findByWorkspaceId(workspaceId); + + // Extract agents from each daemon's metadata + const localAgents = daemons.flatMap((daemon) => { + const metadata = daemon.metadata as Record | null; + const agents = (metadata?.agents as Array<{ name: string; status: string }>) || []; + return agents.map((agent) => ({ + name: agent.name, + status: agent.status, + isLocal: true, + daemonId: daemon.id, + daemonName: daemon.name, + daemonStatus: daemon.status, + machineId: daemon.machineId, + lastSeenAt: daemon.lastSeenAt, + })); + }); + + res.json({ + agents: localAgents, + daemons: daemons.map((d) => ({ + id: d.id, + name: d.name, + machineId: d.machineId, + status: d.status, + lastSeenAt: d.lastSeenAt, + })), + }); + } catch (error) { + console.error('Error fetching local agents:', error); + res.status(500).json({ error: 'Failed to fetch local agents' }); + } +}); + /** * DELETE /api/daemons/:id * Unlink a daemon diff --git a/src/cloud/db/drizzle.ts b/src/cloud/db/drizzle.ts index d2731f87..3fc5a5c6 100644 --- a/src/cloud/db/drizzle.ts +++ b/src/cloud/db/drizzle.ts @@ -639,6 +639,7 @@ export interface DaemonUpdate { export interface LinkedDaemonQueries { findById(id: string): Promise; findByUserId(userId: string): Promise; + findByWorkspaceId(workspaceId: string): Promise; findByMachineId(userId: string, machineId: string): Promise; findByApiKeyHash(apiKeyHash: string): Promise; create(data: schema.NewLinkedDaemon): Promise; @@ -647,6 +648,7 @@ export interface LinkedDaemonQueries { delete(id: string): Promise; markStale(): Promise; getAllAgentsForUser(userId: string): Promise; + getAgentsForWorkspace(workspaceId: string): Promise; getPendingUpdates(id: string): Promise; queueUpdate(id: string, update: DaemonUpdate): Promise; queueMessage(id: string, message: Record): Promise; @@ -670,6 +672,15 @@ export const linkedDaemonQueries: LinkedDaemonQueries = { .orderBy(desc(schema.linkedDaemons.lastSeenAt)); }, + async findByWorkspaceId(workspaceId: string): Promise { + const db = getDb(); + return db + .select() + .from(schema.linkedDaemons) + .where(eq(schema.linkedDaemons.workspaceId, workspaceId)) + .orderBy(desc(schema.linkedDaemons.lastSeenAt)); + }, + async findByMachineId(userId: string, machineId: string): Promise { const db = getDb(); const result = await db @@ -750,6 +761,21 @@ export const linkedDaemonQueries: LinkedDaemonQueries = { })); }, + async getAgentsForWorkspace(workspaceId: string): Promise { + const db = getDb(); + const daemons = await db + .select() + .from(schema.linkedDaemons) + .where(eq(schema.linkedDaemons.workspaceId, workspaceId)); + + return daemons.map((d) => ({ + daemonId: d.id, + daemonName: d.name, + machineId: d.machineId, + agents: ((d.metadata as Record)?.agents as Array<{ name: string; status: string }>) || [], + })); + }, + async getPendingUpdates(id: string): Promise { const db = getDb(); const result = await db.select().from(schema.linkedDaemons).where(eq(schema.linkedDaemons.id, id)); diff --git a/src/cloud/db/migrations/0011_linked_daemon_workspace.sql b/src/cloud/db/migrations/0011_linked_daemon_workspace.sql new file mode 100644 index 00000000..227b87ff --- /dev/null +++ b/src/cloud/db/migrations/0011_linked_daemon_workspace.sql @@ -0,0 +1,8 @@ +-- Add workspaceId to linked_daemons for associating local agents with workspaces +ALTER TABLE "linked_daemons" ADD COLUMN IF NOT EXISTS "workspace_id" uuid;--> statement-breakpoint +DO $$ BEGIN + ALTER TABLE "linked_daemons" ADD CONSTRAINT "linked_daemons_workspace_id_workspaces_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspaces"("id") ON DELETE set null ON UPDATE no action; +EXCEPTION + WHEN duplicate_object THEN null; +END $$;--> statement-breakpoint +CREATE INDEX IF NOT EXISTS "idx_linked_daemons_workspace_id" ON "linked_daemons" USING btree ("workspace_id"); diff --git a/src/cloud/db/migrations/meta/_journal.json b/src/cloud/db/migrations/meta/_journal.json index ef64ab56..9c1d063b 100644 --- a/src/cloud/db/migrations/meta/_journal.json +++ b/src/cloud/db/migrations/meta/_journal.json @@ -71,6 +71,13 @@ "when": 1736208003000, "tag": "0010_remove_credential_tokens", "breakpoints": true + }, + { + "idx": 10, + "version": "5", + "when": 1736294400000, + "tag": "0011_linked_daemon_workspace", + "breakpoints": true } ] } \ No newline at end of file diff --git a/src/cloud/db/schema.ts b/src/cloud/db/schema.ts index e49b758d..a5b919fe 100644 --- a/src/cloud/db/schema.ts +++ b/src/cloud/db/schema.ts @@ -190,6 +190,7 @@ export const workspacesRelations = relations(workspaces, ({ one, many }) => ({ }), members: many(workspaceMembers), repositories: many(repositories), + linkedDaemons: many(linkedDaemons), })); // ============================================================================ @@ -323,6 +324,7 @@ export const repositoriesRelations = relations(repositories, ({ one }) => ({ export const linkedDaemons = pgTable('linked_daemons', { id: uuid('id').primaryKey().defaultRandom(), userId: uuid('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), + workspaceId: uuid('workspace_id').references(() => workspaces.id, { onDelete: 'set null' }), name: varchar('name', { length: 255 }).notNull(), machineId: varchar('machine_id', { length: 255 }).notNull(), apiKeyHash: varchar('api_key_hash', { length: 255 }).notNull(), @@ -336,6 +338,7 @@ export const linkedDaemons = pgTable('linked_daemons', { }, (table) => ({ userMachineIdx: unique('linked_daemons_user_machine_unique').on(table.userId, table.machineId), userIdIdx: index('idx_linked_daemons_user_id').on(table.userId), + workspaceIdIdx: index('idx_linked_daemons_workspace_id').on(table.workspaceId), apiKeyHashIdx: index('idx_linked_daemons_api_key_hash').on(table.apiKeyHash), statusIdx: index('idx_linked_daemons_status').on(table.status), })); @@ -345,6 +348,10 @@ export const linkedDaemonsRelations = relations(linkedDaemons, ({ one }) => ({ fields: [linkedDaemons.userId], references: [users.id], }), + workspace: one(workspaces, { + fields: [linkedDaemons.workspaceId], + references: [workspaces.id], + }), })); // ============================================================================ diff --git a/src/cloud/server.ts b/src/cloud/server.ts index 69e6c6df..1f1f4d44 100644 --- a/src/cloud/server.ts +++ b/src/cloud/server.ts @@ -373,6 +373,7 @@ export async function createServer(): Promise { let scalingOrchestrator: ScalingOrchestrator | null = null; let computeEnforcement: ComputeEnforcementService | null = null; let introExpiration: IntroExpirationService | null = null; + let daemonStaleCheckInterval: ReturnType | null = null; // Create HTTP server for WebSocket upgrade handling const httpServer = http.createServer(app); @@ -750,6 +751,20 @@ export async function createServer(): Promise { } } + // Start daemon stale check (mark daemons offline if no heartbeat for 2+ minutes) + // Runs every 60 seconds regardless of RELAY_CLOUD_ENABLED + daemonStaleCheckInterval = setInterval(async () => { + try { + const count = await db.linkedDaemons.markStale(); + if (count > 0) { + console.log(`[cloud] Marked ${count} daemon(s) as offline (stale)`); + } + } catch (error) { + console.error('[cloud] Failed to mark stale daemons:', error); + } + }, 60_000); // Every 60 seconds + console.log('[cloud] Daemon stale check started (60s interval)'); + return new Promise((resolve) => { server = httpServer.listen(config.port, () => { console.log(`Agent Relay Cloud running on port ${config.port}`); @@ -776,6 +791,12 @@ export async function createServer(): Promise { introExpiration.stop(); } + // Stop daemon stale check + if (daemonStaleCheckInterval) { + clearInterval(daemonStaleCheckInterval); + daemonStaleCheckInterval = null; + } + // Close WebSocket server wssPresence.close(); diff --git a/src/dashboard/app/cloud/link/page.tsx b/src/dashboard/app/cloud/link/page.tsx index cd3a548b..69e13ba3 100644 --- a/src/dashboard/app/cloud/link/page.tsx +++ b/src/dashboard/app/cloud/link/page.tsx @@ -23,9 +23,16 @@ interface MachineInfo { machineName: string; } +interface Workspace { + id: string; + name: string; + status: string; +} + interface LinkResult { apiKey: string; daemonId: string; + workspaceId: string | null; } // Loading fallback for Suspense @@ -48,6 +55,8 @@ function CloudLinkContent() { const [linkResult, setLinkResult] = useState(null); const [error, setError] = useState(''); const [copied, setCopied] = useState(false); + const [workspaces, setWorkspaces] = useState([]); + const [selectedWorkspaceId, setSelectedWorkspaceId] = useState(null); // Extract machine info from URL params useEffect(() => { @@ -69,6 +78,13 @@ function CloudLinkContent() { try { const data = await api.get<{ authenticated?: boolean; user?: { id: string } }>('/api/auth/session'); if (data.authenticated && data.user?.id) { + // Fetch user's workspaces + const workspacesData = await api.get<{ workspaces: Workspace[] }>('/api/workspaces'); + setWorkspaces(workspacesData.workspaces || []); + // Auto-select first workspace if only one exists + if (workspacesData.workspaces?.length === 1) { + setSelectedWorkspaceId(workspacesData.workspaces[0].id); + } setState('ready'); } else { setState('auth-required'); @@ -82,13 +98,20 @@ function CloudLinkContent() { const handleLink = async () => { if (!machineInfo) return; + // Require workspace selection if workspaces exist + if (workspaces.length > 0 && !selectedWorkspaceId) { + setError('Please select a workspace'); + return; + } + setState('linking'); setError(''); try { - const result = await api.post<{ apiKey: string; daemonId: string }>('/api/daemons/link', { + const result = await api.post<{ apiKey: string; daemonId: string; workspaceId: string | null }>('/api/daemons/link', { machineId: machineInfo.machineId, name: machineInfo.machineName, + workspaceId: selectedWorkspaceId, metadata: { linkedVia: 'cli', userAgent: navigator.userAgent, @@ -98,6 +121,7 @@ function CloudLinkContent() { setLinkResult({ apiKey: result.apiKey, daemonId: result.daemonId, + workspaceId: result.workspaceId, }); setState('success'); } catch (err: any) { @@ -221,6 +245,60 @@ function CloudLinkContent() { + {/* Workspace selector */} + {workspaces.length > 0 && ( +
+ +

+ Local agents from this machine will appear in the selected workspace's dashboard. +

+
+ {workspaces.map((workspace) => ( + + ))} +
+ {error && !error.includes('Failed') && ( +

{error}

+ )} +
+ )} + + {/* No workspaces warning */} + {workspaces.length === 0 && ( +
+
📁
+
+

+ No workspaces found. Create a workspace first to link this machine. +

+ + Go to Dashboard → + +
+
+ )} + {/* Warning */}
⚠️
@@ -233,7 +311,12 @@ function CloudLinkContent() { {/* Link button */}
{/* API key box */} diff --git a/src/dashboard/react-components/AgentCard.tsx b/src/dashboard/react-components/AgentCard.tsx index e5544e12..215a41b1 100644 --- a/src/dashboard/react-components/AgentCard.tsx +++ b/src/dashboard/react-components/AgentCard.tsx @@ -160,17 +160,28 @@ export function AgentCard({ {/* Agent Info */}
- - {displayName} - +
+ + {displayName} + + {agent.isLocal && ( + + Local + + )} +
{!displayNameOverride && ( - {getAgentBreadcrumb(agent.name)} + {agent.isLocal ? agent.daemonName || agent.machineId : getAgentBreadcrumb(agent.name)} )}
@@ -281,6 +292,15 @@ export function AgentCard({
{displayName} + {agent.isLocal && ( + + Local + + )} {agent.needsAttention && ( ! )} @@ -291,9 +311,9 @@ export function AgentCard({ )}
{showBreadcrumb ? ( - {getAgentBreadcrumb(agent.name)} + {agent.isLocal ? agent.daemonName || agent.machineId : getAgentBreadcrumb(agent.name)} ) : ( - {agent.name} + {agent.isLocal ? agent.daemonName || agent.machineId : agent.name} )} {agent.agentId && ( diff --git a/src/dashboard/react-components/App.tsx b/src/dashboard/react-components/App.tsx index 0f7ca0a7..874ec76e 100644 --- a/src/dashboard/react-components/App.tsx +++ b/src/dashboard/react-components/App.tsx @@ -103,6 +103,9 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { const [activeCloudWorkspaceId, setActiveCloudWorkspaceId] = useState(null); const [isLoadingCloudWorkspaces, setIsLoadingCloudWorkspaces] = useState(false); + // Local agents from linked daemons + const [localAgents, setLocalAgents] = useState([]); + // Fetch cloud workspaces when in cloud mode useEffect(() => { if (!cloudSession?.user) return; @@ -131,6 +134,53 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { return () => clearInterval(interval); }, [cloudSession?.user, activeCloudWorkspaceId]); + // Fetch local agents for the active workspace + useEffect(() => { + if (!cloudSession?.user || !activeCloudWorkspaceId) { + setLocalAgents([]); + return; + } + + const fetchLocalAgents = async () => { + try { + const result = await api.get<{ + agents: Array<{ + name: string; + status: string; + isLocal: boolean; + daemonId: string; + daemonName: string; + daemonStatus: string; + machineId: string; + lastSeenAt: string | null; + }>; + }>(`/api/daemons/workspace/${activeCloudWorkspaceId}/agents`); + + if (result.agents) { + // Convert API response to Agent format + // Agent status is 'online' when daemon is online (agent is connected to daemon) + const agents: Agent[] = result.agents.map((a) => ({ + name: a.name, + status: a.daemonStatus === 'online' ? 'online' : 'offline', + isLocal: true, + daemonName: a.daemonName, + machineId: a.machineId, + lastSeen: a.lastSeenAt || undefined, + })); + setLocalAgents(agents); + } + } catch (err) { + console.error('Failed to fetch local agents:', err); + setLocalAgents([]); + } + }; + + fetchLocalAgents(); + // Poll for updates every 15 seconds + const interval = setInterval(fetchLocalAgents, 15000); + return () => clearInterval(interval); + }, [cloudSession?.user, activeCloudWorkspaceId]); + // Determine which workspaces to use (cloud mode or orchestrator) const isCloudMode = Boolean(cloudSession?.user); const effectiveWorkspaces = useMemo(() => { @@ -280,19 +330,28 @@ export function App({ wsUrl, orchestratorUrl }: AppProps) { } }, []); - // Merge AI agents with human users from the daemon so the sidebar and notifications include both + // Merge AI agents, human users, and local agents from linked daemons const combinedAgents = useMemo(() => { - const merged = [...(data?.agents ?? []), ...(data?.users ?? [])]; + const merged = [...(data?.agents ?? []), ...(data?.users ?? []), ...localAgents]; const byName = new Map(); for (const agent of merged) { const key = agent.name.toLowerCase(); const existing = byName.get(key); - byName.set(key, existing ? { ...existing, ...agent } : agent); + // Local agents should preserve their isLocal flag when merging + if (existing) { + byName.set(key, { + ...existing, + ...agent, + isLocal: existing.isLocal || agent.isLocal, + }); + } else { + byName.set(key, agent); + } } return Array.from(byName.values()); - }, [data?.agents, data?.users]); + }, [data?.agents, data?.users, localAgents]); // Mark a DM conversation as seen (used for unread badges) const markDmSeen = useCallback((username: string) => { diff --git a/src/dashboard/types/index.ts b/src/dashboard/types/index.ts index 7dfaf856..1d88fd6e 100644 --- a/src/dashboard/types/index.ts +++ b/src/dashboard/types/index.ts @@ -26,6 +26,10 @@ export interface Agent { isStuck?: boolean; // True when agent received message but hasn't responded within threshold isHuman?: boolean; // True if this is a human user, not an AI agent avatarUrl?: string; // Avatar URL for human users + // Local daemon agent fields + isLocal?: boolean; // True if agent is from a linked local daemon + daemonName?: string; // Name of the linked daemon + machineId?: string; // Machine ID of the linked daemon // Profile fields for understanding agent behavior profile?: AgentProfile; }