Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 64 additions & 0 deletions .github/workflows/migrations.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
name: Database Migrations

on:
push:
branches: [main]
paths:
- 'src/cloud/db/**'
- 'drizzle.config.ts'
- '.github/workflows/migrations.yml'
pull_request:
branches: [main]
paths:
- 'src/cloud/db/**'
- 'drizzle.config.ts'
- '.github/workflows/migrations.yml'
# Allow manual trigger
workflow_dispatch:

jobs:
migrations:
name: Run Migrations
runs-on: ubuntu-latest

services:
postgres:
image: postgres:16
env:
POSTGRES_USER: agent_relay
POSTGRES_PASSWORD: test_password
POSTGRES_DB: agent_relay_test
ports:
- 5432:5432
# Health check to wait for postgres to be ready
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5

steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
cache: 'npm'

- name: Install dependencies
run: npm ci

- name: Build project
run: npm run build

- name: Run migrations
env:
DATABASE_URL: postgres://agent_relay:test_password@localhost:5432/agent_relay_test
run: node scripts/run-migrations.js

- name: Verify schema
env:
DATABASE_URL: postgres://agent_relay:test_password@localhost:5432/agent_relay_test
run: node scripts/verify-schema.js
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@
"clean": "rm -rf dist",
"db:generate": "drizzle-kit generate",
"db:migrate": "drizzle-kit migrate",
"db:migrate:run": "node scripts/run-migrations.js",
"db:migrate:verify": "node scripts/verify-schema.js",
"db:push": "drizzle-kit push",
"db:studio": "drizzle-kit studio",
"services:up": "docker compose -f docker-compose.dev.yml up -d postgres redis && echo '✓ Postgres and Redis running'",
Expand Down
43 changes: 43 additions & 0 deletions scripts/run-migrations.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/usr/bin/env node
/**
* Run database migrations (standalone)
*
* This script is used in CI to verify migrations run successfully.
* It connects to the database and runs all pending migrations.
*
* This is a standalone script that doesn't depend on the cloud config,
* so it only requires DATABASE_URL to run.
*
* Usage: DATABASE_URL=postgres://... node scripts/run-migrations.js
*/

import pg from 'pg';
import { drizzle } from 'drizzle-orm/node-postgres';
import { migrate } from 'drizzle-orm/node-postgres/migrator';

const { Pool } = pg;

async function main() {
console.log('Starting database migrations...');
console.log(`Database URL: ${process.env.DATABASE_URL?.replace(/:[^:@]+@/, ':***@') || 'not set'}`);

if (!process.env.DATABASE_URL) {
console.error('ERROR: DATABASE_URL environment variable is required');
process.exit(1);
}

const pool = new Pool({ connectionString: process.env.DATABASE_URL });
const db = drizzle(pool);

try {
await migrate(db, { migrationsFolder: './src/cloud/db/migrations' });
console.log('All migrations completed successfully');
} catch (error) {
console.error('Migration failed:', error);
process.exit(1);
} finally {
await pool.end();
}
}

main();
134 changes: 134 additions & 0 deletions scripts/verify-schema.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
#!/usr/bin/env node
/**
* Verify database schema after migrations
*
* This script verifies that all expected tables exist after migrations.
* It dynamically reads table definitions from the schema to avoid hardcoding.
*
* Usage: DATABASE_URL=postgres://... node scripts/verify-schema.js
*/

import pg from 'pg';
import * as schema from '../dist/cloud/db/schema.js';

const { Pool } = pg;

/**
* Extract table names from the schema module.
* Drizzle pgTable objects store their name in Symbol.for('drizzle:Name').
*/
function getTablesFromSchema() {
const tables = [];
const drizzleNameSymbol = Symbol.for('drizzle:Name');

for (const [key, value] of Object.entries(schema)) {
// Skip relation definitions (they end with 'Relations')
if (key.endsWith('Relations')) continue;

// Drizzle tables have the table name in a Symbol
if (value && typeof value === 'object' && value[drizzleNameSymbol]) {
tables.push(value[drizzleNameSymbol]);
}
}
return tables;
}

// Dynamically get tables from schema
const SCHEMA_TABLES = getTablesFromSchema();
const EXPECTED_TABLES = [...SCHEMA_TABLES];

// Key columns to spot-check (subset of critical columns)
const EXPECTED_COLUMNS = {
users: ['id', 'email', 'created_at'],
workspaces: ['id', 'user_id', 'name', 'status'],
linked_daemons: ['id', 'user_id', 'workspace_id', 'status'],
};

async function main() {
console.log('Verifying database schema...\n');

if (!process.env.DATABASE_URL) {
console.error('ERROR: DATABASE_URL environment variable is required');
process.exit(1);
}

console.log(`Found ${SCHEMA_TABLES.length} tables in schema.ts:`);
console.log(` ${SCHEMA_TABLES.join(', ')}\n`);

const pool = new Pool({ connectionString: process.env.DATABASE_URL });

try {
// Get all tables in the public schema
const tablesResult = await pool.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
ORDER BY table_name
`);

const existingTables = tablesResult.rows.map((r) => r.table_name);
console.log('Existing tables:', existingTables.join(', '));
console.log('');

// Check for missing tables
const missingTables = EXPECTED_TABLES.filter((t) => !existingTables.includes(t));
if (missingTables.length > 0) {
console.error('MISSING TABLES:', missingTables.join(', '));
process.exit(1);
}
console.log(`All ${EXPECTED_TABLES.length} expected tables exist`);

// Verify key columns
console.log('\nVerifying key columns...');
for (const [table, columns] of Object.entries(EXPECTED_COLUMNS)) {
const columnsResult = await pool.query(
`
SELECT column_name
FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = $1
`,
[table]
);

const existingColumns = columnsResult.rows.map((r) => r.column_name);
const missingColumns = columns.filter((c) => !existingColumns.includes(c));

if (missingColumns.length > 0) {
console.error(`Table '${table}' missing columns: ${missingColumns.join(', ')}`);
console.error(`Existing columns: ${existingColumns.join(', ')}`);
process.exit(1);
}
console.log(` ${table}: OK (${columns.length} key columns verified)`);
}

// Check migration history (table may be in public or drizzle schema)
try {
// Try public schema first, then drizzle schema
let migrationsResult;
try {
migrationsResult = await pool.query(`
SELECT id, hash, created_at FROM public.__drizzle_migrations ORDER BY created_at
`);
} catch {
migrationsResult = await pool.query(`
SELECT id, hash, created_at FROM drizzle.__drizzle_migrations ORDER BY created_at
`);
}
console.log(`\nMigration history: ${migrationsResult.rows.length} migrations applied`);
for (const row of migrationsResult.rows) {
console.log(` - ${row.id} (${new Date(Number(row.created_at)).toISOString()})`);
}
} catch {
console.log('\nMigration history: (table not found, but migrations ran successfully)');
}

console.log('\nSchema verification passed!');
} catch (error) {
console.error('Schema verification failed:', error);
process.exit(1);
} finally {
await pool.end();
}
}

main();
79 changes: 78 additions & 1 deletion src/cloud/api/daemons.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,28 @@ function hashApiKey(apiKey: string): string {
*/
daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) => {
const userId = req.session.userId!;
const { name, machineId, metadata } = req.body;
const { name, machineId, metadata, workspaceId } = req.body;

if (!machineId || typeof machineId !== 'string') {
return res.status(400).json({ error: 'machineId is required' });
}

try {
// Validate workspace ownership if provided
if (workspaceId) {
const workspace = await db.workspaces.findById(workspaceId);
if (!workspace) {
return res.status(404).json({ error: 'Workspace not found' });
}
if (workspace.userId !== userId) {
// Check if user is a member of the workspace
const member = await db.workspaceMembers.findMembership(workspaceId, userId);
if (!member) {
return res.status(403).json({ error: 'Not authorized to link to this workspace' });
}
}
}

// Check if this machine is already linked
const existing = await db.linkedDaemons.findByMachineId(userId, machineId);

Expand All @@ -63,6 +78,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) =>
await db.linkedDaemons.update(existing.id, {
name: name || existing.name,
apiKeyHash,
workspaceId: workspaceId || existing.workspaceId,
metadata: metadata || existing.metadata,
status: 'online',
lastSeenAt: new Date(),
Expand All @@ -71,6 +87,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) =>
return res.json({
success: true,
daemonId: existing.id,
workspaceId: workspaceId || existing.workspaceId,
apiKey, // Only returned once!
message: 'Daemon re-linked with new API key',
});
Expand All @@ -82,6 +99,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) =>

const daemon = await db.linkedDaemons.create({
userId,
workspaceId: workspaceId || null,
name: name || `Daemon on ${machineId.substring(0, 8)}`,
machineId,
apiKeyHash,
Expand All @@ -92,6 +110,7 @@ daemonsRouter.post('/link', requireAuth, async (req: Request, res: Response) =>
res.status(201).json({
success: true,
daemonId: daemon.id,
workspaceId: workspaceId || null,
apiKey, // Only returned once - user must save this!
message: 'Daemon linked successfully. Save your API key - it cannot be retrieved later.',
});
Expand Down Expand Up @@ -128,6 +147,64 @@ daemonsRouter.get('/', requireAuth, async (req: Request, res: Response) => {
}
});

/**
* GET /api/daemons/workspace/:workspaceId/agents
* Get local agents for a specific workspace
*/
daemonsRouter.get('/workspace/:workspaceId/agents', requireAuth, async (req: Request, res: Response) => {
const userId = req.session.userId!;
const { workspaceId } = req.params;

try {
// Verify user has access to this workspace
const workspace = await db.workspaces.findById(workspaceId);
if (!workspace) {
return res.status(404).json({ error: 'Workspace not found' });
}

// Check if user owns the workspace or is a member
if (workspace.userId !== userId) {
const member = await db.workspaceMembers.findMembership(workspaceId, userId);
if (!member) {
return res.status(403).json({ error: 'Not authorized to access this workspace' });
}
}

// Get all linked daemons for this workspace
const daemons = await db.linkedDaemons.findByWorkspaceId(workspaceId);

// Extract agents from each daemon's metadata
const localAgents = daemons.flatMap((daemon) => {
const metadata = daemon.metadata as Record<string, unknown> | null;
const agents = (metadata?.agents as Array<{ name: string; status: string }>) || [];
return agents.map((agent) => ({
name: agent.name,
status: agent.status,
isLocal: true,
daemonId: daemon.id,
daemonName: daemon.name,
daemonStatus: daemon.status,
machineId: daemon.machineId,
lastSeenAt: daemon.lastSeenAt,
}));
});

res.json({
agents: localAgents,
daemons: daemons.map((d) => ({
id: d.id,
name: d.name,
machineId: d.machineId,
status: d.status,
lastSeenAt: d.lastSeenAt,
})),
});
} catch (error) {
console.error('Error fetching local agents:', error);
res.status(500).json({ error: 'Failed to fetch local agents' });
}
});

/**
* DELETE /api/daemons/:id
* Unlink a daemon
Expand Down
Loading
Loading