diff --git a/.github/workflows/build-and-push-worker-image.yml b/.github/workflows/build-and-push-worker-image.yml new file mode 100644 index 0000000..d5cbba0 --- /dev/null +++ b/.github/workflows/build-and-push-worker-image.yml @@ -0,0 +1,149 @@ +name: Build and Push Docker Image + +on: + push: + branches: + - main + paths: + - 'apps/web/package.json' + +env: + REGISTRY: ghcr.io + IMAGE_NAME: seastackapp/seastack-worker + +jobs: + check-version-change: + runs-on: ubuntu-latest + permissions: + contents: read + outputs: + version_changed: ${{ steps.check.outputs.changed }} + new_version: ${{ steps.get_version.outputs.version }} + steps: + - name: Checkout code + uses: actions/checkout@v4.2.2 + with: + fetch-depth: 2 + + - name: Get current version + id: get_version + run: | + VERSION=$(jq -r '.version' apps/web/package.json) + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Current version: $VERSION" + + - name: Get previous version + id: get_prev_version + run: | + if git show HEAD^1:apps/web/package.json > /dev/null 2>&1; then + PREV_VERSION=$(git show HEAD^1:apps/web/package.json | jq -r '.version') + else + PREV_VERSION="none" + fi + echo "prev_version=$PREV_VERSION" >> $GITHUB_OUTPUT + echo "Previous version: $PREV_VERSION" + + - name: Check if version changed + id: check + run: | + CURRENT="${{ steps.get_version.outputs.version }}" + PREVIOUS="${{ steps.get_prev_version.outputs.prev_version }}" + if [ "$CURRENT" != "$PREVIOUS" ]; then + echo "changed=true" >> $GITHUB_OUTPUT + echo "Version changed from $PREVIOUS to $CURRENT" + else + echo "changed=false" >> $GITHUB_OUTPUT + echo "Version did not change" + fi + + build-and-push: + needs: check-version-change + if: needs.check-version-change.outputs.version_changed == 'true' + strategy: + matrix: + include: + - runner: ubuntu-latest + platform: linux/amd64 + - runner: ubuntu-24.04-arm + platform: linux/arm64 + runs-on: ${{ matrix.runner }} + permissions: + contents: read + packages: write + steps: + - name: Checkout code + uses: actions/checkout@v4.2.2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3.7.1 + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3.3.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push by digest + id: build + uses: docker/build-push-action@v6.10.0 + with: + context: . + file: ./apps/workers/Dockerfile + platforms: ${{ matrix.platform }} + outputs: type=image,name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-${{ matrix.platform == 'linux/amd64' && 'amd64' || 'arm64' }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + merge: + needs: [check-version-change, build-and-push] + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: /tmp/digests + pattern: digests-* + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3.7.1 + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3.3.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata + id: meta + uses: docker/metadata-action@v5.6.1 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=raw,value=latest + type=raw,value=${{ needs.check-version-change.outputs.new_version }} + + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *) diff --git a/apps/web/package.json b/apps/web/package.json index 00286ac..bcec6f9 100644 --- a/apps/web/package.json +++ b/apps/web/package.json @@ -1,6 +1,6 @@ { "name": "web", - "version": "0.11.3", + "version": "0.11.4", "type": "module", "private": true, "scripts": { diff --git a/apps/workers/Dockerfile b/apps/workers/Dockerfile new file mode 100644 index 0000000..4f4bffa --- /dev/null +++ b/apps/workers/Dockerfile @@ -0,0 +1,78 @@ +# Multi-stage Dockerfile for the workers app in a pnpm + turbo monorepo +# Uses Alpine for the final worker image + +# ----------------------- +# 1) Base image with pnpm +# ----------------------- +FROM node:24-alpine AS base + +# Runtime dependencies commonly needed by Node native modules & Prisma +RUN apk add --no-cache libc6-compat openssl + +# Enable corepack and pin pnpm (must match repo tooling) +RUN corepack enable && corepack prepare pnpm@9.0.0 --activate + +WORKDIR /app + +# ----------------------- +# 2) Dependencies layer +# - install workspace deps with caching +# ----------------------- +FROM base AS deps + +# Build toolchain for native modules (not in final image) +RUN apk add --no-cache python3 make g++ + +# Copy only files needed to resolve the dependency graph to leverage Docker cache +COPY pnpm-lock.yaml pnpm-workspace.yaml package.json turbo.json ./ + +# Copy workspace manifests used by the workers app and its internal deps +COPY apps/workers/package.json ./apps/workers/package.json +COPY packages ./packages + +# Install dependencies (workspace-aware) using lockfile +RUN pnpm install --frozen-lockfile + +# ----------------------- +# 3) Build layer +# ----------------------- +FROM deps AS build + +# Bring in full source to build the workers package +COPY . . + +# Provide safe defaults for codegen steps that don't require a live DB +ENV DATABASE_URL=postgres://postgres:password@postgres:5432/postgres + +# Generate Prisma client for @repo/db (no live DB needed) +RUN pnpm --filter @repo/db... generate || pnpm --filter @repo/db generate || true + +# Build the workers app (outputs to apps/workers/dist) +RUN pnpm --filter workers build + +# Produce a pruned, production-ready output for just the workers package +# This includes the package files, its dist, and a pruned node_modules +RUN pnpm deploy --filter workers --prod /out + +# ----------------------- +# 4) Runtime layer (Alpine) +# ----------------------- +FROM node:24-alpine AS runner + +ENV NODE_ENV=production + +# Runtime libs needed by Prisma and other native deps +RUN apk add --no-cache libc6-compat openssl + +# Non-root user for security +RUN adduser -D worker + +WORKDIR /app + +# Copy the pruned deployment from build stage +COPY --from=build /out/ . + +USER worker + +# Start the worker process +CMD ["node", "dist/index.mjs"] diff --git a/apps/workers/package.json b/apps/workers/package.json index 2515be7..8fd3f47 100644 --- a/apps/workers/package.json +++ b/apps/workers/package.json @@ -14,12 +14,16 @@ "dependencies": { "@dotenvx/dotenvx": "^1.51.1", "@prisma/adapter-pg": "^7.1.0", + "@prisma/client": "^7.1.0", "@repo/db": "workspace:*", "@repo/queues": "workspace:*", "@repo/utils": "workspace:*", "@types/ssh2": "^1.15.5", "bullmq": "^5.65.1", "ioredis": "^5.8.2", + "pino": "^10.1.0", + "pino-pretty": "^13.1.3", + "pg": "^8.16.3", "prisma": "^7.1.0", "ssh2": "^1.17.0" }, diff --git a/apps/workers/src/backups.ts b/apps/workers/src/backups.ts index dccbe50..964460f 100644 --- a/apps/workers/src/backups.ts +++ b/apps/workers/src/backups.ts @@ -2,9 +2,13 @@ import { setupWorker } from './setupWorker'; import { BACKUPS_QUEUE_NAME, VolumeBackupJob } from '@repo/queues'; import { prisma } from '@repo/db'; import { - decrypt, + encrypt, + generateRcloneFlags, generateVolumeName, + getLogger, + getS3Storage, getSSHClient, + parseRetentionString, remoteExec, sh, } from '@repo/utils'; @@ -12,7 +16,8 @@ import { Client } from 'ssh2'; export const setUpVolumeBackups = () => { return setupWorker(BACKUPS_QUEUE_NAME, async (job) => { - console.log(`Processing job ${job.id}`); + const { logger, logs } = getLogger(); + console.info(`Processing job ${job.id}`); const schedule = await prisma.volumeBackupSchedule.findUnique({ where: { id: job.data.schedule }, include: { @@ -34,13 +39,22 @@ export const setUpVolumeBackups = () => { console.error(`Could not find schedule ${job.data.schedule}`); return; } - console.log( + console.info( `Starting backup for schedule ${schedule.id} (${schedule.volume.name})` ); const service = schedule.volume.service; const serverId = service.server.id; const volumeName = generateVolumeName(schedule.volume.name, service.id); - const backupFilename = `backup-${volumeName}-${job.id}.tar.zst`; + const baseFileName = `backup-${volumeName}`; + const backupFilename = `${baseFileName}.tar.zst`; + const run = await prisma.backupRun.create({ + data: { + status: 'RUNNING', + volumeBackupSchedule: { + connect: { id: schedule.id }, + }, + }, + }); let connection: Client | undefined = undefined; try { @@ -49,46 +63,59 @@ export const setUpVolumeBackups = () => { serverId, schedule.volume.service.server.organizations[0]!.id ); - console.log('Connected to server via SSH'); + logger.debug('Connected to server via SSH'); - const command = sh`docker run --rm -v ${volumeName}:/data alpine sh -c "tar -C /data -cf - ." | zstd -z -19 -o ${backupFilename}`; + const command = sh`docker run --rm -v ${volumeName}:/data alpine sh -c "tar -C /data -cf - ." | zstd -z -19 -o ${backupFilename} -f`; - console.log(`Running command: ${command}`); + logger.debug(`Running command: ${command}`); await remoteExec(connection, command); - console.log(`Backup created: ${backupFilename}`); - console.log('Uploading backup file to S3 using rclone'); + logger.debug('Uploading backup file to S3 using rclone'); - const s3 = await prisma.s3Storage.findFirst({ - where: { id: schedule.storageDestinationId }, - }); + const s3 = await getS3Storage( + prisma, + schedule.storageDestinationId + ); - if (!s3) { - throw new Error( - `Could not find S3 storage destination ${schedule.storageDestinationId}` - ); - } + const flags = generateRcloneFlags(s3); + const target = sh`:s3:${s3.bucket}/seastack/backups/${schedule.id}/${new Date().getTime()}-${backupFilename}`; + const secureName = sh`./${backupFilename}`; + const rcloneCommand = `rclone copyto ${secureName} ${target} ${flags} --progress`; - const flags = [ - '--s3-provider=Other', - sh`--s3-access-key-id=${decrypt(s3.accessKeyId)}`, - sh`--s3-secret-access-key=${decrypt(s3.secretAccessKey)}`, - sh`--s3-endpoint=${s3.endpoint}`, - s3.region ? `--s3-region=${s3.region}` : '', - '--s3-acl=private', - sh`--s3-force-path-style=${s3.usePathStyle ? 'true' : 'false'}`, - ].join(' '); - const target = sh`:s3:${s3.bucket}`; - const secureName = sh`${backupFilename}`; - const rcloneCommand = `rclone copy ${secureName} ${target} ${flags} --progress`; + logger.info(`Running command: ${rcloneCommand}`); + logger.info(await remoteExec(connection, rcloneCommand)); - console.log(`Running command: ${rcloneCommand}`); - console.log(await remoteExec(connection, rcloneCommand)); + logger.info('Creating copies for data retention'); + const { rules } = parseRetentionString(schedule.retention); + for (const { unit } of rules) { + if (unit === 'latest') continue; + const newTarget = sh`:s3:${s3.bucket}/seastack/backups/${schedule.id}/${baseFileName}.${unit}`; - console.log('Deleting local backup file'); - await remoteExec(connection, sh`rm ${backupFilename}`); + const copyCommand = `rclone copyto ${target} ${newTarget} ${flags} --progress`; + logger.info(`Running command: ${copyCommand}`); + logger.info(await remoteExec(connection, copyCommand)); + } + + logger.debug('Deleting local backup file'); + logger.debug( + await remoteExec(connection, sh`rm ${backupFilename}`) + ); + + logger.info(`Backup created: ${backupFilename}`); + await prisma.backupRun.update({ + where: { id: run.id }, + data: { + status: 'SUCCESS', + artifactLocation: backupFilename, + logs: encrypt(logs.join('')), + }, + }); } catch (error) { - console.error(error); + logger.error(error); + await prisma.backupRun.update({ + where: { id: run.id }, + data: { status: 'FAILED', logs: encrypt(logs.join('')) }, + }); throw error; } finally { if (connection) connection.end(); diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 18d1006..6ee6ce8 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -21,6 +21,33 @@ services: BETTER_AUTH_SECRET: CeOx3AmjqVWNIlmeMLOgwtq87J49YpQX BETTER_AUTH_URL: http://localhost:3000 ENCRYPTION_SECRET: CeOx3AmjqVWNIlmeMLOgwtq87J49YpQX + REDIS_HOST: redis + REDIS_PORT: 6379 + REDIS_PASSWORD: devpassword + + worker: + image: ghcr.io/seastackapp/seastack-worker:latest + depends_on: + - postgres + - redis + environment: + DATABASE_URL: "postgresql://postgres:password@postgres:5432/public?schema=public" + BETTER_AUTH_SECRET: CeOx3AmjqVWNIlmeMLOgwtq87J49YpQX + BETTER_AUTH_URL: http://localhost:3000 + ENCRYPTION_SECRET: CeOx3AmjqVWNIlmeMLOgwtq87J49YpQX + REDIS_HOST: redis + REDIS_PORT: 6379 + REDIS_PASSWORD: devpassword + redis: + image: redis:7.4-alpine + command: redis-server --requirepass devpassword + environment: + REDIS_PASSWORD: devpassword + ports: + - "6379:6379" + volumes: + - redis_data:/data volumes: postgres_data: + redis_data: diff --git a/packages/api/src/routers/services/backups/createVolumeBackup.ts b/packages/api/src/routers/services/backups/createVolumeBackup.ts index f09fad6..9effc6a 100644 --- a/packages/api/src/routers/services/backups/createVolumeBackup.ts +++ b/packages/api/src/routers/services/backups/createVolumeBackup.ts @@ -4,6 +4,7 @@ import { checkDestinationExistsInOrg, checkVolumeExistsInOrganization, } from '@repo/utils'; +import { volumeBackupsQueue } from '@repo/queues'; export const createVolumeBackup = protectedProcedure .input(volumeBackupScheduleCreateSchema) @@ -23,26 +24,42 @@ export const createVolumeBackup = protectedProcedure organizationId ); - return prisma.volumeBackupSchedule.create({ - data: { - volumeId, - cron, - retention: retention ?? '@latest:7 @days:30 @months:12', - storageDestinationId, - isActive: true, - }, - include: { - runs: { - take: 10, - orderBy: { id: 'desc' }, + return prisma.$transaction(async (tx) => { + const schedule = await tx.volumeBackupSchedule.create({ + data: { + volumeId, + cron, + retention: retention ?? '@latest:7 @days:30 @months:12', + storageDestinationId, + isActive: true, }, - destination: { - select: { name: true }, + include: { + runs: { + take: 10, + orderBy: { id: 'desc' }, + }, + destination: { + select: { name: true }, + }, + volume: { + select: { name: true, mountPath: true }, + }, }, - volume: { - select: { name: true, mountPath: true }, + }); + + await volumeBackupsQueue.upsertJobScheduler( + 'backup-cron-job-' + schedule.id, + { + pattern: schedule.cron, }, - }, + { + data: { + schedule: schedule.id, + }, + } + ); + + return schedule; }); } ); diff --git a/packages/api/src/routers/services/backups/deleteVolumeBackupSchedule.ts b/packages/api/src/routers/services/backups/deleteVolumeBackupSchedule.ts index e4bc11a..ff09db0 100644 --- a/packages/api/src/routers/services/backups/deleteVolumeBackupSchedule.ts +++ b/packages/api/src/routers/services/backups/deleteVolumeBackupSchedule.ts @@ -2,6 +2,7 @@ import { protectedProcedure } from '../../../trpc'; import { volumeBackupScheduleIdSchema } from '@repo/schemas'; import { TRPCError } from '@trpc/server'; import { checkServiceExistsInOrganization } from '@repo/utils'; +import { volumeBackupsQueue } from '@repo/queues'; export const deleteVolumeBackupSchedule = protectedProcedure .input(volumeBackupScheduleIdSchema) @@ -30,5 +31,9 @@ export const deleteVolumeBackupSchedule = protectedProcedure where: { id: input.volumeBackupScheduleId }, }); + await volumeBackupsQueue.removeJobScheduler( + 'backup-cron-job-' + schedule.id + ); + return { success: true } as const; }); diff --git a/packages/db/prisma/migrations/20251208121055_add_logs_on_backup_run/migration.sql b/packages/db/prisma/migrations/20251208121055_add_logs_on_backup_run/migration.sql new file mode 100644 index 0000000..77943a9 --- /dev/null +++ b/packages/db/prisma/migrations/20251208121055_add_logs_on_backup_run/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "BackupRun" ADD COLUMN "logs" TEXT NOT NULL DEFAULT ''; diff --git a/packages/db/prisma/schema.prisma b/packages/db/prisma/schema.prisma index 745ffe9..d3bc8f4 100644 --- a/packages/db/prisma/schema.prisma +++ b/packages/db/prisma/schema.prisma @@ -414,6 +414,7 @@ model BackupRun { status BackupRunStatus @default(RUNNING) createdAt DateTime @default(now()) updatedAt DateTime @updatedAt() + logs String @default("") artifactLocation String? diff --git a/packages/utils/src/backups/index.ts b/packages/utils/src/backups/index.ts index 9b5335a..945d58e 100644 --- a/packages/utils/src/backups/index.ts +++ b/packages/utils/src/backups/index.ts @@ -1 +1,2 @@ export * from './retention'; +export * from './s3'; diff --git a/packages/utils/src/backups/s3/generateRcloneFlags.ts b/packages/utils/src/backups/s3/generateRcloneFlags.ts new file mode 100644 index 0000000..508a207 --- /dev/null +++ b/packages/utils/src/backups/s3/generateRcloneFlags.ts @@ -0,0 +1,15 @@ +import { S3Storage } from './getS3Storage'; +import { sh } from '../../sh'; +import { decrypt } from '../../crypto'; + +export const generateRcloneFlags = (s3: S3Storage) => { + return [ + '--s3-provider=Other', + sh`--s3-access-key-id=${decrypt(s3.accessKeyId)}`, + sh`--s3-secret-access-key=${decrypt(s3.secretAccessKey)}`, + sh`--s3-endpoint=${s3.endpoint}`, + s3.region ? `--s3-region=${s3.region}` : '', + '--s3-acl=private', + sh`--s3-force-path-style=${s3.usePathStyle ? 'true' : 'false'}`, + ].join(' '); +}; diff --git a/packages/utils/src/backups/s3/getS3Storage.ts b/packages/utils/src/backups/s3/getS3Storage.ts new file mode 100644 index 0000000..bb3eb54 --- /dev/null +++ b/packages/utils/src/backups/s3/getS3Storage.ts @@ -0,0 +1,20 @@ +import { PrismaClient } from '@repo/db'; + +export const getS3Storage = async ( + prisma: PrismaClient, + destinationId: string +) => { + const s3 = await prisma.s3Storage.findFirst({ + where: { id: destinationId }, + }); + + if (!s3) { + throw new Error( + `Could not find S3 storage destination ${destinationId}` + ); + } + + return s3; +}; + +export type S3Storage = NonNullable>>; diff --git a/packages/utils/src/backups/s3/index.ts b/packages/utils/src/backups/s3/index.ts new file mode 100644 index 0000000..3d781ad --- /dev/null +++ b/packages/utils/src/backups/s3/index.ts @@ -0,0 +1,2 @@ +export * from './getS3Storage'; +export * from './generateRcloneFlags'; diff --git a/packages/utils/src/getLogger.ts b/packages/utils/src/getLogger.ts new file mode 100644 index 0000000..f67358d --- /dev/null +++ b/packages/utils/src/getLogger.ts @@ -0,0 +1,36 @@ +import pino from 'pino'; +import pretty from 'pino-pretty'; +import { LOG_LEVEL } from './configs'; + +export const getLogger = () => { + const logs: string[] = []; + + const prettyStream = + process.env.NODE_ENV === 'production' + ? null + : pretty({ + colorize: true, + translateTime: 'HH:MM:ss', + }); + const captureStream = { + write(msg: string) { + logs.push(msg); // store parsed logs + }, + }; + const streams = [ + { stream: captureStream }, // capture logs + ]; + + if (prettyStream) { + streams.push({ stream: prettyStream }); // pretty console output + } + + const logger = pino( + { + level: LOG_LEVEL, + }, + pino.multistream(streams) + ); + + return { logs, logger }; +}; diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts index ab82e9f..7b52fd7 100644 --- a/packages/utils/src/index.ts +++ b/packages/utils/src/index.ts @@ -12,3 +12,4 @@ export * from './docker'; export * from './remote-server'; export * from './backups'; export * from './sh'; +export * from './getLogger'; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c55644a..ea004e5 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -304,6 +304,9 @@ importers: '@prisma/adapter-pg': specifier: ^7.1.0 version: 7.1.0 + '@prisma/client': + specifier: ^7.1.0 + version: 7.1.0(prisma@7.1.0(@types/react@19.2.0)(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(typescript@5.9.3))(typescript@5.9.3) '@repo/db': specifier: workspace:* version: link:../../packages/db @@ -322,6 +325,15 @@ importers: ioredis: specifier: ^5.8.2 version: 5.8.2 + pg: + specifier: ^8.16.3 + version: 8.16.3 + pino: + specifier: ^10.1.0 + version: 10.1.0 + pino-pretty: + specifier: ^13.1.3 + version: 13.1.3 prisma: specifier: ^7.1.0 version: 7.1.0(@types/react@19.2.0)(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(typescript@5.9.3)