From ba7c8a17ebd65199eb6f3b77b4799c20f309e94a Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 25 Apr 2025 21:26:01 +0100 Subject: [PATCH 01/33] WIP clickhouse package with test containers setup --- docker/docker-compose.yml | 139 ++++-------- internal-packages/clickhouse/Dockerfile | 12 ++ internal-packages/clickhouse/package.json | 27 +++ .../schema/001_create_databases.sql | 6 + .../schema/002_create_smoke_test.sql | 11 + .../clickhouse/src/client.test.ts | 17 ++ internal-packages/clickhouse/src/client.ts | 9 + internal-packages/clickhouse/src/index.ts | 1 + internal-packages/clickhouse/tsconfig.json | 8 + .../clickhouse/tsconfig.src.json | 20 ++ .../clickhouse/tsconfig.test.json | 21 ++ internal-packages/clickhouse/vitest.config.ts | 19 ++ internal-packages/testcontainers/package.json | 1 + .../testcontainers/src/clickhouse.ts | 197 ++++++++++++++++++ internal-packages/testcontainers/src/index.ts | 36 ++++ internal-packages/testcontainers/src/utils.ts | 24 +++ pnpm-lock.yaml | 63 ++++-- 17 files changed, 497 insertions(+), 114 deletions(-) create mode 100644 internal-packages/clickhouse/Dockerfile create mode 100644 internal-packages/clickhouse/package.json create mode 100644 internal-packages/clickhouse/schema/001_create_databases.sql create mode 100644 internal-packages/clickhouse/schema/002_create_smoke_test.sql create mode 100644 internal-packages/clickhouse/src/client.test.ts create mode 100644 internal-packages/clickhouse/src/client.ts create mode 100644 internal-packages/clickhouse/src/index.ts create mode 100644 internal-packages/clickhouse/tsconfig.json create mode 100644 internal-packages/clickhouse/tsconfig.src.json create mode 100644 internal-packages/clickhouse/tsconfig.test.json create mode 100644 internal-packages/clickhouse/vitest.config.ts create mode 100644 internal-packages/testcontainers/src/clickhouse.ts diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 4ddceeee56..5d7ab5926e 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -5,12 +5,7 @@ volumes: database-data-alt: pgadmin-data: redis-data: - redis-cluster_data-0: - redis-cluster_data-1: - redis-cluster_data-2: - redis-cluster_data-3: - redis-cluster_data-4: - redis-cluster_data-5: + clickhouse: networks: app_network: @@ -52,93 +47,6 @@ services: ports: - 6379:6379 - # redis-node-0: - # image: docker.io/bitnami/redis-cluster:7.0 - # container_name: redis-node-0 - # networks: - # - app_network - # ports: - # - "6378:6379" - # volumes: - # - redis-cluster_data-0:/bitnami/redis/data - # environment: - # - "REDIS_PASSWORD=bitnami" - # - "REDIS_NODES=redis-node-0 redis-node-1 redis-node-2 redis-node-3 redis-node-4 redis-node-5" - - # redis-node-1: - # image: docker.io/bitnami/redis-cluster:7.0 - # container_name: redis-node-1 - # networks: - # - app_network - # ports: - # - "6380:6379" - # volumes: - # - redis-cluster_data-1:/bitnami/redis/data - # environment: - # - "REDIS_PASSWORD=bitnami" - # - "REDIS_NODES=redis-node-0 redis-node-1 redis-node-2 redis-node-3 redis-node-4 redis-node-5" - - # redis-node-2: - # image: docker.io/bitnami/redis-cluster:7.0 - # container_name: redis-node-2 - # networks: - # - app_network - # ports: - # - "6381:6379" - # volumes: - # - redis-cluster_data-2:/bitnami/redis/data - # environment: - # - "REDIS_PASSWORD=bitnami" - # - "REDIS_NODES=redis-node-0 redis-node-1 redis-node-2 redis-node-3 redis-node-4 redis-node-5" - - # redis-node-3: - # image: docker.io/bitnami/redis-cluster:7.0 - # container_name: redis-node-3 - # networks: - # - app_network - # ports: - # - "6382:6379" - # volumes: - # - redis-cluster_data-3:/bitnami/redis/data - # environment: - # - "REDIS_PASSWORD=bitnami" - # - "REDIS_NODES=redis-node-0 redis-node-1 redis-node-2 redis-node-3 redis-node-4 redis-node-5" - - # redis-node-4: - # image: docker.io/bitnami/redis-cluster:7.0 - # container_name: redis-node-4 - # networks: - # - app_network - # ports: - # - "6383:6379" - # volumes: - # - redis-cluster_data-4:/bitnami/redis/data - # environment: - # - "REDIS_PASSWORD=bitnami" - # - "REDIS_NODES=redis-node-0 redis-node-1 redis-node-2 redis-node-3 redis-node-4 redis-node-5" - - # redis-node-5: - # image: docker.io/bitnami/redis-cluster:7.0 - # container_name: redis-node-5 - # networks: - # - app_network - # ports: - # - "6384:6379" - # volumes: - # - redis-cluster_data-5:/bitnami/redis/data - # depends_on: - # - redis-node-0 - # - redis-node-1 - # - redis-node-2 - # - redis-node-3 - # - redis-node-4 - # environment: - # - "REDIS_PASSWORD=bitnami" - # - "REDISCLI_AUTH=bitnami" - # - "REDIS_CLUSTER_REPLICAS=1" - # - "REDIS_NODES=redis-node-0 redis-node-1 redis-node-2 redis-node-3 redis-node-4 redis-node-5" - # - "REDIS_CLUSTER_CREATOR=yes" - electric: container_name: electric image: electricsql/electric:1.0.0-beta.15@sha256:4ae0f895753b82684aa31ea1c708e9e86d0a9bca355acb7270dcb24062520810 @@ -152,6 +60,51 @@ services: depends_on: - database + clickhouse: + image: bitnami/clickhouse:latest + container_name: clickhouse + environment: + CLICKHOUSE_ADMIN_USER: default + CLICKHOUSE_ADMIN_PASSWORD: password + ports: + - "8123:8123" + - "9000:9000" + volumes: + - clickhouse:/bitnami/clickhouse + networks: + - app_network + healthcheck: + test: + [ + "CMD", + "clickhouse-client", + "--host", + "localhost", + "--port", + "9000", + "--user", + "default", + "--password", + "password", + "--query", + "SELECT 1", + ] + interval: 3s + timeout: 5s + retries: 5 + start_period: 10s + + clickhouse_migrator: + build: + context: ../internal-packages/clickhouse + dockerfile: ./Dockerfile + depends_on: + clickhouse: + condition: service_healthy + networks: + - app_network + command: ["goose", "${GOOSE_COMMAND:-up}"] + # otel-collector: # container_name: otel-collector # image: otel/opentelemetry-collector-contrib:latest diff --git a/internal-packages/clickhouse/Dockerfile b/internal-packages/clickhouse/Dockerfile new file mode 100644 index 0000000000..ceb5092021 --- /dev/null +++ b/internal-packages/clickhouse/Dockerfile @@ -0,0 +1,12 @@ +FROM golang + + +RUN go install github.com/pressly/goose/v3/cmd/goose@latest + + +COPY ./schema ./schema + +ENV GOOSE_DRIVER=clickhouse +ENV GOOSE_DBSTRING="tcp://default:password@clickhouse:9000" +ENV GOOSE_MIGRATION_DIR=./schema +CMD ["goose", "up"] diff --git a/internal-packages/clickhouse/package.json b/internal-packages/clickhouse/package.json new file mode 100644 index 0000000000..7471021722 --- /dev/null +++ b/internal-packages/clickhouse/package.json @@ -0,0 +1,27 @@ +{ + "name": "@trigger.dev/clickhouse", + "private": true, + "version": "0.0.2", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "type": "module", + "dependencies": { + "@clickhouse/client": "^1.11.1" + }, + "devDependencies": { + "rimraf": "6.0.1", + "@internal/testcontainers": "workspace:*", + "@vitest/coverage-v8": "^3.0.8", + "vitest": "^3.0.8" + }, + "scripts": { + "clean": "rimraf dist", + "typecheck": "tsc --noEmit", + "build": "pnpm run clean && tsc --noEmit false --outDir dist --declaration", + "dev": "tsc --noEmit false --outDir dist --declaration --watch", + "db:migrate": "docker compose -p triggerdotdev-docker -f ../../docker/docker-compose.yml up clickhouse_migrator --build", + "db:migrate:down": "GOOSE_COMMAND=down pnpm run db:migrate", + "test": "vitest --sequence.concurrent=false --no-file-parallelism", + "test:coverage": "vitest --sequence.concurrent=false --no-file-parallelism --coverage.enabled" + } +} \ No newline at end of file diff --git a/internal-packages/clickhouse/schema/001_create_databases.sql b/internal-packages/clickhouse/schema/001_create_databases.sql new file mode 100644 index 0000000000..71b1b9f0fe --- /dev/null +++ b/internal-packages/clickhouse/schema/001_create_databases.sql @@ -0,0 +1,6 @@ +-- +goose up + +CREATE DATABASE trigger_dev; + +-- +goose down +DROP DATABASE trigger_dev; diff --git a/internal-packages/clickhouse/schema/002_create_smoke_test.sql b/internal-packages/clickhouse/schema/002_create_smoke_test.sql new file mode 100644 index 0000000000..8026951a3c --- /dev/null +++ b/internal-packages/clickhouse/schema/002_create_smoke_test.sql @@ -0,0 +1,11 @@ +-- +goose Up +CREATE TABLE IF NOT EXISTS trigger_dev.smoke_test ( + id UUID DEFAULT generateUUIDv4(), + timestamp DateTime64(3) DEFAULT now64(3), + message String, + number UInt32 +) ENGINE = MergeTree() +ORDER BY (timestamp, id); + +-- +goose Down +DROP TABLE IF EXISTS trigger_dev.smoke_test; diff --git a/internal-packages/clickhouse/src/client.test.ts b/internal-packages/clickhouse/src/client.test.ts new file mode 100644 index 0000000000..a52c98be6e --- /dev/null +++ b/internal-packages/clickhouse/src/client.test.ts @@ -0,0 +1,17 @@ +import { clickhouseTest } from "@internal/testcontainers"; + +describe("ClickHouse Client", () => { + clickhouseTest("should create a client", async ({ clickhouseClient }) => { + const client = clickhouseClient; + + const result = await client.query({ + query: "SELECT 1", + }); + + const json = await result.json(); + + console.log(json); + + expect(json.data).toEqual([{ "1": 1 }]); + }); +}); diff --git a/internal-packages/clickhouse/src/client.ts b/internal-packages/clickhouse/src/client.ts new file mode 100644 index 0000000000..f998c098f6 --- /dev/null +++ b/internal-packages/clickhouse/src/client.ts @@ -0,0 +1,9 @@ +import { createClient as createClickhouseClient } from "@clickhouse/client"; + +export function createClient(url: string) { + const client = createClickhouseClient({ + url, + }); + + return client; +} diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts new file mode 100644 index 0000000000..cb0ff5c3b5 --- /dev/null +++ b/internal-packages/clickhouse/src/index.ts @@ -0,0 +1 @@ +export {}; diff --git a/internal-packages/clickhouse/tsconfig.json b/internal-packages/clickhouse/tsconfig.json new file mode 100644 index 0000000000..af630abe1f --- /dev/null +++ b/internal-packages/clickhouse/tsconfig.json @@ -0,0 +1,8 @@ +{ + "references": [{ "path": "./tsconfig.src.json" }, { "path": "./tsconfig.test.json" }], + "compilerOptions": { + "moduleResolution": "Node16", + "module": "Node16", + "customConditions": ["@triggerdotdev/source"] + } +} diff --git a/internal-packages/clickhouse/tsconfig.src.json b/internal-packages/clickhouse/tsconfig.src.json new file mode 100644 index 0000000000..6043e02ad2 --- /dev/null +++ b/internal-packages/clickhouse/tsconfig.src.json @@ -0,0 +1,20 @@ +{ + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "src/**/*.test.ts"], + "compilerOptions": { + "composite": true, + "target": "ES2019", + "lib": ["ES2019", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], + "module": "Node16", + "moduleResolution": "Node16", + "moduleDetection": "force", + "verbatimModuleSyntax": false, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "preserveWatchOutput": true, + "skipLibCheck": true, + "strict": true, + "customConditions": ["@triggerdotdev/source"] + } +} diff --git a/internal-packages/clickhouse/tsconfig.test.json b/internal-packages/clickhouse/tsconfig.test.json new file mode 100644 index 0000000000..99db8eb7c9 --- /dev/null +++ b/internal-packages/clickhouse/tsconfig.test.json @@ -0,0 +1,21 @@ +{ + "include": ["src/**/*.test.ts", "vitest.config.ts"], + "references": [{ "path": "./tsconfig.src.json" }], + "compilerOptions": { + "composite": true, + "target": "ES2019", + "lib": ["ES2019", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], + "module": "Node16", + "moduleResolution": "Node16", + "moduleDetection": "force", + "verbatimModuleSyntax": false, + "types": ["vitest/globals"], + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "preserveWatchOutput": true, + "skipLibCheck": true, + "strict": true, + "customConditions": ["@triggerdotdev/source"] + } +} diff --git a/internal-packages/clickhouse/vitest.config.ts b/internal-packages/clickhouse/vitest.config.ts new file mode 100644 index 0000000000..1d779c0957 --- /dev/null +++ b/internal-packages/clickhouse/vitest.config.ts @@ -0,0 +1,19 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + include: ["**/*.test.ts"], + globals: true, + isolate: true, + fileParallelism: false, + poolOptions: { + threads: { + singleThread: true, + }, + }, + testTimeout: 60_000, + coverage: { + provider: "v8", + }, + }, +}); diff --git a/internal-packages/testcontainers/package.json b/internal-packages/testcontainers/package.json index ab41c7c4a3..56c8b84202 100644 --- a/internal-packages/testcontainers/package.json +++ b/internal-packages/testcontainers/package.json @@ -5,6 +5,7 @@ "main": "./src/index.ts", "types": "./src/index.ts", "dependencies": { + "@clickhouse/client": "^1.11.1", "@opentelemetry/api": "^1.9.0", "@trigger.dev/database": "workspace:*", "ioredis": "^5.3.2" diff --git a/internal-packages/testcontainers/src/clickhouse.ts b/internal-packages/testcontainers/src/clickhouse.ts new file mode 100644 index 0000000000..e8259e73ef --- /dev/null +++ b/internal-packages/testcontainers/src/clickhouse.ts @@ -0,0 +1,197 @@ +import { ClickHouseClient } from "@clickhouse/client"; +import { readdir, readFile } from "node:fs/promises"; +import { resolve } from "node:path"; +import { + AbstractStartedContainer, + GenericContainer, + StartedTestContainer, + Wait, +} from "testcontainers"; + +const CLICKHOUSE_PORT = 9000; +const CLICKHOUSE_HTTP_PORT = 8123; + +export class ClickHouseContainer extends GenericContainer { + private username = "test"; + private password = "test"; + private database = "test"; + + constructor(image = "clickhouse/clickhouse-server:25.4-alpine") { + super(image); + this.withExposedPorts(CLICKHOUSE_PORT, CLICKHOUSE_HTTP_PORT); + this.withWaitStrategy( + Wait.forHttp("/", CLICKHOUSE_HTTP_PORT).forResponsePredicate( + (response) => response === "Ok.\n" + ) + ); + this.withStartupTimeout(120_000); + + // Setting this high ulimits value proactively prevents the "Too many open files" error, + // especially under potentially heavy load during testing. + this.withUlimits({ + nofile: { + hard: 262144, + soft: 262144, + }, + }); + } + + public withDatabase(database: string): this { + this.database = database; + return this; + } + + public withUsername(username: string): this { + this.username = username; + return this; + } + + public withPassword(password: string): this { + this.password = password; + return this; + } + + public override async start(): Promise { + this.withEnvironment({ + CLICKHOUSE_USER: this.username, + CLICKHOUSE_PASSWORD: this.password, + CLICKHOUSE_DB: this.database, + }); + + return new StartedClickHouseContainer( + await super.start(), + this.database, + this.username, + this.password + ); + } +} + +export class StartedClickHouseContainer extends AbstractStartedContainer { + constructor( + startedTestContainer: StartedTestContainer, + private readonly database: string, + private readonly username: string, + private readonly password: string + ) { + super(startedTestContainer); + } + + public getPort(): number { + return super.getMappedPort(CLICKHOUSE_PORT); + } + + public getHttpPort(): number { + return super.getMappedPort(CLICKHOUSE_HTTP_PORT); + } + + public getUsername(): string { + return this.username; + } + + public getPassword(): string { + return this.password; + } + + public getDatabase(): string { + return this.database; + } + + /** + * Gets the base HTTP URL (protocol, host and mapped port) for the ClickHouse container's HTTP interface. + * Example: `http://localhost:32768` + */ + public getHttpUrl(): string { + const protocol = "http"; + const host = this.getHost(); + const port = this.getHttpPort(); + return `${protocol}://${host}:${port}`; + } + + /** + * Gets configuration options suitable for passing directly to `createClient({...})` + * from `@clickhouse/client`. Uses the HTTP interface. + */ + public getClientOptions(): { + url?: string; + username: string; + password: string; + database: string; + } { + return { + url: this.getHttpUrl(), + username: this.getUsername(), + password: this.getPassword(), + database: this.getDatabase(), + }; + } + + /** + * Gets a ClickHouse connection URL for the HTTP interface with format: + * http://username:password@hostname:port/database + * @returns The ClickHouse HTTP URL string. + */ + public getConnectionUrl(): string { + const url = new URL(this.getHttpUrl()); + + url.username = this.getUsername(); + url.password = this.getPassword(); + + const dbName = this.getDatabase(); + url.pathname = dbName.startsWith("/") ? dbName : `/${dbName}`; + + return url.toString(); + } +} + +export async function runClickhouseMigrations(client: ClickHouseClient, migrationsPath: string) { + // Get all the *.sql files in the migrations path + const queries = await getAllClickhouseMigrationQueries(migrationsPath); + + for (const query of queries) { + console.log(`Running migration: ${query}`); + + await client.command({ + query, + }); + } +} + +async function getAllClickhouseMigrationQueries(migrationsPath: string) { + const queries: string[] = []; + // Get all the *.sql files in the migrations path + const migrations = await readdir(migrationsPath); + + for (const migration of migrations) { + const migrationPath = resolve(migrationsPath, migration); + + console.log(`Reading migration: ${migrationPath}`, { + migrationPath, + migration, + migrationsPath, + }); + + const migrationContent = await readFile(migrationPath, "utf-8"); + + // Split content by goose markers + const parts = migrationContent.split(/--\s*\+goose\s+(Up|Down)/i); + + // The array will be: ["", "Up", "up queries", "Down", "down queries"] + // We want the "up queries" part which is at index 2 + if (parts.length >= 3) { + const upQueries = parts[2].trim(); + queries.push( + ...upQueries + .split(";") + .filter((q) => q.trim()) + .map((q) => q.trim()) + ); + } + } + + console.log(`Found queries`, { + queries, + }); + + return queries; +} diff --git a/internal-packages/testcontainers/src/index.ts b/internal-packages/testcontainers/src/index.ts index ef36de754c..913b6d70dc 100644 --- a/internal-packages/testcontainers/src/index.ts +++ b/internal-packages/testcontainers/src/index.ts @@ -5,6 +5,7 @@ import { RedisOptions } from "ioredis"; import { Network, type StartedNetwork } from "testcontainers"; import { TaskContext, test } from "vitest"; import { + createClickHouseContainer, createElectricContainer, createPostgresContainer, createRedisContainer, @@ -12,6 +13,8 @@ import { withContainerSetup, } from "./utils"; import { getTaskMetadata, logCleanup, logSetup } from "./logs"; +import { StartedClickHouseContainer } from "./clickhouse"; +import { ClickHouseClient, createClient } from "@clickhouse/client"; export { assertNonNullable } from "./utils"; export { StartedRedisContainer }; @@ -170,6 +173,39 @@ const electricOrigin = async ( await useContainer("electricContainer", { container, task, use: () => use(origin) }); }; +const clickhouseContainer = async ( + { network }: { network: StartedNetwork }, + use: Use +) => { + const { container } = await createClickHouseContainer(network); + + try { + await use(container); + } finally { + await container.stop(); + } +}; + +const clickhouseClient = async ( + { clickhouseContainer }: { clickhouseContainer: StartedClickHouseContainer }, + use: Use +) => { + const client = createClient({ url: clickhouseContainer.getConnectionUrl() }); + await use(client); +}; + +type ClickhouseContext = { + network: StartedNetwork; + clickhouseContainer: StartedClickHouseContainer; + clickhouseClient: ClickHouseClient; +}; + +export const clickhouseTest = test.extend({ + network, + clickhouseContainer, + clickhouseClient, +}); + export const containerTest = test.extend({ network, postgresContainer, diff --git a/internal-packages/testcontainers/src/utils.ts b/internal-packages/testcontainers/src/utils.ts index dec2093539..587cb954c5 100644 --- a/internal-packages/testcontainers/src/utils.ts +++ b/internal-packages/testcontainers/src/utils.ts @@ -9,6 +9,9 @@ import { x } from "tinyexec"; import { expect, TaskContext } from "vitest"; import { getContainerMetadata, getTaskMetadata, logCleanup } from "./logs"; import { logSetup } from "./logs"; +import { ClickHouseContainer, runClickhouseMigrations } from "./clickhouse"; +import { createClient } from "@clickhouse/client"; +import { readdir, readFile } from "node:fs/promises"; export async function createPostgresContainer(network: StartedNetwork) { const container = await new PostgreSqlContainer("docker.io/postgres:14") @@ -45,6 +48,27 @@ export async function createPostgresContainer(network: StartedNetwork) { return { url: container.getConnectionUri(), container, network }; } +export async function createClickHouseContainer(network: StartedNetwork) { + const container = await new ClickHouseContainer().withNetwork(network).start(); + + const client = createClient({ + url: container.getConnectionUrl(), + }); + + await client.ping(); + + // Now we run the migrations + const migrationsPath = path.resolve(__dirname, "../../clickhouse/schema"); + + await runClickhouseMigrations(client, migrationsPath); + + return { + url: container.getConnectionUrl(), + container, + network, + }; +} + export async function createRedisContainer({ port, network, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8b91607b7e..fb16267bba 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -859,6 +859,25 @@ importers: docs: {} + internal-packages/clickhouse: + dependencies: + '@clickhouse/client': + specifier: ^1.11.1 + version: 1.11.1 + devDependencies: + '@internal/testcontainers': + specifier: workspace:* + version: link:../testcontainers + '@vitest/coverage-v8': + specifier: ^3.0.8 + version: 3.0.8(vitest@3.0.8) + rimraf: + specifier: 6.0.1 + version: 6.0.1 + vitest: + specifier: ^3.0.8 + version: 3.0.8(@types/node@20.14.14) + internal-packages/database: dependencies: '@prisma/client': @@ -995,6 +1014,9 @@ importers: internal-packages/testcontainers: dependencies: + '@clickhouse/client': + specifier: ^1.11.1 + version: 1.11.1 '@opentelemetry/api': specifier: ^1.9.0 version: 1.9.0 @@ -5790,6 +5812,16 @@ packages: '@clack/core': 0.4.1 picocolors: 1.1.1 sisteransi: 1.0.5 + + /@clickhouse/client-common@1.11.1: + resolution: {integrity: sha512-bme0le2yhDSAh13d2fxhSW5ZrNoVqZ3LTyac8jK6hNH0qkksXnjYkLS6KQalPU6NMpffxHmpI4+/Gi2MnX0NCA==} + dev: false + + /@clickhouse/client@1.11.1: + resolution: {integrity: sha512-u9h++h72SmWystijNqfNvMkfA+5+Y1LNfmLL/odCL3VgI3oyAPP9ubSw/Yrt2zRZkLKehMMD1kuOej0QHbSoBA==} + engines: {node: '>=16'} + dependencies: + '@clickhouse/client-common': 1.11.1 dev: false /@cloudflare/kv-asset-handler@0.3.4: @@ -9330,7 +9362,7 @@ packages: engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dependencies: '@npmcli/git': 4.1.0 - glob: 10.3.10 + glob: 10.4.5 hosted-git-info: 6.1.1 json-parse-even-better-errors: 3.0.0 normalize-package-data: 5.0.0 @@ -19079,7 +19111,7 @@ packages: /@vitest/snapshot@1.4.0: resolution: {integrity: sha512-saAFnt5pPIA5qDGxOHxJ/XxhMFKkUSBJmVt5VgDsAqPTX6JP326r5C/c9UuCMPoXNzuudTPsYDZCoJ5ilpqG2A==} dependencies: - magic-string: 0.30.11 + magic-string: 0.30.17 pathe: 1.1.2 pretty-format: 29.7.0 dev: true @@ -19096,7 +19128,7 @@ packages: resolution: {integrity: sha512-SgCPUeDFLaM0mIUHfaArq8fD2WbaXG/zVXjRupthYfYGzc8ztbFbu6dUNOblBG7XLMR1kEhS/DNnfCZ2IhdDew==} dependencies: '@vitest/pretty-format': 2.0.5 - magic-string: 0.30.11 + magic-string: 0.30.17 pathe: 1.1.2 dev: true @@ -20742,7 +20774,7 @@ packages: dependencies: '@npmcli/fs': 3.1.0 fs-minipass: 3.0.3 - glob: 10.3.10 + glob: 10.4.5 lru-cache: 7.18.3 minipass: 7.0.3 minipass-collect: 1.0.2 @@ -20916,7 +20948,7 @@ packages: assertion-error: 2.0.1 check-error: 2.1.1 deep-eql: 5.0.2 - loupe: 3.1.1 + loupe: 3.1.3 pathval: 2.0.0 dev: true @@ -22708,12 +22740,8 @@ packages: resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} engines: {node: '>= 0.4'} - /es-module-lexer@1.3.1: - resolution: {integrity: sha512-JUFAyicQV9mXc3YRxPnDlrfBKpqt6hUYzz9/boprUJHs4e4KVr3XwOF70doO6gwXUor6EWZJAyWAfKki84t20Q==} - /es-module-lexer@1.6.0: resolution: {integrity: sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==} - dev: true /es-object-atoms@1.0.0: resolution: {integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==} @@ -24789,7 +24817,7 @@ packages: jackspeak: 2.3.6 minimatch: 9.0.5 minipass: 7.1.2 - path-scurry: 1.10.1 + path-scurry: 1.11.1 /glob@10.3.4: resolution: {integrity: sha512-6LFElP3A+i/Q8XQKEvZjkEWEOTgAIALR9AO2rwT8bgPhDd1anmqDJDZ6lLddI4ehxxxR1S5RIqKe1uapMQfYaQ==} @@ -24800,7 +24828,7 @@ packages: jackspeak: 2.3.6 minimatch: 9.0.5 minipass: 7.1.2 - path-scurry: 1.10.1 + path-scurry: 1.11.1 dev: false /glob@10.4.5: @@ -26067,7 +26095,7 @@ packages: dependencies: config-chain: 1.1.13 editorconfig: 1.0.4 - glob: 10.3.10 + glob: 10.4.5 js-cookie: 3.0.5 nopt: 7.2.0 dev: false @@ -29329,13 +29357,6 @@ packages: /path-parse@1.0.7: resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} - /path-scurry@1.10.1: - resolution: {integrity: sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==} - engines: {node: '>=16 || 14 >=14.17'} - dependencies: - lru-cache: 10.4.3 - minipass: 7.1.2 - /path-scurry@1.11.1: resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} engines: {node: '>=16 || 14 >=14.18'} @@ -35663,7 +35684,7 @@ packages: browserslist: 4.24.4 chrome-trace-event: 1.0.3 enhanced-resolve: 5.18.1 - es-module-lexer: 1.3.1 + es-module-lexer: 1.6.0 eslint-scope: 5.1.1 events: 3.3.0 glob-to-regexp: 0.4.1 @@ -35703,7 +35724,7 @@ packages: browserslist: 4.24.4 chrome-trace-event: 1.0.3 enhanced-resolve: 5.18.1 - es-module-lexer: 1.3.1 + es-module-lexer: 1.6.0 eslint-scope: 5.1.1 events: 3.3.0 glob-to-regexp: 0.4.1 From a49930a1887ecb359e369d4e4b3391817e4e44f4 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 28 Apr 2025 14:27:25 +0100 Subject: [PATCH 02/33] More clickhouse client setup now with otel and real tests, and the v1 of raw run events --- docker/docker-compose.yml | 12 + internal-packages/clickhouse/README.md | 64 + internal-packages/clickhouse/package.json | 8 +- .../schema/003_create_raw_run_events_v1.sql | 84 ++ .../clickhouse/src/client.test.ts | 17 - internal-packages/clickhouse/src/client.ts | 9 - .../clickhouse/src/client/client.test.ts | 149 +++ .../clickhouse/src/client/client.ts | 240 ++++ .../clickhouse/src/client/errors.ts | 42 + .../clickhouse/src/client/noop.ts | 62 + .../clickhouse/src/client/types.ts | 61 + internal-packages/clickhouse/src/index.ts | 80 +- .../clickhouse/src/runEvents.test.ts | 80 ++ internal-packages/clickhouse/src/runEvents.ts | 70 + .../testcontainers/src/clickhouse.ts | 12 - internal-packages/tracing/src/index.ts | 8 + packages/core/src/v3/tryCatch.ts | 6 +- pnpm-lock.yaml | 1131 +++++++++++------ 18 files changed, 1690 insertions(+), 445 deletions(-) create mode 100644 internal-packages/clickhouse/README.md create mode 100644 internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql delete mode 100644 internal-packages/clickhouse/src/client.test.ts delete mode 100644 internal-packages/clickhouse/src/client.ts create mode 100644 internal-packages/clickhouse/src/client/client.test.ts create mode 100644 internal-packages/clickhouse/src/client/client.ts create mode 100644 internal-packages/clickhouse/src/client/errors.ts create mode 100644 internal-packages/clickhouse/src/client/noop.ts create mode 100644 internal-packages/clickhouse/src/client/types.ts create mode 100644 internal-packages/clickhouse/src/runEvents.test.ts create mode 100644 internal-packages/clickhouse/src/runEvents.ts diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 5d7ab5926e..7294e450c5 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -105,6 +105,18 @@ services: - app_network command: ["goose", "${GOOSE_COMMAND:-up}"] + ch-ui: + image: ghcr.io/caioricciuti/ch-ui:latest + restart: always + ports: + - "5521:5521" + environment: + VITE_CLICKHOUSE_URL: "http://clickhouse:8123" + VITE_CLICKHOUSE_USER: "default" + VITE_CLICKHOUSE_PASS: "password" + networks: + - app_network + # otel-collector: # container_name: otel-collector # image: otel/opentelemetry-collector-contrib:latest diff --git a/internal-packages/clickhouse/README.md b/internal-packages/clickhouse/README.md new file mode 100644 index 0000000000..df3aaad33a --- /dev/null +++ b/internal-packages/clickhouse/README.md @@ -0,0 +1,64 @@ +# ClickHouse Table Naming Conventions + +The following document is heavily inspired by the [Unkey](https://unkey.dev) ClickHouse naming conventions. + +This document outlines the naming conventions for tables and materialized views in our ClickHouse setup. Adhering to these conventions ensures consistency, clarity, and ease of management across our data infrastructure. + +## General Rules + +1. Use lowercase letters and separate words with underscores. +2. Avoid ClickHouse reserved words and special characters in names. +3. Be descriptive but concise. + +## Table Naming Convention + +Format: `[prefix]_[domain]_[description]_[version]` + +### Prefixes + +- `raw_`: Input data tables +- `tmp_{yourname}_`: Temporary tables for experiments, add your name, so it's easy to identify ownership. + +### Versioning + +- Version numbers: `_v1`, `_v2`, etc. + +### Aggregation Suffixes + +For aggregated or summary tables, use suffixes like: + +- `_per_day` +- `_per_month` +- `_summary` + +## Materialized View Naming Convention + +Format: `[description]_[aggregation]_mv_[version]` + +- Always suffix with `mv_[version]` +- Include a description of the view's purpose +- Add aggregation level if applicable + +## Examples + +1. Raw Data Table: + `raw_sales_transactions_v1` + +2. Materialized View: + `active_users_per_day_mv_v2` + +3. Temporary Table: + `tmp_eric_user_analysis_v1` + +4. Aggregated Table: + `sales_summary_per_hour_mv_v1` + +## Consistency Across Related Objects + +Maintain consistent naming across related tables, views, and other objects: + +- `raw_user_activity_v1` +- `user_activity_per_day_v1` +- `user_activity_per_day_mv_v1` + +By following these conventions, we ensure a clear, consistent, and scalable naming structure for our ClickHouse setup. diff --git a/internal-packages/clickhouse/package.json b/internal-packages/clickhouse/package.json index 7471021722..5deb8e32f5 100644 --- a/internal-packages/clickhouse/package.json +++ b/internal-packages/clickhouse/package.json @@ -6,12 +6,16 @@ "types": "./dist/index.d.ts", "type": "module", "dependencies": { - "@clickhouse/client": "^1.11.1" + "@clickhouse/client": "^1.11.1", + "@internal/tracing": "workspace:*", + "@trigger.dev/core": "workspace:*", + "zod": "3.23.8", + "zod-error": "1.5.0" }, "devDependencies": { - "rimraf": "6.0.1", "@internal/testcontainers": "workspace:*", "@vitest/coverage-v8": "^3.0.8", + "rimraf": "6.0.1", "vitest": "^3.0.8" }, "scripts": { diff --git a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql new file mode 100644 index 0000000000..aa85ad2982 --- /dev/null +++ b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql @@ -0,0 +1,84 @@ +-- +goose Up +SET enable_json_type = 1; + +/* ───────────────────────────────────────────────────────────── + RAW EVENT STREAM trigger_dev.raw_run_events_v1 + ───────────────────────────────────────────────────────────── + • One row for every status change / retry / metric emission + • All TaskRun scalar columns duplicated in each row + – they compress brilliantly and remove JOINs later + • Heavy blobs → ZSTD + • High-cardinality enums / strings → LowCardinality + LZ4 + • Array / JSON fields → ZSTD + late-materialised + • Bloom-filter index on tags for instant “has(tag)” + ----------------------------------------------------------------- */ + +CREATE TABLE trigger_dev.raw_run_events_v1 +( + /* ─── ids & hierarchy ─────────────────────────────────────── */ + environment_id String, + run_id String, + attempt UInt8 DEFAULT 1, + + /* ─── enums / status ──────────────────────────────────────── */ + engine Enum8('V1'=1,'V2'=2) + CODEC(T64, LZ4), + status Enum8( -- TaskRunStatus + 'DELAYED'=1,'PENDING'=2,'PENDING_VERSION'=3, + 'WAITING_FOR_DEPLOY'=4,'WAITING_FOR_EVENT'=5, + 'RUNNING'=6,'WAITING'=7,'PAUSED'=8, + /* final */ 'COMPLETED_SUCCESSFULLY'=20,'FAILED'=21, + 'CANCELED'=22,'INTERRUPTED'=23,'CRASHED'=24, + 'EXPIRED'=25,'TIMED_OUT'=26), + + /* ─── queue / concurrency / schedule ─────────────────────── */ + task_identifier String, + queue String, + + schedule_id Nullable(String), + batch_id Nullable(String), + + /* ─── timing ─────────────────────────────────────────────── */ + event_time DateTime64(3), -- when this row created + created_at DateTime64(3), + updated_at DateTime64(3), + started_at Nullable(DateTime64(3)), + executed_at Nullable(DateTime64(3)), + completed_at Nullable(DateTime64(3)), + finished_at Nullable(DateTime64(3)), -- end of *this* status + delay_until Nullable(DateTime64(3)), + queued_at Nullable(DateTime64(3)), + expired_at Nullable(DateTime64(3)), + duration_ms Nullable(UInt32), + + /* ─── cost / usage ───────────────────────────────────────── */ + usage_duration_ms UInt32 DEFAULT 0, + cost_in_cents Float64 DEFAULT 0, + + /* ─── payload & context ──────────────────────────────────── */ + payload Nullable(JSON(max_dynamic_paths = 2048)), + output Nullable(JSON(max_dynamic_paths = 2048)), + error Nullable(JSON(max_dynamic_paths = 64)), + + /* ─── tagging / versions ─────────────────────────────────── */ + tags Array(String) CODEC(ZSTD(1)), + task_version Nullable(String) CODEC(LZ4), + sdk_version Nullable(String) CODEC(LZ4), + cli_version Nullable(String) CODEC(LZ4), + machine_preset LowCardinality(Nullable(String)) CODEC(LZ4), +) +ENGINE = MergeTree +PARTITION BY toYYYYMM(event_time) +ORDER BY (environment_id, event_time, run_id) +SETTINGS + index_granularity = 8192, + vertical_merge_algorithm_min_rows_to_activate = 100000; + +/* Fast tag filtering */ +ALTER TABLE trigger_dev.raw_run_events_v1 + ADD INDEX idx_tags tags TYPE tokenbf_v1(32768, 3, 0) GRANULARITY 4; + + +-- +goose Down +SET enable_json_type = 0; +DROP TABLE IF EXISTS trigger_dev.raw_run_events_v1; diff --git a/internal-packages/clickhouse/src/client.test.ts b/internal-packages/clickhouse/src/client.test.ts deleted file mode 100644 index a52c98be6e..0000000000 --- a/internal-packages/clickhouse/src/client.test.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { clickhouseTest } from "@internal/testcontainers"; - -describe("ClickHouse Client", () => { - clickhouseTest("should create a client", async ({ clickhouseClient }) => { - const client = clickhouseClient; - - const result = await client.query({ - query: "SELECT 1", - }); - - const json = await result.json(); - - console.log(json); - - expect(json.data).toEqual([{ "1": 1 }]); - }); -}); diff --git a/internal-packages/clickhouse/src/client.ts b/internal-packages/clickhouse/src/client.ts deleted file mode 100644 index f998c098f6..0000000000 --- a/internal-packages/clickhouse/src/client.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { createClient as createClickhouseClient } from "@clickhouse/client"; - -export function createClient(url: string) { - const client = createClickhouseClient({ - url, - }); - - return client; -} diff --git a/internal-packages/clickhouse/src/client/client.test.ts b/internal-packages/clickhouse/src/client/client.test.ts new file mode 100644 index 0000000000..55ede90197 --- /dev/null +++ b/internal-packages/clickhouse/src/client/client.test.ts @@ -0,0 +1,149 @@ +import { clickhouseTest } from "@internal/testcontainers"; +import { ClickhouseClient } from "./client.js"; +import { z } from "zod"; +import { setTimeout } from "timers/promises"; + +describe("ClickHouse Client", () => { + clickhouseTest("should be able to insert and query data", async ({ clickhouseContainer }) => { + const client = new ClickhouseClient({ + name: "test", + url: clickhouseContainer.getConnectionUrl(), + }); + + const insertSmokeTest = client.insert({ + name: "insert-smoke-test", + table: "trigger_dev.smoke_test", + schema: z.object({ + message: z.string(), + number: z.number(), + }), + }); + + const querySmokeTest = client.query({ + name: "query-smoke-test", + query: "SELECT * FROM trigger_dev.smoke_test", + schema: z.object({ + message: z.string(), + number: z.number(), + timestamp: z.string(), + id: z.string(), + }), + }); + + const [insertError, insertResult] = await insertSmokeTest([ + { message: "hello", number: 42 }, + { message: "world", number: 100 }, + ]); + + expect(insertError).toBeNull(); + expect(insertResult).toEqual( + expect.objectContaining({ + executed: true, + query_id: expect.any(String), + summary: expect.objectContaining({ read_rows: "2", elapsed_ns: expect.any(String) }), + }) + ); + + const [queryError, result] = await querySmokeTest({}); + + expect(queryError).toBeNull(); + + expect(result).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + message: "hello", + number: 42, + timestamp: expect.any(String), + id: expect.any(String), + }), + expect.objectContaining({ + message: "world", + number: 100, + timestamp: expect.any(String), + id: expect.any(String), + }), + ]) + ); + + const insertSmokeTestAsyncWaiting = client.insert({ + name: "insert-smoke-test-async-waiting", + table: "trigger_dev.smoke_test", + schema: z.object({ + message: z.string(), + number: z.number(), + }), + settings: { + async_insert: 1, + wait_for_async_insert: 1, + async_insert_busy_timeout_ms: 1000, + }, + }); + + const [insertErrorAsyncWaiting, insertResultAsyncWaiting] = await insertSmokeTestAsyncWaiting([ + { message: "async-waiting-hello", number: 42 }, + { message: "async-waiting-world", number: 100 }, + ]); + + expect(insertErrorAsyncWaiting).toBeNull(); + expect(insertResultAsyncWaiting).toEqual(expect.objectContaining({ executed: true })); + + // Should be able to query for the data right away + const [queryErrorAsyncWaiting, resultAsyncWaiting] = await querySmokeTest({}); + + expect(queryErrorAsyncWaiting).toBeNull(); + expect(resultAsyncWaiting).toEqual( + expect.arrayContaining([ + expect.objectContaining({ message: "async-waiting-hello", number: 42 }), + expect.objectContaining({ message: "async-waiting-world", number: 100 }), + ]) + ); + + const insertSmokeTestAsyncDontWait = client.insert({ + name: "insert-smoke-test-async-dont-wait", + table: "trigger_dev.smoke_test", + schema: z.object({ + message: z.string(), + number: z.number(), + }), + settings: { + async_insert: 1, + wait_for_async_insert: 0, + async_insert_busy_timeout_ms: 1000, + }, + }); + + const [insertErrorAsyncDontWait, insertResultAsyncDontWait] = + await insertSmokeTestAsyncDontWait([ + { message: "async-dont-wait-hello", number: 42 }, + { message: "async-dont-wait-world", number: 100 }, + ]); + + expect(insertErrorAsyncDontWait).toBeNull(); + expect(insertResultAsyncDontWait).toEqual(expect.objectContaining({ executed: true })); + + // Querying now should return an array without the data + const [queryErrorAsyncDontWait, resultAsyncDontWait] = await querySmokeTest({}); + + expect(queryErrorAsyncDontWait).toBeNull(); + expect(resultAsyncDontWait).toEqual( + expect.not.arrayContaining([ + expect.objectContaining({ message: "async-dont-wait-hello", number: 42 }), + expect.objectContaining({ message: "async-dont-wait-world", number: 100 }), + ]) + ); + + // Now we wait for the data to be flushed + await setTimeout(2000); + + // Querying now should return the data + const [queryErrorAsyncDontWait2, resultAsyncDontWait2] = await querySmokeTest({}); + + expect(queryErrorAsyncDontWait2).toBeNull(); + expect(resultAsyncDontWait2).toEqual( + expect.arrayContaining([ + expect.objectContaining({ message: "async-dont-wait-hello", number: 42 }), + expect.objectContaining({ message: "async-dont-wait-world", number: 100 }), + ]) + ); + }); +}); diff --git a/internal-packages/clickhouse/src/client/client.ts b/internal-packages/clickhouse/src/client/client.ts new file mode 100644 index 0000000000..f2cd5f9151 --- /dev/null +++ b/internal-packages/clickhouse/src/client/client.ts @@ -0,0 +1,240 @@ +import { + type ClickHouseClient, + ClickHouseError, + type ClickHouseSettings, + createClient, +} from "@clickhouse/client"; +import { recordSpanError, Span, startSpan, trace, Tracer } from "@internal/tracing"; +import { flattenAttributes, tryCatch } from "@trigger.dev/core/v3"; +import { z } from "zod"; +import { InsertError, QueryError } from "./errors.js"; +import type { + ClickhouseInsertFunction, + ClickhouseQueryFunction, + ClickhouseReader, + ClickhouseWriter, +} from "./types.js"; +import { generateErrorMessage } from "zod-error"; + +export type ClickhouseConfig = { + name: string; + url: string; + tracer?: Tracer; + clickhouseSettings?: ClickHouseSettings; +}; + +export class ClickhouseClient implements ClickhouseReader, ClickhouseWriter { + private readonly client: ClickHouseClient; + private readonly tracer: Tracer; + private readonly name: string; + + constructor(config: ClickhouseConfig) { + this.name = config.name; + + this.client = createClient({ + url: config.url, + + clickhouse_settings: { + ...config.clickhouseSettings, + output_format_json_quote_64bit_integers: 0, + output_format_json_quote_64bit_floats: 0, + }, + }); + + this.tracer = config.tracer ?? trace.getTracer("@internal/clickhouse"); + } + + public query, TOut extends z.ZodSchema>(req: { + /** + * The name of the operation. + * This will be used to identify the operation in the span. + */ + name: string; + /** + * The SQL query to run. + * Use {paramName: Type} to define parameters + * Example: `SELECT * FROM table WHERE id = {id: String}` + */ + query: string; + /** + * The schema of the parameters + * Example: z.object({ id: z.string() }) + */ + params?: TIn; + /** + * The schema of the output of each row + * Example: z.object({ id: z.string() }) + */ + schema: TOut; + /** + * The settings to use for the query. + * These will be merged with the default settings. + */ + settings?: ClickHouseSettings; + }): ClickhouseQueryFunction, z.output> { + return async (params, options) => { + return await startSpan(this.tracer, "query", async (span) => { + span.setAttributes({ + "clickhouse.clientName": this.name, + "clickhouse.operationName": req.name, + ...flattenAttributes(req.settings, "clickhouse.settings"), + ...flattenAttributes(options?.attributes), + }); + + const validParams = req.params?.safeParse(params); + + if (validParams?.error) { + recordSpanError(span, validParams.error); + + return [ + new QueryError(`Bad params: ${generateErrorMessage(validParams.error.issues)}`, { + query: req.query, + }), + null, + ]; + } + + let unparsedRows: Array = []; + + const [clickhouseError, res] = await tryCatch( + this.client.query({ + query: req.query, + query_params: validParams?.data, + format: "JSONEachRow", + ...options?.params, + clickhouse_settings: { + ...req.settings, + ...options?.params?.clickhouse_settings, + }, + }) + ); + + if (clickhouseError) { + recordClickhouseError(span, clickhouseError); + + return [ + new QueryError(`Unable to query clickhouse: ${clickhouseError.message}`, { + query: req.query, + }), + null, + ]; + } + + unparsedRows = await res.json(); + + span.setAttributes({ + "clickhouse.query_id": res.query_id, + ...flattenAttributes(res.response_headers, "clickhouse.response_headers"), + }); + + const summaryHeader = res.response_headers["x-clickhouse-summary"]; + + if (typeof summaryHeader === "string") { + span.setAttributes({ + ...flattenAttributes(JSON.parse(summaryHeader), "clickhouse.summary"), + }); + } + + const parsed = z.array(req.schema).safeParse(unparsedRows); + + if (parsed.error) { + const queryError = new QueryError(generateErrorMessage(parsed.error.issues), { + query: req.query, + }); + + recordSpanError(span, queryError); + + return [queryError, null]; + } + + span.setAttributes({ + "clickhouse.rows": unparsedRows.length, + }); + + return [null, parsed.data]; + }); + }; + } + + public insert>(req: { + name: string; + table: string; + schema: TSchema; + settings?: ClickHouseSettings; + }): ClickhouseInsertFunction> { + return async (events, options) => { + return await startSpan(this.tracer, "insert", async (span) => { + span.setAttributes({ + "clickhouse.clientName": this.name, + "clickhouse.tableName": req.table, + "clickhouse.operationName": req.name, + ...flattenAttributes(req.settings, "clickhouse.settings"), + ...flattenAttributes(options?.attributes), + }); + + let validatedEvents: z.output | z.output[] | undefined = undefined; + + const v = Array.isArray(events) + ? req.schema.array().safeParse(events) + : req.schema.safeParse(events); + + if (!v.success) { + const error = new InsertError(generateErrorMessage(v.error.issues)); + + recordSpanError(span, error); + + return [error, null]; + } + + validatedEvents = v.data; + + const [clickhouseError, result] = await tryCatch( + this.client.insert({ + table: req.table, + format: "JSONEachRow", + values: Array.isArray(validatedEvents) ? validatedEvents : [validatedEvents], + ...options?.params, + clickhouse_settings: { + ...req.settings, + ...options?.params?.clickhouse_settings, + }, + }) + ); + + if (clickhouseError) { + recordClickhouseError(span, clickhouseError); + + return [new InsertError(clickhouseError.message), null]; + } + + span.setAttributes({ + "clickhouse.query_id": result.query_id, + "clickhouse.executed": result.executed, + "clickhouse.summary.read_rows": result.summary?.read_rows, + "clickhouse.summary.read_bytes": result.summary?.read_bytes, + "clickhouse.summary.written_rows": result.summary?.written_rows, + "clickhouse.summary.written_bytes": result.summary?.written_bytes, + "clickhouse.summary.total_rows_to_read": result.summary?.total_rows_to_read, + "clickhouse.summary.result_rows": result.summary?.result_rows, + "clickhouse.summary.result_bytes": result.summary?.result_bytes, + "clickhouse.summary.elapsed_ns": result.summary?.elapsed_ns, + }); + + return [null, result]; + }); + }; + } +} + +function recordClickhouseError(span: Span, error: Error) { + if (error instanceof ClickHouseError) { + span.setAttributes({ + "clickhouse.error.code": error.code, + "clickhouse.error.message": error.message, + "clickhouse.error.type": error.type, + }); + recordSpanError(span, error); + } else { + recordSpanError(span, error); + } +} diff --git a/internal-packages/clickhouse/src/client/errors.ts b/internal-packages/clickhouse/src/client/errors.ts new file mode 100644 index 0000000000..a620fb5464 --- /dev/null +++ b/internal-packages/clickhouse/src/client/errors.ts @@ -0,0 +1,42 @@ +export type ErrorContext = Record; + +export abstract class BaseError extends Error { + public abstract readonly retry: boolean; + public readonly cause: BaseError | undefined; + public readonly context: TContext | undefined; + public readonly message: string; + public abstract readonly name: string; + + constructor(opts: { message: string; cause?: BaseError; context?: TContext }) { + super(opts.message); + this.message = opts.message; + this.cause = opts.cause; + this.context = opts.context; + } + + public toString(): string { + return `${this.name}: ${this.message} - ${JSON.stringify( + this.context + )} - caused by ${this.cause?.toString()}`; + } +} + +export class InsertError extends BaseError { + public readonly retry = true; + public readonly name = InsertError.name; + constructor(message: string) { + super({ + message, + }); + } +} +export class QueryError extends BaseError<{ query: string }> { + public readonly retry = true; + public readonly name = QueryError.name; + constructor(message: string, context: { query: string }) { + super({ + message, + context, + }); + } +} diff --git a/internal-packages/clickhouse/src/client/noop.ts b/internal-packages/clickhouse/src/client/noop.ts new file mode 100644 index 0000000000..ca8f647b47 --- /dev/null +++ b/internal-packages/clickhouse/src/client/noop.ts @@ -0,0 +1,62 @@ +import { Result } from "@trigger.dev/core/v3"; +import { InsertError, QueryError } from "./errors.js"; +import { ClickhouseWriter } from "./types.js"; +import { ClickhouseReader } from "./types.js"; +import { z } from "zod"; +import { ClickHouseSettings, InsertResult } from "@clickhouse/client"; + +export class NoopClient implements ClickhouseReader, ClickhouseWriter { + public query, TOut extends z.ZodSchema>(req: { + query: string; + params?: TIn; + schema: TOut; + }): (params: z.input) => Promise[], QueryError>> { + return async (params: z.input) => { + const validParams = req.params?.safeParse(params); + + if (validParams?.error) { + return [new QueryError(`Bad params: ${validParams.error.message}`, { query: "" }), null]; + } + + return [null, []]; + }; + } + + public insert>(req: { + name: string; + table: string; + schema: TSchema; + settings?: ClickHouseSettings; + }): ( + events: z.input | z.input[] + ) => Promise> { + return async (events: z.input | z.input[]) => { + const v = Array.isArray(events) + ? req.schema.array().safeParse(events) + : req.schema.safeParse(events); + + if (!v.success) { + return [new InsertError(v.error.message), null]; + } + + return [ + null, + { + executed: true, + query_id: "noop", + summary: { + read_rows: "0", + read_bytes: "0", + written_rows: "0", + written_bytes: "0", + total_rows_to_read: "0", + result_rows: "0", + result_bytes: "0", + elapsed_ns: "0", + }, + response_headers: {}, + }, + ]; + }; + } +} diff --git a/internal-packages/clickhouse/src/client/types.ts b/internal-packages/clickhouse/src/client/types.ts new file mode 100644 index 0000000000..ea2df95a78 --- /dev/null +++ b/internal-packages/clickhouse/src/client/types.ts @@ -0,0 +1,61 @@ +import type { Result } from "@trigger.dev/core/v3"; +import type { z } from "zod"; +import type { InsertError, QueryError } from "./errors.js"; +import { ClickHouseSettings } from "@clickhouse/client"; +import type { BaseQueryParams, InsertResult } from "@clickhouse/client"; + +export type ClickhouseQueryFunction = ( + params: TInput, + options?: { + attributes?: Record; + params?: BaseQueryParams; + } +) => Promise>; + +export interface ClickhouseReader { + query, TOut extends z.ZodSchema>(req: { + /** + * The name of the operation. + * This will be used to identify the operation in the span. + */ + name: string; + /** + * The SQL query to run. + * Use {paramName: Type} to define parameters + * Example: `SELECT * FROM table WHERE id = {id: String}` + */ + query: string; + /** + * The schema of the parameters + * Example: z.object({ id: z.string() }) + */ + params?: TIn; + /** + * The schema of the output of each row + * Example: z.object({ id: z.string() }) + */ + schema: TOut; + /** + * The settings to use for the query. + * These will be merged with the default settings. + */ + settings?: ClickHouseSettings; + }): ClickhouseQueryFunction, z.output>; +} + +export type ClickhouseInsertFunction = ( + events: TInput | TInput[], + options?: { + attributes?: Record; + params?: BaseQueryParams; + } +) => Promise>; + +export interface ClickhouseWriter { + insert>(req: { + name: string; + table: string; + schema: TSchema; + settings?: ClickHouseSettings; + }): ClickhouseInsertFunction>; +} diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts index cb0ff5c3b5..b281f4bebe 100644 --- a/internal-packages/clickhouse/src/index.ts +++ b/internal-packages/clickhouse/src/index.ts @@ -1 +1,79 @@ -export {}; +import { ClickHouseSettings } from "@clickhouse/client"; +import { ClickhouseClient } from "./client/client.js"; +import { ClickhouseReader, ClickhouseWriter } from "./client/types.js"; +import { NoopClient } from "./client/noop.js"; +import { insertRunEvents } from "./runEvents.js"; + +export type ClickHouseConfig = + | { + name?: string; + url?: string; + writerUrl?: never; + readerUrl?: never; + clickhouseSettings?: ClickHouseSettings; + } + | { + name?: never; + url?: never; + writerName?: string; + writerUrl: string; + readerName?: string; + readerUrl: string; + clickhouseSettings?: ClickHouseSettings; + }; + +export class ClickHouse { + public readonly reader: ClickhouseReader; + public readonly writer: ClickhouseWriter; + + constructor(config: ClickHouseConfig) { + if (config.url) { + const client = new ClickhouseClient({ + name: config.name ?? "clickhouse", + url: config.url, + clickhouseSettings: config.clickhouseSettings, + }); + this.reader = client; + this.writer = client; + } else if (config.writerUrl && config.readerUrl) { + this.reader = new ClickhouseClient({ + name: config.readerName ?? "clickhouse-reader", + url: config.readerUrl, + clickhouseSettings: config.clickhouseSettings, + }); + this.writer = new ClickhouseClient({ + name: config.writerName ?? "clickhouse-writer", + url: config.writerUrl, + clickhouseSettings: config.clickhouseSettings, + }); + } else { + this.reader = new NoopClient(); + this.writer = new NoopClient(); + } + } + + static fromEnv(): ClickHouse { + if ( + typeof process.env.CLICKHOUSE_WRITER_URL === "string" && + typeof process.env.CLICKHOUSE_READER_URL === "string" + ) { + return new ClickHouse({ + writerUrl: process.env.CLICKHOUSE_WRITER_URL, + readerUrl: process.env.CLICKHOUSE_READER_URL, + writerName: process.env.CLICKHOUSE_WRITER_NAME, + readerName: process.env.CLICKHOUSE_READER_NAME, + }); + } + + return new ClickHouse({ + url: process.env.CLICKHOUSE_URL, + name: process.env.CLICKHOUSE_NAME, + }); + } + + get runEvents() { + return { + insert: insertRunEvents(this.writer), + }; + } +} diff --git a/internal-packages/clickhouse/src/runEvents.test.ts b/internal-packages/clickhouse/src/runEvents.test.ts new file mode 100644 index 0000000000..e48a19fe5c --- /dev/null +++ b/internal-packages/clickhouse/src/runEvents.test.ts @@ -0,0 +1,80 @@ +import { clickhouseTest } from "@internal/testcontainers"; +import { z } from "zod"; +import { ClickhouseClient } from "./client/client.js"; +import { insertRunEvents } from "./runEvents.js"; + +describe("Run Events", () => { + clickhouseTest("should be able to insert run events", async ({ clickhouseContainer }) => { + const client = new ClickhouseClient({ + name: "test", + url: clickhouseContainer.getConnectionUrl(), + }); + + const insert = insertRunEvents(client, { + async_insert: 0, // turn off async insert for this test + }); + + const [insertError, insertResult] = await insert([ + { + environment_id: "env_1234", + run_id: "run_1234", + attempt: 1, + engine: "V2", + status: "PENDING", + task_identifier: "my-task", + queue: "my-queue", + schedule_id: "schedule_1234", + batch_id: "batch_1234", + event_time: Date.now(), + created_at: Date.now(), + updated_at: Date.now(), + completed_at: Date.now(), + tags: ["tag1", "tag2"], + payload: { + key: "value", + }, + output: { + key: "value", + }, + error: { + type: "BUILT_IN_ERROR", + name: "Error", + message: "error", + stackTrace: "stack trace", + }, + duration_ms: 1000, + usage_duration_ms: 1000, + cost_in_cents: 100, + task_version: "1.0.0", + sdk_version: "1.0.0", + cli_version: "1.0.0", + machine_preset: "small-1x", + }, + ]); + + expect(insertError).toBeNull(); + expect(insertResult).toEqual(expect.objectContaining({ executed: true })); + expect(insertResult?.summary?.written_rows).toEqual("1"); + + const query = client.query({ + name: "query-run-events", + query: "SELECT * FROM trigger_dev.raw_run_events_v1", + schema: z.object({ + environment_id: z.string(), + run_id: z.string(), + }), + }); + + const [queryError, result] = await query({}); + + expect(queryError).toBeNull(); + expect(result).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + environment_id: "env_1234", + run_id: "run_1234", + }), + ]) + ); + }); +}); diff --git a/internal-packages/clickhouse/src/runEvents.ts b/internal-packages/clickhouse/src/runEvents.ts new file mode 100644 index 0000000000..11fd221c16 --- /dev/null +++ b/internal-packages/clickhouse/src/runEvents.ts @@ -0,0 +1,70 @@ +import { z } from "zod"; +import { ClickhouseWriter } from "./client/types.js"; +import { ClickHouseSettings } from "@clickhouse/client"; +import { TaskRunError } from "@trigger.dev/core/v3/schemas"; + +export const RawRunEventV1 = z.object({ + environment_id: z.string(), + run_id: z.string(), + attempt: z.number().int().default(1), + engine: z.enum(["V1", "V2"]), + status: z.enum([ + "DELAYED", + "PENDING", + "PENDING_VERSION", + "WAITING_FOR_DEPLOY", + "WAITING_FOR_EVENT", + "RUNNING", + "WAITING", + "PAUSED", + "COMPLETED_SUCCESSFULLY", + "FAILED", + "CANCELED", + "INTERRUPTED", + "CRASHED", + "EXPIRED", + "TIMED_OUT", + ]), + task_identifier: z.string(), + queue: z.string(), + schedule_id: z.string().optional(), + batch_id: z.string().optional(), + event_time: z.coerce.number().int(), + created_at: z.coerce.number().int(), + updated_at: z.coerce.number().int(), + completed_at: z.coerce.number().int().optional(), + started_at: z.coerce.number().int().optional(), + executed_at: z.coerce.number().int().optional(), + finished_at: z.coerce.number().int().optional(), + delay_until: z.coerce.number().int().optional(), + queued_at: z.coerce.number().int().optional(), + expired_at: z.coerce.number().int().optional(), + duration_ms: z.coerce.number().int().optional(), + usage_duration_ms: z.coerce.number().int().optional(), + cost_in_cents: z.coerce.number().int().optional(), + payload: z.unknown().optional(), + output: z.unknown().optional(), + error: TaskRunError.optional(), + tags: z.array(z.string()).transform((arr) => arr.sort()), + task_version: z.string().optional(), + sdk_version: z.string().optional(), + cli_version: z.string().optional(), + machine_preset: z.string().optional(), +}); + +export type RawRunEventV1 = z.infer; + +export function insertRunEvents(ch: ClickhouseWriter, settings?: ClickHouseSettings) { + return ch.insert({ + name: "insertRunEvents", + table: "trigger_dev.raw_run_events_v1", + schema: RawRunEventV1, + settings: { + async_insert: 1, + wait_for_async_insert: 0, + async_insert_max_data_size: "1000000", + async_insert_busy_timeout_ms: 1000, + ...settings, + }, + }); +} diff --git a/internal-packages/testcontainers/src/clickhouse.ts b/internal-packages/testcontainers/src/clickhouse.ts index e8259e73ef..ee59521156 100644 --- a/internal-packages/testcontainers/src/clickhouse.ts +++ b/internal-packages/testcontainers/src/clickhouse.ts @@ -149,8 +149,6 @@ export async function runClickhouseMigrations(client: ClickHouseClient, migratio const queries = await getAllClickhouseMigrationQueries(migrationsPath); for (const query of queries) { - console.log(`Running migration: ${query}`); - await client.command({ query, }); @@ -165,12 +163,6 @@ async function getAllClickhouseMigrationQueries(migrationsPath: string) { for (const migration of migrations) { const migrationPath = resolve(migrationsPath, migration); - console.log(`Reading migration: ${migrationPath}`, { - migrationPath, - migration, - migrationsPath, - }); - const migrationContent = await readFile(migrationPath, "utf-8"); // Split content by goose markers @@ -189,9 +181,5 @@ async function getAllClickhouseMigrationQueries(migrationsPath: string) { } } - console.log(`Found queries`, { - queries, - }); - return queries; } diff --git a/internal-packages/tracing/src/index.ts b/internal-packages/tracing/src/index.ts index a2fbb0e2b6..04d11f159a 100644 --- a/internal-packages/tracing/src/index.ts +++ b/internal-packages/tracing/src/index.ts @@ -45,6 +45,14 @@ export async function startSpan( }); } +export function recordSpanError(span: Span, error: Error) { + span.recordException(error); + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error.message, + }); +} + export async function emitDebugLog( logger: Logger, message: string, diff --git a/packages/core/src/v3/tryCatch.ts b/packages/core/src/v3/tryCatch.ts index 664c6251f2..cc890a0061 100644 --- a/packages/core/src/v3/tryCatch.ts +++ b/packages/core/src/v3/tryCatch.ts @@ -1,8 +1,8 @@ // Types for the result object with discriminated union -type Success = [null, T]; -type Failure = [E, null]; +export type Success = [null, T]; +export type Failure = [E, null]; -type Result = Success | Failure; +export type Result = Success | Failure; // Main wrapper function export async function tryCatch(promise: Promise): Promise> { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fb16267bba..6182d85131 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -52,7 +52,7 @@ importers: version: 10.4.13(postcss@8.5.3) eslint-plugin-turbo: specifier: ^2.0.4 - version: 2.0.5(eslint@8.49.0) + version: 2.0.5(eslint@8.31.0) lefthook: specifier: ^1.11.3 version: 1.11.3 @@ -203,7 +203,7 @@ importers: version: 3.454.0 '@codemirror/autocomplete': specifier: ^6.3.1 - version: 6.4.0(@codemirror/language@6.3.2)(@codemirror/state@6.2.0)(@codemirror/view@6.7.2)(@lezer/common@1.0.2) + version: 6.4.0(@codemirror/language@6.3.2)(@codemirror/state@6.2.0)(@codemirror/view@6.7.2)(@lezer/common@1.2.3) '@codemirror/commands': specifier: ^6.1.2 version: 6.1.3 @@ -380,7 +380,7 @@ importers: version: 8.3.0(socket.io-adapter@2.5.4) '@splinetool/react-spline': specifier: ^2.2.6 - version: 2.2.6(@splinetool/runtime@1.9.42)(react-dom@18.2.0)(react@18.2.0) + version: 2.2.6(@splinetool/runtime@1.9.87)(react-dom@18.2.0)(react@18.2.0) '@tabler/icons-react': specifier: ^2.39.0 version: 2.47.0(react@18.2.0) @@ -419,7 +419,7 @@ importers: version: 8.6.6 '@uiw/react-codemirror': specifier: ^4.19.5 - version: 4.19.5(@babel/runtime@7.26.7)(@codemirror/autocomplete@6.4.0)(@codemirror/language@6.3.2)(@codemirror/lint@6.4.2)(@codemirror/search@6.2.3)(@codemirror/state@6.2.0)(@codemirror/theme-one-dark@6.1.0)(@codemirror/view@6.7.2)(codemirror@6.0.1)(react-dom@18.2.0)(react@18.2.0) + version: 4.19.5(@babel/runtime@7.27.0)(@codemirror/autocomplete@6.4.0)(@codemirror/language@6.3.2)(@codemirror/lint@6.4.2)(@codemirror/search@6.2.3)(@codemirror/state@6.2.0)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.7.2)(codemirror@6.0.1)(react-dom@18.2.0)(react@18.2.0) '@unkey/cache': specifier: ^1.5.0 version: 1.5.0 @@ -780,13 +780,13 @@ importers: version: 10.4.13(postcss@8.5.3) babel-loader: specifier: ^9.1.3 - version: 9.1.3(@babel/core@7.26.8)(webpack@5.88.2) + version: 9.1.3(@babel/core@7.26.10)(webpack@5.99.7) babel-preset-react-app: specifier: ^10.0.1 version: 10.0.1 css-loader: specifier: ^6.10.0 - version: 6.10.0(webpack@5.88.2) + version: 6.10.0(webpack@5.99.7) datepicker: specifier: link:@types/@react-aria/datepicker version: link:@types/@react-aria/datepicker @@ -819,7 +819,7 @@ importers: version: 16.0.1(postcss@8.5.3) postcss-loader: specifier: ^8.1.1 - version: 8.1.1(postcss@8.5.3)(typescript@5.5.4)(webpack@5.88.2) + version: 8.1.1(postcss@8.5.3)(typescript@5.5.4)(webpack@5.99.7) prettier: specifier: ^2.8.8 version: 2.8.8 @@ -834,7 +834,7 @@ importers: version: 3.0.2 style-loader: specifier: ^3.3.4 - version: 3.3.4(webpack@5.88.2) + version: 3.3.4(webpack@5.99.7) supertest: specifier: ^7.0.0 version: 7.0.0 @@ -864,6 +864,18 @@ importers: '@clickhouse/client': specifier: ^1.11.1 version: 1.11.1 + '@internal/tracing': + specifier: workspace:* + version: link:../tracing + '@trigger.dev/core': + specifier: workspace:* + version: link:../../packages/core + zod: + specifier: 3.23.8 + version: 3.23.8 + zod-error: + specifier: 1.5.0 + version: 1.5.0 devDependencies: '@internal/testcontainers': specifier: workspace:* @@ -910,7 +922,7 @@ importers: version: 18.3.1 react-email: specifier: ^2.1.1 - version: 2.1.2(eslint@8.49.0) + version: 2.1.2(eslint@8.31.0) resend: specifier: ^3.2.0 version: 3.2.0 @@ -1487,7 +1499,7 @@ importers: version: 4.0.14 ai: specifier: ^3.4.33 - version: 3.4.33(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.23.8) + version: 3.4.33(react@18.3.1)(svelte@5.28.2)(vue@3.5.13)(zod@3.23.8) defu: specifier: ^6.1.4 version: 6.1.4 @@ -2129,7 +2141,7 @@ importers: dependencies: '@effect/schema': specifier: ^0.75.5 - version: 0.75.5(effect@3.11.7) + version: 0.75.5(effect@3.14.14) '@infisical/sdk': specifier: ^2.1.9 version: 2.3.5 @@ -2141,10 +2153,10 @@ importers: version: 5.19.0(prisma@5.19.0) '@react-email/components': specifier: 0.0.24 - version: 0.0.24(react-dom@18.2.0)(react@19.0.0-rc.0) + version: 0.0.24(react-dom@18.3.1)(react@19.0.0-rc.0) '@react-email/render': specifier: 1.0.1 - version: 1.0.1(react-dom@18.2.0)(react@19.0.0-rc.0) + version: 1.0.1(react-dom@18.3.1)(react@19.0.0-rc.0) '@sentry/esbuild-plugin': specifier: ^2.22.2 version: 2.22.2 @@ -2171,7 +2183,7 @@ importers: version: 0.14.0(@sinclair/typebox@0.33.17) ai: specifier: ^3.3.24 - version: 3.3.24(openai@4.56.0)(react@19.0.0-rc.0)(svelte@4.2.19)(vue@3.4.38)(zod@3.23.8) + version: 3.3.24(openai@4.56.0)(react@19.0.0-rc.0)(svelte@5.28.2)(vue@3.5.13)(zod@3.23.8) arktype: specifier: 2.0.0-rc.17 version: 2.0.0-rc.17 @@ -2210,7 +2222,7 @@ importers: version: 19.0.0-rc.0 react-email: specifier: ^3.0.1 - version: 3.0.1(@opentelemetry/api@1.4.1)(@playwright/test@1.37.0)(react-dom@18.2.0)(react@19.0.0-rc.0) + version: 3.0.1(@opentelemetry/api@1.4.1)(@playwright/test@1.37.0)(react-dom@18.3.1)(react@19.0.0-rc.0) reflect-metadata: specifier: ^0.1.13 version: 0.1.14 @@ -2323,10 +2335,6 @@ importers: packages: - /@aashutoshrathi/word-wrap@1.2.6: - resolution: {integrity: sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==} - engines: {node: '>=0.10.0'} - /@adobe/css-tools@4.4.0: resolution: {integrity: sha512-Ff9+ksdQQB3rMncgqDK78uLznstjyfIf2Arnh22pW8kBpLs6rpKDwgnZT46hin5Hl1WzazzK64DOrhSwYpS7bQ==} dev: false @@ -2587,7 +2595,7 @@ packages: - zod dev: true - /@ai-sdk/svelte@0.0.45(svelte@4.2.19)(zod@3.23.8): + /@ai-sdk/svelte@0.0.45(svelte@5.28.2)(zod@3.23.8): resolution: {integrity: sha512-w5Sdl0ArFIM3Fp8BbH4TUvlrS84WP/jN/wC1+fghMOXd7ceVO3Yhs9r71wTqndhgkLC7LAEX9Ll7ZEPfW9WBDA==} engines: {node: '>=18'} peerDependencies: @@ -2598,13 +2606,13 @@ packages: dependencies: '@ai-sdk/provider-utils': 1.0.17(zod@3.23.8) '@ai-sdk/ui-utils': 0.0.40(zod@3.23.8) - sswr: 2.1.0(svelte@4.2.19) - svelte: 4.2.19 + sswr: 2.1.0(svelte@5.28.2) + svelte: 5.28.2 transitivePeerDependencies: - zod dev: false - /@ai-sdk/svelte@0.0.57(svelte@4.2.19)(zod@3.23.8): + /@ai-sdk/svelte@0.0.57(svelte@5.28.2)(zod@3.23.8): resolution: {integrity: sha512-SyF9ItIR9ALP9yDNAD+2/5Vl1IT6kchgyDH8xkmhysfJI6WrvJbtO1wdQ0nylvPLcsPoYu+cAlz1krU4lFHcYw==} engines: {node: '>=18'} peerDependencies: @@ -2615,8 +2623,8 @@ packages: dependencies: '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8) '@ai-sdk/ui-utils': 0.0.50(zod@3.23.8) - sswr: 2.1.0(svelte@4.2.19) - svelte: 4.2.19 + sswr: 2.1.0(svelte@5.28.2) + svelte: 5.28.2 transitivePeerDependencies: - zod dev: true @@ -2681,7 +2689,7 @@ packages: zod: 3.23.8 zod-to-json-schema: 3.24.5(zod@3.23.8) - /@ai-sdk/vue@0.0.45(vue@3.4.38)(zod@3.23.8): + /@ai-sdk/vue@0.0.45(vue@3.5.13)(zod@3.23.8): resolution: {integrity: sha512-bqeoWZqk88TQmfoPgnFUKkrvhOIcOcSH5LMPgzZ8XwDqz5tHHrMHzpPfHCj7XyYn4ROTFK/2kKdC/ta6Ko0fMw==} engines: {node: '>=18'} peerDependencies: @@ -2692,13 +2700,13 @@ packages: dependencies: '@ai-sdk/provider-utils': 1.0.17(zod@3.23.8) '@ai-sdk/ui-utils': 0.0.40(zod@3.23.8) - swrv: 1.0.4(vue@3.4.38) - vue: 3.4.38(typescript@5.5.4) + swrv: 1.0.4(vue@3.5.13) + vue: 3.5.13(typescript@5.5.4) transitivePeerDependencies: - zod dev: false - /@ai-sdk/vue@0.0.59(vue@3.4.38)(zod@3.23.8): + /@ai-sdk/vue@0.0.59(vue@3.5.13)(zod@3.23.8): resolution: {integrity: sha512-+ofYlnqdc8c4F6tM0IKF0+7NagZRAiqBJpGDJ+6EYhDW8FHLUP/JFBgu32SjxSxC6IKFZxEnl68ZoP/Z38EMlw==} engines: {node: '>=18'} peerDependencies: @@ -2709,8 +2717,8 @@ packages: dependencies: '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8) '@ai-sdk/ui-utils': 0.0.50(zod@3.23.8) - swrv: 1.0.4(vue@3.4.38) - vue: 3.4.38(typescript@5.5.4) + swrv: 1.0.4(vue@3.5.13) + vue: 3.5.13(typescript@5.5.4) transitivePeerDependencies: - zod dev: true @@ -3831,21 +3839,20 @@ packages: - supports-color dev: false - /@babel/core@7.26.8: - resolution: {integrity: sha512-l+lkXCHS6tQEc5oUpK28xBOZ6+HwaH7YwoYQbLFiYb4nS2/l1tKnZEtEWkD0GuiYdvArf9qBS0XlQGXzPMsNqQ==} + /@babel/core@7.26.10: + resolution: {integrity: sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ==} engines: {node: '>=6.9.0'} dependencies: '@ampproject/remapping': 2.3.0 '@babel/code-frame': 7.26.2 - '@babel/generator': 7.26.8 - '@babel/helper-compilation-targets': 7.26.5 - '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.8) - '@babel/helpers': 7.26.7 - '@babel/parser': 7.26.8 - '@babel/template': 7.26.8 - '@babel/traverse': 7.26.8 - '@babel/types': 7.26.8 - '@types/gensync': 1.0.4 + '@babel/generator': 7.27.0 + '@babel/helper-compilation-targets': 7.27.0 + '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.10) + '@babel/helpers': 7.27.0 + '@babel/parser': 7.27.0 + '@babel/template': 7.27.0 + '@babel/traverse': 7.27.0 + '@babel/types': 7.27.0 convert-source-map: 2.0.0 debug: 4.4.0(supports-color@10.0.0) gensync: 1.0.0-beta.2 @@ -3897,15 +3904,15 @@ packages: jsesc: 2.5.2 dev: false - /@babel/generator@7.26.8: - resolution: {integrity: sha512-ef383X5++iZHWAXX0SXQR6ZyQhw/0KtTkrTz61WXRhFM6dhpHulO/RJz79L8S6ugZHJkOOkUrUdxgdF2YiPFnA==} + /@babel/generator@7.27.0: + resolution: {integrity: sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw==} engines: {node: '>=6.9.0'} dependencies: - '@babel/parser': 7.26.8 - '@babel/types': 7.26.8 - '@jridgewell/gen-mapping': 0.3.5 + '@babel/parser': 7.27.0 + '@babel/types': 7.27.0 + '@jridgewell/gen-mapping': 0.3.8 '@jridgewell/trace-mapping': 0.3.25 - jsesc: 3.0.2 + jsesc: 3.1.0 dev: true /@babel/helper-annotate-as-pure@7.22.5: @@ -3944,8 +3951,8 @@ packages: semver: 6.3.1 dev: false - /@babel/helper-compilation-targets@7.26.5: - resolution: {integrity: sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==} + /@babel/helper-compilation-targets@7.27.0: + resolution: {integrity: sha512-LVk7fbXml0H2xH34dFzKQ7TDZ2G4/rVTOrq9V+icbbadjbVxxeFeDsNHv2SrZeWoA+6ZiTyWYWtScEIW07EAcA==} engines: {node: '>=6.9.0'} dependencies: '@babel/compat-data': 7.26.8 @@ -4127,8 +4134,8 @@ packages: resolution: {integrity: sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==} engines: {node: '>=6.9.0'} dependencies: - '@babel/traverse': 7.26.8 - '@babel/types': 7.26.8 + '@babel/traverse': 7.27.0 + '@babel/types': 7.27.0 transitivePeerDependencies: - supports-color dev: true @@ -4161,16 +4168,16 @@ packages: - supports-color dev: false - /@babel/helper-module-transforms@7.26.0(@babel/core@7.26.8): + /@babel/helper-module-transforms@7.26.0(@babel/core@7.26.10): resolution: {integrity: sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 dependencies: - '@babel/core': 7.26.8 + '@babel/core': 7.26.10 '@babel/helper-module-imports': 7.25.9 '@babel/helper-validator-identifier': 7.25.9 - '@babel/traverse': 7.26.8 + '@babel/traverse': 7.27.0 transitivePeerDependencies: - supports-color dev: true @@ -4342,12 +4349,12 @@ packages: '@babel/types': 7.26.8 dev: false - /@babel/helpers@7.26.7: - resolution: {integrity: sha512-8NHiL98vsi0mbPQmYAGWwfcFaOy4j2HY49fXJCfuDcdE7fMIsH9a7GdaeXpIBsbT7307WU8KCMp5pUVDNL4f9A==} + /@babel/helpers@7.27.0: + resolution: {integrity: sha512-U5eyP/CTFPuNE3qk+WZMxFkp/4zUzdceQlfzf7DdGdhp+Fezd7HD+i8Y24ZuTMKX3wQBld449jijbGq6OdGNQg==} engines: {node: '>=6.9.0'} dependencies: - '@babel/template': 7.26.8 - '@babel/types': 7.26.8 + '@babel/template': 7.27.0 + '@babel/types': 7.27.0 dev: true /@babel/highlight@7.22.13: @@ -4405,6 +4412,13 @@ packages: dependencies: '@babel/types': 7.26.8 + /@babel/parser@7.27.0: + resolution: {integrity: sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.27.0 + /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.18.6(@babel/core@7.22.17): resolution: {integrity: sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==} engines: {node: '>=6.9.0'} @@ -5422,6 +5436,13 @@ packages: dependencies: regenerator-runtime: 0.14.1 + /@babel/runtime@7.27.0: + resolution: {integrity: sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==} + engines: {node: '>=6.9.0'} + dependencies: + regenerator-runtime: 0.14.1 + dev: false + /@babel/template@7.22.15: resolution: {integrity: sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==} engines: {node: '>=6.9.0'} @@ -5447,13 +5468,13 @@ packages: '@babel/types': 7.26.8 dev: false - /@babel/template@7.26.8: - resolution: {integrity: sha512-iNKaX3ZebKIsCvJ+0jd6embf+Aulaa3vNBqZ41kM7iTWjx5qzWKXGHiJUW3+nTpQ18SG11hdF8OAzKrpXkb96Q==} + /@babel/template@7.27.0: + resolution: {integrity: sha512-2ncevenBqXI6qRMukPlXwHKHchC7RyMuu4xv5JBXRfOGVcTy1mXCD12qrp7Jsoxll1EV3+9sE4GugBVRjT2jFA==} engines: {node: '>=6.9.0'} dependencies: '@babel/code-frame': 7.26.2 - '@babel/parser': 7.26.8 - '@babel/types': 7.26.8 + '@babel/parser': 7.27.0 + '@babel/types': 7.27.0 dev: true /@babel/traverse@7.22.17: @@ -5506,16 +5527,16 @@ packages: - supports-color dev: false - /@babel/traverse@7.26.8: - resolution: {integrity: sha512-nic9tRkjYH0oB2dzr/JoGIm+4Q6SuYeLEiIiZDwBscRMYFJ+tMAz98fuel9ZnbXViA2I0HVSSRRK8DW5fjXStA==} + /@babel/traverse@7.27.0: + resolution: {integrity: sha512-19lYZFzYVQkkHkl4Cy4WrAVcqBkgvV2YM2TU3xG6DIwO7O3ecbDPfW3yM3bjAGcqcQHi+CCtjMR3dIEHxsd6bA==} engines: {node: '>=6.9.0'} dependencies: '@babel/code-frame': 7.26.2 - '@babel/generator': 7.26.8 - '@babel/parser': 7.26.8 - '@babel/template': 7.26.8 - '@babel/types': 7.26.8 - debug: 4.4.0(supports-color@10.0.0) + '@babel/generator': 7.27.0 + '@babel/parser': 7.27.0 + '@babel/template': 7.27.0 + '@babel/types': 7.27.0 + debug: 4.4.0 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -5544,6 +5565,13 @@ packages: '@babel/helper-string-parser': 7.25.9 '@babel/helper-validator-identifier': 7.25.9 + /@babel/types@7.27.0: + resolution: {integrity: sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.25.9 + '@babel/helper-validator-identifier': 7.25.9 + /@balena/dockerignore@1.0.2: resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} @@ -5880,6 +5908,15 @@ packages: resolution: {integrity: sha512-SyD4iw6jM4anZaG+ujgVETV4fulF2KHBOW31eavbVN7TNpk2l4aJgwY1YSPK00IKSWsoQuH2TigR446KuT5lqQ==} dev: false + /@codemirror/autocomplete@6.18.6: + resolution: {integrity: sha512-PHHBXFomUs5DF+9tCOM/UoW6XQ4R44lLNNhRaW9PKPTU0D7lIjRg3ElxaJnTwsl/oHiR93WSXDBrekhoUGCPtg==} + dependencies: + '@codemirror/language': 6.11.0 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.36.6 + '@lezer/common': 1.2.3 + dev: false + /@codemirror/autocomplete@6.4.0(@codemirror/language@6.3.2)(@codemirror/state@6.2.0)(@codemirror/view@6.7.2)(@lezer/common@1.0.2): resolution: {integrity: sha512-HLF2PnZAm1s4kGs30EiqKMgD7XsYaQ0XJnMR0rofEWQ5t5D60SfqpDIkIh1ze5tiEbyUWm8+VJ6W1/erVvBMIA==} peerDependencies: @@ -5894,6 +5931,20 @@ packages: '@lezer/common': 1.0.2 dev: false + /@codemirror/autocomplete@6.4.0(@codemirror/language@6.3.2)(@codemirror/state@6.2.0)(@codemirror/view@6.7.2)(@lezer/common@1.2.3): + resolution: {integrity: sha512-HLF2PnZAm1s4kGs30EiqKMgD7XsYaQ0XJnMR0rofEWQ5t5D60SfqpDIkIh1ze5tiEbyUWm8+VJ6W1/erVvBMIA==} + peerDependencies: + '@codemirror/language': ^6.0.0 + '@codemirror/state': ^6.0.0 + '@codemirror/view': ^6.0.0 + '@lezer/common': ^1.0.0 + dependencies: + '@codemirror/language': 6.3.2 + '@codemirror/state': 6.2.0 + '@codemirror/view': 6.7.2 + '@lezer/common': 1.2.3 + dev: false + /@codemirror/commands@6.1.3: resolution: {integrity: sha512-wUw1+vb34Ultv0Q9m/OVB7yizGXgtoDbkI5f5ErM8bebwLyUYjicdhJTKhTvPTpgkv8dq/BK0lQ3K5pRf2DAJw==} dependencies: @@ -5903,6 +5954,15 @@ packages: '@lezer/common': 1.0.2 dev: false + /@codemirror/commands@6.8.1: + resolution: {integrity: sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw==} + dependencies: + '@codemirror/language': 6.11.0 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.36.6 + '@lezer/common': 1.2.3 + dev: false + /@codemirror/lang-javascript@6.1.2: resolution: {integrity: sha512-OcwLfZXdQ1OHrLiIcKCn7MqZ7nx205CMKlhe+vL88pe2ymhT9+2P+QhwkYGxMICj8TDHyp8HFKVwpiisUT7iEQ==} dependencies: @@ -5922,6 +5982,17 @@ packages: '@lezer/json': 1.0.0 dev: false + /@codemirror/language@6.11.0: + resolution: {integrity: sha512-A7+f++LodNNc1wGgoRDTt78cOwWm9KVezApgjOMp1W4hM0898nsqBXwF+sbePE7ZRcjN7Sa1Z5m2oN27XkmEjQ==} + dependencies: + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.36.6 + '@lezer/common': 1.2.3 + '@lezer/highlight': 1.2.1 + '@lezer/lr': 1.4.2 + style-mod: 4.1.2 + dev: false + /@codemirror/language@6.3.2: resolution: {integrity: sha512-g42uHhOcEMAXjmozGG+rdom5UsbyfMxQFh7AbkeoaNImddL6Xt4cQDL0+JxmG7+as18rUAvZaqzP/TjsciVIrA==} dependencies: @@ -5941,6 +6012,14 @@ packages: crelt: 1.0.5 dev: false + /@codemirror/lint@6.8.5: + resolution: {integrity: sha512-s3n3KisH7dx3vsoeGMxsbRAgKe4O1vbrnKBClm99PU0fWxmxsx5rR2PfqQgIt+2MMJBHbiJ5rfIdLYfB9NNvsA==} + dependencies: + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.36.6 + crelt: 1.0.6 + dev: false + /@codemirror/search@6.2.3: resolution: {integrity: sha512-V9n9233lopQhB1dyjsBK2Wc1i+8hcCqxl1wQ46c5HWWLePoe4FluV3TGHoZ04rBRlGjNyz9DTmpJErig8UE4jw==} dependencies: @@ -5949,17 +6028,39 @@ packages: crelt: 1.0.5 dev: false + /@codemirror/search@6.5.10: + resolution: {integrity: sha512-RMdPdmsrUf53pb2VwflKGHEe1XVM07hI7vV2ntgw1dmqhimpatSJKva4VA9h4TLUDOD4EIF02201oZurpnEFsg==} + dependencies: + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.36.6 + crelt: 1.0.6 + dev: false + /@codemirror/state@6.2.0: resolution: {integrity: sha512-69QXtcrsc3RYtOtd+GsvczJ319udtBf1PTrr2KbLWM/e2CXUPnh0Nz9AUo8WfhSQ7GeL8dPVNUmhQVgpmuaNGA==} dev: false - /@codemirror/theme-one-dark@6.1.0: - resolution: {integrity: sha512-AiTHtFRu8+vWT9wWUWDM+cog6ZwgivJogB1Tm/g40NIpLwph7AnmxrSzWfvJN5fBVufsuwBxecQCNmdcR5D7Aw==} + /@codemirror/state@6.5.2: + resolution: {integrity: sha512-FVqsPqtPWKVVL3dPSxy8wEF/ymIEuVzF1PK3VbUgrxXpJUSHQWWZz4JMToquRxnkw+36LTamCZG2iua2Ptq0fA==} dependencies: - '@codemirror/language': 6.3.2 - '@codemirror/state': 6.2.0 - '@codemirror/view': 6.7.2 - '@lezer/highlight': 1.1.6 + '@marijn/find-cluster-break': 1.0.2 + dev: false + + /@codemirror/theme-one-dark@6.1.2: + resolution: {integrity: sha512-F+sH0X16j/qFLMAfbciKTxVOwkdAS336b7AXTKOZhy8BR3eH/RelsnLgLFINrpST63mmN2OuwUt0W2ndUgYwUA==} + dependencies: + '@codemirror/language': 6.11.0 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.36.6 + '@lezer/highlight': 1.2.1 + dev: false + + /@codemirror/view@6.36.6: + resolution: {integrity: sha512-uxugGLet+Nzp0Jcit8Hn3LypM8ioMLKTsdf8FRoT3HWvZtb9GhaWMe0Cc15rz90Ljab4YFJiAulmIVB74OY0IQ==} + dependencies: + '@codemirror/state': 6.5.2 + style-mod: 4.1.2 + w3c-keyname: 2.2.8 dev: false /@codemirror/view@6.7.2: @@ -6182,26 +6283,25 @@ packages: fast-check: 3.22.0 dev: false - /@effect/schema@0.75.5(effect@3.11.7): + /@effect/schema@0.75.5(effect@3.14.14): resolution: {integrity: sha512-TQInulTVCuF+9EIbJpyLP6dvxbQJMphrnRqgexm/Ze39rSjfhJuufF7XvU3SxTgg3HnL7B/kpORTJbHhlE6thw==} peerDependencies: effect: ^3.9.2 dependencies: - effect: 3.11.7 + effect: 3.14.14 fast-check: 3.22.0 dev: false /@electric-sql/client@0.4.0: resolution: {integrity: sha512-YVYSqHitqVIDC1RBTfmHMfAfqDNAKMK9/AFVTDFQQxN3Q85dIQS49zThAuJVecYiuYRJvTiqf40c4n39jZSNrQ==} optionalDependencies: - '@rollup/rollup-darwin-arm64': 4.36.0 + '@rollup/rollup-darwin-arm64': 4.40.1 dev: false /@electric-sql/client@1.0.0-beta.1: resolution: {integrity: sha512-Ei9jN3pDoGzc+a/bGqnB5ajb52IvSv7/n2btuyzUlcOHIR2kM9fqtYTJXPwZYKLkGZlHWlpHgWyRtrinkP2nHg==} optionalDependencies: - '@rollup/rollup-darwin-arm64': 4.36.0 - dev: false + '@rollup/rollup-darwin-arm64': 4.40.1 /@electric-sql/react@0.3.5(react@18.2.0): resolution: {integrity: sha512-qPrlF3BsRg5L8zAn1sLGzc3pkswfEHyQI3lNOu7Xllv1DBx85RvHR1zgGGPAUfC8iwyWupQu9pFPE63GdbeuhA==} @@ -6216,8 +6316,8 @@ packages: use-sync-external-store: 1.2.2(react@18.2.0) dev: false - /@emnapi/runtime@1.3.1: - resolution: {integrity: sha512-kEBmG8KyqtxJZv+ygbEim+KCGtIq1fC22Ms3S4ziXmYKm8uyoLX0MHONVKwp+9opg390VaKRNt4a7A9NwmpNhw==} + /@emnapi/runtime@1.4.3: + resolution: {integrity: sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ==} requiresBuild: true dependencies: tslib: 2.8.1 @@ -7924,19 +8024,6 @@ packages: eslint-visitor-keys: 3.4.2 dev: true - /@eslint-community/eslint-utils@4.4.0(eslint@8.49.0): - resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 - dependencies: - eslint: 8.49.0 - eslint-visitor-keys: 3.4.2 - - /@eslint-community/regexpp@4.12.1: - resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==} - engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} - /@eslint-community/regexpp@4.5.1: resolution: {integrity: sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} @@ -8246,7 +8333,6 @@ packages: minimatch: 3.1.2 transitivePeerDependencies: - supports-color - dev: true /@humanwhocodes/module-importer@1.0.1: resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} @@ -8254,11 +8340,6 @@ packages: /@humanwhocodes/object-schema@1.2.1: resolution: {integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==} - dev: true - - /@humanwhocodes/object-schema@2.0.3: - resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} - deprecated: Use @eslint/object-schema instead /@img/sharp-darwin-arm64@0.33.5: resolution: {integrity: sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==} @@ -8418,7 +8499,7 @@ packages: cpu: [wasm32] requiresBuild: true dependencies: - '@emnapi/runtime': 1.3.1 + '@emnapi/runtime': 1.4.3 dev: false optional: true @@ -8699,6 +8780,15 @@ packages: '@jridgewell/sourcemap-codec': 1.5.0 '@jridgewell/trace-mapping': 0.3.25 + /@jridgewell/gen-mapping@0.3.8: + resolution: {integrity: sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.2.1 + '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/trace-mapping': 0.3.25 + dev: true + /@jridgewell/resolve-uri@3.1.0: resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} engines: {node: '>=6.0.0'} @@ -8712,6 +8802,14 @@ packages: dependencies: '@jridgewell/gen-mapping': 0.3.5 '@jridgewell/trace-mapping': 0.3.25 + dev: false + + /@jridgewell/source-map@0.3.6: + resolution: {integrity: sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==} + dependencies: + '@jridgewell/gen-mapping': 0.3.8 + '@jridgewell/trace-mapping': 0.3.25 + dev: true /@jridgewell/sourcemap-codec@1.4.15: resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} @@ -8784,7 +8882,7 @@ packages: tslib: 2.6.2 ws: 8.16.0 optionalDependencies: - openid-client: 5.6.4 + openid-client: 5.7.1 transitivePeerDependencies: - bufferutil - utf-8-validate @@ -8822,12 +8920,22 @@ packages: resolution: {integrity: sha512-SVgiGtMnMnW3ActR8SXgsDhw7a0w0ChHSYAyAUxxrOiJ1OqYWEKk/xJd84tTSPo1mo6DXLObAJALNnd0Hrv7Ng==} dev: false + /@lezer/common@1.2.3: + resolution: {integrity: sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==} + dev: false + /@lezer/highlight@1.1.6: resolution: {integrity: sha512-cmSJYa2us+r3SePpRCjN5ymCqCPv+zyXmDl0ciWtVaNiORT/MxM7ZgOMQZADD0o51qOaOg24qc/zBViOIwAjJg==} dependencies: '@lezer/common': 1.0.2 dev: false + /@lezer/highlight@1.2.1: + resolution: {integrity: sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==} + dependencies: + '@lezer/common': 1.2.3 + dev: false + /@lezer/javascript@1.4.1: resolution: {integrity: sha512-Hqx36DJeYhKtdpc7wBYPR0XF56ZzIp0IkMO/zNNj80xcaFOV4Oj/P7TQc/8k2TxNhzl7tV5tXS8ZOCPbT4L3nA==} dependencies: @@ -8848,6 +8956,12 @@ packages: '@lezer/common': 1.0.2 dev: false + /@lezer/lr@1.4.2: + resolution: {integrity: sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==} + dependencies: + '@lezer/common': 1.2.3 + dev: false + /@manypkg/cli@0.19.2: resolution: {integrity: sha512-DXx/P1lyunNoFWwOj1MWBucUhaIJljoiAGOpO2fE0GKMBCI6EZBZD0Up1+fQZoXBecKXRgV9mGgLvIB2fOQ0KQ==} hasBin: true @@ -8886,6 +9000,10 @@ packages: globby: 11.1.0 read-yaml-file: 1.1.0 + /@marijn/find-cluster-break@1.0.2: + resolution: {integrity: sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==} + dev: false + /@mdx-js/mdx@2.3.0: resolution: {integrity: sha512-jLuwRlz8DQfQNiUCJR50Y09CGPq3fLtmtUQfVrj79E0JWu3dvsVcxVIcfhR5h0iXu+/z++zDrYeiJqifRynJkA==} dependencies: @@ -14380,7 +14498,7 @@ packages: - '@types/react' dev: false - /@react-email/components@0.0.24(react-dom@18.2.0)(react@19.0.0-rc.0): + /@react-email/components@0.0.24(react-dom@18.3.1)(react@19.0.0-rc.0): resolution: {integrity: sha512-/DNmfTREaT59UFdkHoIK3BewJ214LfRxmduiil3m7POj+gougkItANu1+BMmgbUATxjf7jH1WoBxo9x/rhFEFw==} engines: {node: '>=18.0.0'} peerDependencies: @@ -14401,7 +14519,7 @@ packages: '@react-email/link': 0.0.10(react@19.0.0-rc.0) '@react-email/markdown': 0.0.12(react@19.0.0-rc.0) '@react-email/preview': 0.0.11(react@19.0.0-rc.0) - '@react-email/render': 1.0.1(react-dom@18.2.0)(react@19.0.0-rc.0) + '@react-email/render': 1.0.1(react-dom@18.3.1)(react@19.0.0-rc.0) '@react-email/row': 0.0.10(react@19.0.0-rc.0) '@react-email/section': 0.0.14(react@19.0.0-rc.0) '@react-email/tailwind': 0.1.0(react@19.0.0-rc.0) @@ -14707,7 +14825,7 @@ packages: react-dom: 18.2.0(react@18.3.1) dev: false - /@react-email/render@1.0.1(react-dom@18.2.0)(react@19.0.0-rc.0): + /@react-email/render@1.0.1(react-dom@18.3.1)(react@19.0.0-rc.0): resolution: {integrity: sha512-W3gTrcmLOVYnG80QuUp22ReIT/xfLsVJ+n7ghSlG2BITB8evNABn1AO2rGQoXuK84zKtDAlxCdm3hRyIpZdGSA==} engines: {node: '>=18.0.0'} peerDependencies: @@ -14717,7 +14835,7 @@ packages: html-to-text: 9.0.5 js-beautify: 1.15.1 react: 19.0.0-rc.0 - react-dom: 18.2.0(react@19.0.0-rc.0) + react-dom: 18.3.1(react@19.0.0-rc.0) react-promise-suspense: 0.3.4 dev: false @@ -16213,6 +16331,14 @@ packages: cpu: [arm64] os: [darwin] requiresBuild: true + dev: true + optional: true + + /@rollup/rollup-darwin-arm64@4.40.1: + resolution: {integrity: sha512-VWXGISWFY18v/0JyNUy4A46KCFCb9NVsH+1100XP31lud+TzlezBbz24CYzbnA4x6w4hx+NYCXDfnvDVO6lcAA==} + cpu: [arm64] + os: [darwin] + requiresBuild: true optional: true /@rollup/rollup-darwin-x64@4.36.0: @@ -17307,22 +17433,22 @@ packages: - supports-color dev: false - /@splinetool/react-spline@2.2.6(@splinetool/runtime@1.9.42)(react-dom@18.2.0)(react@18.2.0): + /@splinetool/react-spline@2.2.6(@splinetool/runtime@1.9.87)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-y9L2VEbnC6FNZZu8XMmWM9YTTTWal6kJVfP05Amf0QqDNzCSumKsJxZyGUODvuCmiAvy0PfIfEsiVKnSxvhsDw==} peerDependencies: '@splinetool/runtime': '*' react: '>=17.0.0' react-dom: '>=17.0.0' dependencies: - '@splinetool/runtime': 1.9.42 + '@splinetool/runtime': 1.9.87 lodash.debounce: 4.0.8 react: 18.2.0 react-dom: 18.2.0(react@18.2.0) react-merge-refs: 2.1.1 dev: false - /@splinetool/runtime@1.9.42: - resolution: {integrity: sha512-g6D3E5SDIANZS41SnRSTAI+/ALhdHrrn72N2D7IoEUEmR4NRy9BMAYH4SWPjbl/nR3AqIWN4FHwD2FwJEboC6Q==} + /@splinetool/runtime@1.9.87: + resolution: {integrity: sha512-qEwhQSuN/J3Hw+c5vbprj/SzbMOHNxfjMX+L/dLLLVsjxnnofOfIwT89dMoLu9PCMeGxrc2yqbZ/pw/L3wYoGw==} dependencies: on-change: 4.0.2 semver-compare: 1.0.0 @@ -17332,6 +17458,17 @@ packages: resolution: {integrity: sha512-Uy0+khmZqUrUGm5dmMqVlnvufZRSK0FbYzVgp0UMstm+F5+W2/jnEEQyc9vo1ZR/E5ZI/B1WjjoTqBqwJL6Krw==} dev: false + /@standard-schema/spec@1.0.0: + resolution: {integrity: sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==} + dev: false + + /@sveltejs/acorn-typescript@1.0.5(acorn@8.14.1): + resolution: {integrity: sha512-IwQk4yfwLdibDlrXVE04jTZYlLnwsTT2PIOQQGNLWfjavGifnk1JD1LcZjZaBTRcxZu2FfPfNLOE04DSu9lqtQ==} + peerDependencies: + acorn: ^8.9.0 + dependencies: + acorn: 8.14.1 + /@swc/core-darwin-arm64@1.3.101: resolution: {integrity: sha512-mNFK+uHNPRXSnfTOG34zJOeMl2waM4hF4a2NY7dkMXrPqw9CoJn4MwTXJcyMiSz1/BnNjjTCHF3Yhj0jPxmkzQ==} engines: {node: '>=10'} @@ -18104,6 +18241,14 @@ packages: dependencies: '@types/eslint': 8.4.10 '@types/estree': 1.0.6 + dev: false + + /@types/eslint-scope@3.7.7: + resolution: {integrity: sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==} + dependencies: + '@types/eslint': 8.56.12 + '@types/estree': 1.0.7 + dev: true /@types/eslint@8.4.10: resolution: {integrity: sha512-Sl/HOqN8NKPmhWo2VBEPm0nvHnu2LL3v9vKo8MEq0EtbJ4eVzGPl41VNPvn5E1i5poMk4/XD8UriLHpJvEP/Nw==} @@ -18111,6 +18256,13 @@ packages: '@types/estree': 1.0.0 '@types/json-schema': 7.0.11 + /@types/eslint@8.56.12: + resolution: {integrity: sha512-03ruubjWyOHlmljCVoxSuNDdmfZDzsrrz0P2LeJsOXr+ZwFQ+0yQIwNCwt/GYhV7Z31fgtXJTAEs+FYlEL851g==} + dependencies: + '@types/estree': 1.0.7 + '@types/json-schema': 7.0.15 + dev: true + /@types/estree-jsx@1.0.0: resolution: {integrity: sha512-3qvGd0z8F2ENTGr/GG1yViqfiKmRfrXVx5sJyHGFu3z7m5g5utCQtGp/g29JnjflhtQJBv1WDQukHiT58xPcYQ==} dependencies: @@ -18122,6 +18274,9 @@ packages: /@types/estree@1.0.6: resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} + /@types/estree@1.0.7: + resolution: {integrity: sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==} + /@types/eventsource@1.1.15: resolution: {integrity: sha512-XQmGcbnxUNa06HR3VBVkc9+A2Vpi9ZyLJcdS5dwaQQ/4ZMWFO+5c90FnMUpbtMZwB/FChoYHwuVg8TvkECacTA==} dev: true @@ -18149,10 +18304,6 @@ packages: '@types/node': 20.14.14 dev: true - /@types/gensync@1.0.4: - resolution: {integrity: sha512-C3YYeRQWp2fmq9OryX+FoDy8nXS6scQ7dPptD8LnFDAUNcKWJjXQKDNJD3HVm+kOUsXhTOkpi69vI4EuAr95bA==} - dev: true - /@types/gradient-string@1.1.2: resolution: {integrity: sha512-zIet2KvHr2dkOCPI5ggQQ+WJVyfBSFaqK9sNelhgDjlE2K3Fu2muuPJwu5aKM3xoWuc3WXudVEMUwI1QWhykEQ==} dependencies: @@ -18215,6 +18366,10 @@ packages: /@types/json-schema@7.0.13: resolution: {integrity: sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==} + /@types/json-schema@7.0.15: + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + dev: true + /@types/json5@0.0.29: resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} dev: true @@ -18805,7 +18960,7 @@ packages: '@codemirror/state': '>=6.0.0' '@codemirror/view': '>=6.0.0' dependencies: - '@codemirror/autocomplete': 6.4.0(@codemirror/language@6.3.2)(@codemirror/state@6.2.0)(@codemirror/view@6.7.2)(@lezer/common@1.0.2) + '@codemirror/autocomplete': 6.4.0(@codemirror/language@6.3.2)(@codemirror/state@6.2.0)(@codemirror/view@6.7.2)(@lezer/common@1.2.3) '@codemirror/commands': 6.1.3 '@codemirror/language': 6.3.2 '@codemirror/lint': 6.4.2 @@ -18814,7 +18969,7 @@ packages: '@codemirror/view': 6.7.2 dev: false - /@uiw/react-codemirror@4.19.5(@babel/runtime@7.26.7)(@codemirror/autocomplete@6.4.0)(@codemirror/language@6.3.2)(@codemirror/lint@6.4.2)(@codemirror/search@6.2.3)(@codemirror/state@6.2.0)(@codemirror/theme-one-dark@6.1.0)(@codemirror/view@6.7.2)(codemirror@6.0.1)(react-dom@18.2.0)(react@18.2.0): + /@uiw/react-codemirror@4.19.5(@babel/runtime@7.27.0)(@codemirror/autocomplete@6.4.0)(@codemirror/language@6.3.2)(@codemirror/lint@6.4.2)(@codemirror/search@6.2.3)(@codemirror/state@6.2.0)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.7.2)(codemirror@6.0.1)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-ZCHh8d7beXbF8/t7F1+yHht6A9Y6CdKeOkZq4A09lxJEnyTQrj1FMf2zvfaqc7K23KNjkTCtSlbqKKbVDgrWaw==} peerDependencies: '@babel/runtime': '>=7.11.0' @@ -18825,13 +18980,13 @@ packages: react: '>=16.8.0' react-dom: '>=16.8.0' dependencies: - '@babel/runtime': 7.26.7 + '@babel/runtime': 7.27.0 '@codemirror/commands': 6.1.3 '@codemirror/state': 6.2.0 - '@codemirror/theme-one-dark': 6.1.0 + '@codemirror/theme-one-dark': 6.1.2 '@codemirror/view': 6.7.2 '@uiw/codemirror-extensions-basic-setup': 4.19.5(@codemirror/autocomplete@6.4.0)(@codemirror/commands@6.1.3)(@codemirror/language@6.3.2)(@codemirror/lint@6.4.2)(@codemirror/search@6.2.3)(@codemirror/state@6.2.0)(@codemirror/view@6.7.2) - codemirror: 6.0.1(@lezer/common@1.0.2) + codemirror: 6.0.1 react: 18.2.0 react-dom: 18.2.0(react@18.2.0) transitivePeerDependencies: @@ -19215,70 +19370,70 @@ packages: tinyrainbow: 2.0.0 dev: true - /@vue/compiler-core@3.4.38: - resolution: {integrity: sha512-8IQOTCWnLFqfHzOGm9+P8OPSEDukgg3Huc92qSG49if/xI2SAwLHQO2qaPQbjCWPBcQoO1WYfXfTACUrWV3c5A==} + /@vue/compiler-core@3.5.13: + resolution: {integrity: sha512-oOdAkwqUfW1WqpwSYJce06wvt6HljgY3fGeM9NcVA1HaYOij3mZG9Rkysn0OHuyUAGMbEbARIpsG+LPVlBJ5/Q==} dependencies: - '@babel/parser': 7.26.8 - '@vue/shared': 3.4.38 + '@babel/parser': 7.27.0 + '@vue/shared': 3.5.13 entities: 4.5.0 estree-walker: 2.0.2 source-map-js: 1.2.1 - /@vue/compiler-dom@3.4.38: - resolution: {integrity: sha512-Osc/c7ABsHXTsETLgykcOwIxFktHfGSUDkb05V61rocEfsFDcjDLH/IHJSNJP+/Sv9KeN2Lx1V6McZzlSb9EhQ==} + /@vue/compiler-dom@3.5.13: + resolution: {integrity: sha512-ZOJ46sMOKUjO3e94wPdCzQ6P1Lx/vhp2RSvfaab88Ajexs0AHeV0uasYhi99WPaogmBlRHNRuly8xV75cNTMDA==} dependencies: - '@vue/compiler-core': 3.4.38 - '@vue/shared': 3.4.38 + '@vue/compiler-core': 3.5.13 + '@vue/shared': 3.5.13 - /@vue/compiler-sfc@3.4.38: - resolution: {integrity: sha512-s5QfZ+9PzPh3T5H4hsQDJtI8x7zdJaew/dCGgqZ2630XdzaZ3AD8xGZfBqpT8oaD/p2eedd+pL8tD5vvt5ZYJQ==} + /@vue/compiler-sfc@3.5.13: + resolution: {integrity: sha512-6VdaljMpD82w6c2749Zhf5T9u5uLBWKnVue6XWxprDobftnletJ8+oel7sexFfM3qIxNmVE7LSFGTpv6obNyaQ==} dependencies: - '@babel/parser': 7.26.8 - '@vue/compiler-core': 3.4.38 - '@vue/compiler-dom': 3.4.38 - '@vue/compiler-ssr': 3.4.38 - '@vue/shared': 3.4.38 + '@babel/parser': 7.27.0 + '@vue/compiler-core': 3.5.13 + '@vue/compiler-dom': 3.5.13 + '@vue/compiler-ssr': 3.5.13 + '@vue/shared': 3.5.13 estree-walker: 2.0.2 magic-string: 0.30.17 postcss: 8.5.3 source-map-js: 1.2.1 - /@vue/compiler-ssr@3.4.38: - resolution: {integrity: sha512-YXznKFQ8dxYpAz9zLuVvfcXhc31FSPFDcqr0kyujbOwNhlmaNvL2QfIy+RZeJgSn5Fk54CWoEUeW+NVBAogGaw==} + /@vue/compiler-ssr@3.5.13: + resolution: {integrity: sha512-wMH6vrYHxQl/IybKJagqbquvxpWCuVYpoUJfCqFZwa/JY1GdATAQ+TgVtgrwwMZ0D07QhA99rs/EAAWfvG6KpA==} dependencies: - '@vue/compiler-dom': 3.4.38 - '@vue/shared': 3.4.38 + '@vue/compiler-dom': 3.5.13 + '@vue/shared': 3.5.13 - /@vue/reactivity@3.4.38: - resolution: {integrity: sha512-4vl4wMMVniLsSYYeldAKzbk72+D3hUnkw9z8lDeJacTxAkXeDAP1uE9xr2+aKIN0ipOL8EG2GPouVTH6yF7Gnw==} + /@vue/reactivity@3.5.13: + resolution: {integrity: sha512-NaCwtw8o48B9I6L1zl2p41OHo/2Z4wqYGGIK1Khu5T7yxrn+ATOixn/Udn2m+6kZKB/J7cuT9DbWWhRxqixACg==} dependencies: - '@vue/shared': 3.4.38 + '@vue/shared': 3.5.13 - /@vue/runtime-core@3.4.38: - resolution: {integrity: sha512-21z3wA99EABtuf+O3IhdxP0iHgkBs1vuoCAsCKLVJPEjpVqvblwBnTj42vzHRlWDCyxu9ptDm7sI2ZMcWrQqlA==} + /@vue/runtime-core@3.5.13: + resolution: {integrity: sha512-Fj4YRQ3Az0WTZw1sFe+QDb0aXCerigEpw418pw1HBUKFtnQHWzwojaukAs2X/c9DQz4MQ4bsXTGlcpGxU/RCIw==} dependencies: - '@vue/reactivity': 3.4.38 - '@vue/shared': 3.4.38 + '@vue/reactivity': 3.5.13 + '@vue/shared': 3.5.13 - /@vue/runtime-dom@3.4.38: - resolution: {integrity: sha512-afZzmUreU7vKwKsV17H1NDThEEmdYI+GCAK/KY1U957Ig2NATPVjCROv61R19fjZNzMmiU03n79OMnXyJVN0UA==} + /@vue/runtime-dom@3.5.13: + resolution: {integrity: sha512-dLaj94s93NYLqjLiyFzVs9X6dWhTdAlEAciC3Moq7gzAc13VJUdCnjjRurNM6uTLFATRHexHCTu/Xp3eW6yoog==} dependencies: - '@vue/reactivity': 3.4.38 - '@vue/runtime-core': 3.4.38 - '@vue/shared': 3.4.38 + '@vue/reactivity': 3.5.13 + '@vue/runtime-core': 3.5.13 + '@vue/shared': 3.5.13 csstype: 3.1.3 - /@vue/server-renderer@3.4.38(vue@3.4.38): - resolution: {integrity: sha512-NggOTr82FbPEkkUvBm4fTGcwUY8UuTsnWC/L2YZBmvaQ4C4Jl/Ao4HHTB+l7WnFCt5M/dN3l0XLuyjzswGYVCA==} + /@vue/server-renderer@3.5.13(vue@3.5.13): + resolution: {integrity: sha512-wAi4IRJV/2SAW3htkTlB+dHeRmpTiVIK1OGLWV1yeStVSebSQQOwGwIq0D3ZIoBj2C2qpgz5+vX9iEBkTdk5YA==} peerDependencies: - vue: 3.4.38 + vue: 3.5.13 dependencies: - '@vue/compiler-ssr': 3.4.38 - '@vue/shared': 3.4.38 - vue: 3.4.38(typescript@5.5.4) + '@vue/compiler-ssr': 3.5.13 + '@vue/shared': 3.5.13 + vue: 3.5.13(typescript@5.5.4) - /@vue/shared@3.4.38: - resolution: {integrity: sha512-q0xCiLkuWWQLzVrecPb0RMsNWyxICOjPrcrwxTUEHb1fsnvni4dcuyG7RT/Ie7VPTvnjzIaWzRMUBsrqNj/hhw==} + /@vue/shared@3.5.13: + resolution: {integrity: sha512-/hnE/qP5ZoGpol0a5mDi45bOd7t3tjYJBjsgCsivow7D48cJeV5l05RD82lPqi7gRiphZM37rnhW1l6ZoCNNnQ==} /@web3-storage/multipart-parser@1.0.0: resolution: {integrity: sha512-BEO6al7BYqcnfX15W2cnGR+Q566ACXAT9UQykORCWW80lmkpWsnEob6zJS1ZVBKsSJC8+7vJkHwlp+lXG1UCdw==} @@ -19288,15 +19443,38 @@ packages: dependencies: '@webassemblyjs/helper-numbers': 1.11.5 '@webassemblyjs/helper-wasm-bytecode': 1.11.5 + dev: false + + /@webassemblyjs/ast@1.14.1: + resolution: {integrity: sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==} + dependencies: + '@webassemblyjs/helper-numbers': 1.13.2 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + dev: true /@webassemblyjs/floating-point-hex-parser@1.11.5: resolution: {integrity: sha512-1j1zTIC5EZOtCplMBG/IEwLtUojtwFVwdyVMbL/hwWqbzlQoJsWCOavrdnLkemwNoC/EOwtUFch3fuo+cbcXYQ==} + dev: false + + /@webassemblyjs/floating-point-hex-parser@1.13.2: + resolution: {integrity: sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==} + dev: true /@webassemblyjs/helper-api-error@1.11.5: resolution: {integrity: sha512-L65bDPmfpY0+yFrsgz8b6LhXmbbs38OnwDCf6NpnMUYqa+ENfE5Dq9E42ny0qz/PdR0LJyq/T5YijPnU8AXEpA==} + dev: false + + /@webassemblyjs/helper-api-error@1.13.2: + resolution: {integrity: sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==} + dev: true /@webassemblyjs/helper-buffer@1.11.5: resolution: {integrity: sha512-fDKo1gstwFFSfacIeH5KfwzjykIE6ldh1iH9Y/8YkAZrhmu4TctqYjSh7t0K2VyDSXOZJ1MLhht/k9IvYGcIxg==} + dev: false + + /@webassemblyjs/helper-buffer@1.14.1: + resolution: {integrity: sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==} + dev: true /@webassemblyjs/helper-numbers@1.11.5: resolution: {integrity: sha512-DhykHXM0ZABqfIGYNv93A5KKDw/+ywBFnuWybZZWcuzWHfbp21wUfRkbtz7dMGwGgT4iXjWuhRMA2Mzod6W4WA==} @@ -19304,9 +19482,23 @@ packages: '@webassemblyjs/floating-point-hex-parser': 1.11.5 '@webassemblyjs/helper-api-error': 1.11.5 '@xtuc/long': 4.2.2 + dev: false + + /@webassemblyjs/helper-numbers@1.13.2: + resolution: {integrity: sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==} + dependencies: + '@webassemblyjs/floating-point-hex-parser': 1.13.2 + '@webassemblyjs/helper-api-error': 1.13.2 + '@xtuc/long': 4.2.2 + dev: true /@webassemblyjs/helper-wasm-bytecode@1.11.5: resolution: {integrity: sha512-oC4Qa0bNcqnjAowFn7MPCETQgDYytpsfvz4ujZz63Zu/a/v71HeCAAmZsgZ3YVKec3zSPYytG3/PrRCqbtcAvA==} + dev: false + + /@webassemblyjs/helper-wasm-bytecode@1.13.2: + resolution: {integrity: sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==} + dev: true /@webassemblyjs/helper-wasm-section@1.11.5: resolution: {integrity: sha512-uEoThA1LN2NA+K3B9wDo3yKlBfVtC6rh0i4/6hvbz071E8gTNZD/pT0MsBf7MeD6KbApMSkaAK0XeKyOZC7CIA==} @@ -19315,19 +19507,48 @@ packages: '@webassemblyjs/helper-buffer': 1.11.5 '@webassemblyjs/helper-wasm-bytecode': 1.11.5 '@webassemblyjs/wasm-gen': 1.11.5 + dev: false + + /@webassemblyjs/helper-wasm-section@1.14.1: + resolution: {integrity: sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==} + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-buffer': 1.14.1 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/wasm-gen': 1.14.1 + dev: true /@webassemblyjs/ieee754@1.11.5: resolution: {integrity: sha512-37aGq6qVL8A8oPbPrSGMBcp38YZFXcHfiROflJn9jxSdSMMM5dS5P/9e2/TpaJuhE+wFrbukN2WI6Hw9MH5acg==} dependencies: '@xtuc/ieee754': 1.2.0 + dev: false + + /@webassemblyjs/ieee754@1.13.2: + resolution: {integrity: sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==} + dependencies: + '@xtuc/ieee754': 1.2.0 + dev: true /@webassemblyjs/leb128@1.11.5: resolution: {integrity: sha512-ajqrRSXaTJoPW+xmkfYN6l8VIeNnR4vBOTQO9HzR7IygoCcKWkICbKFbVTNMjMgMREqXEr0+2M6zukzM47ZUfQ==} dependencies: '@xtuc/long': 4.2.2 + dev: false + + /@webassemblyjs/leb128@1.13.2: + resolution: {integrity: sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==} + dependencies: + '@xtuc/long': 4.2.2 + dev: true /@webassemblyjs/utf8@1.11.5: resolution: {integrity: sha512-WiOhulHKTZU5UPlRl53gHR8OxdGsSOxqfpqWeA2FmcwBMaoEdz6b2x2si3IwC9/fSPLfe8pBMRTHVMk5nlwnFQ==} + dev: false + + /@webassemblyjs/utf8@1.13.2: + resolution: {integrity: sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==} + dev: true /@webassemblyjs/wasm-edit@1.11.5: resolution: {integrity: sha512-C0p9D2fAu3Twwqvygvf42iGCQ4av8MFBLiTb+08SZ4cEdwzWx9QeAHDo1E2k+9s/0w1DM40oflJOpkZ8jW4HCQ==} @@ -19340,6 +19561,20 @@ packages: '@webassemblyjs/wasm-opt': 1.11.5 '@webassemblyjs/wasm-parser': 1.11.5 '@webassemblyjs/wast-printer': 1.11.5 + dev: false + + /@webassemblyjs/wasm-edit@1.14.1: + resolution: {integrity: sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==} + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-buffer': 1.14.1 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/helper-wasm-section': 1.14.1 + '@webassemblyjs/wasm-gen': 1.14.1 + '@webassemblyjs/wasm-opt': 1.14.1 + '@webassemblyjs/wasm-parser': 1.14.1 + '@webassemblyjs/wast-printer': 1.14.1 + dev: true /@webassemblyjs/wasm-gen@1.11.5: resolution: {integrity: sha512-14vteRlRjxLK9eSyYFvw1K8Vv+iPdZU0Aebk3j6oB8TQiQYuO6hj9s4d7qf6f2HJr2khzvNldAFG13CgdkAIfA==} @@ -19349,6 +19584,17 @@ packages: '@webassemblyjs/ieee754': 1.11.5 '@webassemblyjs/leb128': 1.11.5 '@webassemblyjs/utf8': 1.11.5 + dev: false + + /@webassemblyjs/wasm-gen@1.14.1: + resolution: {integrity: sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==} + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/ieee754': 1.13.2 + '@webassemblyjs/leb128': 1.13.2 + '@webassemblyjs/utf8': 1.13.2 + dev: true /@webassemblyjs/wasm-opt@1.11.5: resolution: {integrity: sha512-tcKwlIXstBQgbKy1MlbDMlXaxpucn42eb17H29rawYLxm5+MsEmgPzeCP8B1Cl69hCice8LeKgZpRUAPtqYPgw==} @@ -19357,6 +19603,16 @@ packages: '@webassemblyjs/helper-buffer': 1.11.5 '@webassemblyjs/wasm-gen': 1.11.5 '@webassemblyjs/wasm-parser': 1.11.5 + dev: false + + /@webassemblyjs/wasm-opt@1.14.1: + resolution: {integrity: sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==} + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-buffer': 1.14.1 + '@webassemblyjs/wasm-gen': 1.14.1 + '@webassemblyjs/wasm-parser': 1.14.1 + dev: true /@webassemblyjs/wasm-parser@1.11.5: resolution: {integrity: sha512-SVXUIwsLQlc8srSD7jejsfTU83g7pIGr2YYNb9oHdtldSxaOhvA5xwvIiWIfcX8PlSakgqMXsLpLfbbJ4cBYew==} @@ -19367,12 +19623,32 @@ packages: '@webassemblyjs/ieee754': 1.11.5 '@webassemblyjs/leb128': 1.11.5 '@webassemblyjs/utf8': 1.11.5 + dev: false + + /@webassemblyjs/wasm-parser@1.14.1: + resolution: {integrity: sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==} + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-api-error': 1.13.2 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/ieee754': 1.13.2 + '@webassemblyjs/leb128': 1.13.2 + '@webassemblyjs/utf8': 1.13.2 + dev: true /@webassemblyjs/wast-printer@1.11.5: resolution: {integrity: sha512-f7Pq3wvg3GSPUPzR0F6bmI89Hdb+u9WXrSKc4v+N0aV0q6r42WoF92Jp2jEorBEBRoRNXgjp53nBniDXcqZYPA==} dependencies: '@webassemblyjs/ast': 1.11.5 '@xtuc/long': 4.2.2 + dev: false + + /@webassemblyjs/wast-printer@1.14.1: + resolution: {integrity: sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==} + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@xtuc/long': 4.2.2 + dev: true /@whatwg-node/events@0.1.1: resolution: {integrity: sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==} @@ -19488,14 +19764,6 @@ packages: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 dependencies: acorn: 8.12.1 - dev: true - - /acorn-jsx@5.3.2(acorn@8.14.1): - resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} - peerDependencies: - acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - dependencies: - acorn: 8.14.1 /acorn-node@1.8.2: resolution: {integrity: sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A==} @@ -19601,7 +19869,7 @@ packages: resolution: {integrity: sha512-hCOfMzbFx5IDutmWLAt6MZwOUjIfSM9G9FyVxytmE4Rs/5YDPWQrD/+IR1w+FweD9H2oOZEnv36TmkjhNURBVA==} dev: true - /ai@3.3.24(openai@4.56.0)(react@19.0.0-rc.0)(svelte@4.2.19)(vue@3.4.38)(zod@3.23.8): + /ai@3.3.24(openai@4.56.0)(react@19.0.0-rc.0)(svelte@5.28.2)(vue@3.5.13)(zod@3.23.8): resolution: {integrity: sha512-hhyczvEdCQeeEMWBWP4Af8k1YIzsheC+dHv6lAsti8NBiOnySFhnjS1sTiIrLyuCgciHXoFYLhlA2+/3AtBLAQ==} engines: {node: '>=18'} peerDependencies: @@ -19626,9 +19894,9 @@ packages: '@ai-sdk/provider-utils': 1.0.17(zod@3.23.8) '@ai-sdk/react': 0.0.53(react@19.0.0-rc.0)(zod@3.23.8) '@ai-sdk/solid': 0.0.43(zod@3.23.8) - '@ai-sdk/svelte': 0.0.45(svelte@4.2.19)(zod@3.23.8) + '@ai-sdk/svelte': 0.0.45(svelte@5.28.2)(zod@3.23.8) '@ai-sdk/ui-utils': 0.0.40(zod@3.23.8) - '@ai-sdk/vue': 0.0.45(vue@3.4.38)(zod@3.23.8) + '@ai-sdk/vue': 0.0.45(vue@3.5.13)(zod@3.23.8) '@opentelemetry/api': 1.9.0 eventsource-parser: 1.1.2 json-schema: 0.4.0 @@ -19637,7 +19905,7 @@ packages: openai: 4.56.0(zod@3.23.8) react: 19.0.0-rc.0 secure-json-parse: 2.7.0 - svelte: 4.2.19 + svelte: 5.28.2 zod: 3.23.8 zod-to-json-schema: 3.23.2(zod@3.23.8) transitivePeerDependencies: @@ -19645,7 +19913,7 @@ packages: - vue dev: false - /ai@3.4.33(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.23.8): + /ai@3.4.33(react@18.3.1)(svelte@5.28.2)(vue@3.5.13)(zod@3.23.8): resolution: {integrity: sha512-plBlrVZKwPoRTmM8+D1sJac9Bq8eaa2jiZlHLZIWekKWI1yMWYZvCCEezY9ASPwRhULYDJB2VhKOBUUeg3S5JQ==} engines: {node: '>=18'} peerDependencies: @@ -19670,16 +19938,16 @@ packages: '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8) '@ai-sdk/react': 0.0.70(react@18.3.1)(zod@3.23.8) '@ai-sdk/solid': 0.0.54(zod@3.23.8) - '@ai-sdk/svelte': 0.0.57(svelte@4.2.19)(zod@3.23.8) + '@ai-sdk/svelte': 0.0.57(svelte@5.28.2)(zod@3.23.8) '@ai-sdk/ui-utils': 0.0.50(zod@3.23.8) - '@ai-sdk/vue': 0.0.59(vue@3.4.38)(zod@3.23.8) + '@ai-sdk/vue': 0.0.59(vue@3.5.13)(zod@3.23.8) '@opentelemetry/api': 1.9.0 eventsource-parser: 1.1.2 json-schema: 0.4.0 jsondiffpatch: 0.6.0 react: 18.3.1 secure-json-parse: 2.7.0 - svelte: 4.2.19 + svelte: 5.28.2 zod: 3.23.8 zod-to-json-schema: 3.24.3(zod@3.23.8) transitivePeerDependencies: @@ -19761,12 +20029,24 @@ packages: ajv: 8.12.0 dev: true + /ajv-formats@2.1.1(ajv@8.17.1): + resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + dependencies: + ajv: 8.17.1 + dev: true + /ajv-keywords@3.5.2(ajv@6.12.6): resolution: {integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==} peerDependencies: ajv: ^6.9.1 dependencies: ajv: 6.12.6 + dev: false /ajv-keywords@5.1.0(ajv@8.12.0): resolution: {integrity: sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==} @@ -19777,6 +20057,15 @@ packages: fast-deep-equal: 3.1.3 dev: true + /ajv-keywords@5.1.0(ajv@8.17.1): + resolution: {integrity: sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==} + peerDependencies: + ajv: ^8.8.2 + dependencies: + ajv: 8.17.1 + fast-deep-equal: 3.1.3 + dev: true + /ajv@6.12.6: resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} dependencies: @@ -19794,6 +20083,15 @@ packages: uri-js: 4.4.1 dev: true + /ajv@8.17.1: + resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==} + dependencies: + fast-deep-equal: 3.1.3 + fast-uri: 3.0.6 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + dev: true + /ansi-colors@4.1.3: resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} engines: {node: '>=6'} @@ -19959,6 +20257,10 @@ packages: dependencies: dequal: 2.0.3 + /aria-query@5.3.2: + resolution: {integrity: sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==} + engines: {node: '>= 0.4'} + /arktype@2.0.0-rc.17: resolution: {integrity: sha512-1m1VG9ZGcGx8OIbeA4ghw8n1QVpu7MYcel3My2Tob17mMBaLy6+M116RRwx9GvaCyGpHhgu1RK5XfhP4wX17ug==} dependencies: @@ -20264,17 +20566,17 @@ packages: /b4a@1.6.6: resolution: {integrity: sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==} - /babel-loader@9.1.3(@babel/core@7.26.8)(webpack@5.88.2): + /babel-loader@9.1.3(@babel/core@7.26.10)(webpack@5.99.7): resolution: {integrity: sha512-xG3ST4DglodGf8qSwv0MdeWLhrDsw/32QMdTO5T1ZIp9gQur0HkCyFs7Awskr10JKXFXwpAhiCuYX5oGXnRGbw==} engines: {node: '>= 14.15.0'} peerDependencies: '@babel/core': ^7.12.0 webpack: '>=5' dependencies: - '@babel/core': 7.26.8 + '@babel/core': 7.26.10 find-cache-dir: 4.0.0 schema-utils: 4.0.1 - webpack: 5.88.2(@swc/core@1.3.26)(esbuild@0.15.18) + webpack: 5.99.7(@swc/core@1.3.26)(esbuild@0.15.18) dev: true /babel-plugin-macros@3.1.0: @@ -20763,6 +21065,29 @@ packages: rc9: 2.1.2 dev: false + /c12@1.11.1(magicast@0.3.5): + resolution: {integrity: sha512-KDU0TvSvVdaYcQKQ6iPHATGz/7p/KiVjPg4vQrB6Jg/wX9R0yl5RZxWm9IoZqaIHD2+6PZd81+KMGwRr/lRIUg==} + peerDependencies: + magicast: ^0.3.4 + peerDependenciesMeta: + magicast: + optional: true + dependencies: + chokidar: 3.6.0 + confbox: 0.1.7 + defu: 6.1.4 + dotenv: 16.4.5 + giget: 1.2.3 + jiti: 1.21.6 + magicast: 0.3.5 + mlly: 1.7.1 + ohash: 1.1.3 + pathe: 1.1.2 + perfect-debounce: 1.0.0 + pkg-types: 1.1.3 + rc9: 2.1.2 + dev: true + /cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -21091,6 +21416,12 @@ packages: /chrome-trace-event@1.0.3: resolution: {integrity: sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==} engines: {node: '>=6.0'} + dev: false + + /chrome-trace-event@1.0.4: + resolution: {integrity: sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==} + engines: {node: '>=6.0'} + dev: true /chromium-bidi@0.6.5(devtools-protocol@0.0.1342118): resolution: {integrity: sha512-RuLrmzYrxSb0s9SgpB+QN5jJucPduZQ/9SIe76MDxYJuecPW5mxMdacJ1f4EtgiV+R0p3sCkznTMvH0MPGFqjA==} @@ -21265,34 +21596,22 @@ packages: /clsx@2.1.1: resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} engines: {node: '>=6'} - dev: false /cluster-key-slot@1.1.2: resolution: {integrity: sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==} engines: {node: '>=0.10.0'} dev: false - /code-red@1.0.4: - resolution: {integrity: sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==} - dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 - '@types/estree': 1.0.6 - acorn: 8.14.1 - estree-walker: 3.0.3 - periscopic: 3.1.0 - - /codemirror@6.0.1(@lezer/common@1.0.2): + /codemirror@6.0.1: resolution: {integrity: sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==} dependencies: - '@codemirror/autocomplete': 6.4.0(@codemirror/language@6.3.2)(@codemirror/state@6.2.0)(@codemirror/view@6.7.2)(@lezer/common@1.0.2) - '@codemirror/commands': 6.1.3 - '@codemirror/language': 6.3.2 - '@codemirror/lint': 6.4.2 - '@codemirror/search': 6.2.3 - '@codemirror/state': 6.2.0 - '@codemirror/view': 6.7.2 - transitivePeerDependencies: - - '@lezer/common' + '@codemirror/autocomplete': 6.18.6 + '@codemirror/commands': 6.8.1 + '@codemirror/language': 6.11.0 + '@codemirror/lint': 6.8.5 + '@codemirror/search': 6.5.10 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.36.6 dev: false /color-convert@1.9.3: @@ -21618,7 +21937,7 @@ packages: requiresBuild: true dependencies: buildcheck: 0.0.6 - nan: 2.20.0 + nan: 2.22.2 optional: true /cpy-cli@5.0.0: @@ -21673,6 +21992,10 @@ packages: resolution: {integrity: sha512-+BO9wPPi+DWTDcNYhr/W90myha8ptzftZT+LwcmUbbok0rcP/fequmFYCw8NMoH7pkAZQzU78b3kYrlua5a9eA==} dev: false + /crelt@1.0.6: + resolution: {integrity: sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==} + dev: false + /cron-parser@4.9.0: resolution: {integrity: sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==} engines: {node: '>=12.0.0'} @@ -21738,7 +22061,7 @@ packages: hyphenate-style-name: 1.0.4 dev: false - /css-loader@6.10.0(webpack@5.88.2): + /css-loader@6.10.0(webpack@5.99.7): resolution: {integrity: sha512-LTSA/jWbwdMlk+rhmElbDR2vbtQoTBPr7fkJE+mxrHj+7ru0hUmHafDRzWIjIHTwpitWVaqY2/UWGRca3yUgRw==} engines: {node: '>= 12.13.0'} peerDependencies: @@ -21758,7 +22081,7 @@ packages: postcss-modules-values: 4.0.0(postcss@8.4.35) postcss-value-parser: 4.2.0 semver: 7.6.3 - webpack: 5.88.2(@swc/core@1.3.26)(esbuild@0.15.18) + webpack: 5.99.7(@swc/core@1.3.26)(esbuild@0.15.18) dev: true /css-tree@1.1.3: @@ -21769,13 +22092,6 @@ packages: source-map: 0.6.1 dev: false - /css-tree@2.3.1: - resolution: {integrity: sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==} - engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0} - dependencies: - mdn-data: 2.0.30 - source-map-js: 1.2.1 - /css-unit-converter@1.1.2: resolution: {integrity: sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA==} dev: false @@ -22495,6 +22811,13 @@ packages: fast-check: 3.22.0 dev: false + /effect@3.14.14: + resolution: {integrity: sha512-Dbt9MAZHqM1UAip41RrZnypzLa/hJJGHXIVS9MbgU0L+UoJTFXToWIwWmHY/OcaQVNlf/1YxpMrD3xtxoDP/qw==} + dependencies: + '@standard-schema/spec': 1.0.0 + fast-check: 3.23.2 + dev: false + /effect@3.7.2: resolution: {integrity: sha512-pV7l1+LSZFvVObj4zuy4nYiBaC7qZOfrKV6s/Ef4p3KueiQwZFgamazklwyZ+x7Nyj2etRDFvHE/xkThTfQD1w==} dev: false @@ -22743,6 +23066,10 @@ packages: /es-module-lexer@1.6.0: resolution: {integrity: sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==} + /es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} + dev: true + /es-object-atoms@1.0.0: resolution: {integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==} engines: {node: '>= 0.4'} @@ -23300,22 +23627,22 @@ packages: eslint: 8.31.0 dev: true - /eslint-config-prettier@9.0.0(eslint@8.49.0): + /eslint-config-prettier@9.0.0(eslint@8.31.0): resolution: {integrity: sha512-IcJsTkJae2S35pRsRAwoCE+925rJJStOdkKnLVgtE+tEpqU0EVVM7OqrwxqgptKdX29NUwC82I5pXsGFIgSevw==} hasBin: true peerDependencies: eslint: '>=7.0.0' dependencies: - eslint: 8.49.0 + eslint: 8.31.0 dev: false - /eslint-config-turbo@1.10.12(eslint@8.49.0): + /eslint-config-turbo@1.10.12(eslint@8.31.0): resolution: {integrity: sha512-z3jfh+D7UGYlzMWGh+Kqz++hf8LOE96q3o5R8X4HTjmxaBWlLAWG+0Ounr38h+JLR2TJno0hU9zfzoPNkR9BdA==} peerDependencies: eslint: '>6.6.0' dependencies: - eslint: 8.49.0 - eslint-plugin-turbo: 1.10.12(eslint@8.49.0) + eslint: 8.31.0 + eslint-plugin-turbo: 1.10.12(eslint@8.31.0) dev: false /eslint-import-resolver-node@0.3.7: @@ -23587,13 +23914,13 @@ packages: - typescript dev: true - /eslint-plugin-turbo@1.10.12(eslint@8.49.0): + /eslint-plugin-turbo@1.10.12(eslint@8.31.0): resolution: {integrity: sha512-uNbdj+ohZaYo4tFJ6dStRXu2FZigwulR1b3URPXe0Q8YaE7thuekKNP+54CHtZPH9Zey9dmDx5btAQl9mfzGOw==} peerDependencies: eslint: '>6.6.0' dependencies: dotenv: 16.0.3 - eslint: 8.49.0 + eslint: 8.31.0 dev: false /eslint-plugin-turbo@2.0.5(eslint@8.31.0): @@ -23605,15 +23932,6 @@ packages: eslint: 8.31.0 dev: true - /eslint-plugin-turbo@2.0.5(eslint@8.49.0): - resolution: {integrity: sha512-nCTXZdaKmdRybBdjnMrDFG+ppLc9toUqB01Hf0pfhkQw8OoC29oJIVPsCSvuL/W58RKD02CNEUrwnVt57t36IQ==} - peerDependencies: - eslint: '>6.6.0' - dependencies: - dotenv: 16.0.3 - eslint: 8.49.0 - dev: true - /eslint-scope@5.1.1: resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} engines: {node: '>=8.0.0'} @@ -23627,14 +23945,6 @@ packages: dependencies: esrecurse: 4.3.0 estraverse: 5.3.0 - dev: true - - /eslint-scope@7.2.2: - resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - esrecurse: 4.3.0 - estraverse: 5.3.0 /eslint-utils@2.1.0: resolution: {integrity: sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==} @@ -23651,7 +23961,6 @@ packages: dependencies: eslint: 8.31.0 eslint-visitor-keys: 2.1.0 - dev: true /eslint-visitor-keys@1.3.0: resolution: {integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==} @@ -23661,21 +23970,15 @@ packages: /eslint-visitor-keys@2.1.0: resolution: {integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==} engines: {node: '>=10'} - dev: true /eslint-visitor-keys@3.3.0: resolution: {integrity: sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dev: true /eslint-visitor-keys@3.4.2: resolution: {integrity: sha512-8drBzUEyZ2llkpCA67iYrgEssKDUu68V8ChqqOfFupIaG/LCVPUT+CoGJpT77zJprs4T/W7p07LP7zAIMuweVw==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - /eslint-visitor-keys@3.4.3: - resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - /eslint@8.31.0: resolution: {integrity: sha512-0tQQEVdmPZ1UtUKXjX7EMm9BlgJ08G90IhWh0PKDCb3ZLsgAOHI8fYSIzYVZej92zsgq+ft0FGsxhJ3xo2tbuA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -23722,53 +24025,9 @@ packages: text-table: 0.2.0 transitivePeerDependencies: - supports-color - dev: true - /eslint@8.49.0: - resolution: {integrity: sha512-jw03ENfm6VJI0jA9U+8H5zfl5b+FvuU3YYvZRdZHOlU2ggJkxrlkJH4HcDrZpj6YwD8kuYqvQM8LyesoazrSOQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - deprecated: This version is no longer supported. Please see https://eslint.org/version-support for other options. - hasBin: true - dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@8.49.0) - '@eslint-community/regexpp': 4.12.1 - '@eslint/eslintrc': 2.1.4 - '@eslint/js': 8.49.0 - '@humanwhocodes/config-array': 0.11.14 - '@humanwhocodes/module-importer': 1.0.1 - '@nodelib/fs.walk': 1.2.8 - ajv: 6.12.6 - chalk: 4.1.2 - cross-spawn: 7.0.3 - debug: 4.4.0(supports-color@10.0.0) - doctrine: 3.0.0 - escape-string-regexp: 4.0.0 - eslint-scope: 7.2.2 - eslint-visitor-keys: 3.4.3 - espree: 9.6.1 - esquery: 1.5.0 - esutils: 2.0.3 - fast-deep-equal: 3.1.3 - file-entry-cache: 6.0.1 - find-up: 5.0.0 - glob-parent: 6.0.2 - globals: 13.19.0 - graphemer: 1.4.0 - ignore: 5.2.4 - imurmurhash: 0.1.4 - is-glob: 4.0.3 - is-path-inside: 3.0.3 - js-yaml: 4.1.0 - json-stable-stringify-without-jsonify: 1.0.1 - levn: 0.4.1 - lodash.merge: 4.6.2 - minimatch: 3.1.2 - natural-compare: 1.4.0 - optionator: 0.9.3 - strip-ansi: 6.0.1 - text-table: 0.2.0 - transitivePeerDependencies: - - supports-color + /esm-env@1.2.2: + resolution: {integrity: sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==} /espree@9.4.1: resolution: {integrity: sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==} @@ -23777,7 +24036,6 @@ packages: acorn: 8.12.1 acorn-jsx: 5.3.2(acorn@8.12.1) eslint-visitor-keys: 3.4.2 - dev: true /espree@9.6.0: resolution: {integrity: sha512-1FH/IiruXZ84tpUlm0aCUEwMl2Ho5ilqVh0VvQXw+byAz/4SAciyHLlfmL5WYqsvD38oymdUwBss0LtK8m4s/A==} @@ -23786,15 +24044,6 @@ packages: acorn: 8.12.1 acorn-jsx: 5.3.2(acorn@8.12.1) eslint-visitor-keys: 3.4.2 - dev: true - - /espree@9.6.1: - resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - acorn: 8.14.1 - acorn-jsx: 5.3.2(acorn@8.14.1) - eslint-visitor-keys: 3.4.3 /esprima@4.0.1: resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} @@ -23806,13 +24055,11 @@ packages: engines: {node: '>=0.10'} dependencies: estraverse: 5.3.0 - dev: true - /esquery@1.5.0: - resolution: {integrity: sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==} - engines: {node: '>=0.10'} + /esrap@1.4.6: + resolution: {integrity: sha512-F/D2mADJ9SHY3IwksD4DAXjTt7qt7GWUf3/8RhCNWmC/67tyb55dpimHmy7EplakFaflV0R/PC+fdSPqrRHAQw==} dependencies: - estraverse: 5.3.0 + '@jridgewell/sourcemap-codec': 1.5.0 /esrecurse@4.3.0: resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} @@ -23887,6 +24134,7 @@ packages: resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} dependencies: '@types/estree': 1.0.6 + dev: true /esutils@2.0.3: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} @@ -24123,6 +24371,13 @@ packages: pure-rand: 6.1.0 dev: false + /fast-check@3.23.2: + resolution: {integrity: sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==} + engines: {node: '>=8.0.0'} + dependencies: + pure-rand: 6.1.0 + dev: false + /fast-decode-uri-component@1.0.1: resolution: {integrity: sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg==} dev: false @@ -24180,6 +24435,10 @@ packages: resolution: {integrity: sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw==} dev: false + /fast-uri@3.0.6: + resolution: {integrity: sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==} + dev: true + /fast-url-parser@1.1.3: resolution: {integrity: sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==} dependencies: @@ -24988,9 +25247,6 @@ packages: /grapheme-splitter@1.0.4: resolution: {integrity: sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==} - /graphemer@1.4.0: - resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} - /graphile-config@0.0.1-beta.8: resolution: {integrity: sha512-H8MinryZewvUigVLnkVDhKJgHrcNYGcLvgYWfSnR1d6l76iV9E8m4ZfN9estSHKVm6cyHhRfHBfL1G5QfXmS5A==} engines: {node: '>=16'} @@ -25829,6 +26085,12 @@ packages: resolution: {integrity: sha512-baJJdQLiYaJdvFbJqXrcGv3WU3QCzBlUcI5QhbesIm6/xPsvmO+2CDoi/GMOFBQEQm+PXkwOPrp9KK5ozZsp2w==} dependencies: '@types/estree': 1.0.6 + dev: true + + /is-reference@3.0.3: + resolution: {integrity: sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==} + dependencies: + '@types/estree': 1.0.7 /is-regex@1.1.4: resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} @@ -26069,8 +26331,8 @@ packages: '@sideway/pinpoint': 2.0.0 dev: false - /jose@4.15.4: - resolution: {integrity: sha512-W+oqK4H+r5sITxfxpSU+MMdr/YSWGvgZMQDIsNoBDGGy4i7GBPTtvFKibQzW06n3U3TqHjhvBJsirShsEJ6eeQ==} + /jose@4.15.9: + resolution: {integrity: sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA==} requiresBuild: true dev: false optional: true @@ -26111,7 +26373,6 @@ packages: /js-sdsl@4.2.0: resolution: {integrity: sha512-dyBIzQBDkCqCu+0upx25Y2jGdbTGxE9fshMsCdK0ViOongpV+n5tXRcZY9v7CaVQ79AGS9KA1KHtojxiM7aXSQ==} - dev: true /js-tiktoken@1.0.14: resolution: {integrity: sha512-Pk3l3WOgM9joguZY2k52+jH82RtABRgB5RdGFZNUGbOKGMVlNmafcPA3b0ITcCZPu1L9UclP1tne6aw7ZI4Myg==} @@ -26168,6 +26429,12 @@ packages: hasBin: true dev: true + /jsesc@3.1.0: + resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} + engines: {node: '>=6'} + hasBin: true + dev: true + /json-buffer@3.0.0: resolution: {integrity: sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==} dev: true @@ -27247,9 +27514,6 @@ packages: resolution: {integrity: sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==} dev: false - /mdn-data@2.0.30: - resolution: {integrity: sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==} - /media-query-parser@2.0.2: resolution: {integrity: sha512-1N4qp+jE0pL5Xv4uEcwVUhIkwdUO3S/9gML90nqKA7v7FcOS5vUtatfzok9S9U1EJU8dHWlcv95WLnKmmxZI9w==} dependencies: @@ -28135,6 +28399,12 @@ packages: /nan@2.20.0: resolution: {integrity: sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==} + dev: false + + /nan@2.22.2: + resolution: {integrity: sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==} + requiresBuild: true + optional: true /nano-css@5.3.5(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-vSB9X12bbNu4ALBu7nigJgRViZ6ja3OU7CeuiV1zMIbXOdmkLahgtPmh3GBOlDxbKY0CitqlPdOReGlBLSp+yg==} @@ -28299,7 +28569,7 @@ packages: - babel-plugin-macros dev: false - /next@14.2.3(@babel/core@7.24.5)(@opentelemetry/api@1.4.1)(@playwright/test@1.37.0)(react-dom@18.2.0)(react@19.0.0-rc.0): + /next@14.2.3(@babel/core@7.24.5)(@opentelemetry/api@1.4.1)(@playwright/test@1.37.0)(react-dom@18.3.1)(react@19.0.0-rc.0): resolution: {integrity: sha512-dowFkFTR8v79NPJO4QsBUtxv0g9BrS/phluVpMAt2ku7H+cbcBJlopXjkWlwxrk/xGqMemr7JkGPGemPrLLX7A==} engines: {node: '>=18.17.0'} hasBin: true @@ -28326,7 +28596,7 @@ packages: graceful-fs: 4.2.11 postcss: 8.4.31 react: 19.0.0-rc.0 - react-dom: 18.2.0(react@19.0.0-rc.0) + react-dom: 18.3.1(react@19.0.0-rc.0) styled-jsx: 5.1.1(@babel/core@7.24.5)(react@19.0.0-rc.0) optionalDependencies: '@next/swc-darwin-arm64': 14.2.3 @@ -28422,6 +28692,10 @@ packages: resolution: {integrity: sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==} dev: false + /node-fetch-native@1.6.6: + resolution: {integrity: sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==} + dev: false + /node-fetch@2.6.12: resolution: {integrity: sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==} engines: {node: 4.x || >=6.0.0} @@ -28962,11 +29236,11 @@ packages: hasBin: true dev: true - /openid-client@5.6.4: - resolution: {integrity: sha512-T1h3B10BRPKfcObdBklX639tVz+xh34O7GjofqrqiAQdm7eHsQ00ih18x6wuJ/E6FxdtS2u3FmUGPDeEcMwzNA==} + /openid-client@5.7.1: + resolution: {integrity: sha512-jDBPgSVfTnkIh71Hg9pRvtJc6wTwqjRkN88+gCFtYWrlP4Yx2Dsrow8uPi3qLr/aeymPF3o2+dS+wOpglK04ew==} requiresBuild: true dependencies: - jose: 4.15.4 + jose: 4.15.9 lru-cache: 6.0.0 object-hash: 2.2.0 oidc-token-hash: 5.0.3 @@ -28990,18 +29264,6 @@ packages: prelude-ls: 1.2.1 type-check: 0.4.0 word-wrap: 1.2.3 - dev: true - - /optionator@0.9.3: - resolution: {integrity: sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==} - engines: {node: '>= 0.8.0'} - dependencies: - '@aashutoshrathi/word-wrap': 1.2.6 - deep-is: 0.1.4 - fast-levenshtein: 2.0.6 - levn: 0.4.1 - prelude-ls: 1.2.1 - type-check: 0.4.0 /ora@5.4.1: resolution: {integrity: sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==} @@ -29452,9 +29714,10 @@ packages: '@types/estree': 1.0.6 estree-walker: 3.0.3 is-reference: 3.0.1 + dev: true - /pg-cloudflare@1.1.1: - resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} + /pg-cloudflare@1.2.5: + resolution: {integrity: sha512-OOX22Vt0vOSRrdoUPKJ8Wi2OpE/o/h9T8X1s4qSkCedbNah9ei2W2765be8iMVxQUsvgT7zIAT2eIa9fs5+vtg==} requiresBuild: true dev: false optional: true @@ -29521,7 +29784,7 @@ packages: pg-types: 2.2.0 pgpass: 1.0.5 optionalDependencies: - pg-cloudflare: 1.1.1 + pg-cloudflare: 1.2.5 dev: false /pgpass@1.0.5: @@ -29752,7 +30015,7 @@ packages: tsx: 4.17.0 dev: true - /postcss-loader@8.1.1(postcss@8.5.3)(typescript@5.5.4)(webpack@5.88.2): + /postcss-loader@8.1.1(postcss@8.5.3)(typescript@5.5.4)(webpack@5.99.7): resolution: {integrity: sha512-0IeqyAsG6tYiDRCYKQJLAmgQr47DX6N7sFSWvQxt6AcupX8DIdmykuk/o/tx0Lze3ErGHJEp5OSRxrelC6+NdQ==} engines: {node: '>= 18.12.0'} peerDependencies: @@ -29769,7 +30032,7 @@ packages: jiti: 1.21.0 postcss: 8.5.3 semver: 7.6.3 - webpack: 5.88.2(@swc/core@1.3.26)(esbuild@0.15.18) + webpack: 5.99.7(@swc/core@1.3.26)(esbuild@0.15.18) transitivePeerDependencies: - typescript dev: true @@ -30661,14 +30924,14 @@ packages: scheduler: 0.23.0 dev: false - /react-dom@18.2.0(react@19.0.0-rc.0): - resolution: {integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==} + /react-dom@18.3.1(react@19.0.0-rc.0): + resolution: {integrity: sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==} peerDependencies: - react: ^18.2.0 + react: ^18.3.1 dependencies: loose-envify: 1.4.0 react: 19.0.0-rc.0 - scheduler: 0.23.0 + scheduler: 0.23.2 dev: false /react-dom@19.0.0(react@19.0.0): @@ -30689,7 +30952,7 @@ packages: scheduler: 0.25.0-rc.1 dev: false - /react-email@2.1.2(eslint@8.49.0): + /react-email@2.1.2(eslint@8.31.0): resolution: {integrity: sha512-HBHhpzEE5es9YUoo7VSj6qy1omjwndxf3/Sb44UJm/uJ2AjmqALo2yryux0CjW9QAVfitc9rxHkLvIb9H87QQw==} engines: {node: '>=18.0.0'} hasBin: true @@ -30715,8 +30978,8 @@ packages: commander: 11.1.0 debounce: 2.0.0 esbuild: 0.19.11 - eslint-config-prettier: 9.0.0(eslint@8.49.0) - eslint-config-turbo: 1.10.12(eslint@8.49.0) + eslint-config-prettier: 9.0.0(eslint@8.31.0) + eslint-config-turbo: 1.10.12(eslint@8.31.0) framer-motion: 10.17.4(react-dom@18.2.0)(react@18.3.1) glob: 10.3.4 log-symbols: 4.1.0 @@ -30752,7 +31015,7 @@ packages: - webpack-cli dev: false - /react-email@3.0.1(@opentelemetry/api@1.4.1)(@playwright/test@1.37.0)(react-dom@18.2.0)(react@19.0.0-rc.0): + /react-email@3.0.1(@opentelemetry/api@1.4.1)(@playwright/test@1.37.0)(react-dom@18.3.1)(react@19.0.0-rc.0): resolution: {integrity: sha512-G4Bkx2ULIScy/0Z8nnWywHt0W1iTkaYCdh9rWNuQ3eVZ6B3ttTUDE9uUy3VNQ8dtQbmG0cpt8+XmImw7mMBW6Q==} engines: {node: '>=18.0.0'} hasBin: true @@ -30767,7 +31030,7 @@ packages: glob: 10.3.4 log-symbols: 4.1.0 mime-types: 2.1.35 - next: 14.2.3(@babel/core@7.24.5)(@opentelemetry/api@1.4.1)(@playwright/test@1.37.0)(react-dom@18.2.0)(react@19.0.0-rc.0) + next: 14.2.3(@babel/core@7.24.5)(@opentelemetry/api@1.4.1)(@playwright/test@1.37.0)(react-dom@18.3.1)(react@19.0.0-rc.0) normalize-path: 3.0.0 ora: 5.4.1 socket.io: 4.7.5 @@ -31357,7 +31620,6 @@ packages: /regexpp@3.2.0: resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==} engines: {node: '>=8'} - dev: true /regexpu-core@5.2.2: resolution: {integrity: sha512-T0+1Zp2wjF/juXMrMxHxidqGYn8U4R+zleSJhX9tQ1PUsS8a9UtYfbsF9LdiVgNX3kiX8RNaKM42nfSgvFJjmw==} @@ -31876,8 +32138,8 @@ packages: resolution: {integrity: sha512-cLgakCUf6PedEu15t8kbsjnwIFFR2D4RfL+W3iWFJ4iac7z4B0ZI8fxy4R3J956kAI68HclCFGL8MPoUVC3qVA==} dev: false - /rxjs@7.8.1: - resolution: {integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==} + /rxjs@7.8.2: + resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==} requiresBuild: true dependencies: tslib: 2.6.2 @@ -31932,6 +32194,12 @@ packages: dependencies: loose-envify: 1.4.0 + /scheduler@0.23.2: + resolution: {integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==} + dependencies: + loose-envify: 1.4.0 + dev: false + /scheduler@0.25.0: resolution: {integrity: sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA==} dev: false @@ -31947,6 +32215,7 @@ packages: '@types/json-schema': 7.0.13 ajv: 6.12.6 ajv-keywords: 3.5.2(ajv@6.12.6) + dev: false /schema-utils@4.0.1: resolution: {integrity: sha512-lELhBAAly9NowEsX0yZBlw9ahZG+sK/1RJ21EpzdYHKEs13Vku3LJ+MIPhh4sMs0oCCeufZQEQbMekiA4vuVIQ==} @@ -31958,6 +32227,16 @@ packages: ajv-keywords: 5.1.0(ajv@8.12.0) dev: true + /schema-utils@4.3.2: + resolution: {integrity: sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==} + engines: {node: '>= 10.13.0'} + dependencies: + '@types/json-schema': 7.0.15 + ajv: 8.17.1 + ajv-formats: 2.1.1(ajv@8.17.1) + ajv-keywords: 5.1.0(ajv@8.17.1) + dev: true + /screenfull@5.2.0: resolution: {integrity: sha512-9BakfsO2aUQN2K9Fdbj87RJIEZ82Q9IGim7FqM5OsebfoFC6ZHXgDq/KvniuLTPdeM8wY2o6Dj3WQ7KeQCj3cA==} engines: {node: '>=0.10.0'} @@ -32052,6 +32331,13 @@ packages: resolution: {integrity: sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==} dependencies: randombytes: 2.1.0 + dev: false + + /serialize-javascript@6.0.2: + resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==} + dependencies: + randombytes: 2.1.0 + dev: true /serve-static@1.15.0: resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==} @@ -32596,7 +32882,7 @@ packages: bcrypt-pbkdf: 1.0.2 optionalDependencies: cpu-features: 0.0.10 - nan: 2.20.0 + nan: 2.22.2 /sshpk@1.18.0: resolution: {integrity: sha512-2p2KJZTSqQ/I3+HX42EpYOa2l3f8Erv8MWKsy2I9uf4wA7yFIkXRffYdsx86y6z4vHtV8u7g+pPlr8/4ouAxsQ==} @@ -32620,12 +32906,12 @@ packages: dependencies: minipass: 7.1.2 - /sswr@2.1.0(svelte@4.2.19): + /sswr@2.1.0(svelte@5.28.2): resolution: {integrity: sha512-Cqc355SYlTAaUt8iDPaC/4DPPXK925PePLMxyBKuWd5kKc5mwsG3nT9+Mq2tyguL5s7b4Jg+IRMpTRsNTAfpSQ==} peerDependencies: svelte: ^4.0.0 || ^5.0.0-next.0 dependencies: - svelte: 4.2.19 + svelte: 5.28.2 swrev: 4.0.0 /stack-generator@2.0.10: @@ -32906,19 +33192,23 @@ packages: resolution: {integrity: sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==} dev: false - /style-loader@3.3.4(webpack@5.88.2): + /style-loader@3.3.4(webpack@5.99.7): resolution: {integrity: sha512-0WqXzrsMTyb8yjZJHDqwmnwRJvhALK9LfRtRc6B4UTWe8AijYLZYZ9thuJTZc2VfQWINADW/j+LiJnfy2RoC1w==} engines: {node: '>= 12.13.0'} peerDependencies: webpack: ^5.0.0 dependencies: - webpack: 5.88.2(@swc/core@1.3.26)(esbuild@0.15.18) + webpack: 5.99.7(@swc/core@1.3.26)(esbuild@0.15.18) dev: true /style-mod@4.0.0: resolution: {integrity: sha512-OPhtyEjyyN9x3nhPsu76f52yUGXiZcgvsrFVtvTkyGRQJ0XK+GPc6ov1z+lRpbeabka+MYEQxOYRnt5nF30aMw==} dev: false + /style-mod@4.1.2: + resolution: {integrity: sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==} + dev: false + /style-to-js@1.1.16: resolution: {integrity: sha512-/Q6ld50hKYPH3d/r6nr117TZkHR0w0kGGIVfpG9N6D8NymRPM9RqCUv4pRpJ62E5DqOYx2AFpbZMyCPnjQCnOw==} dependencies: @@ -33094,24 +33384,24 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} - /svelte@4.2.19: - resolution: {integrity: sha512-IY1rnGr6izd10B0A8LqsBfmlT5OILVuZ7XsI0vdGPEvuonFV7NYEUK4dAkm9Zg2q0Um92kYjTpS1CAP3Nh/KWw==} - engines: {node: '>=16'} + /svelte@5.28.2: + resolution: {integrity: sha512-FbWBxgWOpQfhKvoGJv/TFwzqb4EhJbwCD17dB0tEpQiw1XyUEKZJtgm4nA4xq3LLsMo7hu5UY/BOFmroAxKTMg==} + engines: {node: '>=18'} dependencies: '@ampproject/remapping': 2.3.0 '@jridgewell/sourcemap-codec': 1.5.0 - '@jridgewell/trace-mapping': 0.3.25 - '@types/estree': 1.0.6 + '@sveltejs/acorn-typescript': 1.0.5(acorn@8.14.1) + '@types/estree': 1.0.7 acorn: 8.14.1 - aria-query: 5.3.0 + aria-query: 5.3.2 axobject-query: 4.1.0 - code-red: 1.0.4 - css-tree: 2.3.1 - estree-walker: 3.0.3 - is-reference: 3.0.1 + clsx: 2.1.1 + esm-env: 1.2.2 + esrap: 1.4.6 + is-reference: 3.0.3 locate-character: 3.0.0 magic-string: 0.30.17 - periscopic: 3.1.0 + zimmerframe: 1.1.2 /swr@2.2.5(react@18.3.1): resolution: {integrity: sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==} @@ -33145,12 +33435,12 @@ packages: /swrev@4.0.0: resolution: {integrity: sha512-LqVcOHSB4cPGgitD1riJ1Hh4vdmITOp+BkmfmXRh4hSF/t7EnS4iD+SOTmq7w5pPm/SiPeto4ADbKS6dHUDWFA==} - /swrv@1.0.4(vue@3.4.38): + /swrv@1.0.4(vue@3.5.13): resolution: {integrity: sha512-zjEkcP8Ywmj+xOJW3lIT65ciY/4AL4e/Or7Gj0MzU3zBJNMdJiT8geVZhINavnlHRMMCcJLHhraLTAiDOTmQ9g==} peerDependencies: vue: '>=3.2.26 < 4' dependencies: - vue: 3.4.38(typescript@5.5.4) + vue: 3.5.13(typescript@5.5.4) /sync-content@2.0.1: resolution: {integrity: sha512-NI1mo514yFhr8pV/5Etvgh+pSBUIpoAKoiBIUwALVlQQNAwb40bTw8hhPFaip/dvv0GhpHVOq0vq8iY02ppLTg==} @@ -33463,8 +33753,16 @@ packages: supports-hyperlinks: 2.3.0 dev: true - /terser-webpack-plugin@5.3.7(@swc/core@1.3.101)(esbuild@0.19.11)(webpack@5.88.2): - resolution: {integrity: sha512-AfKwIktyP7Cu50xNjXF/6Qb5lBNzYaWpU6YfoX3uZicTx0zTy0stDDCsvjDapKsSDvOeWo5MEq4TmdBy2cNoHw==} + /terminal-link@3.0.0: + resolution: {integrity: sha512-flFL3m4wuixmf6IfhFJd1YPiLiMuxEc8uHRM1buzIeZPm22Au2pDqBJQgdo7n1WfPU1ONFGv7YDwpFBmHGF6lg==} + engines: {node: '>=12'} + dependencies: + ansi-escapes: 5.0.0 + supports-hyperlinks: 2.3.0(patch_hash=xmw2etywyp5w2jf77wkqg4ob3a) + dev: false + + /terser-webpack-plugin@5.3.14(@swc/core@1.3.26)(esbuild@0.15.18)(webpack@5.99.7): + resolution: {integrity: sha512-vkZjpUjb6OMS7dhV+tILUW6BhpDR7P2L/aQSAv+Uwk+m8KATX9EccViHTJR2qDtACKPIYndLGCyl3FMo+r2LMw==} engines: {node: '>= 10.13.0'} peerDependencies: '@swc/core': '*' @@ -33480,16 +33778,16 @@ packages: optional: true dependencies: '@jridgewell/trace-mapping': 0.3.25 - '@swc/core': 1.3.101 - esbuild: 0.19.11 + '@swc/core': 1.3.26 + esbuild: 0.15.18 jest-worker: 27.5.1 - schema-utils: 3.3.0 - serialize-javascript: 6.0.1 - terser: 5.17.1 - webpack: 5.88.2(@swc/core@1.3.101)(esbuild@0.19.11) - dev: false + schema-utils: 4.3.2 + serialize-javascript: 6.0.2 + terser: 5.39.0 + webpack: 5.99.7(@swc/core@1.3.26)(esbuild@0.15.18) + dev: true - /terser-webpack-plugin@5.3.7(@swc/core@1.3.26)(esbuild@0.15.18)(webpack@5.88.2): + /terser-webpack-plugin@5.3.7(@swc/core@1.3.101)(esbuild@0.19.11)(webpack@5.88.2): resolution: {integrity: sha512-AfKwIktyP7Cu50xNjXF/6Qb5lBNzYaWpU6YfoX3uZicTx0zTy0stDDCsvjDapKsSDvOeWo5MEq4TmdBy2cNoHw==} engines: {node: '>= 10.13.0'} peerDependencies: @@ -33506,14 +33804,14 @@ packages: optional: true dependencies: '@jridgewell/trace-mapping': 0.3.25 - '@swc/core': 1.3.26 - esbuild: 0.15.18 + '@swc/core': 1.3.101 + esbuild: 0.19.11 jest-worker: 27.5.1 schema-utils: 3.3.0 serialize-javascript: 6.0.1 terser: 5.17.1 - webpack: 5.88.2(@swc/core@1.3.26)(esbuild@0.15.18) - dev: true + webpack: 5.88.2(@swc/core@1.3.101)(esbuild@0.19.11) + dev: false /terser@5.17.1: resolution: {integrity: sha512-hVl35zClmpisy6oaoKALOpS0rDYLxRFLHhRuDlEGTKey9qHjS1w9GMORjuwIMt70Wan4lwsLYyWDVnWgF+KUEw==} @@ -33524,6 +33822,18 @@ packages: acorn: 8.12.1 commander: 2.20.3 source-map-support: 0.5.21 + dev: false + + /terser@5.39.0: + resolution: {integrity: sha512-LBAhFyLho16harJoWMg/nZsQYgTrg5jXOn2nCYjRUcZZEdE3qa2zb8QEDRUGVZBW4rlazf2fxkg8tztybTaqWw==} + engines: {node: '>=10'} + hasBin: true + dependencies: + '@jridgewell/source-map': 0.3.6 + acorn: 8.14.1 + commander: 2.20.3 + source-map-support: 0.5.21 + dev: true /test-exclude@7.0.1: resolution: {integrity: sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==} @@ -34342,7 +34652,7 @@ packages: /typed-emitter@2.1.0: resolution: {integrity: sha512-g/KzbYKbH5C2vPkaXGu8DJlHrGKHLsM25Zg9WuC9pMGfuvT+X25tZQWo5fK1BjBm8+UrVE9LDCvaY0CQk+fXDA==} optionalDependencies: - rxjs: 7.8.1 + rxjs: 7.8.2 dev: true /typed-query-selector@2.12.0: @@ -34448,6 +34758,10 @@ packages: /ufo@1.5.4: resolution: {integrity: sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==} + /ufo@1.6.1: + resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==} + dev: false + /uid2@1.0.0: resolution: {integrity: sha512-+I6aJUv63YAcY9n4mQreLUt0d4lvwkkopDNmpomkAUz0fAkEMV9pRWxN0EjhW1YfRhcuyHg2v3mwddCDW1+LFQ==} engines: {node: '>= 4.0.0'} @@ -34511,9 +34825,9 @@ packages: consola: 3.4.2 defu: 6.1.4 mime: 3.0.0 - node-fetch-native: 1.6.4 + node-fetch-native: 1.6.6 pathe: 1.1.2 - ufo: 1.5.4 + ufo: 1.6.1 dev: false /unicode-canonical-property-names-ecmascript@2.0.0: @@ -35568,25 +35882,29 @@ packages: - terser dev: true - /vue@3.4.38(typescript@5.5.4): - resolution: {integrity: sha512-f0ZgN+mZ5KFgVv9wz0f4OgVKukoXtS3nwET4c2vLBGQR50aI8G0cqbFtLlX9Yiyg3LFGBitruPHt2PxwTduJEw==} + /vue@3.5.13(typescript@5.5.4): + resolution: {integrity: sha512-wmeiSMxkZCSc+PM2w2VRsOYAZC8GdipNFRTsLSfodVqI9mbejKeXEGr8SckuLnrQPGe3oJN5c3K0vpoU9q/wCQ==} peerDependencies: typescript: '*' peerDependenciesMeta: typescript: optional: true dependencies: - '@vue/compiler-dom': 3.4.38 - '@vue/compiler-sfc': 3.4.38 - '@vue/runtime-dom': 3.4.38 - '@vue/server-renderer': 3.4.38(vue@3.4.38) - '@vue/shared': 3.4.38 + '@vue/compiler-dom': 3.5.13 + '@vue/compiler-sfc': 3.5.13 + '@vue/runtime-dom': 3.5.13 + '@vue/server-renderer': 3.5.13(vue@3.5.13) + '@vue/shared': 3.5.13 typescript: 5.5.4 /w3c-keyname@2.2.6: resolution: {integrity: sha512-f+fciywl1SJEniZHD6H+kUO8gOnwIr7f4ijKA6+ZvJFjeGi1r4PDLl53Ayud9O/rk64RqgoQine0feoeOU0kXg==} dev: false + /w3c-keyname@2.2.8: + resolution: {integrity: sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==} + dev: false + /walk-up-path@4.0.0: resolution: {integrity: sha512-3hu+tD8YzSLGuFYtPRb48vdhKMi0KQV5sn+uWr8+7dMEq/2G/dtLrdDinkLjqq5TIbIBjYJ4Ax/n3YiaW7QM8A==} engines: {node: 20 || >=22} @@ -35604,6 +35922,15 @@ packages: dependencies: glob-to-regexp: 0.4.1 graceful-fs: 4.2.11 + dev: false + + /watchpack@2.4.2: + resolution: {integrity: sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==} + engines: {node: '>=10.13.0'} + dependencies: + glob-to-regexp: 0.4.1 + graceful-fs: 4.2.11 + dev: true /wcwidth@1.0.1: resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} @@ -35704,8 +36031,8 @@ packages: - uglify-js dev: false - /webpack@5.88.2(@swc/core@1.3.26)(esbuild@0.15.18): - resolution: {integrity: sha512-JmcgNZ1iKj+aiR0OvTYtWQqJwq37Pf683dY9bVORwVbUrDhLhdn/PlO2sHsFHPkj7sHNQF3JwaAkp49V+Sq1tQ==} + /webpack@5.99.7(@swc/core@1.3.26)(esbuild@0.15.18): + resolution: {integrity: sha512-CNqKBRMQjwcmKR0idID5va1qlhrqVUKpovi+Ec79ksW8ux7iS1+A6VqzfZXgVYCFRKl7XL5ap3ZoMpwBJxcg0w==} engines: {node: '>=10.13.0'} hasBin: true peerDependencies: @@ -35714,17 +36041,17 @@ packages: webpack-cli: optional: true dependencies: - '@types/eslint-scope': 3.7.4 - '@types/estree': 1.0.6 - '@webassemblyjs/ast': 1.11.5 - '@webassemblyjs/wasm-edit': 1.11.5 - '@webassemblyjs/wasm-parser': 1.11.5 - acorn: 8.12.1 - acorn-import-assertions: 1.9.0(acorn@8.12.1) + '@types/eslint-scope': 3.7.7 + '@types/estree': 1.0.7 + '@types/json-schema': 7.0.15 + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/wasm-edit': 1.14.1 + '@webassemblyjs/wasm-parser': 1.14.1 + acorn: 8.14.1 browserslist: 4.24.4 - chrome-trace-event: 1.0.3 + chrome-trace-event: 1.0.4 enhanced-resolve: 5.18.1 - es-module-lexer: 1.6.0 + es-module-lexer: 1.7.0 eslint-scope: 5.1.1 events: 3.3.0 glob-to-regexp: 0.4.1 @@ -35733,10 +36060,10 @@ packages: loader-runner: 4.3.0 mime-types: 2.1.35 neo-async: 2.6.2 - schema-utils: 3.3.0 + schema-utils: 4.3.2 tapable: 2.2.1 - terser-webpack-plugin: 5.3.7(@swc/core@1.3.26)(esbuild@0.15.18)(webpack@5.88.2) - watchpack: 2.4.0 + terser-webpack-plugin: 5.3.14(@swc/core@1.3.26)(esbuild@0.15.18)(webpack@5.99.7) + watchpack: 2.4.2 webpack-sources: 3.2.3 transitivePeerDependencies: - '@swc/core' @@ -35851,7 +36178,6 @@ packages: /word-wrap@1.2.3: resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} engines: {node: '>=0.10.0'} - dev: true /workerd@1.20240806.0: resolution: {integrity: sha512-yyNtyzTMgVY0sgYijHBONqZFVXsOFGj2jDjS8MF/RbO2ZdGROvs4Hkc/9QnmqFWahE0STxXeJ1yW1yVotdF0UQ==} @@ -36164,6 +36490,9 @@ packages: type-fest: 2.19.0 dev: false + /zimmerframe@1.1.2: + resolution: {integrity: sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w==} + /zip-stream@4.1.1: resolution: {integrity: sha512-9qv4rlDiopXg4E69k+vMHjNN63YFMe9sZMrdlvKnCjlCRWeCBswPPMPUfx+ipsAWq1LXHe70RcbaHdJJpS6hyQ==} engines: {node: '>= 10'} From 5cd2d20a0f923765f2f3989b3a556c8d97c910f8 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 28 Apr 2025 15:04:07 +0100 Subject: [PATCH 03/33] Add some additional columns to raw_run_events_v1 --- .../schema/003_create_raw_run_events_v1.sql | 14 ++++++++ .../clickhouse/src/runEvents.test.ts | 9 +++++ internal-packages/clickhouse/src/runEvents.ts | 35 ++++++++++++------- 3 files changed, 45 insertions(+), 13 deletions(-) diff --git a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql index aa85ad2982..88490e13e5 100644 --- a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql +++ b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql @@ -18,6 +18,7 @@ CREATE TABLE trigger_dev.raw_run_events_v1 /* ─── ids & hierarchy ─────────────────────────────────────── */ environment_id String, run_id String, + friendly_id String, attempt UInt8 DEFAULT 1, /* ─── enums / status ──────────────────────────────────────── */ @@ -38,6 +39,16 @@ CREATE TABLE trigger_dev.raw_run_events_v1 schedule_id Nullable(String), batch_id Nullable(String), + /* ─── related runs ─────────────────────────────────────────────── */ + root_run_id Nullable(String), + parent_run_id Nullable(String), + depth UInt8 DEFAULT 0, + + /* ─── telemetry ─────────────────────────────────────────────── */ + span_id Nullable(String), + trace_id Nullable(String), + idempotency_key Nullable(String), + /* ─── timing ─────────────────────────────────────────────── */ event_time DateTime64(3), -- when this row created created_at DateTime64(3), @@ -50,6 +61,7 @@ CREATE TABLE trigger_dev.raw_run_events_v1 queued_at Nullable(DateTime64(3)), expired_at Nullable(DateTime64(3)), duration_ms Nullable(UInt32), + expiration_ttl Nullable(String), /* ─── cost / usage ───────────────────────────────────────── */ usage_duration_ms UInt32 DEFAULT 0, @@ -66,6 +78,8 @@ CREATE TABLE trigger_dev.raw_run_events_v1 sdk_version Nullable(String) CODEC(LZ4), cli_version Nullable(String) CODEC(LZ4), machine_preset LowCardinality(Nullable(String)) CODEC(LZ4), + + is_test Nullable(UInt8) DEFAULT 0, ) ENGINE = MergeTree PARTITION BY toYYYYMM(event_time) diff --git a/internal-packages/clickhouse/src/runEvents.test.ts b/internal-packages/clickhouse/src/runEvents.test.ts index e48a19fe5c..fec990cd46 100644 --- a/internal-packages/clickhouse/src/runEvents.test.ts +++ b/internal-packages/clickhouse/src/runEvents.test.ts @@ -18,6 +18,7 @@ describe("Run Events", () => { { environment_id: "env_1234", run_id: "run_1234", + friendly_id: "friendly_1234", attempt: 1, engine: "V2", status: "PENDING", @@ -49,6 +50,14 @@ describe("Run Events", () => { sdk_version: "1.0.0", cli_version: "1.0.0", machine_preset: "small-1x", + is_test: true, + span_id: "span_1234", + trace_id: "trace_1234", + idempotency_key: "idempotency_key_1234", + expiration_ttl: "1h", + root_run_id: "root_run_1234", + parent_run_id: "parent_run_1234", + depth: 1, }, ]); diff --git a/internal-packages/clickhouse/src/runEvents.ts b/internal-packages/clickhouse/src/runEvents.ts index 11fd221c16..06bdde59db 100644 --- a/internal-packages/clickhouse/src/runEvents.ts +++ b/internal-packages/clickhouse/src/runEvents.ts @@ -6,6 +6,7 @@ import { TaskRunError } from "@trigger.dev/core/v3/schemas"; export const RawRunEventV1 = z.object({ environment_id: z.string(), run_id: z.string(), + friendly_id: z.string(), attempt: z.number().int().default(1), engine: z.enum(["V1", "V2"]), status: z.enum([ @@ -29,19 +30,19 @@ export const RawRunEventV1 = z.object({ queue: z.string(), schedule_id: z.string().optional(), batch_id: z.string().optional(), - event_time: z.coerce.number().int(), - created_at: z.coerce.number().int(), - updated_at: z.coerce.number().int(), - completed_at: z.coerce.number().int().optional(), - started_at: z.coerce.number().int().optional(), - executed_at: z.coerce.number().int().optional(), - finished_at: z.coerce.number().int().optional(), - delay_until: z.coerce.number().int().optional(), - queued_at: z.coerce.number().int().optional(), - expired_at: z.coerce.number().int().optional(), - duration_ms: z.coerce.number().int().optional(), - usage_duration_ms: z.coerce.number().int().optional(), - cost_in_cents: z.coerce.number().int().optional(), + event_time: z.number().int(), + created_at: z.number().int(), + updated_at: z.number().int(), + completed_at: z.number().int().optional(), + started_at: z.number().int().optional(), + executed_at: z.number().int().optional(), + finished_at: z.number().int().optional(), + delay_until: z.number().int().optional(), + queued_at: z.number().int().optional(), + expired_at: z.number().int().optional(), + duration_ms: z.number().int().optional(), + usage_duration_ms: z.number().int().optional(), + cost_in_cents: z.number().int().optional(), payload: z.unknown().optional(), output: z.unknown().optional(), error: TaskRunError.optional(), @@ -50,6 +51,14 @@ export const RawRunEventV1 = z.object({ sdk_version: z.string().optional(), cli_version: z.string().optional(), machine_preset: z.string().optional(), + root_run_id: z.string().optional(), + parent_run_id: z.string().optional(), + depth: z.number().int().default(0), + span_id: z.string().optional(), + trace_id: z.string().optional(), + idempotency_key: z.string().optional(), + expiration_ttl: z.string().optional(), + is_test: z.boolean().default(false), }); export type RawRunEventV1 = z.infer; From 9a1fd64c87ac452abfd8c3e02d02bed1eb5f2763 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 28 Apr 2025 17:30:25 +0100 Subject: [PATCH 04/33] WIP runs dashboard service --- apps/webapp/app/runEngine/types.ts | 2 +- .../services/runsDashboardInstance.server.ts | 36 ++ .../services/runsDashboardService.server.ts | 135 ++++++ apps/webapp/package.json | 3 +- apps/webapp/test/engine/triggerTask.test.ts | 450 +++++++++++------- internal-packages/clickhouse/package.json | 10 +- .../schema/003_create_raw_run_events_v1.sql | 30 +- .../clickhouse/src/client/client.ts | 37 ++ internal-packages/clickhouse/src/index.ts | 14 + .../clickhouse/src/runEvents.test.ts | 4 +- internal-packages/clickhouse/src/runEvents.ts | 19 +- .../clickhouse/tsconfig.build.json | 21 + .../run-engine/src/engine/index.ts | 6 +- internal-packages/testcontainers/src/index.ts | 6 +- pnpm-lock.yaml | 3 + 15 files changed, 565 insertions(+), 211 deletions(-) create mode 100644 apps/webapp/app/services/runsDashboardInstance.server.ts create mode 100644 apps/webapp/app/services/runsDashboardService.server.ts create mode 100644 internal-packages/clickhouse/tsconfig.build.json diff --git a/apps/webapp/app/runEngine/types.ts b/apps/webapp/app/runEngine/types.ts index c6a4fe1868..439bbf3776 100644 --- a/apps/webapp/app/runEngine/types.ts +++ b/apps/webapp/app/runEngine/types.ts @@ -7,7 +7,7 @@ import { TriggerTaskRequestBody, } from "@trigger.dev/core/v3"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; -import { z } from "zod"; +import type { TaskRunStatusUpdateEnvironment } from "~/services/runsDashboardInstance.server"; export type TriggerTaskServiceOptions = { idempotencyKey?: string; diff --git a/apps/webapp/app/services/runsDashboardInstance.server.ts b/apps/webapp/app/services/runsDashboardInstance.server.ts new file mode 100644 index 0000000000..64933763eb --- /dev/null +++ b/apps/webapp/app/services/runsDashboardInstance.server.ts @@ -0,0 +1,36 @@ +import { singleton } from "~/utils/singleton"; +import { ClickHouse } from "@internal/clickhouse"; +import { + RunDashboardEventBus, + RunDashboardEvents, + RunsDashboardService, +} from "./runsDashboardService.server"; +import { EventEmitter } from "node:events"; +import { RuntimeEnvironmentType, TaskRun } from "@trigger.dev/database"; + +const runDashboardEventBus: RunDashboardEventBus = new EventEmitter(); + +export type TaskRunStatusUpdateEnvironment = { + type: RuntimeEnvironmentType; + organizationId: string; +}; + +export function emitRunStatusUpdate(run: TaskRun, environment: TaskRunStatusUpdateEnvironment) { + runDashboardEventBus.emit("runStatusUpdate", { + run, + environment, + organization: { id: environment.organizationId }, + }); +} + +export const runsDashboard = singleton("runsDashboard", () => { + const clickhouse = ClickHouse.fromEnv(); + + const service = new RunsDashboardService(clickhouse); + + runDashboardEventBus.on("runStatusUpdate", async (event) => { + await service.upsertRun(event.run, event.environment.type, event.organization.id); + }); + + return service; +}); diff --git a/apps/webapp/app/services/runsDashboardService.server.ts b/apps/webapp/app/services/runsDashboardService.server.ts new file mode 100644 index 0000000000..2c821e82b4 --- /dev/null +++ b/apps/webapp/app/services/runsDashboardService.server.ts @@ -0,0 +1,135 @@ +import type { ClickHouse } from "@internal/clickhouse"; +import { TaskRunError } from "@trigger.dev/core/v3/schemas"; +import { RuntimeEnvironmentType, TaskRun } from "@trigger.dev/database"; +import { logger } from "./logger.server"; +import { EventEmitter } from "node:events"; +import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; + +export class RunsDashboardService { + constructor(private readonly clickhouse: ClickHouse) {} + + private readonly logger = logger.child({ + service: "RunsDashboardService", + }); + + async upsertRun( + taskRun: TaskRun, + environmentType: RuntimeEnvironmentType, + organizationId: string + ) { + const [payload, output] = await Promise.all([ + this.#preparePayload(taskRun), + this.#prepareOutput(taskRun), + ]); + + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: taskRun.runtimeEnvironmentId, + environment_type: environmentType, + organization_id: organizationId, + project_id: taskRun.projectId, + run_id: taskRun.id, + friendly_id: taskRun.friendlyId, + attempt: taskRun.attemptNumber ?? 1, + engine: taskRun.engine, + status: taskRun.status, + task_identifier: taskRun.taskIdentifier, + queue: taskRun.queue, + schedule_id: taskRun.scheduleId ?? undefined, + batch_id: taskRun.batchId ?? undefined, + event_time: Date.now(), + created_at: taskRun.createdAt.getTime(), + updated_at: taskRun.updatedAt.getTime(), + completed_at: taskRun.completedAt ? taskRun.completedAt.getTime() : undefined, + started_at: taskRun.startedAt ? taskRun.startedAt.getTime() : undefined, + executed_at: taskRun.executedAt ? taskRun.executedAt.getTime() : undefined, + delay_until: taskRun.delayUntil ? taskRun.delayUntil.getTime() : undefined, + queued_at: taskRun.queuedAt ? taskRun.queuedAt.getTime() : undefined, + expired_at: taskRun.expiredAt ? taskRun.expiredAt.getTime() : undefined, + usage_duration_ms: taskRun.usageDurationMs, + tags: taskRun.runTags, + payload: payload, + output: output, + error: taskRun.error ? (taskRun.error as TaskRunError) : undefined, + task_version: taskRun.taskVersion ?? undefined, + sdk_version: taskRun.sdkVersion ?? undefined, + cli_version: taskRun.cliVersion ?? undefined, + machine_preset: taskRun.machinePreset ?? undefined, + is_test: taskRun.isTest ?? false, + root_run_id: taskRun.rootTaskRunId ?? undefined, + parent_run_id: taskRun.parentTaskRunId ?? undefined, + depth: taskRun.depth ?? 0, + span_id: taskRun.spanId ?? undefined, + trace_id: taskRun.traceId ?? undefined, + idempotency_key: taskRun.idempotencyKey ?? undefined, + expiration_ttl: taskRun.ttl ?? undefined, + cost_in_cents: taskRun.costInCents ?? undefined, + base_cost_in_cents: taskRun.baseCostInCents ?? undefined, + }); + + if (insertError) { + this.logger.error("RunsDashboardService: upsertRun", { + error: insertError, + taskRun, + }); + } else { + this.logger.info("RunsDashboardService: upsertRun", { + id: taskRun.id, + friendlyId: taskRun.friendlyId, + status: taskRun.status, + }); + } + + return insertResult?.executed === true; + } + + async #preparePayload(run: TaskRun): Promise { + if (run.status !== "PENDING" && run.status !== "DELAYED") { + return undefined; + } + + if (run.payloadType !== "application/json" && run.payloadType !== "application/super+json") { + return undefined; + } + + const packet = { + data: run.payload, + dataType: run.payloadType, + }; + + return await parsePacket(packet); + } + + async #prepareOutput(run: TaskRun): Promise { + if (!run.output) { + return undefined; + } + + if (run.outputType !== "application/json" && run.outputType !== "application/super+json") { + return undefined; + } + + const packet = { + data: run.output, + dataType: run.outputType, + }; + + return await parsePacket(packet); + } +} + +export type RunDashboardEvents = { + runStatusUpdate: [ + { + run: TaskRun; + organization: { + id: string; + }; + environment: { + type: RuntimeEnvironmentType; + }; + } + ]; +}; + +export type RunDashboardEventArgs = RunDashboardEvents[T]; +export type RunDashboardEventBus = EventEmitter; diff --git a/apps/webapp/package.json b/apps/webapp/package.json index 97d743f879..810649ece8 100644 --- a/apps/webapp/package.json +++ b/apps/webapp/package.json @@ -193,6 +193,7 @@ }, "devDependencies": { "@internal/testcontainers": "workspace:*", + "@internal/clickhouse": "workspace:*", "@remix-run/dev": "2.1.0", "@remix-run/eslint-config": "2.1.0", "@remix-run/testing": "^2.1.0", @@ -258,4 +259,4 @@ "engines": { "node": ">=16.0.0" } -} +} \ No newline at end of file diff --git a/apps/webapp/test/engine/triggerTask.test.ts b/apps/webapp/test/engine/triggerTask.test.ts index 9e6a75b01d..9be547fb60 100644 --- a/apps/webapp/test/engine/triggerTask.test.ts +++ b/apps/webapp/test/engine/triggerTask.test.ts @@ -32,7 +32,9 @@ import { ValidationResult, } from "~/runEngine/types"; import { RunEngineTriggerTaskService } from "../../app/runEngine/services/triggerTask.server"; -import { getEntitlement } from "~/services/platform.v3.server"; +import { DefaultRunsDashboardManager } from "~/runEngine/concerns/runsDashboard.server"; +import { ClickHouse } from "@internal/clickhouse"; +import { RunsDashboardService } from "~/services/runsDashboardService.server"; vi.setConfig({ testTimeout: 30_000 }); // 30 seconds timeout @@ -106,216 +108,242 @@ class MockTraceEventConcern implements TraceEventConcern { } describe("RunEngineTriggerTaskService", () => { - containerTest("should trigger a task with minimal options", async ({ prisma, redisOptions }) => { - const engine = new RunEngine({ - prisma, - worker: { - redis: redisOptions, - workers: 1, - tasksPerWorker: 10, - pollIntervalMs: 100, - }, - queue: { - redis: redisOptions, - }, - runLock: { - redis: redisOptions, - }, - machines: { - defaultMachine: "small-1x", + containerTest( + "should trigger a task with minimal options", + async ({ prisma, redisOptions, clickhouseContainer }) => { + const engine = new RunEngine({ + prisma, + worker: { + redis: redisOptions, + workers: 1, + tasksPerWorker: 10, + pollIntervalMs: 100, + }, + queue: { + redis: redisOptions, + }, + runLock: { + redis: redisOptions, + }, machines: { - "small-1x": { - name: "small-1x" as const, - cpu: 0.5, - memory: 0.5, - centsPerMs: 0.0001, + defaultMachine: "small-1x", + machines: { + "small-1x": { + name: "small-1x" as const, + cpu: 0.5, + memory: 0.5, + centsPerMs: 0.0001, + }, }, + baseCostInCents: 0.0005, }, - baseCostInCents: 0.0005, - }, - tracer: trace.getTracer("test", "0.0.0"), - }); + tracer: trace.getTracer("test", "0.0.0"), + }); - const authenticatedEnvironment = await setupAuthenticatedEnvironment(prisma, "PRODUCTION"); + const authenticatedEnvironment = await setupAuthenticatedEnvironment(prisma, "PRODUCTION"); - const taskIdentifier = "test-task"; + const taskIdentifier = "test-task"; - //create background worker - await setupBackgroundWorker(engine, authenticatedEnvironment, taskIdentifier); + //create background worker + await setupBackgroundWorker(engine, authenticatedEnvironment, taskIdentifier); - const queuesManager = new DefaultQueueManager(prisma, engine); + const queuesManager = new DefaultQueueManager(prisma, engine); - const idempotencyKeyConcern = new IdempotencyKeyConcern( - prisma, - engine, - new MockTraceEventConcern() - ); + const idempotencyKeyConcern = new IdempotencyKeyConcern( + prisma, + engine, + new MockTraceEventConcern() + ); - const runChainStateManager = new DefaultRunChainStateManager(prisma, true); + const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - const triggerTaskService = new RunEngineTriggerTaskService({ - engine, - prisma, - runNumberIncrementer: new MockRunNumberIncrementer(), - payloadProcessor: new MockPayloadProcessor(), - queueConcern: queuesManager, - idempotencyKeyConcern, - validator: new MockTriggerTaskValidator(), - traceEventConcern: new MockTraceEventConcern(), - runChainStateManager, - tracer: trace.getTracer("test", "0.0.0"), - }); + const runsDashboardManager = new DefaultRunsDashboardManager( + new RunsDashboardService( + new ClickHouse({ + name: "Test", + url: clickhouseContainer.getConnectionUrl(), + }) + ) + ); - const result = await triggerTaskService.call({ - taskId: taskIdentifier, - environment: authenticatedEnvironment, - body: { payload: { test: "test" } }, - }); + const triggerTaskService = new RunEngineTriggerTaskService({ + engine, + prisma, + runNumberIncrementer: new MockRunNumberIncrementer(), + payloadProcessor: new MockPayloadProcessor(), + queueConcern: queuesManager, + idempotencyKeyConcern, + validator: new MockTriggerTaskValidator(), + traceEventConcern: new MockTraceEventConcern(), + runChainStateManager, + runsDashboardManager, + tracer: trace.getTracer("test", "0.0.0"), + }); + + const result = await triggerTaskService.call({ + taskId: taskIdentifier, + environment: authenticatedEnvironment, + body: { payload: { test: "test" } }, + }); - expect(result).toBeDefined(); - expect(result?.run.friendlyId).toBeDefined(); - expect(result?.run.status).toBe("PENDING"); - expect(result?.isCached).toBe(false); + expect(result).toBeDefined(); + expect(result?.run.friendlyId).toBeDefined(); + expect(result?.run.status).toBe("PENDING"); + expect(result?.isCached).toBe(false); - const run = await prisma.taskRun.findUnique({ - where: { - id: result?.run.id, - }, - }); + const run = await prisma.taskRun.findUnique({ + where: { + id: result?.run.id, + }, + }); + + expect(run).toBeDefined(); + expect(run?.friendlyId).toBe(result?.run.friendlyId); + expect(run?.engine).toBe("V2"); + expect(run?.queuedAt).toBeDefined(); + expect(run?.queue).toBe(`task/${taskIdentifier}`); - expect(run).toBeDefined(); - expect(run?.friendlyId).toBe(result?.run.friendlyId); - expect(run?.engine).toBe("V2"); - expect(run?.queuedAt).toBeDefined(); - expect(run?.queue).toBe(`task/${taskIdentifier}`); - - // Lets make sure the task is in the queue - const queueLength = await engine.runQueue.lengthOfQueue( - authenticatedEnvironment, - `task/${taskIdentifier}` - ); - expect(queueLength).toBe(1); - - await engine.quit(); - }); - - containerTest("should handle idempotency keys correctly", async ({ prisma, redisOptions }) => { - const engine = new RunEngine({ - prisma, - worker: { - redis: redisOptions, - workers: 1, - tasksPerWorker: 10, - pollIntervalMs: 100, - }, - queue: { - redis: redisOptions, - }, - runLock: { - redis: redisOptions, - }, - machines: { - defaultMachine: "small-1x", + // Lets make sure the task is in the queue + const queueLength = await engine.runQueue.lengthOfQueue( + authenticatedEnvironment, + `task/${taskIdentifier}` + ); + expect(queueLength).toBe(1); + + await engine.quit(); + } + ); + + containerTest( + "should handle idempotency keys correctly", + async ({ prisma, redisOptions, clickhouseContainer }) => { + const engine = new RunEngine({ + prisma, + worker: { + redis: redisOptions, + workers: 1, + tasksPerWorker: 10, + pollIntervalMs: 100, + }, + queue: { + redis: redisOptions, + }, + runLock: { + redis: redisOptions, + }, machines: { - "small-1x": { - name: "small-1x" as const, - cpu: 0.5, - memory: 0.5, - centsPerMs: 0.0001, + defaultMachine: "small-1x", + machines: { + "small-1x": { + name: "small-1x" as const, + cpu: 0.5, + memory: 0.5, + centsPerMs: 0.0001, + }, }, + baseCostInCents: 0.0005, }, - baseCostInCents: 0.0005, - }, - tracer: trace.getTracer("test", "0.0.0"), - }); + tracer: trace.getTracer("test", "0.0.0"), + }); - const authenticatedEnvironment = await setupAuthenticatedEnvironment(prisma, "PRODUCTION"); + const authenticatedEnvironment = await setupAuthenticatedEnvironment(prisma, "PRODUCTION"); - const taskIdentifier = "test-task"; + const taskIdentifier = "test-task"; - //create background worker - await setupBackgroundWorker(engine, authenticatedEnvironment, taskIdentifier); + //create background worker + await setupBackgroundWorker(engine, authenticatedEnvironment, taskIdentifier); - const queuesManager = new DefaultQueueManager(prisma, engine); + const queuesManager = new DefaultQueueManager(prisma, engine); - const idempotencyKeyConcern = new IdempotencyKeyConcern( - prisma, - engine, - new MockTraceEventConcern() - ); + const idempotencyKeyConcern = new IdempotencyKeyConcern( + prisma, + engine, + new MockTraceEventConcern() + ); + + const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - const runChainStateManager = new DefaultRunChainStateManager(prisma, true); + const runsDashboardManager = new DefaultRunsDashboardManager( + new RunsDashboardService( + new ClickHouse({ + name: "Test", + url: clickhouseContainer.getConnectionUrl(), + }) + ) + ); - const triggerTaskService = new RunEngineTriggerTaskService({ - engine, - prisma, - runNumberIncrementer: new MockRunNumberIncrementer(), - payloadProcessor: new MockPayloadProcessor(), - queueConcern: queuesManager, - idempotencyKeyConcern, - validator: new MockTriggerTaskValidator(), - traceEventConcern: new MockTraceEventConcern(), - runChainStateManager, - tracer: trace.getTracer("test", "0.0.0"), - }); + const triggerTaskService = new RunEngineTriggerTaskService({ + engine, + prisma, + runNumberIncrementer: new MockRunNumberIncrementer(), + payloadProcessor: new MockPayloadProcessor(), + queueConcern: queuesManager, + idempotencyKeyConcern, + validator: new MockTriggerTaskValidator(), + traceEventConcern: new MockTraceEventConcern(), + runChainStateManager, + runsDashboardManager, + tracer: trace.getTracer("test", "0.0.0"), + }); - const result = await triggerTaskService.call({ - taskId: taskIdentifier, - environment: authenticatedEnvironment, - body: { - payload: { test: "test" }, - options: { - idempotencyKey: "test-idempotency-key", + const result = await triggerTaskService.call({ + taskId: taskIdentifier, + environment: authenticatedEnvironment, + body: { + payload: { test: "test" }, + options: { + idempotencyKey: "test-idempotency-key", + }, }, - }, - }); + }); - expect(result).toBeDefined(); - expect(result?.run.friendlyId).toBeDefined(); - expect(result?.run.status).toBe("PENDING"); - expect(result?.isCached).toBe(false); + expect(result).toBeDefined(); + expect(result?.run.friendlyId).toBeDefined(); + expect(result?.run.status).toBe("PENDING"); + expect(result?.isCached).toBe(false); - const run = await prisma.taskRun.findUnique({ - where: { - id: result?.run.id, - }, - }); + const run = await prisma.taskRun.findUnique({ + where: { + id: result?.run.id, + }, + }); + + expect(run).toBeDefined(); + expect(run?.friendlyId).toBe(result?.run.friendlyId); + expect(run?.engine).toBe("V2"); + expect(run?.queuedAt).toBeDefined(); + expect(run?.queue).toBe(`task/${taskIdentifier}`); - expect(run).toBeDefined(); - expect(run?.friendlyId).toBe(result?.run.friendlyId); - expect(run?.engine).toBe("V2"); - expect(run?.queuedAt).toBeDefined(); - expect(run?.queue).toBe(`task/${taskIdentifier}`); - - // Lets make sure the task is in the queue - const queueLength = await engine.runQueue.lengthOfQueue( - authenticatedEnvironment, - `task/${taskIdentifier}` - ); - expect(queueLength).toBe(1); - - // Now lets try to trigger the same task with the same idempotency key - const cachedResult = await triggerTaskService.call({ - taskId: taskIdentifier, - environment: authenticatedEnvironment, - body: { - payload: { test: "test" }, - options: { - idempotencyKey: "test-idempotency-key", + // Lets make sure the task is in the queue + const queueLength = await engine.runQueue.lengthOfQueue( + authenticatedEnvironment, + `task/${taskIdentifier}` + ); + expect(queueLength).toBe(1); + + // Now lets try to trigger the same task with the same idempotency key + const cachedResult = await triggerTaskService.call({ + taskId: taskIdentifier, + environment: authenticatedEnvironment, + body: { + payload: { test: "test" }, + options: { + idempotencyKey: "test-idempotency-key", + }, }, - }, - }); + }); - expect(cachedResult).toBeDefined(); - expect(cachedResult?.run.friendlyId).toBe(result?.run.friendlyId); - expect(cachedResult?.isCached).toBe(true); + expect(cachedResult).toBeDefined(); + expect(cachedResult?.run.friendlyId).toBe(result?.run.friendlyId); + expect(cachedResult?.isCached).toBe(true); - await engine.quit(); - }); + await engine.quit(); + } + ); containerTest( "should resolve queue names correctly when locked to version", - async ({ prisma, redisOptions }) => { + async ({ prisma, redisOptions, clickhouseContainer }) => { const engine = new RunEngine({ prisma, worker: { @@ -390,6 +418,15 @@ describe("RunEngineTriggerTaskService", () => { const runChainStateManager = new DefaultRunChainStateManager(prisma, true); + const runsDashboardManager = new DefaultRunsDashboardManager( + new RunsDashboardService( + new ClickHouse({ + name: "Test", + url: clickhouseContainer.getConnectionUrl(), + }) + ) + ); + const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -400,6 +437,7 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, + runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -480,7 +518,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains correctly when release concurrency is enabled", - async ({ prisma, redisOptions }) => { + async ({ prisma, redisOptions, clickhouseContainer }) => { const engine = new RunEngine({ prisma, worker: { @@ -537,6 +575,15 @@ describe("RunEngineTriggerTaskService", () => { const runChainStateManager = new DefaultRunChainStateManager(prisma, true); + const runsDashboardManager = new DefaultRunsDashboardManager( + new RunsDashboardService( + new ClickHouse({ + name: "Test", + url: clickhouseContainer.getConnectionUrl(), + }) + ) + ); + const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -547,6 +594,7 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, + runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -639,7 +687,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains with multiple queues correctly", - async ({ prisma, redisOptions }) => { + async ({ prisma, redisOptions, clickhouseContainer }) => { const engine = new RunEngine({ prisma, worker: { @@ -694,6 +742,15 @@ describe("RunEngineTriggerTaskService", () => { ); const runChainStateManager = new DefaultRunChainStateManager(prisma, true); + const runsDashboardManager = new DefaultRunsDashboardManager( + new RunsDashboardService( + new ClickHouse({ + name: "Test", + url: clickhouseContainer.getConnectionUrl(), + }) + ) + ); + const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -704,6 +761,7 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, + runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -840,7 +898,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains with explicit releaseConcurrency option", - async ({ prisma, redisOptions }) => { + async ({ prisma, redisOptions, clickhouseContainer }) => { const engine = new RunEngine({ prisma, worker: { @@ -895,6 +953,15 @@ describe("RunEngineTriggerTaskService", () => { ); const runChainStateManager = new DefaultRunChainStateManager(prisma, true); + const runsDashboardManager = new DefaultRunsDashboardManager( + new RunsDashboardService( + new ClickHouse({ + name: "Test", + url: clickhouseContainer.getConnectionUrl(), + }) + ) + ); + const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -905,6 +972,7 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, + runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -970,7 +1038,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains when release concurrency is disabled", - async ({ prisma, redisOptions }) => { + async ({ prisma, redisOptions, clickhouseContainer }) => { const engine = new RunEngine({ prisma, worker: { @@ -1025,6 +1093,15 @@ describe("RunEngineTriggerTaskService", () => { ); const runChainStateManager = new DefaultRunChainStateManager(prisma, false); + const runsDashboardManager = new DefaultRunsDashboardManager( + new RunsDashboardService( + new ClickHouse({ + name: "Test", + url: clickhouseContainer.getConnectionUrl(), + }) + ) + ); + const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -1035,6 +1112,7 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, + runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -1099,7 +1177,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains correctly when the parent run queue doesn't have a concurrency limit", - async ({ prisma, redisOptions }) => { + async ({ prisma, redisOptions, clickhouseContainer }) => { const engine = new RunEngine({ prisma, worker: { @@ -1156,6 +1234,15 @@ describe("RunEngineTriggerTaskService", () => { const runChainStateManager = new DefaultRunChainStateManager(prisma, true); + const runsDashboardManager = new DefaultRunsDashboardManager( + new RunsDashboardService( + new ClickHouse({ + name: "Test", + url: clickhouseContainer.getConnectionUrl(), + }) + ) + ); + const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -1166,6 +1253,7 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, + runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); diff --git a/internal-packages/clickhouse/package.json b/internal-packages/clickhouse/package.json index 5deb8e32f5..a8109eec5b 100644 --- a/internal-packages/clickhouse/package.json +++ b/internal-packages/clickhouse/package.json @@ -1,9 +1,9 @@ { - "name": "@trigger.dev/clickhouse", + "name": "@internal/clickhouse", "private": true, "version": "0.0.2", - "main": "./dist/index.js", - "types": "./dist/index.d.ts", + "main": "./dist/src/index.js", + "types": "./dist/src/index.d.ts", "type": "module", "dependencies": { "@clickhouse/client": "^1.11.1", @@ -21,8 +21,8 @@ "scripts": { "clean": "rimraf dist", "typecheck": "tsc --noEmit", - "build": "pnpm run clean && tsc --noEmit false --outDir dist --declaration", - "dev": "tsc --noEmit false --outDir dist --declaration --watch", + "build": "pnpm run clean && tsc -p tsconfig.build.json", + "dev": "tsc --watch -p tsconfig.build.json", "db:migrate": "docker compose -p triggerdotdev-docker -f ../../docker/docker-compose.yml up clickhouse_migrator --build", "db:migrate:down": "GOOSE_COMMAND=down pnpm run db:migrate", "test": "vitest --sequence.concurrent=false --no-file-parallelism", diff --git a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql index 88490e13e5..6e820c361f 100644 --- a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql +++ b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql @@ -17,6 +17,9 @@ CREATE TABLE trigger_dev.raw_run_events_v1 ( /* ─── ids & hierarchy ─────────────────────────────────────── */ environment_id String, + environment_type LowCardinality(String), + organization_id String, + project_id String, run_id String, friendly_id String, attempt UInt8 DEFAULT 1, @@ -25,12 +28,22 @@ CREATE TABLE trigger_dev.raw_run_events_v1 engine Enum8('V1'=1,'V2'=2) CODEC(T64, LZ4), status Enum8( -- TaskRunStatus - 'DELAYED'=1,'PENDING'=2,'PENDING_VERSION'=3, - 'WAITING_FOR_DEPLOY'=4,'WAITING_FOR_EVENT'=5, - 'RUNNING'=6,'WAITING'=7,'PAUSED'=8, - /* final */ 'COMPLETED_SUCCESSFULLY'=20,'FAILED'=21, - 'CANCELED'=22,'INTERRUPTED'=23,'CRASHED'=24, - 'EXPIRED'=25,'TIMED_OUT'=26), + 'DELAYED'=1, + 'PENDING'=2, + 'PENDING_VERSION'=3, + 'WAITING_FOR_DEPLOY'=4, + 'EXECUTING'=5, + 'WAITING_TO_RESUME'=6, + 'RETRYING_AFTER_FAILURE'=7, + 'PAUSED'=8, + 'CANCELED'=9, + 'INTERRUPTED'=10, + 'COMPLETED_SUCCESSFULLY'=11, + 'COMPLETED_WITH_ERRORS'=12, + 'SYSTEM_FAILURE'=13, + 'CRASHED'=14, + 'EXPIRED'=15, + 'TIMED_OUT'=16), /* ─── queue / concurrency / schedule ─────────────────────── */ task_identifier String, @@ -66,6 +79,7 @@ CREATE TABLE trigger_dev.raw_run_events_v1 /* ─── cost / usage ───────────────────────────────────────── */ usage_duration_ms UInt32 DEFAULT 0, cost_in_cents Float64 DEFAULT 0, + base_cost_in_cents Float64 DEFAULT 0, /* ─── payload & context ──────────────────────────────────── */ payload Nullable(JSON(max_dynamic_paths = 2048)), @@ -82,8 +96,8 @@ CREATE TABLE trigger_dev.raw_run_events_v1 is_test Nullable(UInt8) DEFAULT 0, ) ENGINE = MergeTree -PARTITION BY toYYYYMM(event_time) -ORDER BY (environment_id, event_time, run_id) +PARTITION BY toYYYYMMDD(event_time) +ORDER BY (organization_id, project_id, environment_id, event_time, run_id) SETTINGS index_granularity = 8192, vertical_merge_algorithm_min_rows_to_activate = 100000; diff --git a/internal-packages/clickhouse/src/client/client.ts b/internal-packages/clickhouse/src/client/client.ts index f2cd5f9151..d86e5a68ba 100644 --- a/internal-packages/clickhouse/src/client/client.ts +++ b/internal-packages/clickhouse/src/client/client.ts @@ -15,21 +15,25 @@ import type { ClickhouseWriter, } from "./types.js"; import { generateErrorMessage } from "zod-error"; +import { Logger } from "@trigger.dev/core/logger"; export type ClickhouseConfig = { name: string; url: string; tracer?: Tracer; clickhouseSettings?: ClickHouseSettings; + logger?: Logger; }; export class ClickhouseClient implements ClickhouseReader, ClickhouseWriter { private readonly client: ClickHouseClient; private readonly tracer: Tracer; private readonly name: string; + private readonly logger: Logger; constructor(config: ClickhouseConfig) { this.name = config.name; + this.logger = config.logger ?? new Logger("ClickhouseClient", "debug"); this.client = createClient({ url: config.url, @@ -86,6 +90,13 @@ export class ClickhouseClient implements ClickhouseReader, ClickhouseWriter { if (validParams?.error) { recordSpanError(span, validParams.error); + this.logger.error("Error parsing query params", { + name: req.name, + error: validParams.error, + query: req.query, + params, + }); + return [ new QueryError(`Bad params: ${generateErrorMessage(validParams.error.issues)}`, { query: req.query, @@ -110,6 +121,13 @@ export class ClickhouseClient implements ClickhouseReader, ClickhouseWriter { ); if (clickhouseError) { + this.logger.error("Error querying clickhouse", { + name: req.name, + error: clickhouseError, + query: req.query, + params, + }); + recordClickhouseError(span, clickhouseError); return [ @@ -138,6 +156,13 @@ export class ClickhouseClient implements ClickhouseReader, ClickhouseWriter { const parsed = z.array(req.schema).safeParse(unparsedRows); if (parsed.error) { + this.logger.error("Error parsing clickhouse query result", { + name: req.name, + error: parsed.error, + query: req.query, + params, + }); + const queryError = new QueryError(generateErrorMessage(parsed.error.issues), { query: req.query, }); @@ -179,6 +204,12 @@ export class ClickhouseClient implements ClickhouseReader, ClickhouseWriter { : req.schema.safeParse(events); if (!v.success) { + this.logger.error("Error validating insert events", { + name: req.name, + table: req.table, + error: v.error, + }); + const error = new InsertError(generateErrorMessage(v.error.issues)); recordSpanError(span, error); @@ -202,6 +233,12 @@ export class ClickhouseClient implements ClickhouseReader, ClickhouseWriter { ); if (clickhouseError) { + this.logger.error("Error inserting into clickhouse", { + name: req.name, + error: clickhouseError, + table: req.table, + }); + recordClickhouseError(span, clickhouseError); return [new InsertError(clickhouseError.message), null]; diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts index b281f4bebe..f3de3aded6 100644 --- a/internal-packages/clickhouse/src/index.ts +++ b/internal-packages/clickhouse/src/index.ts @@ -3,6 +3,7 @@ import { ClickhouseClient } from "./client/client.js"; import { ClickhouseReader, ClickhouseWriter } from "./client/types.js"; import { NoopClient } from "./client/noop.js"; import { insertRunEvents } from "./runEvents.js"; +import { Logger } from "@trigger.dev/core/logger"; export type ClickHouseConfig = | { @@ -11,6 +12,7 @@ export type ClickHouseConfig = writerUrl?: never; readerUrl?: never; clickhouseSettings?: ClickHouseSettings; + logger?: Logger; } | { name?: never; @@ -20,18 +22,28 @@ export type ClickHouseConfig = readerName?: string; readerUrl: string; clickhouseSettings?: ClickHouseSettings; + logger?: Logger; }; export class ClickHouse { public readonly reader: ClickhouseReader; public readonly writer: ClickhouseWriter; + private readonly logger: Logger; constructor(config: ClickHouseConfig) { + this.logger = config.logger ?? new Logger("ClickHouse", "debug"); + if (config.url) { + const url = new URL(config.url); + url.password = "redacted"; + + this.logger.info("🏠 Initializing ClickHouse client with url", { url: url.toString() }); + const client = new ClickhouseClient({ name: config.name ?? "clickhouse", url: config.url, clickhouseSettings: config.clickhouseSettings, + logger: this.logger, }); this.reader = client; this.writer = client; @@ -40,11 +52,13 @@ export class ClickHouse { name: config.readerName ?? "clickhouse-reader", url: config.readerUrl, clickhouseSettings: config.clickhouseSettings, + logger: this.logger, }); this.writer = new ClickhouseClient({ name: config.writerName ?? "clickhouse-writer", url: config.writerUrl, clickhouseSettings: config.clickhouseSettings, + logger: this.logger, }); } else { this.reader = new NoopClient(); diff --git a/internal-packages/clickhouse/src/runEvents.test.ts b/internal-packages/clickhouse/src/runEvents.test.ts index fec990cd46..6ab2ef7a0a 100644 --- a/internal-packages/clickhouse/src/runEvents.test.ts +++ b/internal-packages/clickhouse/src/runEvents.test.ts @@ -17,6 +17,9 @@ describe("Run Events", () => { const [insertError, insertResult] = await insert([ { environment_id: "env_1234", + environment_type: "DEVELOPMENT", + organization_id: "org_1234", + project_id: "project_1234", run_id: "run_1234", friendly_id: "friendly_1234", attempt: 1, @@ -43,7 +46,6 @@ describe("Run Events", () => { message: "error", stackTrace: "stack trace", }, - duration_ms: 1000, usage_duration_ms: 1000, cost_in_cents: 100, task_version: "1.0.0", diff --git a/internal-packages/clickhouse/src/runEvents.ts b/internal-packages/clickhouse/src/runEvents.ts index 06bdde59db..0d73cf3cdf 100644 --- a/internal-packages/clickhouse/src/runEvents.ts +++ b/internal-packages/clickhouse/src/runEvents.ts @@ -5,6 +5,9 @@ import { TaskRunError } from "@trigger.dev/core/v3/schemas"; export const RawRunEventV1 = z.object({ environment_id: z.string(), + environment_type: z.string(), + organization_id: z.string(), + project_id: z.string(), run_id: z.string(), friendly_id: z.string(), attempt: z.number().int().default(1), @@ -14,14 +17,15 @@ export const RawRunEventV1 = z.object({ "PENDING", "PENDING_VERSION", "WAITING_FOR_DEPLOY", - "WAITING_FOR_EVENT", - "RUNNING", - "WAITING", + "EXECUTING", + "WAITING_TO_RESUME", + "RETRYING_AFTER_FAILURE", "PAUSED", - "COMPLETED_SUCCESSFULLY", - "FAILED", "CANCELED", "INTERRUPTED", + "COMPLETED_SUCCESSFULLY", + "COMPLETED_WITH_ERRORS", + "SYSTEM_FAILURE", "CRASHED", "EXPIRED", "TIMED_OUT", @@ -36,13 +40,12 @@ export const RawRunEventV1 = z.object({ completed_at: z.number().int().optional(), started_at: z.number().int().optional(), executed_at: z.number().int().optional(), - finished_at: z.number().int().optional(), delay_until: z.number().int().optional(), queued_at: z.number().int().optional(), expired_at: z.number().int().optional(), - duration_ms: z.number().int().optional(), usage_duration_ms: z.number().int().optional(), - cost_in_cents: z.number().int().optional(), + cost_in_cents: z.number().optional(), + base_cost_in_cents: z.number().optional(), payload: z.unknown().optional(), output: z.unknown().optional(), error: TaskRunError.optional(), diff --git a/internal-packages/clickhouse/tsconfig.build.json b/internal-packages/clickhouse/tsconfig.build.json new file mode 100644 index 0000000000..619461da80 --- /dev/null +++ b/internal-packages/clickhouse/tsconfig.build.json @@ -0,0 +1,21 @@ +{ + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.test.ts"], + "compilerOptions": { + "composite": true, + "target": "ES2019", + "lib": ["ES2019", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], + "outDir": "dist", + "module": "Node16", + "moduleResolution": "Node16", + "moduleDetection": "force", + "verbatimModuleSyntax": false, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "preserveWatchOutput": true, + "skipLibCheck": true, + "strict": true, + "declaration": true + } +} diff --git a/internal-packages/run-engine/src/engine/index.ts b/internal-packages/run-engine/src/engine/index.ts index 9361a4d366..37ef0aff49 100644 --- a/internal-packages/run-engine/src/engine/index.ts +++ b/internal-packages/run-engine/src/engine/index.ts @@ -28,11 +28,7 @@ import { FairQueueSelectionStrategy } from "../run-queue/fairQueueSelectionStrat import { RunQueue } from "../run-queue/index.js"; import { RunQueueFullKeyProducer } from "../run-queue/keyProducer.js"; import { MinimalAuthenticatedEnvironment } from "../shared/index.js"; -import { - NotImplementedError, - RunDuplicateIdempotencyKeyError, - ServiceValidationError, -} from "./errors.js"; +import { NotImplementedError, RunDuplicateIdempotencyKeyError } from "./errors.js"; import { EventBus, EventBusEvents } from "./eventBus.js"; import { RunLocker } from "./locking.js"; import { BatchSystem } from "./systems/batchSystem.js"; diff --git a/internal-packages/testcontainers/src/index.ts b/internal-packages/testcontainers/src/index.ts index 913b6d70dc..a232cde5aa 100644 --- a/internal-packages/testcontainers/src/index.ts +++ b/internal-packages/testcontainers/src/index.ts @@ -35,7 +35,7 @@ type ElectricContext = { electricOrigin: string; }; -type ContainerContext = NetworkContext & PostgresContext & RedisContext; +type ContainerContext = NetworkContext & PostgresContext & RedisContext & ClickhouseContext; type ContainerWithElectricAndRedisContext = ContainerContext & ElectricContext; type ContainerWithElectricContext = NetworkContext & PostgresContext & ElectricContext; @@ -212,6 +212,8 @@ export const containerTest = test.extend({ prisma, redisContainer, redisOptions, + clickhouseContainer, + clickhouseClient, }); export const containerWithElectricTest = test.extend({ @@ -228,4 +230,6 @@ export const containerWithElectricAndRedisTest = test.extend Date: Mon, 28 Apr 2025 17:52:01 +0100 Subject: [PATCH 05/33] Create a new run engine event bus event for the runs dashboard to hook into --- .../services/runsDashboardInstance.server.ts | 10 + .../app/v3/services/triggerTaskV1.server.ts | 3 + apps/webapp/test/engine/triggerTask.test.ts | 447 +++++++----------- .../run-engine/src/engine/eventBus.ts | 11 +- .../run-engine/src/engine/index.ts | 6 + 5 files changed, 208 insertions(+), 269 deletions(-) diff --git a/apps/webapp/app/services/runsDashboardInstance.server.ts b/apps/webapp/app/services/runsDashboardInstance.server.ts index 64933763eb..dcc4f6c517 100644 --- a/apps/webapp/app/services/runsDashboardInstance.server.ts +++ b/apps/webapp/app/services/runsDashboardInstance.server.ts @@ -7,6 +7,8 @@ import { } from "./runsDashboardService.server"; import { EventEmitter } from "node:events"; import { RuntimeEnvironmentType, TaskRun } from "@trigger.dev/database"; +import { engine } from "~/v3/runEngine.server"; +import { logger } from "./logger.server"; const runDashboardEventBus: RunDashboardEventBus = new EventEmitter(); @@ -32,5 +34,13 @@ export const runsDashboard = singleton("runsDashboard", () => { await service.upsertRun(event.run, event.environment.type, event.organization.id); }); + engine.eventBus.on("runStatusChanged", async (event) => { + logger.debug("RunDashboard: runStatusChanged", { + event, + }); + + await service.upsertRun(event.run, event.environment.type, event.environment.organization.id); + }); + return service; }); diff --git a/apps/webapp/app/v3/services/triggerTaskV1.server.ts b/apps/webapp/app/v3/services/triggerTaskV1.server.ts index 16038f9ddf..e17e5e74e2 100644 --- a/apps/webapp/app/v3/services/triggerTaskV1.server.ts +++ b/apps/webapp/app/v3/services/triggerTaskV1.server.ts @@ -43,6 +43,7 @@ import { import { getTaskEventStore } from "../taskEventStore.server"; import { enqueueRun } from "./enqueueRun.server"; import { z } from "zod"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; // This is here for backwords compatibility for v3 users const QueueOptions = z.object({ @@ -601,6 +602,8 @@ export class TriggerTaskServiceV1 extends BaseService { return; } + emitRunStatusUpdate(run, environment); + return { run, isCached: result?.isCached, diff --git a/apps/webapp/test/engine/triggerTask.test.ts b/apps/webapp/test/engine/triggerTask.test.ts index 9be547fb60..adde07a435 100644 --- a/apps/webapp/test/engine/triggerTask.test.ts +++ b/apps/webapp/test/engine/triggerTask.test.ts @@ -32,7 +32,6 @@ import { ValidationResult, } from "~/runEngine/types"; import { RunEngineTriggerTaskService } from "../../app/runEngine/services/triggerTask.server"; -import { DefaultRunsDashboardManager } from "~/runEngine/concerns/runsDashboard.server"; import { ClickHouse } from "@internal/clickhouse"; import { RunsDashboardService } from "~/services/runsDashboardService.server"; @@ -108,242 +107,216 @@ class MockTraceEventConcern implements TraceEventConcern { } describe("RunEngineTriggerTaskService", () => { - containerTest( - "should trigger a task with minimal options", - async ({ prisma, redisOptions, clickhouseContainer }) => { - const engine = new RunEngine({ - prisma, - worker: { - redis: redisOptions, - workers: 1, - tasksPerWorker: 10, - pollIntervalMs: 100, - }, - queue: { - redis: redisOptions, - }, - runLock: { - redis: redisOptions, - }, + containerTest("should trigger a task with minimal options", async ({ prisma, redisOptions }) => { + const engine = new RunEngine({ + prisma, + worker: { + redis: redisOptions, + workers: 1, + tasksPerWorker: 10, + pollIntervalMs: 100, + }, + queue: { + redis: redisOptions, + }, + runLock: { + redis: redisOptions, + }, + machines: { + defaultMachine: "small-1x", machines: { - defaultMachine: "small-1x", - machines: { - "small-1x": { - name: "small-1x" as const, - cpu: 0.5, - memory: 0.5, - centsPerMs: 0.0001, - }, + "small-1x": { + name: "small-1x" as const, + cpu: 0.5, + memory: 0.5, + centsPerMs: 0.0001, }, - baseCostInCents: 0.0005, }, - tracer: trace.getTracer("test", "0.0.0"), - }); + baseCostInCents: 0.0005, + }, + tracer: trace.getTracer("test", "0.0.0"), + }); - const authenticatedEnvironment = await setupAuthenticatedEnvironment(prisma, "PRODUCTION"); + const authenticatedEnvironment = await setupAuthenticatedEnvironment(prisma, "PRODUCTION"); - const taskIdentifier = "test-task"; + const taskIdentifier = "test-task"; - //create background worker - await setupBackgroundWorker(engine, authenticatedEnvironment, taskIdentifier); - - const queuesManager = new DefaultQueueManager(prisma, engine); + //create background worker + await setupBackgroundWorker(engine, authenticatedEnvironment, taskIdentifier); - const idempotencyKeyConcern = new IdempotencyKeyConcern( - prisma, - engine, - new MockTraceEventConcern() - ); + const queuesManager = new DefaultQueueManager(prisma, engine); - const runChainStateManager = new DefaultRunChainStateManager(prisma, true); + const idempotencyKeyConcern = new IdempotencyKeyConcern( + prisma, + engine, + new MockTraceEventConcern() + ); - const runsDashboardManager = new DefaultRunsDashboardManager( - new RunsDashboardService( - new ClickHouse({ - name: "Test", - url: clickhouseContainer.getConnectionUrl(), - }) - ) - ); + const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - const triggerTaskService = new RunEngineTriggerTaskService({ - engine, - prisma, - runNumberIncrementer: new MockRunNumberIncrementer(), - payloadProcessor: new MockPayloadProcessor(), - queueConcern: queuesManager, - idempotencyKeyConcern, - validator: new MockTriggerTaskValidator(), - traceEventConcern: new MockTraceEventConcern(), - runChainStateManager, - runsDashboardManager, - tracer: trace.getTracer("test", "0.0.0"), - }); - - const result = await triggerTaskService.call({ - taskId: taskIdentifier, - environment: authenticatedEnvironment, - body: { payload: { test: "test" } }, - }); - - expect(result).toBeDefined(); - expect(result?.run.friendlyId).toBeDefined(); - expect(result?.run.status).toBe("PENDING"); - expect(result?.isCached).toBe(false); - - const run = await prisma.taskRun.findUnique({ - where: { - id: result?.run.id, - }, - }); + const triggerTaskService = new RunEngineTriggerTaskService({ + engine, + prisma, + runNumberIncrementer: new MockRunNumberIncrementer(), + payloadProcessor: new MockPayloadProcessor(), + queueConcern: queuesManager, + idempotencyKeyConcern, + validator: new MockTriggerTaskValidator(), + traceEventConcern: new MockTraceEventConcern(), + runChainStateManager, + tracer: trace.getTracer("test", "0.0.0"), + }); - expect(run).toBeDefined(); - expect(run?.friendlyId).toBe(result?.run.friendlyId); - expect(run?.engine).toBe("V2"); - expect(run?.queuedAt).toBeDefined(); - expect(run?.queue).toBe(`task/${taskIdentifier}`); + const result = await triggerTaskService.call({ + taskId: taskIdentifier, + environment: authenticatedEnvironment, + body: { payload: { test: "test" } }, + }); - // Lets make sure the task is in the queue - const queueLength = await engine.runQueue.lengthOfQueue( - authenticatedEnvironment, - `task/${taskIdentifier}` - ); - expect(queueLength).toBe(1); + expect(result).toBeDefined(); + expect(result?.run.friendlyId).toBeDefined(); + expect(result?.run.status).toBe("PENDING"); + expect(result?.isCached).toBe(false); - await engine.quit(); - } - ); + const run = await prisma.taskRun.findUnique({ + where: { + id: result?.run.id, + }, + }); - containerTest( - "should handle idempotency keys correctly", - async ({ prisma, redisOptions, clickhouseContainer }) => { - const engine = new RunEngine({ - prisma, - worker: { - redis: redisOptions, - workers: 1, - tasksPerWorker: 10, - pollIntervalMs: 100, - }, - queue: { - redis: redisOptions, - }, - runLock: { - redis: redisOptions, - }, + expect(run).toBeDefined(); + expect(run?.friendlyId).toBe(result?.run.friendlyId); + expect(run?.engine).toBe("V2"); + expect(run?.queuedAt).toBeDefined(); + expect(run?.queue).toBe(`task/${taskIdentifier}`); + + // Lets make sure the task is in the queue + const queueLength = await engine.runQueue.lengthOfQueue( + authenticatedEnvironment, + `task/${taskIdentifier}` + ); + expect(queueLength).toBe(1); + + await engine.quit(); + }); + + containerTest("should handle idempotency keys correctly", async ({ prisma, redisOptions }) => { + const engine = new RunEngine({ + prisma, + worker: { + redis: redisOptions, + workers: 1, + tasksPerWorker: 10, + pollIntervalMs: 100, + }, + queue: { + redis: redisOptions, + }, + runLock: { + redis: redisOptions, + }, + machines: { + defaultMachine: "small-1x", machines: { - defaultMachine: "small-1x", - machines: { - "small-1x": { - name: "small-1x" as const, - cpu: 0.5, - memory: 0.5, - centsPerMs: 0.0001, - }, + "small-1x": { + name: "small-1x" as const, + cpu: 0.5, + memory: 0.5, + centsPerMs: 0.0001, }, - baseCostInCents: 0.0005, }, - tracer: trace.getTracer("test", "0.0.0"), - }); + baseCostInCents: 0.0005, + }, + tracer: trace.getTracer("test", "0.0.0"), + }); - const authenticatedEnvironment = await setupAuthenticatedEnvironment(prisma, "PRODUCTION"); + const authenticatedEnvironment = await setupAuthenticatedEnvironment(prisma, "PRODUCTION"); - const taskIdentifier = "test-task"; + const taskIdentifier = "test-task"; - //create background worker - await setupBackgroundWorker(engine, authenticatedEnvironment, taskIdentifier); + //create background worker + await setupBackgroundWorker(engine, authenticatedEnvironment, taskIdentifier); - const queuesManager = new DefaultQueueManager(prisma, engine); + const queuesManager = new DefaultQueueManager(prisma, engine); - const idempotencyKeyConcern = new IdempotencyKeyConcern( - prisma, - engine, - new MockTraceEventConcern() - ); - - const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - - const runsDashboardManager = new DefaultRunsDashboardManager( - new RunsDashboardService( - new ClickHouse({ - name: "Test", - url: clickhouseContainer.getConnectionUrl(), - }) - ) - ); + const idempotencyKeyConcern = new IdempotencyKeyConcern( + prisma, + engine, + new MockTraceEventConcern() + ); - const triggerTaskService = new RunEngineTriggerTaskService({ - engine, - prisma, - runNumberIncrementer: new MockRunNumberIncrementer(), - payloadProcessor: new MockPayloadProcessor(), - queueConcern: queuesManager, - idempotencyKeyConcern, - validator: new MockTriggerTaskValidator(), - traceEventConcern: new MockTraceEventConcern(), - runChainStateManager, - runsDashboardManager, - tracer: trace.getTracer("test", "0.0.0"), - }); + const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - const result = await triggerTaskService.call({ - taskId: taskIdentifier, - environment: authenticatedEnvironment, - body: { - payload: { test: "test" }, - options: { - idempotencyKey: "test-idempotency-key", - }, - }, - }); - - expect(result).toBeDefined(); - expect(result?.run.friendlyId).toBeDefined(); - expect(result?.run.status).toBe("PENDING"); - expect(result?.isCached).toBe(false); + const triggerTaskService = new RunEngineTriggerTaskService({ + engine, + prisma, + runNumberIncrementer: new MockRunNumberIncrementer(), + payloadProcessor: new MockPayloadProcessor(), + queueConcern: queuesManager, + idempotencyKeyConcern, + validator: new MockTriggerTaskValidator(), + traceEventConcern: new MockTraceEventConcern(), + runChainStateManager, + tracer: trace.getTracer("test", "0.0.0"), + }); - const run = await prisma.taskRun.findUnique({ - where: { - id: result?.run.id, + const result = await triggerTaskService.call({ + taskId: taskIdentifier, + environment: authenticatedEnvironment, + body: { + payload: { test: "test" }, + options: { + idempotencyKey: "test-idempotency-key", }, - }); + }, + }); - expect(run).toBeDefined(); - expect(run?.friendlyId).toBe(result?.run.friendlyId); - expect(run?.engine).toBe("V2"); - expect(run?.queuedAt).toBeDefined(); - expect(run?.queue).toBe(`task/${taskIdentifier}`); + expect(result).toBeDefined(); + expect(result?.run.friendlyId).toBeDefined(); + expect(result?.run.status).toBe("PENDING"); + expect(result?.isCached).toBe(false); - // Lets make sure the task is in the queue - const queueLength = await engine.runQueue.lengthOfQueue( - authenticatedEnvironment, - `task/${taskIdentifier}` - ); - expect(queueLength).toBe(1); + const run = await prisma.taskRun.findUnique({ + where: { + id: result?.run.id, + }, + }); - // Now lets try to trigger the same task with the same idempotency key - const cachedResult = await triggerTaskService.call({ - taskId: taskIdentifier, - environment: authenticatedEnvironment, - body: { - payload: { test: "test" }, - options: { - idempotencyKey: "test-idempotency-key", - }, + expect(run).toBeDefined(); + expect(run?.friendlyId).toBe(result?.run.friendlyId); + expect(run?.engine).toBe("V2"); + expect(run?.queuedAt).toBeDefined(); + expect(run?.queue).toBe(`task/${taskIdentifier}`); + + // Lets make sure the task is in the queue + const queueLength = await engine.runQueue.lengthOfQueue( + authenticatedEnvironment, + `task/${taskIdentifier}` + ); + expect(queueLength).toBe(1); + + // Now lets try to trigger the same task with the same idempotency key + const cachedResult = await triggerTaskService.call({ + taskId: taskIdentifier, + environment: authenticatedEnvironment, + body: { + payload: { test: "test" }, + options: { + idempotencyKey: "test-idempotency-key", }, - }); + }, + }); - expect(cachedResult).toBeDefined(); - expect(cachedResult?.run.friendlyId).toBe(result?.run.friendlyId); - expect(cachedResult?.isCached).toBe(true); + expect(cachedResult).toBeDefined(); + expect(cachedResult?.run.friendlyId).toBe(result?.run.friendlyId); + expect(cachedResult?.isCached).toBe(true); - await engine.quit(); - } - ); + await engine.quit(); + }); containerTest( "should resolve queue names correctly when locked to version", - async ({ prisma, redisOptions, clickhouseContainer }) => { + async ({ prisma, redisOptions }) => { const engine = new RunEngine({ prisma, worker: { @@ -418,15 +391,6 @@ describe("RunEngineTriggerTaskService", () => { const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - const runsDashboardManager = new DefaultRunsDashboardManager( - new RunsDashboardService( - new ClickHouse({ - name: "Test", - url: clickhouseContainer.getConnectionUrl(), - }) - ) - ); - const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -437,7 +401,6 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, - runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -518,7 +481,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains correctly when release concurrency is enabled", - async ({ prisma, redisOptions, clickhouseContainer }) => { + async ({ prisma, redisOptions }) => { const engine = new RunEngine({ prisma, worker: { @@ -575,15 +538,6 @@ describe("RunEngineTriggerTaskService", () => { const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - const runsDashboardManager = new DefaultRunsDashboardManager( - new RunsDashboardService( - new ClickHouse({ - name: "Test", - url: clickhouseContainer.getConnectionUrl(), - }) - ) - ); - const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -594,7 +548,6 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, - runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -687,7 +640,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains with multiple queues correctly", - async ({ prisma, redisOptions, clickhouseContainer }) => { + async ({ prisma, redisOptions }) => { const engine = new RunEngine({ prisma, worker: { @@ -742,15 +695,6 @@ describe("RunEngineTriggerTaskService", () => { ); const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - const runsDashboardManager = new DefaultRunsDashboardManager( - new RunsDashboardService( - new ClickHouse({ - name: "Test", - url: clickhouseContainer.getConnectionUrl(), - }) - ) - ); - const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -761,7 +705,6 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, - runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -898,7 +841,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains with explicit releaseConcurrency option", - async ({ prisma, redisOptions, clickhouseContainer }) => { + async ({ prisma, redisOptions }) => { const engine = new RunEngine({ prisma, worker: { @@ -953,15 +896,6 @@ describe("RunEngineTriggerTaskService", () => { ); const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - const runsDashboardManager = new DefaultRunsDashboardManager( - new RunsDashboardService( - new ClickHouse({ - name: "Test", - url: clickhouseContainer.getConnectionUrl(), - }) - ) - ); - const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -972,7 +906,6 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, - runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -1038,7 +971,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains when release concurrency is disabled", - async ({ prisma, redisOptions, clickhouseContainer }) => { + async ({ prisma, redisOptions }) => { const engine = new RunEngine({ prisma, worker: { @@ -1093,15 +1026,6 @@ describe("RunEngineTriggerTaskService", () => { ); const runChainStateManager = new DefaultRunChainStateManager(prisma, false); - const runsDashboardManager = new DefaultRunsDashboardManager( - new RunsDashboardService( - new ClickHouse({ - name: "Test", - url: clickhouseContainer.getConnectionUrl(), - }) - ) - ); - const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -1112,7 +1036,6 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, - runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); @@ -1177,7 +1100,7 @@ describe("RunEngineTriggerTaskService", () => { containerTest( "should handle run chains correctly when the parent run queue doesn't have a concurrency limit", - async ({ prisma, redisOptions, clickhouseContainer }) => { + async ({ prisma, redisOptions }) => { const engine = new RunEngine({ prisma, worker: { @@ -1234,15 +1157,6 @@ describe("RunEngineTriggerTaskService", () => { const runChainStateManager = new DefaultRunChainStateManager(prisma, true); - const runsDashboardManager = new DefaultRunsDashboardManager( - new RunsDashboardService( - new ClickHouse({ - name: "Test", - url: clickhouseContainer.getConnectionUrl(), - }) - ) - ); - const triggerTaskService = new RunEngineTriggerTaskService({ engine, prisma, @@ -1253,7 +1167,6 @@ describe("RunEngineTriggerTaskService", () => { validator: new MockTriggerTaskValidator(), traceEventConcern: new MockTraceEventConcern(), runChainStateManager, - runsDashboardManager, tracer: trace.getTracer("test", "0.0.0"), }); diff --git a/internal-packages/run-engine/src/engine/eventBus.ts b/internal-packages/run-engine/src/engine/eventBus.ts index 5662cae00c..44de5127c6 100644 --- a/internal-packages/run-engine/src/engine/eventBus.ts +++ b/internal-packages/run-engine/src/engine/eventBus.ts @@ -1,9 +1,16 @@ -import { TaskRunExecutionStatus, TaskRunStatus } from "@trigger.dev/database"; -import { AuthenticatedEnvironment } from "../shared/index.js"; +import { TaskRun, TaskRunExecutionStatus, TaskRunStatus } from "@trigger.dev/database"; +import { AuthenticatedEnvironment, MinimalAuthenticatedEnvironment } from "../shared/index.js"; import { FlushedRunMetadata, TaskRunError } from "@trigger.dev/core/v3"; import { EventEmitter } from "events"; export type EventBusEvents = { + runStatusChanged: [ + { + time: Date; + run: TaskRun; + environment: MinimalAuthenticatedEnvironment; + }, + ]; runAttemptStarted: [ { time: Date; diff --git a/internal-packages/run-engine/src/engine/index.ts b/internal-packages/run-engine/src/engine/index.ts index 37ef0aff49..ea24d73a83 100644 --- a/internal-packages/run-engine/src/engine/index.ts +++ b/internal-packages/run-engine/src/engine/index.ts @@ -548,6 +548,12 @@ export class RunEngine { } }); + this.eventBus.emit("runStatusChanged", { + time: new Date(), + run: taskRun, + environment, + }); + return taskRun; }, { From 3349897e1d529b5ce74fd5affe0d196c4931da8a Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 28 Apr 2025 22:47:12 +0100 Subject: [PATCH 06/33] Track run events in the run engine --- .../services/runsDashboardInstance.server.ts | 64 +++++++++++++------ .../services/runsDashboardService.server.ts | 12 ++-- .../app/v3/services/triggerTaskV1.server.ts | 2 + .../migration.sql | 4 ++ .../database/prisma/schema.prisma | 4 ++ .../run-engine/src/engine/eventBus.ts | 3 +- .../run-engine/src/engine/index.ts | 5 +- .../src/engine/systems/checkpointSystem.ts | 10 +++ .../src/engine/systems/delayedRunSystem.ts | 10 +++ .../src/engine/systems/dequeueSystem.ts | 10 +++ .../engine/systems/pendingVersionSystem.ts | 5 ++ .../src/engine/systems/runAttemptSystem.ts | 25 ++++++++ .../src/engine/systems/ttlSystem.ts | 14 ++-- 13 files changed, 131 insertions(+), 37 deletions(-) create mode 100644 internal-packages/database/prisma/migrations/20250428211853_add_environment_type_and_org_id_to_task_run/migration.sql diff --git a/apps/webapp/app/services/runsDashboardInstance.server.ts b/apps/webapp/app/services/runsDashboardInstance.server.ts index dcc4f6c517..df409e1fc6 100644 --- a/apps/webapp/app/services/runsDashboardInstance.server.ts +++ b/apps/webapp/app/services/runsDashboardInstance.server.ts @@ -1,27 +1,21 @@ -import { singleton } from "~/utils/singleton"; import { ClickHouse } from "@internal/clickhouse"; +import { EventEmitter } from "node:events"; +import { prisma } from "~/db.server"; +import { singleton } from "~/utils/singleton"; +import { engine } from "~/v3/runEngine.server"; +import { logger } from "./logger.server"; import { RunDashboardEventBus, RunDashboardEvents, RunsDashboardService, } from "./runsDashboardService.server"; -import { EventEmitter } from "node:events"; -import { RuntimeEnvironmentType, TaskRun } from "@trigger.dev/database"; -import { engine } from "~/v3/runEngine.server"; -import { logger } from "./logger.server"; const runDashboardEventBus: RunDashboardEventBus = new EventEmitter(); -export type TaskRunStatusUpdateEnvironment = { - type: RuntimeEnvironmentType; - organizationId: string; -}; - -export function emitRunStatusUpdate(run: TaskRun, environment: TaskRunStatusUpdateEnvironment) { +export function emitRunStatusUpdate(runId: string) { runDashboardEventBus.emit("runStatusUpdate", { - run, - environment, - organization: { id: environment.organizationId }, + time: new Date(), + runId, }); } @@ -31,16 +25,46 @@ export const runsDashboard = singleton("runsDashboard", () => { const service = new RunsDashboardService(clickhouse); runDashboardEventBus.on("runStatusUpdate", async (event) => { - await service.upsertRun(event.run, event.environment.type, event.organization.id); + await upsertRun(event.time, event.runId, service); }); engine.eventBus.on("runStatusChanged", async (event) => { - logger.debug("RunDashboard: runStatusChanged", { - event, - }); - - await service.upsertRun(event.run, event.environment.type, event.environment.organization.id); + await upsertRun(event.time, event.runId, service); }); return service; }); + +async function upsertRun(time: Date, runId: string, service: RunsDashboardService) { + const run = await prisma.taskRun.findFirst({ + where: { + id: runId, + }, + }); + + if (!run) { + logger.error("RunDashboard: upsertRun: run not found", { + runId, + }); + + return; + } + + if (!run.environmentType) { + logger.error("RunDashboard: upsertRun: run environment type not found", { + runId, + }); + + return; + } + + if (!run.organizationId) { + logger.error("RunDashboard: upsertRun: run organization id not found", { + runId, + }); + + return; + } + + await service.upsertRun(time, run, run.environmentType, run.organizationId); +} diff --git a/apps/webapp/app/services/runsDashboardService.server.ts b/apps/webapp/app/services/runsDashboardService.server.ts index 2c821e82b4..c22709321d 100644 --- a/apps/webapp/app/services/runsDashboardService.server.ts +++ b/apps/webapp/app/services/runsDashboardService.server.ts @@ -13,6 +13,7 @@ export class RunsDashboardService { }); async upsertRun( + eventTime: Date, taskRun: TaskRun, environmentType: RuntimeEnvironmentType, organizationId: string @@ -36,7 +37,7 @@ export class RunsDashboardService { queue: taskRun.queue, schedule_id: taskRun.scheduleId ?? undefined, batch_id: taskRun.batchId ?? undefined, - event_time: Date.now(), + event_time: eventTime.getTime(), created_at: taskRun.createdAt.getTime(), updated_at: taskRun.updatedAt.getTime(), completed_at: taskRun.completedAt ? taskRun.completedAt.getTime() : undefined, @@ -120,13 +121,8 @@ export class RunsDashboardService { export type RunDashboardEvents = { runStatusUpdate: [ { - run: TaskRun; - organization: { - id: string; - }; - environment: { - type: RuntimeEnvironmentType; - }; + time: Date; + runId: string; } ]; }; diff --git a/apps/webapp/app/v3/services/triggerTaskV1.server.ts b/apps/webapp/app/v3/services/triggerTaskV1.server.ts index e17e5e74e2..004b07208b 100644 --- a/apps/webapp/app/v3/services/triggerTaskV1.server.ts +++ b/apps/webapp/app/v3/services/triggerTaskV1.server.ts @@ -376,6 +376,8 @@ export class TriggerTaskServiceV1 extends BaseService { number: num, friendlyId: runFriendlyId, runtimeEnvironmentId: environment.id, + environmentType: environment.type, + organizationId: environment.organizationId, projectId: environment.projectId, idempotencyKey, idempotencyKeyExpiresAt: idempotencyKey ? idempotencyKeyExpiresAt : undefined, diff --git a/internal-packages/database/prisma/migrations/20250428211853_add_environment_type_and_org_id_to_task_run/migration.sql b/internal-packages/database/prisma/migrations/20250428211853_add_environment_type_and_org_id_to_task_run/migration.sql new file mode 100644 index 0000000000..246f8cbb2d --- /dev/null +++ b/internal-packages/database/prisma/migrations/20250428211853_add_environment_type_and_org_id_to_task_run/migration.sql @@ -0,0 +1,4 @@ +-- AlterTable +ALTER TABLE "TaskRun" ADD COLUMN "environmentType" "RuntimeEnvironmentType", +ADD COLUMN "organizationId" TEXT; + diff --git a/internal-packages/database/prisma/schema.prisma b/internal-packages/database/prisma/schema.prisma index c67edbd173..e0a4534508 100644 --- a/internal-packages/database/prisma/schema.prisma +++ b/internal-packages/database/prisma/schema.prisma @@ -1731,9 +1731,13 @@ model TaskRun { runtimeEnvironment RuntimeEnvironment @relation(fields: [runtimeEnvironmentId], references: [id], onDelete: Cascade, onUpdate: Cascade) runtimeEnvironmentId String + environmentType RuntimeEnvironmentType? + project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade) projectId String + organizationId String? + // The specific queue this run is in queue String // The queueId is set when the run is locked to a specific queue diff --git a/internal-packages/run-engine/src/engine/eventBus.ts b/internal-packages/run-engine/src/engine/eventBus.ts index 44de5127c6..6047cd5694 100644 --- a/internal-packages/run-engine/src/engine/eventBus.ts +++ b/internal-packages/run-engine/src/engine/eventBus.ts @@ -7,8 +7,7 @@ export type EventBusEvents = { runStatusChanged: [ { time: Date; - run: TaskRun; - environment: MinimalAuthenticatedEnvironment; + runId: string; }, ]; runAttemptStarted: [ diff --git a/internal-packages/run-engine/src/engine/index.ts b/internal-packages/run-engine/src/engine/index.ts index ea24d73a83..daeef1b8b1 100644 --- a/internal-packages/run-engine/src/engine/index.ts +++ b/internal-packages/run-engine/src/engine/index.ts @@ -403,6 +403,8 @@ export class RunEngine { number, friendlyId, runtimeEnvironmentId: environment.id, + environmentType: environment.type, + organizationId: environment.organization.id, projectId: environment.project.id, idempotencyKey, idempotencyKeyExpiresAt, @@ -550,8 +552,7 @@ export class RunEngine { this.eventBus.emit("runStatusChanged", { time: new Date(), - run: taskRun, - environment, + runId: taskRun.id, }); return taskRun; diff --git a/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts b/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts index bcaf417756..6471b591a0 100644 --- a/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts @@ -143,6 +143,11 @@ export class CheckpointSystem { throw new ServiceValidationError("Run not found", 404); } + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); + // Create the checkpoint const taskRunCheckpoint = await prisma.taskRunCheckpoint.create({ data: { @@ -272,6 +277,11 @@ export class CheckpointSystem { throw new ServiceValidationError("Run not found", 404); } + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); + const newSnapshot = await this.executionSnapshotSystem.createExecutionSnapshot(prisma, { run, snapshot: { diff --git a/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts b/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts index b43ff46221..f524de14af 100644 --- a/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts @@ -68,6 +68,11 @@ export class DelayedRunSystem { await this.$.worker.reschedule(`enqueueDelayedRun:${updatedRun.id}`, delayUntil); + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId: updatedRun.id, + }); + return updatedRun; }); }, @@ -109,6 +114,11 @@ export class DelayedRunSystem { }, }); + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); + if (run.ttl) { const expireAt = parseNaturalLanguageDuration(run.ttl); diff --git a/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts b/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts index 0e5800fa9a..5f7c585b03 100644 --- a/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts @@ -356,6 +356,11 @@ export class DequeueSystem { }, }); + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); + if (!lockedTaskRun) { this.$.logger.error( "RunEngine.dequeueFromMasterQueue(): Failed to lock task run", @@ -573,6 +578,11 @@ export class DequeueSystem { //we ack because when it's deployed it will be requeued await this.$.runQueue.acknowledgeMessage(orgId, runId); + + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); }); }, { diff --git a/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts b/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts index 8be87cca7e..f54fc031b8 100644 --- a/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts @@ -100,6 +100,11 @@ export class PendingVersionSystem { tx, }); }); + + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId: run.id, + }); } //enqueue more if needed diff --git a/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts b/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts index 2150f70ac9..f7553bb8ea 100644 --- a/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts @@ -248,6 +248,11 @@ export class RunAttemptSystem { } ); + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId: taskRun.id, + }); + if (!result) { this.$.logger.error("RunEngine.createRunAttempt(): failed to create task run attempt", { runId: taskRun.id, @@ -474,6 +479,11 @@ export class RunAttemptSystem { }); const newSnapshot = await getLatestExecutionSnapshot(prisma, runId); + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); + await this.$.runQueue.acknowledgeMessage(run.project.organizationId, runId); // We need to manually emit this as we created the final snapshot as part of the task run update @@ -664,6 +674,11 @@ export class RunAttemptSystem { }, }); + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); + const nextAttemptNumber = latestSnapshot.attemptNumber === null ? 1 : latestSnapshot.attemptNumber + 1; @@ -992,6 +1007,11 @@ export class RunAttemptSystem { }, }); + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); + //if the run is delayed and hasn't started yet, we need to prevent it being added to the queue in future if (isInitialState(latestSnapshot.executionStatus) && run.delayUntil) { await this.delayedRunSystem.preventDelayedRunFromBeingEnqueued({ runId }); @@ -1145,6 +1165,11 @@ export class RunAttemptSystem { }, }); + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); + const newSnapshot = await this.executionSnapshotSystem.createExecutionSnapshot(prisma, { run, snapshot: { diff --git a/internal-packages/run-engine/src/engine/systems/ttlSystem.ts b/internal-packages/run-engine/src/engine/systems/ttlSystem.ts index 5b40277700..e8c82ef8f7 100644 --- a/internal-packages/run-engine/src/engine/systems/ttlSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/ttlSystem.ts @@ -1,11 +1,10 @@ -import { startSpan } from "@internal/tracing"; -import { SystemResources } from "./systems.js"; -import { PrismaClientOrTransaction, TaskRun } from "@trigger.dev/database"; -import { getLatestExecutionSnapshot } from "./executionSnapshotSystem.js"; import { parseNaturalLanguageDuration } from "@trigger.dev/core/v3/isomorphic"; +import { TaskRunError } from "@trigger.dev/core/v3/schemas"; +import { PrismaClientOrTransaction } from "@trigger.dev/database"; import { ServiceValidationError } from "../errors.js"; import { isExecuting } from "../statuses.js"; -import { TaskRunError } from "@trigger.dev/core/v3/schemas"; +import { getLatestExecutionSnapshot } from "./executionSnapshotSystem.js"; +import { SystemResources } from "./systems.js"; import { WaitpointSystem } from "./waitpointSystem.js"; export type TtlSystemOptions = { @@ -102,6 +101,11 @@ export class TtlSystem { }, }); + this.$.eventBus.emit("runStatusChanged", { + time: new Date(), + runId, + }); + await this.$.runQueue.acknowledgeMessage(updatedRun.runtimeEnvironment.organizationId, runId); if (!updatedRun.associatedWaitpoint) { From 4a6e9c2e08e3c7e22e16a34d434ed4711b3dd0c9 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 29 Apr 2025 15:37:01 +0100 Subject: [PATCH 07/33] make sure engine v1 runs get synced to CH --- .../app/routes/api.v1.runs.$runId.tags.ts | 3 +++ .../runEngine/services/triggerTask.server.ts | 2 ++ .../services/runsDashboardInstance.server.ts | 19 +++++++++++++++++-- .../app/v3/marqs/devQueueConsumer.server.ts | 3 +++ .../v3/marqs/sharedQueueConsumer.server.ts | 9 ++++++++- .../app/v3/services/completeAttempt.server.ts | 3 +++ .../services/createTaskRunAttempt.server.ts | 3 +++ .../v3/services/enqueueDelayedRun.server.ts | 3 +++ .../services/executeTasksWaitingForDeploy.ts | 5 +++++ .../app/v3/services/finalizeTaskRun.server.ts | 3 +++ .../v3/services/rescheduleTaskRun.server.ts | 3 +++ .../services/triggerScheduledTask.server.ts | 17 ++++++----------- .../app/v3/services/triggerTask.server.ts | 2 ++ .../app/v3/services/triggerTaskV1.server.ts | 4 +++- .../run-engine/src/engine/index.ts | 4 ++++ .../run-engine/src/engine/types.ts | 2 ++ 16 files changed, 70 insertions(+), 15 deletions(-) diff --git a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts index b65e5b53ad..c013a1df41 100644 --- a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts +++ b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts @@ -4,6 +4,7 @@ import { z } from "zod"; import { prisma } from "~/db.server"; import { createTag, getTagsForRunId, MAX_TAGS_PER_RUN } from "~/models/taskRunTag.server"; import { authenticateApiRequest } from "~/services/apiAuth.server"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; import { generateFriendlyId } from "~/v3/friendlyIdentifiers"; const ParamsSchema = z.object({ @@ -95,6 +96,8 @@ export async function action({ request, params }: ActionFunctionArgs) { }, }); + emitRunStatusUpdate(taskRun.id); + return json({ message: `Successfully set ${newTags.length} new tags.` }, { status: 200 }); } catch (error) { return json( diff --git a/apps/webapp/app/runEngine/services/triggerTask.server.ts b/apps/webapp/app/runEngine/services/triggerTask.server.ts index b077daac7f..a696758404 100644 --- a/apps/webapp/app/runEngine/services/triggerTask.server.ts +++ b/apps/webapp/app/runEngine/services/triggerTask.server.ts @@ -305,6 +305,8 @@ export class RunEngineTriggerTaskService { ? parentRun.queueTimestamp ?? undefined : undefined, runChainState, + scheduleId: options.scheduleId, + scheduleInstanceId: options.scheduleInstanceId, }, this.prisma ); diff --git a/apps/webapp/app/services/runsDashboardInstance.server.ts b/apps/webapp/app/services/runsDashboardInstance.server.ts index df409e1fc6..a6b1653faa 100644 --- a/apps/webapp/app/services/runsDashboardInstance.server.ts +++ b/apps/webapp/app/services/runsDashboardInstance.server.ts @@ -9,6 +9,7 @@ import { RunDashboardEvents, RunsDashboardService, } from "./runsDashboardService.server"; +import { tryCatch } from "@trigger.dev/core/utils"; const runDashboardEventBus: RunDashboardEventBus = new EventEmitter(); @@ -25,11 +26,25 @@ export const runsDashboard = singleton("runsDashboard", () => { const service = new RunsDashboardService(clickhouse); runDashboardEventBus.on("runStatusUpdate", async (event) => { - await upsertRun(event.time, event.runId, service); + const [upsertError] = await tryCatch(upsertRun(event.time, event.runId, service)); + + if (upsertError) { + logger.error("RunDashboard: runStatusUpdate: upsertRun error", { + runId: event.runId, + error: upsertError, + }); + } }); engine.eventBus.on("runStatusChanged", async (event) => { - await upsertRun(event.time, event.runId, service); + const [upsertError] = await tryCatch(upsertRun(event.time, event.runId, service)); + + if (upsertError) { + logger.error("RunDashboard: runStatusChanged: upsertRun error", { + runId: event.runId, + error: upsertError, + }); + } }); return service; diff --git a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts index e72ede2398..0d4ab32666 100644 --- a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts @@ -23,6 +23,7 @@ import { getMaxDuration } from "@trigger.dev/core/v3/isomorphic"; import { DevSubscriber, devPubSub } from "./devPubSub.server"; import { findQueueInEnvironment, sanitizeQueueName } from "~/models/taskQueue.server"; import { createRedisClient, RedisClient } from "~/redis.server"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; const MessageBody = z.discriminatedUnion("type", [ z.object({ @@ -539,6 +540,8 @@ export class DevQueueConsumer { messageId: message.messageId, }); + emitRunStatusUpdate(lockedTaskRun.id); + this._inProgressRuns.set(lockedTaskRun.friendlyId, message.messageId); } catch (e) { if (e instanceof Error) { diff --git a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts index 5926002c91..a72afd4413 100644 --- a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts @@ -66,6 +66,7 @@ import { import { tracer } from "../tracer.server"; import { getMaxDuration } from "../utils/maxDuration"; import { MessagePayload } from "./types"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; const WithTraceContext = z.object({ traceparent: z.string().optional(), @@ -841,6 +842,8 @@ export class SharedQueueConsumer { }; } + emitRunStatusUpdate(lockedTaskRun.id); + return { action: "noop", reason: "restored_checkpoint", @@ -922,6 +925,8 @@ export class SharedQueueConsumer { }, }); + emitRunStatusUpdate(lockedTaskRun.id); + return { action: "noop", reason: "scheduled_attempt", @@ -1430,7 +1435,7 @@ export class SharedQueueConsumer { async #markRunAsWaitingForDeploy(runId: string) { logger.debug("Marking run as waiting for deploy", { runId }); - return await prisma.taskRun.update({ + await prisma.taskRun.update({ where: { id: runId, }, @@ -1438,6 +1443,8 @@ export class SharedQueueConsumer { status: "WAITING_FOR_DEPLOY", }, }); + + emitRunStatusUpdate(runId); } async #resolveCompletedAttemptsForResumeMessage( diff --git a/apps/webapp/app/v3/services/completeAttempt.server.ts b/apps/webapp/app/v3/services/completeAttempt.server.ts index 73c8fbc88d..1769f79250 100644 --- a/apps/webapp/app/v3/services/completeAttempt.server.ts +++ b/apps/webapp/app/v3/services/completeAttempt.server.ts @@ -34,6 +34,7 @@ import { FinalizeTaskRunService } from "./finalizeTaskRun.server"; import { RetryAttemptService } from "./retryAttempt.server"; import { getTaskEventStoreTableForRun } from "../taskEventStore.server"; import { socketIo } from "../handleSocketIo.server"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; type FoundAttempt = Awaited>; @@ -614,6 +615,8 @@ export class CompleteAttemptService extends BaseService { }, }); + emitRunStatusUpdate(taskRunAttempt.taskRunId); + if (environment.type === "DEVELOPMENT") { await marqs.requeueMessage(taskRunAttempt.taskRunId, {}, executionRetry.timestamp, "retry"); diff --git a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts index f8cad4fbf7..3a7fe10633 100644 --- a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts +++ b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts @@ -12,6 +12,7 @@ import { CrashTaskRunService } from "./crashTaskRun.server"; import { ExpireEnqueuedRunService } from "./expireEnqueuedRun.server"; import { findQueueInEnvironment } from "~/models/taskQueue.server"; import { FINAL_RUN_STATUSES } from "../taskStatus"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; export class CreateTaskRunAttemptService extends BaseService { public async call({ @@ -180,6 +181,8 @@ export class CreateTaskRunAttemptService extends BaseService { }); } + emitRunStatusUpdate(taskRun.id); + const machinePreset = machinePresetFromRun(taskRun) ?? machinePresetFromConfig(lockedBy.machineConfig ?? {}); diff --git a/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts b/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts index be655bfdaa..44f03acae4 100644 --- a/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts +++ b/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts @@ -7,6 +7,7 @@ import { ExpireEnqueuedRunService } from "./expireEnqueuedRun.server"; import { commonWorker } from "../commonWorker.server"; import { workerQueue } from "~/services/worker.server"; import { enqueueRun } from "./enqueueRun.server"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; export class EnqueueDelayedRunService extends BaseService { public static async enqueue(runId: string, runAt?: Date) { @@ -102,6 +103,8 @@ export class EnqueueDelayedRunService extends BaseService { } }); + emitRunStatusUpdate(run.id); + await enqueueRun({ env: run.runtimeEnvironment, run: run, diff --git a/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts b/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts index 0f40ef290b..4e02cf7b8c 100644 --- a/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts +++ b/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts @@ -4,6 +4,7 @@ import { marqs } from "~/v3/marqs/index.server"; import { BaseService } from "./baseService.server"; import { logger } from "~/services/logger.server"; import { env } from "~/env.server"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; export class ExecuteTasksWaitingForDeployService extends BaseService { public async call(backgroundWorkerId: string) { @@ -78,6 +79,10 @@ export class ExecuteTasksWaitingForDeployService extends BaseService { }); } + for (const run of runsWaitingForDeploy) { + emitRunStatusUpdate(run.id); + } + for (const run of runsWaitingForDeploy) { await marqs?.enqueueMessage( backgroundWorker.runtimeEnvironment, diff --git a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts index 7215c4348e..e321a7e773 100644 --- a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts +++ b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts @@ -19,6 +19,7 @@ import { completeBatchTaskRunItemV3 } from "./batchTriggerV3.server"; import { ExpireEnqueuedRunService } from "./expireEnqueuedRun.server"; import { ResumeBatchRunService } from "./resumeBatchRun.server"; import { ResumeDependentParentsService } from "./resumeDependentParents.server"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; type BaseInput = { id: string; @@ -100,6 +101,8 @@ export class FinalizeTaskRunService extends BaseService { ...(include ? { include } : {}), }); + emitRunStatusUpdate(run.id); + if (run.ttl) { await ExpireEnqueuedRunService.ack(run.id); } diff --git a/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts b/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts index 122fcc2c59..cc65e11c24 100644 --- a/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts +++ b/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts @@ -3,6 +3,7 @@ import { TaskRun } from "@trigger.dev/database"; import { parseDelay } from "~/utils/delays"; import { BaseService, ServiceValidationError } from "./baseService.server"; import { EnqueueDelayedRunService } from "./enqueueDelayedRun.server"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; export class RescheduleTaskRunService extends BaseService { public async call(taskRun: TaskRun, body: RescheduleRunRequestBody) { @@ -25,6 +26,8 @@ export class RescheduleTaskRunService extends BaseService { }, }); + emitRunStatusUpdate(taskRun.id); + await EnqueueDelayedRunService.reschedule(taskRun.id, delay); return updatedRun; diff --git a/apps/webapp/app/v3/services/triggerScheduledTask.server.ts b/apps/webapp/app/v3/services/triggerScheduledTask.server.ts index 2965052b02..8e27d3ec14 100644 --- a/apps/webapp/app/v3/services/triggerScheduledTask.server.ts +++ b/apps/webapp/app/v3/services/triggerScheduledTask.server.ts @@ -8,6 +8,7 @@ import { nextScheduledTimestamps } from "../utils/calculateNextSchedule.server"; import { BaseService } from "./baseService.server"; import { RegisterNextTaskScheduleInstanceService } from "./registerNextTaskScheduleInstance.server"; import { TriggerTaskService } from "./triggerTask.server"; +import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; export class TriggerScheduledTaskService extends BaseService { public async call(instanceId: string, finalAttempt: boolean) { @@ -147,7 +148,11 @@ export class TriggerScheduledTaskService extends BaseService { instance.taskSchedule.taskIdentifier, instance.environment, { payload: payloadPacket.data, options: { payloadType: payloadPacket.dataType } }, - { customIcon: "scheduled" } + { + customIcon: "scheduled", + scheduleId: instance.taskSchedule.id, + scheduleInstanceId: instance.id, + } ); if (!result) { @@ -157,16 +162,6 @@ export class TriggerScheduledTaskService extends BaseService { payloadPacket, }); } else { - await this._prisma.taskRun.update({ - where: { - id: result.run.id, - }, - data: { - scheduleId: instance.taskSchedule.id, - scheduleInstanceId: instance.id, - }, - }); - await this._prisma.taskSchedule.update({ where: { id: instance.taskSchedule.id, diff --git a/apps/webapp/app/v3/services/triggerTask.server.ts b/apps/webapp/app/v3/services/triggerTask.server.ts index 4affb4e748..c055ddb904 100644 --- a/apps/webapp/app/v3/services/triggerTask.server.ts +++ b/apps/webapp/app/v3/services/triggerTask.server.ts @@ -29,6 +29,8 @@ export type TriggerTaskServiceOptions = { runFriendlyId?: string; skipChecks?: boolean; oneTimeUseToken?: string; + scheduleId?: string; + scheduleInstanceId?: string; }; export class OutOfEntitlementError extends Error { diff --git a/apps/webapp/app/v3/services/triggerTaskV1.server.ts b/apps/webapp/app/v3/services/triggerTaskV1.server.ts index 004b07208b..b31f1a677b 100644 --- a/apps/webapp/app/v3/services/triggerTaskV1.server.ts +++ b/apps/webapp/app/v3/services/triggerTaskV1.server.ts @@ -437,6 +437,8 @@ export class TriggerTaskServiceV1 extends BaseService { runTags: bodyTags, oneTimeUseToken: options.oneTimeUseToken, machinePreset: body.options?.machine, + scheduleId: options.scheduleId, + scheduleInstanceId: options.scheduleInstanceId, }, }); @@ -604,7 +606,7 @@ export class TriggerTaskServiceV1 extends BaseService { return; } - emitRunStatusUpdate(run, environment); + emitRunStatusUpdate(run.id); return { run, diff --git a/internal-packages/run-engine/src/engine/index.ts b/internal-packages/run-engine/src/engine/index.ts index daeef1b8b1..75c6635ae9 100644 --- a/internal-packages/run-engine/src/engine/index.ts +++ b/internal-packages/run-engine/src/engine/index.ts @@ -365,6 +365,8 @@ export class RunEngine { runnerId, releaseConcurrency, runChainState, + scheduleId, + scheduleInstanceId, }: TriggerParams, tx?: PrismaClientOrTransaction ): Promise { @@ -453,6 +455,8 @@ export class RunEngine { maxDurationInSeconds, machinePreset: machine, runChainState, + scheduleId, + scheduleInstanceId, executionSnapshots: { create: { engine: "V2", diff --git a/internal-packages/run-engine/src/engine/types.ts b/internal-packages/run-engine/src/engine/types.ts index a89a7d1fe3..14b38dc0e9 100644 --- a/internal-packages/run-engine/src/engine/types.ts +++ b/internal-packages/run-engine/src/engine/types.ts @@ -118,6 +118,8 @@ export type TriggerParams = { runnerId?: string; releaseConcurrency?: boolean; runChainState?: RunChainState; + scheduleId?: string; + scheduleInstanceId?: string; }; export type EngineWorker = Worker; From cc6695cf05fefea5ce01538eee97c0e93b842007 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 29 Apr 2025 15:48:41 +0100 Subject: [PATCH 08/33] Update the attemptNumber of v3 task runs --- apps/webapp/app/v3/services/createTaskRunAttempt.server.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts index 3a7fe10633..9727e85bea 100644 --- a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts +++ b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts @@ -160,6 +160,7 @@ export class CreateTaskRunAttemptService extends BaseService { data: { status: setToExecuting ? "EXECUTING" : undefined, executedAt: taskRun.executedAt ?? new Date(), + attemptNumber: nextAttemptNumber, }, }); From 04b2039d0d6e99fe1410263283fc7ce21a1b9b84 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Wed, 30 Apr 2025 15:15:16 +0100 Subject: [PATCH 09/33] Restructure the run events to be more sparse --- .../route.tsx | 506 ++++++++++++++++++ .../route.tsx | 10 + .../app/routes/api.v1.runs.$runId.tags.ts | 22 +- apps/webapp/app/runEngine/types.ts | 1 - .../services/runsDashboardInstance.server.ts | 221 +++++++- .../services/runsDashboardService.server.ts | 359 ++++++++++++- apps/webapp/app/utils/pathBuilder.ts | 11 + .../app/v3/marqs/devQueueConsumer.server.ts | 44 +- .../v3/marqs/sharedQueueConsumer.server.ts | 83 ++- .../app/v3/services/completeAttempt.server.ts | 35 +- .../services/createTaskRunAttempt.server.ts | 28 +- .../v3/services/enqueueDelayedRun.server.ts | 61 ++- .../services/executeTasksWaitingForDeploy.ts | 24 +- .../app/v3/services/finalizeTaskRun.server.ts | 63 ++- .../v3/services/rescheduleTaskRun.server.ts | 23 +- .../services/triggerScheduledTask.server.ts | 1 - .../app/v3/services/triggerTaskV1.server.ts | 4 +- .../clickhouse/004_create_run_latest_v1.sql | 101 ++++ .../schema/003_create_raw_run_events_v1.sql | 16 +- .../clickhouse/src/runEvents.test.ts | 1 + internal-packages/clickhouse/src/runEvents.ts | 25 +- .../run-engine/src/engine/eventBus.ts | 165 +++++- .../run-engine/src/engine/index.ts | 2 +- .../src/engine/systems/checkpointSystem.ts | 34 +- .../src/engine/systems/delayedRunSystem.ts | 36 +- .../src/engine/systems/dequeueSystem.ts | 61 ++- .../engine/systems/pendingVersionSystem.ts | 15 +- .../src/engine/systems/runAttemptSystem.ts | 113 ++-- .../src/engine/systems/ttlSystem.ts | 18 +- internal-packages/run-engine/src/index.ts | 2 +- references/hello-world/package.json | 5 +- 31 files changed, 1907 insertions(+), 183 deletions(-) create mode 100644 apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs._index/route.tsx create mode 100644 apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs/route.tsx create mode 100644 internal-packages/clickhouse/004_create_run_latest_v1.sql diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs._index/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs._index/route.tsx new file mode 100644 index 0000000000..970e73f6ac --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs._index/route.tsx @@ -0,0 +1,506 @@ +import { ArrowPathIcon, StopCircleIcon } from "@heroicons/react/20/solid"; +import { BeakerIcon, BookOpenIcon } from "@heroicons/react/24/solid"; +import { Form, type MetaFunction, useNavigation } from "@remix-run/react"; +import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { IconCircleX } from "@tabler/icons-react"; +import { AnimatePresence, motion } from "framer-motion"; +import { ListChecks, ListX } from "lucide-react"; +import { Suspense, useState } from "react"; +import { TypedAwait, typeddefer, useTypedLoaderData } from "remix-typedjson"; +import { TaskIcon } from "~/assets/icons/TaskIcon"; +import { DevDisconnectedBanner, useDevPresence } from "~/components/DevPresence"; +import { StepContentContainer } from "~/components/StepContentContainer"; +import { MainCenteredContainer, PageBody } from "~/components/layout/AppLayout"; +import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTrigger, +} from "~/components/primitives/Dialog"; +import { Header1, Header2 } from "~/components/primitives/Headers"; +import { InfoPanel } from "~/components/primitives/InfoPanel"; +import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import { + SelectedItemsProvider, + useSelectedItems, +} from "~/components/primitives/SelectedItemsProvider"; +import { Spinner, SpinnerWhite } from "~/components/primitives/Spinner"; +import { StepNumber } from "~/components/primitives/StepNumber"; +import { TextLink } from "~/components/primitives/TextLink"; +import { RunsFilters, TaskRunListSearchFilters } from "~/components/runs/v3/RunFilters"; +import { TaskRunsTable } from "~/components/runs/v3/TaskRunsTable"; +import { BULK_ACTION_RUN_LIMIT } from "~/consts"; +import { useEnvironment } from "~/hooks/useEnvironment"; +import { useOrganization } from "~/hooks/useOrganizations"; +import { useProject } from "~/hooks/useProject"; +import { findProjectBySlug } from "~/models/project.server"; +import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; +import { RunListPresenter } from "~/presenters/v3/RunListPresenter.server"; +import { + getRootOnlyFilterPreference, + setRootOnlyFilterPreference, + uiPreferencesStorage, +} from "~/services/preferences/uiPreferences.server"; +import { requireUserId } from "~/services/session.server"; +import { cn } from "~/utils/cn"; +import { + docsPath, + EnvironmentParamSchema, + v3ProjectPath, + v3RunsNextPath, + v3TestPath, +} from "~/utils/pathBuilder"; +import { ListPagination } from "../../components/ListPagination"; + +export const meta: MetaFunction = () => { + return [ + { + title: `Runs | Trigger.dev`, + }, + ]; +}; + +export const loader = async ({ request, params }: LoaderFunctionArgs) => { + const userId = await requireUserId(request); + const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params); + + const url = new URL(request.url); + + let rootOnlyValue = false; + if (url.searchParams.has("rootOnly")) { + rootOnlyValue = url.searchParams.get("rootOnly") === "true"; + } else { + rootOnlyValue = await getRootOnlyFilterPreference(request); + } + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + if (!project) { + throw new Error("Project not found"); + } + + const environment = await findEnvironmentBySlug(project.id, envParam, userId); + if (!environment) { + throw new Error("Environment not found"); + } + + const s = { + cursor: url.searchParams.get("cursor") ?? undefined, + direction: url.searchParams.get("direction") ?? undefined, + statuses: url.searchParams.getAll("statuses"), + environments: [environment.id], + tasks: url.searchParams.getAll("tasks"), + period: url.searchParams.get("period") ?? undefined, + bulkId: url.searchParams.get("bulkId") ?? undefined, + tags: url.searchParams.getAll("tags").map((t) => decodeURIComponent(t)), + from: url.searchParams.get("from") ?? undefined, + to: url.searchParams.get("to") ?? undefined, + rootOnly: rootOnlyValue, + runId: url.searchParams.get("runId") ?? undefined, + batchId: url.searchParams.get("batchId") ?? undefined, + scheduleId: url.searchParams.get("scheduleId") ?? undefined, + }; + const { + tasks, + versions, + statuses, + environments, + tags, + period, + bulkId, + from, + to, + cursor, + direction, + rootOnly, + runId, + batchId, + scheduleId, + } = TaskRunListSearchFilters.parse(s); + + const presenter = new RunListPresenter(); + const list = presenter.call({ + userId, + projectId: project.id, + tasks, + versions, + statuses, + environments, + tags, + period, + bulkId, + from, + to, + batchId, + runIds: runId ? [runId] : undefined, + scheduleId, + rootOnly, + direction: direction, + cursor: cursor, + }); + + const session = await setRootOnlyFilterPreference(rootOnlyValue, request); + const cookieValue = await uiPreferencesStorage.commitSession(session); + + return typeddefer( + { + data: list, + rootOnlyDefault: rootOnlyValue, + }, + { + headers: { + "Set-Cookie": cookieValue, + }, + } + ); +}; + +export default function Page() { + const { data, rootOnlyDefault } = useTypedLoaderData(); + const navigation = useNavigation(); + const isLoading = navigation.state !== "idle"; + const { isConnected } = useDevPresence(); + const project = useProject(); + const environment = useEnvironment(); + + return ( + <> + + + {environment.type === "DEVELOPMENT" && project.engine === "V2" && ( + + )} + + + Runs docs + + + + + + {({ selectedItems }) => ( +
+ +
+ + Loading runs +
+
+ } + > + + {(list) => ( + <> + {list.runs.length === 0 && !list.hasAnyRuns ? ( + list.possibleTasks.length === 0 ? ( + + ) : ( + + ) + ) : ( +
+
+ +
+ +
+
+ + +
+ )} + + )} +
+ + + + )} +
+
+ + ); +} + +function BulkActionBar() { + const { selectedItems, deselectAll } = useSelectedItems(); + const [barState, setBarState] = useState<"none" | "replay" | "cancel">("none"); + + const hasSelectedMaximum = selectedItems.size >= BULK_ACTION_RUN_LIMIT; + + return ( + + {selectedItems.size > 0 && ( + +
+ + Bulk actions: + {hasSelectedMaximum ? ( + + Maximum of {selectedItems.size} runs selected + + ) : ( + {selectedItems.size} runs selected + )} +
+
+ { + if (o) { + setBarState("cancel"); + } else { + setBarState("none"); + } + }} + /> + { + if (o) { + setBarState("replay"); + } else { + setBarState("none"); + } + }} + /> + +
+
+ )} +
+ ); +} + +function CancelRuns({ onOpen }: { onOpen: (open: boolean) => void }) { + const { selectedItems } = useSelectedItems(); + + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + const failedRedirect = v3RunsNextPath(organization, project, environment); + + const formAction = `/resources/taskruns/bulk/cancel`; + + const navigation = useNavigation(); + const isLoading = navigation.formAction === formAction; + + return ( + onOpen(o)}> + + + + + Cancel {selectedItems.size} runs? + + Canceling these runs will stop them from running. Only runs that are not already finished + will be canceled, the others will remain in their existing state. + + +
+ + + + + {[...selectedItems].map((runId) => ( + + ))} + +
+
+
+
+ ); +} + +function ReplayRuns({ onOpen }: { onOpen: (open: boolean) => void }) { + const { selectedItems } = useSelectedItems(); + + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + const failedRedirect = v3RunsNextPath(organization, project, environment); + + const formAction = `/resources/taskruns/bulk/replay`; + + const navigation = useNavigation(); + const isLoading = navigation.formAction === formAction; + + return ( + onOpen(o)}> + + + + + Replay runs? + + Replaying these runs will create a new run for each with the same payload and environment + as the original. It will use the latest version of the code for each task. + + +
+ + + + + {[...selectedItems].map((runId) => ( + + ))} + +
+
+
+
+ ); +} + +function CreateFirstTaskInstructions() { + const organization = useOrganization(); + const project = useProject(); + return ( + + + Create a task + + } + > + + Before running a task, you must first create one. Follow the instructions on the{" "} + Tasks page to create a + task, then return here to run it. + + + + ); +} + +function RunTaskInstructions() { + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + return ( + + How to run your tasks + + + + Perform a test run with a payload directly from the dashboard. + + + Test + +
+
+ OR +
+
+
+ + + + + Performing a real run depends on the type of trigger your task is using. + + + How to trigger a task + + +
+ ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs/route.tsx new file mode 100644 index 0000000000..f6723ddeba --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs/route.tsx @@ -0,0 +1,10 @@ +import { Outlet } from "@remix-run/react"; +import { PageContainer } from "~/components/layout/AppLayout"; + +export default function Page() { + return ( + + + + ); +} diff --git a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts index c013a1df41..9018fe952a 100644 --- a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts +++ b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts @@ -4,8 +4,7 @@ import { z } from "zod"; import { prisma } from "~/db.server"; import { createTag, getTagsForRunId, MAX_TAGS_PER_RUN } from "~/models/taskRunTag.server"; import { authenticateApiRequest } from "~/services/apiAuth.server"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; -import { generateFriendlyId } from "~/v3/friendlyIdentifiers"; +import { emitRunTagsUpdated } from "~/services/runsDashboardInstance.server"; const ParamsSchema = z.object({ runId: z.string(), @@ -96,7 +95,24 @@ export async function action({ request, params }: ActionFunctionArgs) { }, }); - emitRunStatusUpdate(taskRun.id); + emitRunTagsUpdated({ + time: new Date(), + run: { + id: taskRun.id, + tags: taskRun.runTags, + status: taskRun.status, + updatedAt: taskRun.updatedAt, + }, + organization: { + id: authenticationResult.environment.organizationId, + }, + project: { + id: authenticationResult.environment.projectId, + }, + environment: { + id: authenticationResult.environment.id, + }, + }); return json({ message: `Successfully set ${newTags.length} new tags.` }, { status: 200 }); } catch (error) { diff --git a/apps/webapp/app/runEngine/types.ts b/apps/webapp/app/runEngine/types.ts index 439bbf3776..e953f53169 100644 --- a/apps/webapp/app/runEngine/types.ts +++ b/apps/webapp/app/runEngine/types.ts @@ -7,7 +7,6 @@ import { TriggerTaskRequestBody, } from "@trigger.dev/core/v3"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; -import type { TaskRunStatusUpdateEnvironment } from "~/services/runsDashboardInstance.server"; export type TriggerTaskServiceOptions = { idempotencyKey?: string; diff --git a/apps/webapp/app/services/runsDashboardInstance.server.ts b/apps/webapp/app/services/runsDashboardInstance.server.ts index a6b1653faa..cdf26dd14e 100644 --- a/apps/webapp/app/services/runsDashboardInstance.server.ts +++ b/apps/webapp/app/services/runsDashboardInstance.server.ts @@ -6,6 +6,17 @@ import { engine } from "~/v3/runEngine.server"; import { logger } from "./logger.server"; import { RunDashboardEventBus, + RunDashboardEventRunAttemptStarted, + RunDashboardEventRunCancelled, + RunDashboardEventRunDelayRescheduled, + RunDashboardEventRunEnqueuedAfterDelay, + RunDashboardEventRunExpired, + RunDashboardEventRunFailed, + RunDashboardEventRunLocked, + RunDashboardEventRunRetryScheduled, + RunDashboardEventRunStatusChanged, + RunDashboardEventRunSucceeded, + RunDashboardEventRunTagsUpdated, RunDashboardEvents, RunsDashboardService, } from "./runsDashboardService.server"; @@ -13,44 +24,216 @@ import { tryCatch } from "@trigger.dev/core/utils"; const runDashboardEventBus: RunDashboardEventBus = new EventEmitter(); -export function emitRunStatusUpdate(runId: string) { - runDashboardEventBus.emit("runStatusUpdate", { - time: new Date(), +export function emitRunStatusChanged(event: RunDashboardEventRunStatusChanged) { + runDashboardEventBus.emit("runStatusChanged", event); +} + +export function emitRunCreated(time: Date, runId: string) { + runDashboardEventBus.emit("runCreated", { + time, runId, }); } +export function emitRunAttemptStarted(event: RunDashboardEventRunAttemptStarted) { + runDashboardEventBus.emit("runAttemptStarted", event); +} + +export function emitRunFailed(event: RunDashboardEventRunFailed) { + runDashboardEventBus.emit("runFailed", event); +} + +export function emitRunSucceeded(event: RunDashboardEventRunSucceeded) { + runDashboardEventBus.emit("runSucceeded", event); +} + +export function emitRunCancelled(event: RunDashboardEventRunCancelled) { + runDashboardEventBus.emit("runCancelled", event); +} + +export function emitRunRetryScheduled(event: RunDashboardEventRunRetryScheduled) { + runDashboardEventBus.emit("runRetryScheduled", event); +} + +export function emitRunDelayRescheduled(event: RunDashboardEventRunDelayRescheduled) { + runDashboardEventBus.emit("runDelayRescheduled", event); +} + +export function emitRunLocked(event: RunDashboardEventRunLocked) { + runDashboardEventBus.emit("runLocked", event); +} + +export function emitRunExpired(event: RunDashboardEventRunExpired) { + runDashboardEventBus.emit("runExpired", event); +} + +export function emitRunTagsUpdated(event: RunDashboardEventRunTagsUpdated) { + runDashboardEventBus.emit("runTagsUpdated", event); +} + +export function emitRunEnqueuedAfterDelay(event: RunDashboardEventRunEnqueuedAfterDelay) { + runDashboardEventBus.emit("runEnqueuedAfterDelay", event); +} + export const runsDashboard = singleton("runsDashboard", () => { const clickhouse = ClickHouse.fromEnv(); const service = new RunsDashboardService(clickhouse); - runDashboardEventBus.on("runStatusUpdate", async (event) => { - const [upsertError] = await tryCatch(upsertRun(event.time, event.runId, service)); + runDashboardEventBus.on("runCreated", async (event) => { + const [runCreatedError] = await tryCatch(runCreated(event.time, event.runId, service)); - if (upsertError) { - logger.error("RunDashboard: runStatusUpdate: upsertRun error", { + if (runCreatedError) { + logger.error("RunDashboard: runCreated: runCreated error", { runId: event.runId, - error: upsertError, + error: runCreatedError, }); } }); - engine.eventBus.on("runStatusChanged", async (event) => { - const [upsertError] = await tryCatch(upsertRun(event.time, event.runId, service)); + runDashboardEventBus.on("runAttemptStarted", async (event) => { + const [runAttemptStartedError] = await tryCatch(service.runAttemptStarted(event)); - if (upsertError) { - logger.error("RunDashboard: runStatusChanged: upsertRun error", { - runId: event.runId, - error: upsertError, + if (runAttemptStartedError) { + logger.error("RunDashboard: runAttemptStarted: runAttemptStarted error", { + runId: event.run.id, + error: runAttemptStartedError, }); } }); + runDashboardEventBus.on("runStatusChanged", async (event) => { + const [runStatusChangedError] = await tryCatch(service.runStatusChanged(event)); + + if (runStatusChangedError) { + logger.error("RunDashboard: runStatusChanged: runStatusChanged error", { + runId: event.run.id, + error: runStatusChangedError, + }); + } + }); + + runDashboardEventBus.on("runFailed", async (event) => { + const [runFailedError] = await tryCatch(service.runFailed(event)); + + if (runFailedError) { + logger.error("RunDashboard: runFailed: runFailed error", { + runId: event.run.id, + error: runFailedError, + }); + } + }); + + runDashboardEventBus.on("runSucceeded", async (event) => { + const [runSucceededError] = await tryCatch(service.runSucceeded(event)); + + if (runSucceededError) { + logger.error("RunDashboard: runSucceeded: runSucceeded error", { + runId: event.run.id, + error: runSucceededError, + }); + } + }); + + runDashboardEventBus.on("runCancelled", async (event) => { + const [runCancelledError] = await tryCatch(service.runCancelled(event)); + + if (runCancelledError) { + logger.error("RunDashboard: runCancelled: runCancelled error", { + runId: event.run.id, + error: runCancelledError, + }); + } + }); + + runDashboardEventBus.on("runRetryScheduled", async (event) => { + const [runRetryScheduledError] = await tryCatch(service.runRetryScheduled(event)); + + if (runRetryScheduledError) { + logger.error("RunDashboard: runRetryScheduled: runRetryScheduled error", { + runId: event.run.id, + error: runRetryScheduledError, + }); + } + }); + + runDashboardEventBus.on("runDelayRescheduled", async (event) => { + const [runDelayRescheduledError] = await tryCatch(service.runDelayRescheduled(event)); + + if (runDelayRescheduledError) { + logger.error("RunDashboard: runDelayRescheduled: runDelayRescheduled error", { + runId: event.run.id, + error: runDelayRescheduledError, + }); + } + }); + + runDashboardEventBus.on("runLocked", async (event) => { + const [runLockedError] = await tryCatch(service.runLocked(event)); + + if (runLockedError) { + logger.error("RunDashboard: runLocked: runLocked error", { + runId: event.run.id, + error: runLockedError, + }); + } + }); + + runDashboardEventBus.on("runExpired", async (event) => { + const [runExpiredError] = await tryCatch(service.runExpired(event)); + + if (runExpiredError) { + logger.error("RunDashboard: runExpired: runExpired error", { + runId: event.run.id, + error: runExpiredError, + }); + } + }); + + engine.eventBus.on("runCreated", async (event) => { + runDashboardEventBus.emit("runCreated", event); + }); + + engine.eventBus.on("runAttemptStarted", async (event) => { + runDashboardEventBus.emit("runAttemptStarted", event); + }); + + engine.eventBus.on("runStatusChanged", async (event) => { + runDashboardEventBus.emit("runStatusChanged", event); + }); + + engine.eventBus.on("runFailed", async (event) => { + runDashboardEventBus.emit("runFailed", event); + }); + + engine.eventBus.on("runSucceeded", async (event) => { + runDashboardEventBus.emit("runSucceeded", event); + }); + + engine.eventBus.on("runCancelled", async (event) => { + runDashboardEventBus.emit("runCancelled", event); + }); + + engine.eventBus.on("runRetryScheduled", async (event) => { + runDashboardEventBus.emit("runRetryScheduled", event); + }); + + engine.eventBus.on("runDelayRescheduled", async (event) => { + runDashboardEventBus.emit("runDelayRescheduled", event); + }); + + engine.eventBus.on("runLocked", async (event) => { + runDashboardEventBus.emit("runLocked", event); + }); + + engine.eventBus.on("runExpired", async (event) => { + runDashboardEventBus.emit("runExpired", event); + }); + return service; }); -async function upsertRun(time: Date, runId: string, service: RunsDashboardService) { +async function runCreated(time: Date, runId: string, service: RunsDashboardService) { const run = await prisma.taskRun.findFirst({ where: { id: runId, @@ -58,7 +241,7 @@ async function upsertRun(time: Date, runId: string, service: RunsDashboardServic }); if (!run) { - logger.error("RunDashboard: upsertRun: run not found", { + logger.error("RunDashboard: runCreated: run not found", { runId, }); @@ -66,7 +249,7 @@ async function upsertRun(time: Date, runId: string, service: RunsDashboardServic } if (!run.environmentType) { - logger.error("RunDashboard: upsertRun: run environment type not found", { + logger.error("RunDashboard: runCreated: run environment type not found", { runId, }); @@ -74,12 +257,12 @@ async function upsertRun(time: Date, runId: string, service: RunsDashboardServic } if (!run.organizationId) { - logger.error("RunDashboard: upsertRun: run organization id not found", { + logger.error("RunDashboard: runCreated: run organization id not found", { runId, }); return; } - await service.upsertRun(time, run, run.environmentType, run.organizationId); + await service.runCreated(time, run, run.environmentType, run.organizationId); } diff --git a/apps/webapp/app/services/runsDashboardService.server.ts b/apps/webapp/app/services/runsDashboardService.server.ts index c22709321d..77e031b6c9 100644 --- a/apps/webapp/app/services/runsDashboardService.server.ts +++ b/apps/webapp/app/services/runsDashboardService.server.ts @@ -1,9 +1,10 @@ import type { ClickHouse } from "@internal/clickhouse"; import { TaskRunError } from "@trigger.dev/core/v3/schemas"; -import { RuntimeEnvironmentType, TaskRun } from "@trigger.dev/database"; +import { RuntimeEnvironmentType, TaskRun, TaskRunStatus } from "@trigger.dev/database"; import { logger } from "./logger.server"; import { EventEmitter } from "node:events"; import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; +import { EventBusEvents } from "@internal/run-engine"; export class RunsDashboardService { constructor(private readonly clickhouse: ClickHouse) {} @@ -12,16 +13,289 @@ export class RunsDashboardService { service: "RunsDashboardService", }); - async upsertRun( + async runAttemptStarted(event: RunDashboardEventRunAttemptStarted) { + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + attempt: event.run.attemptNumber ?? 1, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + base_cost_in_cents: event.run.baseCostInCents, + executed_at: event.run.executedAt ? event.run.executedAt.getTime() : undefined, + event_name: "attempt_started", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runAttemptStarted", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runEnqueuedAfterDelay(event: RunDashboardEventRunEnqueuedAfterDelay) { + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + event_name: "enqueued_after_delay", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runEnqueuedAfterDelay", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runDelayRescheduled(event: RunDashboardEventRunDelayRescheduled) { + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + delay_until: event.run.delayUntil ? event.run.delayUntil.getTime() : undefined, + event_name: "delay_rescheduled", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runDelayRescheduled", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runLocked(event: RunDashboardEventRunLocked) { + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + base_cost_in_cents: event.run.baseCostInCents, + task_version: event.run.taskVersion ?? undefined, + sdk_version: event.run.sdkVersion ?? undefined, + cli_version: event.run.cliVersion ?? undefined, + machine_preset: event.run.machinePreset ?? undefined, + executed_at: event.run.startedAt ? event.run.startedAt.getTime() : undefined, + event_name: "locked", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runLocked", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runStatusChanged(event: RunDashboardEventRunStatusChanged) { + if (!event.organization.id || !event.project.id || !event.environment.id) { + return false; + } + + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + event_name: "status_changed", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runStatusChanged", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runExpired(event: RunDashboardEventRunExpired) { + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + expired_at: event.run.expiredAt ? event.run.expiredAt.getTime() : undefined, + event_name: "run_expired", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runExpired", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runSucceeded(event: RunDashboardEventRunSucceeded) { + const output = await this.#prepareOutput(event.run); + + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + completed_at: event.run.completedAt ? event.run.completedAt.getTime() : undefined, + usage_duration_ms: event.run.usageDurationMs, + cost_in_cents: event.run.costInCents, + output: output, + attempt: event.run.attemptNumber, + event_name: "succeeded", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runSucceeded", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runFailed(event: RunDashboardEventRunFailed) { + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + completed_at: event.run.completedAt ? event.run.completedAt.getTime() : undefined, + error: event.run.error, + attempt: event.run.attemptNumber, + usage_duration_ms: event.run.usageDurationMs, + cost_in_cents: event.run.costInCents, + event_name: "failed", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runFailed", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runRetryScheduled(event: RunDashboardEventRunRetryScheduled) { + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.environment.projectId, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + machine_preset: event.run.nextMachineAfterOOM ?? undefined, + attempt: event.run.attemptNumber, + error: event.run.error, + event_name: "retry_scheduled", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runRetryScheduled", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runCancelled(event: RunDashboardEventRunCancelled) { + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + completed_at: event.run.completedAt ? event.run.completedAt.getTime() : undefined, + error: event.run.error ? (event.run.error as TaskRunError) : undefined, + attempt: event.run.attemptNumber, + event_name: "cancelled", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runCancelled", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runTagsUpdated(event: RunDashboardEventRunTagsUpdated) { + const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ + environment_id: event.environment.id, + organization_id: event.organization.id, + project_id: event.project.id, + run_id: event.run.id, + status: event.run.status, + event_time: event.time.getTime(), + updated_at: event.run.updatedAt.getTime(), + tags: event.run.tags, + event_name: "tags_updated", + }); + + if (insertError) { + this.logger.error("RunsDashboardService: runTagsUpdated", { + error: insertError, + event, + }); + } + + return insertResult?.executed === true; + } + + async runCreated( eventTime: Date, taskRun: TaskRun, environmentType: RuntimeEnvironmentType, organizationId: string ) { - const [payload, output] = await Promise.all([ - this.#preparePayload(taskRun), - this.#prepareOutput(taskRun), - ]); + const payload = await this.#preparePayload(taskRun); const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ environment_id: taskRun.runtimeEnvironmentId, @@ -49,8 +323,6 @@ export class RunsDashboardService { usage_duration_ms: taskRun.usageDurationMs, tags: taskRun.runTags, payload: payload, - output: output, - error: taskRun.error ? (taskRun.error as TaskRunError) : undefined, task_version: taskRun.taskVersion ?? undefined, sdk_version: taskRun.sdkVersion ?? undefined, cli_version: taskRun.cliVersion ?? undefined, @@ -65,6 +337,7 @@ export class RunsDashboardService { expiration_ttl: taskRun.ttl ?? undefined, cost_in_cents: taskRun.costInCents ?? undefined, base_cost_in_cents: taskRun.baseCostInCents ?? undefined, + event_name: "created", }); if (insertError) { @@ -100,7 +373,10 @@ export class RunsDashboardService { return await parsePacket(packet); } - async #prepareOutput(run: TaskRun): Promise { + async #prepareOutput(run: { + output: string | undefined; + outputType: string; + }): Promise { if (!run.output) { return undefined; } @@ -116,16 +392,79 @@ export class RunsDashboardService { return await parsePacket(packet); } + + async #prepareMetadata(run: { + metadata: string | undefined; + metadataType: string; + }): Promise { + if (!run.metadata) { + return undefined; + } + + if (run.metadataType !== "application/json" && run.metadataType !== "application/super+json") { + return undefined; + } + + const packet = { + data: run.metadata, + dataType: run.metadataType, + }; + + return await parsePacket(packet); + } } export type RunDashboardEvents = { - runStatusUpdate: [ + runCreated: [ { time: Date; runId: string; } ]; + runEnqueuedAfterDelay: EventBusEvents["runEnqueuedAfterDelay"]; + runDelayRescheduled: EventBusEvents["runDelayRescheduled"]; + runLocked: EventBusEvents["runLocked"]; + runStatusChanged: EventBusEvents["runStatusChanged"]; + runAttemptStarted: EventBusEvents["runAttemptStarted"]; + runExpired: EventBusEvents["runExpired"]; + runSucceeded: EventBusEvents["runSucceeded"]; + runFailed: EventBusEvents["runFailed"]; + runRetryScheduled: EventBusEvents["runRetryScheduled"]; + runCancelled: EventBusEvents["runCancelled"]; + runTagsUpdated: [ + { + time: Date; + run: { + id: string; + tags: string[]; + status: TaskRunStatus; + updatedAt: Date; + }; + organization: { + id: string; + }; + project: { + id: string; + }; + environment: { + id: string; + }; + } + ]; }; export type RunDashboardEventArgs = RunDashboardEvents[T]; export type RunDashboardEventBus = EventEmitter; +export type RunDashboardEventRunAttemptStarted = RunDashboardEventArgs<"runAttemptStarted">[0]; +export type RunDashboardEventRunCreated = RunDashboardEventArgs<"runCreated">[0]; +export type RunDashboardEventRunEnqueuedAfterDelay = + RunDashboardEventArgs<"runEnqueuedAfterDelay">[0]; +export type RunDashboardEventRunDelayRescheduled = RunDashboardEventArgs<"runDelayRescheduled">[0]; +export type RunDashboardEventRunLocked = RunDashboardEventArgs<"runLocked">[0]; +export type RunDashboardEventRunStatusChanged = RunDashboardEventArgs<"runStatusChanged">[0]; +export type RunDashboardEventRunExpired = RunDashboardEventArgs<"runExpired">[0]; +export type RunDashboardEventRunSucceeded = RunDashboardEventArgs<"runSucceeded">[0]; +export type RunDashboardEventRunFailed = RunDashboardEventArgs<"runFailed">[0]; +export type RunDashboardEventRunRetryScheduled = RunDashboardEventArgs<"runRetryScheduled">[0]; +export type RunDashboardEventRunCancelled = RunDashboardEventArgs<"runCancelled">[0]; +export type RunDashboardEventRunTagsUpdated = RunDashboardEventArgs<"runTagsUpdated">[0]; diff --git a/apps/webapp/app/utils/pathBuilder.ts b/apps/webapp/app/utils/pathBuilder.ts index c4d48c3438..c36204f7a1 100644 --- a/apps/webapp/app/utils/pathBuilder.ts +++ b/apps/webapp/app/utils/pathBuilder.ts @@ -233,6 +233,17 @@ export function v3RunsPath( return `${v3EnvironmentPath(organization, project, environment)}/runs${query}`; } +export function v3RunsNextPath( + organization: OrgForPath, + project: ProjectForPath, + environment: EnvironmentForPath, + filters?: TaskRunListSearchFilters +) { + const searchParams = objectToSearchParams(filters); + const query = searchParams ? `?${searchParams.toString()}` : ""; + return `${v3EnvironmentPath(organization, project, environment)}/runs/next${query}`; +} + export function v3RunPath( organization: OrgForPath, project: ProjectForPath, diff --git a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts index 0d4ab32666..aaacbee7fb 100644 --- a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts @@ -6,24 +6,24 @@ import { TaskRunFailedExecutionResult, serverWebsocketMessages, } from "@trigger.dev/core/v3"; +import { getMaxDuration } from "@trigger.dev/core/v3/isomorphic"; import { ZodMessageSender } from "@trigger.dev/core/v3/zodMessageHandler"; import { BackgroundWorker, BackgroundWorkerTask } from "@trigger.dev/database"; import { z } from "zod"; import { prisma } from "~/db.server"; import { createNewSession, disconnectSession } from "~/models/runtimeEnvironment.server"; +import { findQueueInEnvironment, sanitizeQueueName } from "~/models/taskQueue.server"; +import { RedisClient, createRedisClient } from "~/redis.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; +import { emitRunLocked } from "~/services/runsDashboardInstance.server"; import { marqs } from "~/v3/marqs/index.server"; import { resolveVariablesForEnvironment } from "../environmentVariables/environmentVariablesRepository.server"; import { FailedTaskRunService } from "../failedTaskRun.server"; import { CancelDevSessionRunsService } from "../services/cancelDevSessionRuns.server"; import { CompleteAttemptService } from "../services/completeAttempt.server"; import { attributesFromAuthenticatedEnv, tracer } from "../tracer.server"; -import { getMaxDuration } from "@trigger.dev/core/v3/isomorphic"; import { DevSubscriber, devPubSub } from "./devPubSub.server"; -import { findQueueInEnvironment, sanitizeQueueName } from "~/models/taskQueue.server"; -import { createRedisClient, RedisClient } from "~/redis.server"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; const MessageBody = z.discriminatedUnion("type", [ z.object({ @@ -441,19 +441,22 @@ export class DevQueueConsumer { return; } + const lockedAt = new Date(); + const startedAt = existingTaskRun.startedAt ?? new Date(); + const lockedTaskRun = await prisma.taskRun.update({ where: { id: message.messageId, }, data: { - lockedAt: new Date(), + lockedAt, lockedById: backgroundTask.id, status: "EXECUTING", lockedToVersionId: backgroundWorker.id, taskVersion: backgroundWorker.version, sdkVersion: backgroundWorker.sdkVersion, cliVersion: backgroundWorker.cliVersion, - startedAt: existingTaskRun.startedAt ?? new Date(), + startedAt, maxDurationInSeconds: getMaxDuration( existingTaskRun.maxDurationInSeconds, backgroundTask.maxDurationInSeconds @@ -540,7 +543,34 @@ export class DevQueueConsumer { messageId: message.messageId, }); - emitRunStatusUpdate(lockedTaskRun.id); + emitRunLocked({ + time: new Date(), + run: { + id: lockedTaskRun.id, + updatedAt: lockedTaskRun.updatedAt, + status: lockedTaskRun.status, + lockedAt, + lockedById: backgroundTask.id, + lockedToVersionId: backgroundWorker.id, + lockedQueueId: queue.id, + startedAt, + maxDurationInSeconds: lockedTaskRun.maxDurationInSeconds ?? undefined, + taskVersion: backgroundWorker.version, + sdkVersion: backgroundWorker.sdkVersion, + cliVersion: backgroundWorker.cliVersion, + baseCostInCents: lockedTaskRun.baseCostInCents, + machinePreset: lockedTaskRun.machinePreset ?? "small-1x", + }, + organization: { + id: this.env.organizationId, + }, + project: { + id: this.env.projectId, + }, + environment: { + id: this.env.id, + }, + }); this._inProgressRuns.set(lockedTaskRun.friendlyId, message.messageId); } catch (e) { diff --git a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts index a72afd4413..c2a0b39bb0 100644 --- a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts @@ -37,6 +37,7 @@ import { findEnvironmentById } from "~/models/runtimeEnvironment.server"; import { findQueueInEnvironment, sanitizeQueueName } from "~/models/taskQueue.server"; import { generateJWTTokenForEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; +import { emitRunLocked, emitRunStatusChanged } from "~/services/runsDashboardInstance.server"; import { singleton } from "~/utils/singleton"; import { marqs } from "~/v3/marqs/index.server"; import { @@ -66,7 +67,6 @@ import { import { tracer } from "../tracer.server"; import { getMaxDuration } from "../utils/maxDuration"; import { MessagePayload } from "./types"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; const WithTraceContext = z.object({ traceparent: z.string().optional(), @@ -708,26 +708,32 @@ export class SharedQueueConsumer { }; } + const lockedAt = new Date(); + const machinePreset = + existingTaskRun.machinePreset ?? + machinePresetFromConfig(backgroundTask.machineConfig ?? {}).name; + const maxDurationInSeconds = getMaxDuration( + existingTaskRun.maxDurationInSeconds, + backgroundTask.maxDurationInSeconds + ); + const startedAt = existingTaskRun.startedAt ?? dequeuedAt; + const baseCostInCents = env.CENTS_PER_RUN; + const lockedTaskRun = await prisma.taskRun.update({ where: { id: message.messageId, }, data: { - lockedAt: new Date(), + lockedAt, lockedById: backgroundTask.id, lockedToVersionId: worker.id, taskVersion: worker.version, sdkVersion: worker.sdkVersion, cliVersion: worker.cliVersion, - startedAt: existingTaskRun.startedAt ?? dequeuedAt, - baseCostInCents: env.CENTS_PER_RUN, - machinePreset: - existingTaskRun.machinePreset ?? - machinePresetFromConfig(backgroundTask.machineConfig ?? {}).name, - maxDurationInSeconds: getMaxDuration( - existingTaskRun.maxDurationInSeconds, - backgroundTask.maxDurationInSeconds - ), + startedAt: startedAt, + baseCostInCents: baseCostInCents, + machinePreset: machinePreset, + maxDurationInSeconds, }, include: { runtimeEnvironment: true, @@ -842,8 +848,6 @@ export class SharedQueueConsumer { }; } - emitRunStatusUpdate(lockedTaskRun.id); - return { action: "noop", reason: "restored_checkpoint", @@ -925,7 +929,36 @@ export class SharedQueueConsumer { }, }); - emitRunStatusUpdate(lockedTaskRun.id); + if (lockedTaskRun.organizationId) { + emitRunLocked({ + time: new Date(), + run: { + id: lockedTaskRun.id, + status: lockedTaskRun.status, + updatedAt: lockedTaskRun.updatedAt, + lockedAt, + lockedById: backgroundTask.id, + lockedToVersionId: worker.id, + lockedQueueId: queue.id, + startedAt, + baseCostInCents, + machinePreset, + maxDurationInSeconds, + taskVersion: worker.version, + sdkVersion: worker.sdkVersion, + cliVersion: worker.cliVersion, + }, + organization: { + id: lockedTaskRun.organizationId, + }, + project: { + id: lockedTaskRun.projectId, + }, + environment: { + id: lockedTaskRun.runtimeEnvironmentId, + }, + }); + } return { action: "noop", @@ -1435,7 +1468,7 @@ export class SharedQueueConsumer { async #markRunAsWaitingForDeploy(runId: string) { logger.debug("Marking run as waiting for deploy", { runId }); - await prisma.taskRun.update({ + const run = await prisma.taskRun.update({ where: { id: runId, }, @@ -1444,7 +1477,25 @@ export class SharedQueueConsumer { }, }); - emitRunStatusUpdate(runId); + if (run.organizationId) { + emitRunStatusChanged({ + time: new Date(), + run: { + id: runId, + status: "WAITING_FOR_DEPLOY", + updatedAt: run.updatedAt, + }, + organization: { + id: run.organizationId, + }, + project: { + id: run.projectId, + }, + environment: { + id: run.runtimeEnvironmentId, + }, + }); + } } async #resolveCompletedAttemptsForResumeMessage( diff --git a/apps/webapp/app/v3/services/completeAttempt.server.ts b/apps/webapp/app/v3/services/completeAttempt.server.ts index 1769f79250..5367afce12 100644 --- a/apps/webapp/app/v3/services/completeAttempt.server.ts +++ b/apps/webapp/app/v3/services/completeAttempt.server.ts @@ -10,7 +10,6 @@ import { TaskRunFailedExecutionResult, TaskRunSuccessfulExecutionResult, flattenAttributes, - isManualOutOfMemoryError, isOOMRunError, sanitizeError, shouldRetryError, @@ -22,19 +21,19 @@ import { PrismaClientOrTransaction } from "~/db.server"; import { env } from "~/env.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; +import { emitRunRetryScheduled } from "~/services/runsDashboardInstance.server"; import { safeJsonParse } from "~/utils/json"; import { marqs } from "~/v3/marqs/index.server"; import { createExceptionPropertiesFromError, eventRepository } from "../eventRepository.server"; import { FailedTaskRunRetryHelper } from "../failedTaskRun.server"; +import { socketIo } from "../handleSocketIo.server"; +import { getTaskEventStoreTableForRun } from "../taskEventStore.server"; import { FAILED_RUN_STATUSES, isFinalAttemptStatus, isFinalRunStatus } from "../taskStatus"; import { BaseService } from "./baseService.server"; import { CancelAttemptService } from "./cancelAttempt.server"; import { CreateCheckpointService } from "./createCheckpoint.server"; import { FinalizeTaskRunService } from "./finalizeTaskRun.server"; import { RetryAttemptService } from "./retryAttempt.server"; -import { getTaskEventStoreTableForRun } from "../taskEventStore.server"; -import { socketIo } from "../handleSocketIo.server"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; type FoundAttempt = Awaited>; @@ -314,6 +313,7 @@ export class CompleteAttemptService extends BaseService { checkpoint, forceRequeue: isOOMRetry, oomMachine, + error: sanitizedError, }); } @@ -560,6 +560,7 @@ export class CompleteAttemptService extends BaseService { checkpoint, forceRequeue = false, oomMachine, + error, }: { execution: TaskRunExecution; executionRetry: TaskRunExecutionRetry; @@ -570,6 +571,7 @@ export class CompleteAttemptService extends BaseService { forceRequeue?: boolean; /** Setting this will also alter the retry span message */ oomMachine?: MachinePresetName; + error: TaskRunError; }) { const retryAt = new Date(executionRetry.timestamp); @@ -615,7 +617,30 @@ export class CompleteAttemptService extends BaseService { }, }); - emitRunStatusUpdate(taskRunAttempt.taskRunId); + emitRunRetryScheduled({ + time: new Date(), + run: { + id: taskRunAttempt.taskRunId, + status: "RETRYING_AFTER_FAILURE", + friendlyId: taskRunAttempt.taskRun.friendlyId, + spanId: taskRunAttempt.taskRun.spanId, + attemptNumber: execution.attempt.number, + queue: taskRunAttempt.taskRun.queue, + traceContext: taskRunAttempt.taskRun.traceContext as Record, + taskIdentifier: taskRunAttempt.taskRun.taskIdentifier, + baseCostInCents: taskRunAttempt.taskRun.baseCostInCents, + updatedAt: taskRunAttempt.taskRun.updatedAt, + error, + }, + organization: { + id: environment.organizationId, + }, + environment: { + ...environment, + orgMember: environment.orgMember ?? null, + }, + retryAt, + }); if (environment.type === "DEVELOPMENT") { await marqs.requeueMessage(taskRunAttempt.taskRunId, {}, executionRetry.timestamp, "retry"); diff --git a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts index 9727e85bea..7965a01e5a 100644 --- a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts +++ b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts @@ -2,17 +2,17 @@ import { parsePacket, TaskRunExecution } from "@trigger.dev/core/v3"; import { TaskRun, TaskRunAttempt } from "@trigger.dev/database"; import { MAX_TASK_RUN_ATTEMPTS } from "~/consts"; import { $transaction, prisma, PrismaClientOrTransaction } from "~/db.server"; +import { findQueueInEnvironment } from "~/models/taskQueue.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; import { reportInvocationUsage } from "~/services/platform.v3.server"; +import { emitRunAttemptStarted } from "~/services/runsDashboardInstance.server"; import { generateFriendlyId } from "../friendlyIdentifiers"; import { machinePresetFromConfig, machinePresetFromRun } from "../machinePresets.server"; +import { FINAL_RUN_STATUSES } from "../taskStatus"; import { BaseService, ServiceValidationError } from "./baseService.server"; import { CrashTaskRunService } from "./crashTaskRun.server"; import { ExpireEnqueuedRunService } from "./expireEnqueuedRun.server"; -import { findQueueInEnvironment } from "~/models/taskQueue.server"; -import { FINAL_RUN_STATUSES } from "../taskStatus"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; export class CreateTaskRunAttemptService extends BaseService { public async call({ @@ -182,7 +182,27 @@ export class CreateTaskRunAttemptService extends BaseService { }); } - emitRunStatusUpdate(taskRun.id); + emitRunAttemptStarted({ + time: new Date(), + run: { + id: taskRun.id, + status: taskRun.status, + createdAt: taskRun.createdAt, + updatedAt: taskRun.updatedAt, + attemptNumber: taskRunAttempt.number, + baseCostInCents: taskRun.baseCostInCents, + executedAt: taskRun.executedAt ?? undefined, + }, + organization: { + id: environment.organizationId, + }, + project: { + id: environment.projectId, + }, + environment: { + id: environment.id, + }, + }); const machinePreset = machinePresetFromRun(taskRun) ?? machinePresetFromConfig(lockedBy.machineConfig ?? {}); diff --git a/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts b/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts index 44f03acae4..357be06d16 100644 --- a/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts +++ b/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts @@ -1,13 +1,11 @@ import { parseNaturalLanguageDuration } from "@trigger.dev/core/v3/isomorphic"; -import { $transaction } from "~/db.server"; import { logger } from "~/services/logger.server"; -import { marqs } from "~/v3/marqs/index.server"; -import { BaseService } from "./baseService.server"; -import { ExpireEnqueuedRunService } from "./expireEnqueuedRun.server"; -import { commonWorker } from "../commonWorker.server"; +import { emitRunEnqueuedAfterDelay } from "~/services/runsDashboardInstance.server"; import { workerQueue } from "~/services/worker.server"; +import { commonWorker } from "../commonWorker.server"; +import { BaseService } from "./baseService.server"; import { enqueueRun } from "./enqueueRun.server"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; +import { ExpireEnqueuedRunService } from "./expireEnqueuedRun.server"; export class EnqueueDelayedRunService extends BaseService { public static async enqueue(runId: string, runAt?: Date) { @@ -83,27 +81,44 @@ export class EnqueueDelayedRunService extends BaseService { return; } - await $transaction(this._prisma, "delayed run enqueue", async (tx) => { - await tx.taskRun.update({ - where: { - id: run.id, - }, - data: { - status: "PENDING", - queuedAt: new Date(), - }, - }); + await this._prisma.taskRun.update({ + where: { + id: run.id, + }, + data: { + status: "PENDING", + queuedAt: new Date(), + }, + }); - if (run.ttl) { - const expireAt = parseNaturalLanguageDuration(run.ttl); + if (run.ttl) { + const expireAt = parseNaturalLanguageDuration(run.ttl); - if (expireAt) { - await ExpireEnqueuedRunService.enqueue(run.id, expireAt); - } + if (expireAt) { + await ExpireEnqueuedRunService.enqueue(run.id, expireAt); } - }); + } - emitRunStatusUpdate(run.id); + if (run.organizationId) { + emitRunEnqueuedAfterDelay({ + time: new Date(), + run: { + id: run.id, + status: run.status, + queuedAt: run.queuedAt ?? new Date(), + updatedAt: run.updatedAt, + }, + organization: { + id: run.organizationId, + }, + project: { + id: run.projectId, + }, + environment: { + id: run.runtimeEnvironmentId, + }, + }); + } await enqueueRun({ env: run.runtimeEnvironment, diff --git a/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts b/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts index 4e02cf7b8c..39ffcb2655 100644 --- a/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts +++ b/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts @@ -1,10 +1,10 @@ import { PrismaClientOrTransaction } from "~/db.server"; +import { env } from "~/env.server"; +import { logger } from "~/services/logger.server"; +import { emitRunStatusChanged } from "~/services/runsDashboardInstance.server"; import { workerQueue } from "~/services/worker.server"; import { marqs } from "~/v3/marqs/index.server"; import { BaseService } from "./baseService.server"; -import { logger } from "~/services/logger.server"; -import { env } from "~/env.server"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; export class ExecuteTasksWaitingForDeployService extends BaseService { public async call(backgroundWorkerId: string) { @@ -80,7 +80,23 @@ export class ExecuteTasksWaitingForDeployService extends BaseService { } for (const run of runsWaitingForDeploy) { - emitRunStatusUpdate(run.id); + emitRunStatusChanged({ + time: new Date(), + run: { + id: run.id, + status: run.status, + updatedAt: new Date(), + }, + organization: { + id: backgroundWorker.runtimeEnvironment.organizationId, + }, + project: { + id: backgroundWorker.runtimeEnvironment.projectId, + }, + environment: { + id: backgroundWorker.runtimeEnvironment.id, + }, + }); } for (const run of runsWaitingForDeploy) { diff --git a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts index e321a7e773..c572b35a29 100644 --- a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts +++ b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts @@ -4,6 +4,7 @@ import { findQueueInEnvironment } from "~/models/taskQueue.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; import { updateMetadataService } from "~/services/metadata/updateMetadata.server"; +import { emitRunFailed, emitRunSucceeded } from "~/services/runsDashboardInstance.server"; import { marqs } from "~/v3/marqs/index.server"; import { generateFriendlyId } from "../friendlyIdentifiers"; import { socketIo } from "../handleSocketIo.server"; @@ -19,7 +20,6 @@ import { completeBatchTaskRunItemV3 } from "./batchTriggerV3.server"; import { ExpireEnqueuedRunService } from "./expireEnqueuedRun.server"; import { ResumeBatchRunService } from "./resumeBatchRun.server"; import { ResumeDependentParentsService } from "./resumeDependentParents.server"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; type BaseInput = { id: string; @@ -95,13 +95,70 @@ export class FinalizeTaskRunService extends BaseService { // - A single update is more efficient than two // - If the status updates to a final status, realtime will receive that status and then shut down the stream // before the error is updated, which would cause the error to be lost + const taskRunError = error ? sanitizeError(error) : undefined; + const run = await this._prisma.taskRun.update({ where: { id }, - data: { status, expiredAt, completedAt, error: error ? sanitizeError(error) : undefined }, + data: { status, expiredAt, completedAt, error: taskRunError }, ...(include ? { include } : {}), }); - emitRunStatusUpdate(run.id); + if (run.organizationId) { + if (status === "COMPLETED_SUCCESSFULLY") { + emitRunSucceeded({ + time: new Date(), + run: { + id: run.id, + status: run.status, + spanId: run.spanId, + output: run.output ?? undefined, + outputType: run.outputType, + taskEventStore: run.taskEventStore, + createdAt: run.createdAt, + completedAt: run.completedAt, + updatedAt: run.updatedAt, + attemptNumber: run.attemptNumber ?? 1, + usageDurationMs: run.usageDurationMs, + costInCents: run.costInCents, + }, + organization: { + id: run.organizationId, + }, + project: { + id: run.projectId, + }, + environment: { + id: run.runtimeEnvironmentId, + }, + }); + } else if (taskRunError) { + emitRunFailed({ + time: new Date(), + run: { + id: run.id, + status: run.status, + spanId: run.spanId, + error: taskRunError, + taskEventStore: run.taskEventStore, + createdAt: run.createdAt, + completedAt: run.completedAt, + updatedAt: run.updatedAt, + attemptNumber: run.attemptNumber ?? 1, + usageDurationMs: run.usageDurationMs, + costInCents: run.costInCents, + }, + organization: { + id: run.organizationId, + }, + project: { + id: run.projectId, + }, + environment: { + id: run.runtimeEnvironmentId, + }, + }); + } + } if (run.ttl) { await ExpireEnqueuedRunService.ack(run.id); diff --git a/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts b/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts index cc65e11c24..7cf00603ab 100644 --- a/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts +++ b/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts @@ -1,9 +1,9 @@ import { RescheduleRunRequestBody } from "@trigger.dev/core/v3"; import { TaskRun } from "@trigger.dev/database"; +import { emitRunDelayRescheduled } from "~/services/runsDashboardInstance.server"; import { parseDelay } from "~/utils/delays"; import { BaseService, ServiceValidationError } from "./baseService.server"; import { EnqueueDelayedRunService } from "./enqueueDelayedRun.server"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; export class RescheduleTaskRunService extends BaseService { public async call(taskRun: TaskRun, body: RescheduleRunRequestBody) { @@ -26,7 +26,26 @@ export class RescheduleTaskRunService extends BaseService { }, }); - emitRunStatusUpdate(taskRun.id); + if (taskRun.organizationId) { + emitRunDelayRescheduled({ + time: new Date(), + run: { + id: taskRun.id, + status: taskRun.status, + delayUntil: delay, + updatedAt: updatedRun.updatedAt, + }, + organization: { + id: taskRun.organizationId, + }, + project: { + id: taskRun.projectId, + }, + environment: { + id: taskRun.runtimeEnvironmentId, + }, + }); + } await EnqueueDelayedRunService.reschedule(taskRun.id, delay); diff --git a/apps/webapp/app/v3/services/triggerScheduledTask.server.ts b/apps/webapp/app/v3/services/triggerScheduledTask.server.ts index 8e27d3ec14..f20b04c3db 100644 --- a/apps/webapp/app/v3/services/triggerScheduledTask.server.ts +++ b/apps/webapp/app/v3/services/triggerScheduledTask.server.ts @@ -8,7 +8,6 @@ import { nextScheduledTimestamps } from "../utils/calculateNextSchedule.server"; import { BaseService } from "./baseService.server"; import { RegisterNextTaskScheduleInstanceService } from "./registerNextTaskScheduleInstance.server"; import { TriggerTaskService } from "./triggerTask.server"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; export class TriggerScheduledTaskService extends BaseService { public async call(instanceId: string, finalAttempt: boolean) { diff --git a/apps/webapp/app/v3/services/triggerTaskV1.server.ts b/apps/webapp/app/v3/services/triggerTaskV1.server.ts index b31f1a677b..5bf6971245 100644 --- a/apps/webapp/app/v3/services/triggerTaskV1.server.ts +++ b/apps/webapp/app/v3/services/triggerTaskV1.server.ts @@ -43,7 +43,7 @@ import { import { getTaskEventStore } from "../taskEventStore.server"; import { enqueueRun } from "./enqueueRun.server"; import { z } from "zod"; -import { emitRunStatusUpdate } from "~/services/runsDashboardInstance.server"; +import { emitRunCreated } from "~/services/runsDashboardInstance.server"; // This is here for backwords compatibility for v3 users const QueueOptions = z.object({ @@ -606,7 +606,7 @@ export class TriggerTaskServiceV1 extends BaseService { return; } - emitRunStatusUpdate(run.id); + emitRunCreated(run.createdAt, run.id); return { run, diff --git a/internal-packages/clickhouse/004_create_run_latest_v1.sql b/internal-packages/clickhouse/004_create_run_latest_v1.sql new file mode 100644 index 0000000000..db2770b68a --- /dev/null +++ b/internal-packages/clickhouse/004_create_run_latest_v1.sql @@ -0,0 +1,101 @@ +-- +goose Up +/* one immutable row = the latest state we know about a run */ +CREATE TABLE trigger_dev.run_latest_v1 +( + -- identifiers / partition keys + organization_id String, + project_id String, + environment_id String, + run_id String, + friendly_id String, + last_event_time DateTime64(3), + + -- user-visible fields + status Enum8( + 'DELAYED'=1,'PENDING'=2,'PENDING_VERSION'=3, + 'WAITING_FOR_DEPLOY'=4,'EXECUTING'=5,'WAITING_TO_RESUME'=6, + 'RETRYING_AFTER_FAILURE'=7,'PAUSED'=8, + 'CANCELED'=9,'INTERRUPTED'=10, + 'COMPLETED_SUCCESSFULLY'=11,'COMPLETED_WITH_ERRORS'=12, + 'SYSTEM_FAILURE'=13,'CRASHED'=14,'EXPIRED'=15,'TIMED_OUT'=16), + task_identifier String, + task_version Nullable(String), + queue String, + schedule_id Nullable(String), + batch_id Nullable(String), + + root_run_id Nullable(String), + depth UInt8, + is_test UInt8, + + created_at DateTime64(3), + updated_at DateTime64(3), + started_at Nullable(DateTime64(3)), + completed_at Nullable(DateTime64(3)), + delay_until Nullable(DateTime64(3)), + + usage_duration_ms UInt32, + cost_in_cents Float64, + base_cost_in_cents Float64, + + ttl Nullable(String), + expired_at Nullable(DateTime64(3)), + + span_id Nullable(String), + idempotency_key Nullable(String), + + tags Array(String) CODEC(ZSTD(1)), + + _version DateTime64(3) -- used by ReplacingMergeTree dedupe +) +ENGINE = ReplacingMergeTree(_version) +PARTITION BY toYYYYMMDD(last_event_time) +ORDER BY (project_id, environment_id, last_event_time, run_id); + +CREATE MATERIALIZED VIEW trigger_dev.mv_run_latest_v1 +TO trigger_dev.run_latest_v1 +AS +SELECT + organization_id, + project_id, + environment_id, + run_id, + anyLast(friendly_id) AS friendly_id, + anyLast(status) AS status, + anyLast(task_identifier) AS task_identifier, + argMax(task_version, event_time) AS task_version, + argMax(queue, event_time) AS queue, + argMax(schedule_id, event_time) AS schedule_id, + argMax(batch_id, event_time) AS batch_id, + anyLast(root_run_id) AS root_run_id, + anyLast(depth) AS depth, + anyLast(is_test) AS is_test, + + min(created_at) AS created_at, + argMax(updated_at, event_time) AS updated_at, + argMax(started_at, event_time) AS started_at, + argMax(completed_at, event_time) AS completed_at, + argMax(delay_until, event_time) AS delay_until, + + argMax(usage_duration_ms,event_time) AS usage_duration_ms, + argMax(cost_in_cents, event_time) AS cost_in_cents, + argMax(base_cost_in_cents,event_time) AS base_cost_in_cents, + argMax(ttl, event_time) AS ttl, + argMax(expired_at, event_time) AS expired_at, + argMax(span_id, event_time) AS span_id, + argMax(idempotency_key, event_time) AS idempotency_key, + argMaxIf(tags, updated_at, arrayLength(tags) > 0) AS tags + + max(event_time) AS last_event_time, + max(event_time) AS _version -- for RMTree +FROM trigger_dev.raw_run_events_v1 +GROUP BY + organization_id, + project_id, + environment_id, + run_id; + +-- +goose Down + +DROP MATERIALIZED VIEW trigger_dev.mv_run_latest_v1; +DROP TABLE trigger_dev.run_latest_v1; diff --git a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql index 6e820c361f..984370748b 100644 --- a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql +++ b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql @@ -17,15 +17,17 @@ CREATE TABLE trigger_dev.raw_run_events_v1 ( /* ─── ids & hierarchy ─────────────────────────────────────── */ environment_id String, - environment_type LowCardinality(String), organization_id String, project_id String, run_id String, - friendly_id String, + event_name LowCardinality(String), + + environment_type LowCardinality(Nullable(String)), + friendly_id Nullable(String), attempt UInt8 DEFAULT 1, /* ─── enums / status ──────────────────────────────────────── */ - engine Enum8('V1'=1,'V2'=2) + engine Nullable(Enum8('V1'=1,'V2'=2)) CODEC(T64, LZ4), status Enum8( -- TaskRunStatus 'DELAYED'=1, @@ -46,8 +48,8 @@ CREATE TABLE trigger_dev.raw_run_events_v1 'TIMED_OUT'=16), /* ─── queue / concurrency / schedule ─────────────────────── */ - task_identifier String, - queue String, + task_identifier Nullable(String), + queue Nullable(String), schedule_id Nullable(String), batch_id Nullable(String), @@ -64,7 +66,7 @@ CREATE TABLE trigger_dev.raw_run_events_v1 /* ─── timing ─────────────────────────────────────────────── */ event_time DateTime64(3), -- when this row created - created_at DateTime64(3), + created_at Nullable(DateTime64(3)), updated_at DateTime64(3), started_at Nullable(DateTime64(3)), executed_at Nullable(DateTime64(3)), @@ -93,7 +95,7 @@ CREATE TABLE trigger_dev.raw_run_events_v1 cli_version Nullable(String) CODEC(LZ4), machine_preset LowCardinality(Nullable(String)) CODEC(LZ4), - is_test Nullable(UInt8) DEFAULT 0, + is_test UInt8 DEFAULT 0, ) ENGINE = MergeTree PARTITION BY toYYYYMMDD(event_time) diff --git a/internal-packages/clickhouse/src/runEvents.test.ts b/internal-packages/clickhouse/src/runEvents.test.ts index 6ab2ef7a0a..b34dbb390b 100644 --- a/internal-packages/clickhouse/src/runEvents.test.ts +++ b/internal-packages/clickhouse/src/runEvents.test.ts @@ -17,6 +17,7 @@ describe("Run Events", () => { const [insertError, insertResult] = await insert([ { environment_id: "env_1234", + event_name: "RUN_STATUS_CHANGED", environment_type: "DEVELOPMENT", organization_id: "org_1234", project_id: "project_1234", diff --git a/internal-packages/clickhouse/src/runEvents.ts b/internal-packages/clickhouse/src/runEvents.ts index 0d73cf3cdf..a7abcc4c2f 100644 --- a/internal-packages/clickhouse/src/runEvents.ts +++ b/internal-packages/clickhouse/src/runEvents.ts @@ -5,13 +5,12 @@ import { TaskRunError } from "@trigger.dev/core/v3/schemas"; export const RawRunEventV1 = z.object({ environment_id: z.string(), - environment_type: z.string(), organization_id: z.string(), project_id: z.string(), run_id: z.string(), - friendly_id: z.string(), - attempt: z.number().int().default(1), - engine: z.enum(["V1", "V2"]), + updated_at: z.number().int(), + event_time: z.number().int(), + event_name: z.string(), status: z.enum([ "DELAYED", "PENDING", @@ -30,13 +29,16 @@ export const RawRunEventV1 = z.object({ "EXPIRED", "TIMED_OUT", ]), - task_identifier: z.string(), - queue: z.string(), + /* ─── optional fields ─────────────────────────────────────────────── */ + created_at: z.number().int().optional(), + environment_type: z.string().optional(), + friendly_id: z.string().optional(), + attempt: z.number().int().default(1), + engine: z.enum(["V1", "V2"]).optional(), + task_identifier: z.string().optional(), + queue: z.string().optional(), schedule_id: z.string().optional(), batch_id: z.string().optional(), - event_time: z.number().int(), - created_at: z.number().int(), - updated_at: z.number().int(), completed_at: z.number().int().optional(), started_at: z.number().int().optional(), executed_at: z.number().int().optional(), @@ -49,7 +51,10 @@ export const RawRunEventV1 = z.object({ payload: z.unknown().optional(), output: z.unknown().optional(), error: TaskRunError.optional(), - tags: z.array(z.string()).transform((arr) => arr.sort()), + tags: z + .array(z.string()) + .transform((arr) => arr.sort()) + .optional(), task_version: z.string().optional(), sdk_version: z.string().optional(), cli_version: z.string().optional(), diff --git a/internal-packages/run-engine/src/engine/eventBus.ts b/internal-packages/run-engine/src/engine/eventBus.ts index 6047cd5694..963841a75e 100644 --- a/internal-packages/run-engine/src/engine/eventBus.ts +++ b/internal-packages/run-engine/src/engine/eventBus.ts @@ -1,26 +1,130 @@ -import { TaskRun, TaskRunExecutionStatus, TaskRunStatus } from "@trigger.dev/database"; -import { AuthenticatedEnvironment, MinimalAuthenticatedEnvironment } from "../shared/index.js"; import { FlushedRunMetadata, TaskRunError } from "@trigger.dev/core/v3"; +import { + RuntimeEnvironmentType, + TaskRunExecutionStatus, + TaskRunStatus, +} from "@trigger.dev/database"; import { EventEmitter } from "events"; +import { AuthenticatedEnvironment } from "../shared/index.js"; export type EventBusEvents = { - runStatusChanged: [ + runCreated: [ { time: Date; runId: string; }, ]; + runEnqueuedAfterDelay: [ + { + time: Date; + run: { + id: string; + status: TaskRunStatus; + queuedAt: Date; + updatedAt: Date; + }; + organization: { + id: string; + }; + project: { + id: string; + }; + environment: { + id: string; + }; + }, + ]; + runDelayRescheduled: [ + { + time: Date; + run: { + id: string; + status: TaskRunStatus; + delayUntil: Date; + updatedAt: Date; + }; + organization: { + id: string; + }; + project: { + id: string; + }; + environment: { + id: string; + }; + }, + ]; + runLocked: [ + { + time: Date; + run: { + id: string; + updatedAt: Date; + status: TaskRunStatus; + lockedAt: Date; + lockedById: string; + lockedToVersionId: string; + lockedQueueId: string; + startedAt: Date; + baseCostInCents: number; + machinePreset: string; + taskVersion: string; + sdkVersion: string; + cliVersion: string; + maxDurationInSeconds?: number; + maxAttempts?: number; + }; + organization: { + id: string; + }; + project: { + id: string; + }; + environment: { + id: string; + }; + }, + ]; + runStatusChanged: [ + { + time: Date; + run: { + id: string; + status: TaskRunStatus; + updatedAt: Date; + }; + organization: { + id?: string; + }; + project: { + id: string; + }; + environment: { + id: string; + }; + }, + ]; runAttemptStarted: [ { time: Date; run: { id: string; + status: TaskRunStatus; + createdAt: Date; + updatedAt: Date; attemptNumber: number; baseCostInCents: number; + executedAt: Date | undefined; }; organization: { id: string; }; + project: { + id: string; + }; + environment: { + id: string; + }; }, ]; runAttemptFailed: [ @@ -35,6 +139,7 @@ export type EventBusEvents = { taskEventStore: string; createdAt: Date; completedAt: Date | null; + updatedAt: Date; }; }, ]; @@ -43,11 +148,23 @@ export type EventBusEvents = { time: Date; run: { id: string; + status: TaskRunStatus; spanId: string; ttl: string | null; taskEventStore: string; createdAt: Date; completedAt: Date | null; + expiredAt: Date | null; + updatedAt: Date; + }; + organization: { + id: string; + }; + project: { + id: string; + }; + environment: { + id: string; }; }, ]; @@ -56,12 +173,26 @@ export type EventBusEvents = { time: Date; run: { id: string; + status: TaskRunStatus; spanId: string; output: string | undefined; outputType: string; taskEventStore: string; createdAt: Date; completedAt: Date | null; + usageDurationMs: number; + costInCents: number; + updatedAt: Date; + attemptNumber: number; + }; + organization: { + id: string; + }; + project: { + id: string; + }; + environment: { + id: string; }; }, ]; @@ -76,6 +207,19 @@ export type EventBusEvents = { taskEventStore: string; createdAt: Date; completedAt: Date | null; + updatedAt: Date; + attemptNumber: number; + usageDurationMs: number; + costInCents: number; + }; + organization: { + id: string; + }; + project: { + id: string; + }; + environment: { + id: string; }; }, ]; @@ -84,6 +228,7 @@ export type EventBusEvents = { time: Date; run: { id: string; + status: TaskRunStatus; friendlyId: string; spanId: string; attemptNumber: number; @@ -92,6 +237,8 @@ export type EventBusEvents = { taskIdentifier: string; baseCostInCents: number; nextMachineAfterOOM?: string; + updatedAt: Date; + error: TaskRunError; }; organization: { id: string; @@ -105,12 +252,24 @@ export type EventBusEvents = { time: Date; run: { id: string; + status: TaskRunStatus; friendlyId: string; spanId: string; error: TaskRunError; taskEventStore: string; createdAt: Date; completedAt: Date | null; + updatedAt: Date; + attemptNumber: number; + }; + organization: { + id: string; + }; + project: { + id: string; + }; + environment: { + id: string; }; }, ]; diff --git a/internal-packages/run-engine/src/engine/index.ts b/internal-packages/run-engine/src/engine/index.ts index 75c6635ae9..e1d1e0419b 100644 --- a/internal-packages/run-engine/src/engine/index.ts +++ b/internal-packages/run-engine/src/engine/index.ts @@ -554,7 +554,7 @@ export class RunEngine { } }); - this.eventBus.emit("runStatusChanged", { + this.eventBus.emit("runCreated", { time: new Date(), runId: taskRun.id, }); diff --git a/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts b/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts index 6471b591a0..f806db4ec3 100644 --- a/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts @@ -145,7 +145,20 @@ export class CheckpointSystem { this.$.eventBus.emit("runStatusChanged", { time: new Date(), - runId, + run: { + id: runId, + status: run.status, + updatedAt: run.updatedAt, + }, + organization: { + id: run.runtimeEnvironment.organizationId, + }, + project: { + id: run.runtimeEnvironment.projectId, + }, + environment: { + id: run.runtimeEnvironment.id, + }, }); // Create the checkpoint @@ -266,6 +279,10 @@ export class CheckpointSystem { id: true, status: true, attemptNumber: true, + organizationId: true, + runtimeEnvironmentId: true, + projectId: true, + updatedAt: true, }, }); @@ -279,7 +296,20 @@ export class CheckpointSystem { this.$.eventBus.emit("runStatusChanged", { time: new Date(), - runId, + run: { + id: runId, + status: run.status, + updatedAt: run.updatedAt, + }, + organization: { + id: run.organizationId ?? undefined, + }, + project: { + id: run.projectId, + }, + environment: { + id: run.runtimeEnvironmentId, + }, }); const newSnapshot = await this.executionSnapshotSystem.createExecutionSnapshot(prisma, { diff --git a/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts b/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts index f524de14af..eef0ab4406 100644 --- a/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts @@ -68,9 +68,23 @@ export class DelayedRunSystem { await this.$.worker.reschedule(`enqueueDelayedRun:${updatedRun.id}`, delayUntil); - this.$.eventBus.emit("runStatusChanged", { + this.$.eventBus.emit("runDelayRescheduled", { time: new Date(), - runId: updatedRun.id, + run: { + id: updatedRun.id, + status: updatedRun.status, + delayUntil: delayUntil, + updatedAt: updatedRun.updatedAt, + }, + organization: { + id: snapshot.organizationId, + }, + project: { + id: updatedRun.projectId, + }, + environment: { + id: updatedRun.runtimeEnvironmentId, + }, }); return updatedRun; @@ -114,9 +128,23 @@ export class DelayedRunSystem { }, }); - this.$.eventBus.emit("runStatusChanged", { + this.$.eventBus.emit("runEnqueuedAfterDelay", { time: new Date(), - runId, + run: { + id: runId, + status: "PENDING", + queuedAt: new Date(), + updatedAt: new Date(), + }, + organization: { + id: run.runtimeEnvironment.organizationId, + }, + project: { + id: run.runtimeEnvironment.projectId, + }, + environment: { + id: run.runtimeEnvironmentId, + }, }); if (run.ttl) { diff --git a/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts b/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts index 5f7c585b03..7853abcb66 100644 --- a/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts @@ -329,25 +329,29 @@ export class DequeueSystem { maxAttempts = parsedConfig.data?.maxAttempts; } //update the run + const lockedAt = new Date(); + const startedAt = result.run.startedAt ?? lockedAt; + const maxDurationInSeconds = getMaxDuration( + result.run.maxDurationInSeconds, + result.task.maxDurationInSeconds + ); + const lockedTaskRun = await prisma.taskRun.update({ where: { id: runId, }, data: { - lockedAt: new Date(), + lockedAt, lockedById: result.task.id, lockedToVersionId: result.worker.id, lockedQueueId: result.queue.id, - startedAt: result.run.startedAt ?? new Date(), + startedAt, baseCostInCents: this.options.machines.baseCostInCents, machinePreset: machinePreset.name, taskVersion: result.worker.version, sdkVersion: result.worker.sdkVersion, cliVersion: result.worker.cliVersion, - maxDurationInSeconds: getMaxDuration( - result.run.maxDurationInSeconds, - result.task.maxDurationInSeconds - ), + maxDurationInSeconds, maxAttempts: maxAttempts ?? undefined, }, include: { @@ -356,9 +360,34 @@ export class DequeueSystem { }, }); - this.$.eventBus.emit("runStatusChanged", { + this.$.eventBus.emit("runLocked", { time: new Date(), - runId, + run: { + id: runId, + status: lockedTaskRun.status, + lockedAt, + lockedById: result.task.id, + lockedToVersionId: result.worker.id, + lockedQueueId: result.queue.id, + startedAt, + baseCostInCents: this.options.machines.baseCostInCents, + machinePreset: machinePreset.name, + taskVersion: result.worker.version, + sdkVersion: result.worker.sdkVersion, + cliVersion: result.worker.cliVersion, + maxDurationInSeconds: lockedTaskRun.maxDurationInSeconds ?? undefined, + maxAttempts: lockedTaskRun.maxAttempts ?? undefined, + updatedAt: lockedTaskRun.updatedAt, + }, + organization: { + id: orgId, + }, + project: { + id: lockedTaskRun.projectId, + }, + environment: { + id: lockedTaskRun.runtimeEnvironmentId, + }, }); if (!lockedTaskRun) { @@ -544,6 +573,7 @@ export class DequeueSystem { id: true, status: true, attemptNumber: true, + updatedAt: true, runtimeEnvironment: { select: { id: true, @@ -581,7 +611,20 @@ export class DequeueSystem { this.$.eventBus.emit("runStatusChanged", { time: new Date(), - runId, + run: { + id: runId, + status: run.status, + updatedAt: run.updatedAt, + }, + organization: { + id: run.runtimeEnvironment.project.organizationId, + }, + project: { + id: run.runtimeEnvironment.projectId, + }, + environment: { + id: run.runtimeEnvironment.id, + }, }); }); }, diff --git a/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts b/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts index f54fc031b8..6d6899bc1c 100644 --- a/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts @@ -103,7 +103,20 @@ export class PendingVersionSystem { this.$.eventBus.emit("runStatusChanged", { time: new Date(), - runId: run.id, + run: { + id: run.id, + status: "PENDING", + updatedAt: run.updatedAt, + }, + organization: { + id: backgroundWorker.runtimeEnvironment.organizationId, + }, + project: { + id: backgroundWorker.runtimeEnvironment.projectId, + }, + environment: { + id: backgroundWorker.runtimeEnvironmentId, + }, }); } diff --git a/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts b/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts index f7553bb8ea..0517fd9f9d 100644 --- a/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts @@ -175,18 +175,6 @@ export class RunAttemptSystem { throw new ServiceValidationError("Max attempts reached", 400); } - this.$.eventBus.emit("runAttemptStarted", { - time: new Date(), - run: { - id: taskRun.id, - attemptNumber: nextAttemptNumber, - baseCostInCents: taskRun.baseCostInCents, - }, - organization: { - id: environment.organization.id, - }, - }); - const result = await $transaction( prisma, async (tx) => { @@ -248,11 +236,6 @@ export class RunAttemptSystem { } ); - this.$.eventBus.emit("runStatusChanged", { - time: new Date(), - runId: taskRun.id, - }); - if (!result) { this.$.logger.error("RunEngine.createRunAttempt(): failed to create task run attempt", { runId: taskRun.id, @@ -263,6 +246,28 @@ export class RunAttemptSystem { const { run, snapshot } = result; + this.$.eventBus.emit("runAttemptStarted", { + time: new Date(), + run: { + id: run.id, + status: run.status, + createdAt: run.createdAt, + updatedAt: run.updatedAt, + attemptNumber: nextAttemptNumber, + baseCostInCents: run.baseCostInCents, + executedAt: run.executedAt ?? undefined, + }, + organization: { + id: environment.organization.id, + }, + project: { + id: environment.project.id, + }, + environment: { + id: environment.id, + }, + }); + const machinePreset = getMachinePreset({ machines: this.options.machines.machines, defaultMachine: this.options.machines.defaultMachine, @@ -460,6 +465,7 @@ export class RunAttemptSystem { status: true, attemptNumber: true, spanId: true, + updatedAt: true, associatedWaitpoint: { select: { id: true, @@ -475,15 +481,14 @@ export class RunAttemptSystem { completedAt: true, taskEventStore: true, parentTaskRunId: true, + usageDurationMs: true, + costInCents: true, + runtimeEnvironmentId: true, + projectId: true, }, }); const newSnapshot = await getLatestExecutionSnapshot(prisma, runId); - this.$.eventBus.emit("runStatusChanged", { - time: new Date(), - runId, - }); - await this.$.runQueue.acknowledgeMessage(run.project.organizationId, runId); // We need to manually emit this as we created the final snapshot as part of the task run update @@ -513,12 +518,26 @@ export class RunAttemptSystem { time: completedAt, run: { id: runId, + status: run.status, spanId: run.spanId, output: completion.output, outputType: completion.outputType, createdAt: run.createdAt, completedAt: run.completedAt, taskEventStore: run.taskEventStore, + usageDurationMs: run.usageDurationMs, + costInCents: run.costInCents, + updatedAt: run.updatedAt, + attemptNumber: run.attemptNumber ?? 1, + }, + organization: { + id: run.project.organizationId, + }, + project: { + id: run.projectId, + }, + environment: { + id: run.runtimeEnvironmentId, }, }); @@ -603,6 +622,7 @@ export class RunAttemptSystem { taskEventStore: true, createdAt: true, completedAt: true, + updatedAt: true, }, }); @@ -621,6 +641,7 @@ export class RunAttemptSystem { createdAt: minimalRun.createdAt, completedAt: minimalRun.completedAt, taskEventStore: minimalRun.taskEventStore, + updatedAt: minimalRun.updatedAt, }, }); } @@ -674,11 +695,6 @@ export class RunAttemptSystem { }, }); - this.$.eventBus.emit("runStatusChanged", { - time: new Date(), - runId, - }); - const nextAttemptNumber = latestSnapshot.attemptNumber === null ? 1 : latestSnapshot.attemptNumber + 1; @@ -694,6 +710,7 @@ export class RunAttemptSystem { createdAt: run.createdAt, completedAt: run.completedAt, taskEventStore: run.taskEventStore, + updatedAt: run.updatedAt, }, }); } @@ -702,6 +719,7 @@ export class RunAttemptSystem { time: failedAt, run: { id: run.id, + status: run.status, friendlyId: run.friendlyId, attemptNumber: nextAttemptNumber, queue: run.queue, @@ -710,6 +728,8 @@ export class RunAttemptSystem { baseCostInCents: run.baseCostInCents, spanId: run.spanId, nextMachineAfterOOM: retryResult.machine, + updatedAt: run.updatedAt, + error: completion.error, }, organization: { id: run.runtimeEnvironment.organizationId, @@ -989,6 +1009,7 @@ export class RunAttemptSystem { taskEventStore: true, parentTaskRunId: true, delayUntil: true, + updatedAt: true, runtimeEnvironment: { select: { organizationId: true, @@ -1007,11 +1028,6 @@ export class RunAttemptSystem { }, }); - this.$.eventBus.emit("runStatusChanged", { - time: new Date(), - runId, - }); - //if the run is delayed and hasn't started yet, we need to prevent it being added to the queue in future if (isInitialState(latestSnapshot.executionStatus) && run.delayUntil) { await this.delayedRunSystem.preventDelayedRunFromBeingEnqueued({ runId }); @@ -1076,12 +1092,24 @@ export class RunAttemptSystem { time: new Date(), run: { id: run.id, + status: run.status, friendlyId: run.friendlyId, spanId: run.spanId, taskEventStore: run.taskEventStore, createdAt: run.createdAt, completedAt: run.completedAt, error, + updatedAt: run.updatedAt, + attemptNumber: run.attemptNumber ?? 1, + }, + organization: { + id: latestSnapshot.organizationId, + }, + project: { + id: latestSnapshot.projectId, + }, + environment: { + id: latestSnapshot.environmentId, }, }); @@ -1141,6 +1169,9 @@ export class RunAttemptSystem { spanId: true, batchId: true, parentTaskRunId: true, + updatedAt: true, + usageDurationMs: true, + costInCents: true, associatedWaitpoint: { select: { id: true, @@ -1165,11 +1196,6 @@ export class RunAttemptSystem { }, }); - this.$.eventBus.emit("runStatusChanged", { - time: new Date(), - runId, - }); - const newSnapshot = await this.executionSnapshotSystem.createExecutionSnapshot(prisma, { run, snapshot: { @@ -1206,6 +1232,19 @@ export class RunAttemptSystem { taskEventStore: run.taskEventStore, createdAt: run.createdAt, completedAt: run.completedAt, + updatedAt: run.updatedAt, + attemptNumber: run.attemptNumber ?? 1, + usageDurationMs: run.usageDurationMs, + costInCents: run.costInCents, + }, + organization: { + id: run.runtimeEnvironment.project.organizationId, + }, + project: { + id: run.runtimeEnvironment.project.id, + }, + environment: { + id: run.runtimeEnvironment.id, }, }); diff --git a/internal-packages/run-engine/src/engine/systems/ttlSystem.ts b/internal-packages/run-engine/src/engine/systems/ttlSystem.ts index e8c82ef8f7..e9dc5e143b 100644 --- a/internal-packages/run-engine/src/engine/systems/ttlSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/ttlSystem.ts @@ -84,6 +84,7 @@ export class TtlSystem { id: true, spanId: true, ttl: true, + updatedAt: true, associatedWaitpoint: { select: { id: true, @@ -92,20 +93,19 @@ export class TtlSystem { runtimeEnvironment: { select: { organizationId: true, + projectId: true, + id: true, }, }, createdAt: true, completedAt: true, taskEventStore: true, parentTaskRunId: true, + expiredAt: true, + status: true, }, }); - this.$.eventBus.emit("runStatusChanged", { - time: new Date(), - runId, - }); - await this.$.runQueue.acknowledgeMessage(updatedRun.runtimeEnvironment.organizationId, runId); if (!updatedRun.associatedWaitpoint) { @@ -117,7 +117,13 @@ export class TtlSystem { output: { value: JSON.stringify(error), isError: true }, }); - this.$.eventBus.emit("runExpired", { run: updatedRun, time: new Date() }); + this.$.eventBus.emit("runExpired", { + run: updatedRun, + time: new Date(), + organization: { id: updatedRun.runtimeEnvironment.organizationId }, + project: { id: updatedRun.runtimeEnvironment.projectId }, + environment: { id: updatedRun.runtimeEnvironment.id }, + }); }); } diff --git a/internal-packages/run-engine/src/index.ts b/internal-packages/run-engine/src/index.ts index bdb8379d87..845fb48e6e 100644 --- a/internal-packages/run-engine/src/index.ts +++ b/internal-packages/run-engine/src/index.ts @@ -1,4 +1,4 @@ export { RunEngine } from "./engine/index.js"; export { RunDuplicateIdempotencyKeyError, RunOneTimeUseTokenError } from "./engine/errors.js"; -export type { EventBusEventArgs } from "./engine/eventBus.js"; +export type { EventBusEventArgs, EventBusEvents } from "./engine/eventBus.js"; export type { AuthenticatedEnvironment } from "./shared/index.js"; diff --git a/references/hello-world/package.json b/references/hello-world/package.json index 67d93c4fdc..e0617645b8 100644 --- a/references/hello-world/package.json +++ b/references/hello-world/package.json @@ -12,6 +12,7 @@ "zod": "3.23.8" }, "scripts": { - "dev": "trigger dev" + "dev": "trigger dev", + "deploy": "trigger deploy" } -} +} \ No newline at end of file From 56aeaf02851fca3b0efc27061c6ced09cf798b3e Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 1 May 2025 13:27:59 +0100 Subject: [PATCH 10/33] emit more stuff --- .../app/routes/api.v1.runs.$runId.tags.ts | 5 +- .../services/runsDashboardInstance.server.ts | 42 +- .../services/runsDashboardService.server.ts | 12 + .../app/v3/marqs/devQueueConsumer.server.ts | 5 +- .../v3/marqs/sharedQueueConsumer.server.ts | 8 +- .../app/v3/services/completeAttempt.server.ts | 5 +- .../services/createTaskRunAttempt.server.ts | 4 +- .../v3/services/enqueueDelayedRun.server.ts | 5 +- .../services/executeTasksWaitingForDeploy.ts | 9 +- .../app/v3/services/finalizeTaskRun.server.ts | 6 +- .../v3/services/rescheduleTaskRun.server.ts | 5 +- .../app/v3/services/triggerTaskV1.server.ts | 4 +- .../clickhouse/004_create_run_latest_v1.sql | 101 --- .../schema/003_create_raw_run_events_v1.sql | 4 +- .../schema/004_create_run_latest_v1.sql | 127 +++ .../clickhouse/src/client/client.ts | 2 +- .../clickhouse/src/runEvents.test.ts | 746 +++++++++++++++++- internal-packages/clickhouse/src/runEvents.ts | 62 +- .../run-engine/src/engine/eventBus.ts | 5 + .../src/engine/systems/checkpointSystem.ts | 3 + .../src/engine/systems/delayedRunSystem.ts | 12 +- .../src/engine/systems/dequeueSystem.ts | 3 + .../engine/systems/pendingVersionSystem.ts | 1 + .../src/engine/systems/runAttemptSystem.ts | 1 + 24 files changed, 997 insertions(+), 180 deletions(-) delete mode 100644 internal-packages/clickhouse/004_create_run_latest_v1.sql create mode 100644 internal-packages/clickhouse/schema/004_create_run_latest_v1.sql diff --git a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts index 9018fe952a..8660a2178d 100644 --- a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts +++ b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts @@ -4,7 +4,7 @@ import { z } from "zod"; import { prisma } from "~/db.server"; import { createTag, getTagsForRunId, MAX_TAGS_PER_RUN } from "~/models/taskRunTag.server"; import { authenticateApiRequest } from "~/services/apiAuth.server"; -import { emitRunTagsUpdated } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; const ParamsSchema = z.object({ runId: z.string(), @@ -95,13 +95,14 @@ export async function action({ request, params }: ActionFunctionArgs) { }, }); - emitRunTagsUpdated({ + runsDashboard.emit.runTagsUpdated({ time: new Date(), run: { id: taskRun.id, tags: taskRun.runTags, status: taskRun.status, updatedAt: taskRun.updatedAt, + createdAt: taskRun.createdAt, }, organization: { id: authenticationResult.environment.organizationId, diff --git a/apps/webapp/app/services/runsDashboardInstance.server.ts b/apps/webapp/app/services/runsDashboardInstance.server.ts index cdf26dd14e..6d58577a2d 100644 --- a/apps/webapp/app/services/runsDashboardInstance.server.ts +++ b/apps/webapp/app/services/runsDashboardInstance.server.ts @@ -24,54 +24,54 @@ import { tryCatch } from "@trigger.dev/core/utils"; const runDashboardEventBus: RunDashboardEventBus = new EventEmitter(); -export function emitRunStatusChanged(event: RunDashboardEventRunStatusChanged) { +function emitRunStatusChanged(event: RunDashboardEventRunStatusChanged) { runDashboardEventBus.emit("runStatusChanged", event); } -export function emitRunCreated(time: Date, runId: string) { +function emitRunCreated(time: Date, runId: string) { runDashboardEventBus.emit("runCreated", { time, runId, }); } -export function emitRunAttemptStarted(event: RunDashboardEventRunAttemptStarted) { +function emitRunAttemptStarted(event: RunDashboardEventRunAttemptStarted) { runDashboardEventBus.emit("runAttemptStarted", event); } -export function emitRunFailed(event: RunDashboardEventRunFailed) { +function emitRunFailed(event: RunDashboardEventRunFailed) { runDashboardEventBus.emit("runFailed", event); } -export function emitRunSucceeded(event: RunDashboardEventRunSucceeded) { +function emitRunSucceeded(event: RunDashboardEventRunSucceeded) { runDashboardEventBus.emit("runSucceeded", event); } -export function emitRunCancelled(event: RunDashboardEventRunCancelled) { +function emitRunCancelled(event: RunDashboardEventRunCancelled) { runDashboardEventBus.emit("runCancelled", event); } -export function emitRunRetryScheduled(event: RunDashboardEventRunRetryScheduled) { +function emitRunRetryScheduled(event: RunDashboardEventRunRetryScheduled) { runDashboardEventBus.emit("runRetryScheduled", event); } -export function emitRunDelayRescheduled(event: RunDashboardEventRunDelayRescheduled) { +function emitRunDelayRescheduled(event: RunDashboardEventRunDelayRescheduled) { runDashboardEventBus.emit("runDelayRescheduled", event); } -export function emitRunLocked(event: RunDashboardEventRunLocked) { +function emitRunLocked(event: RunDashboardEventRunLocked) { runDashboardEventBus.emit("runLocked", event); } -export function emitRunExpired(event: RunDashboardEventRunExpired) { +function emitRunExpired(event: RunDashboardEventRunExpired) { runDashboardEventBus.emit("runExpired", event); } -export function emitRunTagsUpdated(event: RunDashboardEventRunTagsUpdated) { +function emitRunTagsUpdated(event: RunDashboardEventRunTagsUpdated) { runDashboardEventBus.emit("runTagsUpdated", event); } -export function emitRunEnqueuedAfterDelay(event: RunDashboardEventRunEnqueuedAfterDelay) { +function emitRunEnqueuedAfterDelay(event: RunDashboardEventRunEnqueuedAfterDelay) { runDashboardEventBus.emit("runEnqueuedAfterDelay", event); } @@ -230,7 +230,23 @@ export const runsDashboard = singleton("runsDashboard", () => { runDashboardEventBus.emit("runExpired", event); }); - return service; + return { + service, + emit: { + runStatusChanged: emitRunStatusChanged, + runCreated: emitRunCreated, + runAttemptStarted: emitRunAttemptStarted, + runFailed: emitRunFailed, + runSucceeded: emitRunSucceeded, + runCancelled: emitRunCancelled, + runRetryScheduled: emitRunRetryScheduled, + runDelayRescheduled: emitRunDelayRescheduled, + runLocked: emitRunLocked, + runExpired: emitRunExpired, + runTagsUpdated: emitRunTagsUpdated, + runEnqueuedAfterDelay: emitRunEnqueuedAfterDelay, + }, + }; }); async function runCreated(time: Date, runId: string, service: RunsDashboardService) { diff --git a/apps/webapp/app/services/runsDashboardService.server.ts b/apps/webapp/app/services/runsDashboardService.server.ts index 77e031b6c9..79c4697592 100644 --- a/apps/webapp/app/services/runsDashboardService.server.ts +++ b/apps/webapp/app/services/runsDashboardService.server.ts @@ -23,6 +23,7 @@ export class RunsDashboardService { attempt: event.run.attemptNumber ?? 1, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), base_cost_in_cents: event.run.baseCostInCents, executed_at: event.run.executedAt ? event.run.executedAt.getTime() : undefined, event_name: "attempt_started", @@ -47,6 +48,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), event_name: "enqueued_after_delay", }); @@ -69,6 +71,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), delay_until: event.run.delayUntil ? event.run.delayUntil.getTime() : undefined, event_name: "delay_rescheduled", }); @@ -92,6 +95,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), base_cost_in_cents: event.run.baseCostInCents, task_version: event.run.taskVersion ?? undefined, sdk_version: event.run.sdkVersion ?? undefined, @@ -124,6 +128,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), event_name: "status_changed", }); @@ -146,6 +151,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), expired_at: event.run.expiredAt ? event.run.expiredAt.getTime() : undefined, event_name: "run_expired", }); @@ -171,6 +177,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), completed_at: event.run.completedAt ? event.run.completedAt.getTime() : undefined, usage_duration_ms: event.run.usageDurationMs, cost_in_cents: event.run.costInCents, @@ -198,6 +205,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), completed_at: event.run.completedAt ? event.run.completedAt.getTime() : undefined, error: event.run.error, attempt: event.run.attemptNumber, @@ -225,6 +233,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), machine_preset: event.run.nextMachineAfterOOM ?? undefined, attempt: event.run.attemptNumber, error: event.run.error, @@ -250,6 +259,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), completed_at: event.run.completedAt ? event.run.completedAt.getTime() : undefined, error: event.run.error ? (event.run.error as TaskRunError) : undefined, attempt: event.run.attemptNumber, @@ -275,6 +285,7 @@ export class RunsDashboardService { status: event.run.status, event_time: event.time.getTime(), updated_at: event.run.updatedAt.getTime(), + created_at: event.run.createdAt.getTime(), tags: event.run.tags, event_name: "tags_updated", }); @@ -439,6 +450,7 @@ export type RunDashboardEvents = { tags: string[]; status: TaskRunStatus; updatedAt: Date; + createdAt: Date; }; organization: { id: string; diff --git a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts index aaacbee7fb..d5f0036fd6 100644 --- a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts @@ -16,7 +16,7 @@ import { findQueueInEnvironment, sanitizeQueueName } from "~/models/taskQueue.se import { RedisClient, createRedisClient } from "~/redis.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; -import { emitRunLocked } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { marqs } from "~/v3/marqs/index.server"; import { resolveVariablesForEnvironment } from "../environmentVariables/environmentVariablesRepository.server"; import { FailedTaskRunService } from "../failedTaskRun.server"; @@ -543,11 +543,12 @@ export class DevQueueConsumer { messageId: message.messageId, }); - emitRunLocked({ + runsDashboard.emit.runLocked({ time: new Date(), run: { id: lockedTaskRun.id, updatedAt: lockedTaskRun.updatedAt, + createdAt: lockedTaskRun.createdAt, status: lockedTaskRun.status, lockedAt, lockedById: backgroundTask.id, diff --git a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts index c2a0b39bb0..d1925f8bc6 100644 --- a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts @@ -37,7 +37,7 @@ import { findEnvironmentById } from "~/models/runtimeEnvironment.server"; import { findQueueInEnvironment, sanitizeQueueName } from "~/models/taskQueue.server"; import { generateJWTTokenForEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; -import { emitRunLocked, emitRunStatusChanged } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { singleton } from "~/utils/singleton"; import { marqs } from "~/v3/marqs/index.server"; import { @@ -930,12 +930,13 @@ export class SharedQueueConsumer { }); if (lockedTaskRun.organizationId) { - emitRunLocked({ + runsDashboard.emit.runLocked({ time: new Date(), run: { id: lockedTaskRun.id, status: lockedTaskRun.status, updatedAt: lockedTaskRun.updatedAt, + createdAt: lockedTaskRun.createdAt, lockedAt, lockedById: backgroundTask.id, lockedToVersionId: worker.id, @@ -1478,12 +1479,13 @@ export class SharedQueueConsumer { }); if (run.organizationId) { - emitRunStatusChanged({ + runsDashboard.emit.runStatusChanged({ time: new Date(), run: { id: runId, status: "WAITING_FOR_DEPLOY", updatedAt: run.updatedAt, + createdAt: run.createdAt, }, organization: { id: run.organizationId, diff --git a/apps/webapp/app/v3/services/completeAttempt.server.ts b/apps/webapp/app/v3/services/completeAttempt.server.ts index 5367afce12..576b1dbcf5 100644 --- a/apps/webapp/app/v3/services/completeAttempt.server.ts +++ b/apps/webapp/app/v3/services/completeAttempt.server.ts @@ -21,7 +21,7 @@ import { PrismaClientOrTransaction } from "~/db.server"; import { env } from "~/env.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; -import { emitRunRetryScheduled } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { safeJsonParse } from "~/utils/json"; import { marqs } from "~/v3/marqs/index.server"; import { createExceptionPropertiesFromError, eventRepository } from "../eventRepository.server"; @@ -617,7 +617,7 @@ export class CompleteAttemptService extends BaseService { }, }); - emitRunRetryScheduled({ + runsDashboard.emit.runRetryScheduled({ time: new Date(), run: { id: taskRunAttempt.taskRunId, @@ -630,6 +630,7 @@ export class CompleteAttemptService extends BaseService { taskIdentifier: taskRunAttempt.taskRun.taskIdentifier, baseCostInCents: taskRunAttempt.taskRun.baseCostInCents, updatedAt: taskRunAttempt.taskRun.updatedAt, + createdAt: taskRunAttempt.taskRun.createdAt, error, }, organization: { diff --git a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts index 7965a01e5a..242cf343bd 100644 --- a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts +++ b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts @@ -6,7 +6,7 @@ import { findQueueInEnvironment } from "~/models/taskQueue.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; import { reportInvocationUsage } from "~/services/platform.v3.server"; -import { emitRunAttemptStarted } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { generateFriendlyId } from "../friendlyIdentifiers"; import { machinePresetFromConfig, machinePresetFromRun } from "../machinePresets.server"; import { FINAL_RUN_STATUSES } from "../taskStatus"; @@ -182,7 +182,7 @@ export class CreateTaskRunAttemptService extends BaseService { }); } - emitRunAttemptStarted({ + runsDashboard.emit.runAttemptStarted({ time: new Date(), run: { id: taskRun.id, diff --git a/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts b/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts index 357be06d16..4ee4db562e 100644 --- a/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts +++ b/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts @@ -1,6 +1,6 @@ import { parseNaturalLanguageDuration } from "@trigger.dev/core/v3/isomorphic"; import { logger } from "~/services/logger.server"; -import { emitRunEnqueuedAfterDelay } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { workerQueue } from "~/services/worker.server"; import { commonWorker } from "../commonWorker.server"; import { BaseService } from "./baseService.server"; @@ -100,13 +100,14 @@ export class EnqueueDelayedRunService extends BaseService { } if (run.organizationId) { - emitRunEnqueuedAfterDelay({ + runsDashboard.emit.runEnqueuedAfterDelay({ time: new Date(), run: { id: run.id, status: run.status, queuedAt: run.queuedAt ?? new Date(), updatedAt: run.updatedAt, + createdAt: run.createdAt, }, organization: { id: run.organizationId, diff --git a/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts b/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts index 39ffcb2655..68c3a34c65 100644 --- a/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts +++ b/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts @@ -1,7 +1,7 @@ import { PrismaClientOrTransaction } from "~/db.server"; import { env } from "~/env.server"; import { logger } from "~/services/logger.server"; -import { emitRunStatusChanged } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { workerQueue } from "~/services/worker.server"; import { marqs } from "~/v3/marqs/index.server"; import { BaseService } from "./baseService.server"; @@ -52,6 +52,8 @@ export class ExecuteTasksWaitingForDeployService extends BaseService { taskIdentifier: true, concurrencyKey: true, queue: true, + updatedAt: true, + createdAt: true, }, take: maxCount + 1, }); @@ -80,12 +82,13 @@ export class ExecuteTasksWaitingForDeployService extends BaseService { } for (const run of runsWaitingForDeploy) { - emitRunStatusChanged({ + runsDashboard.emit.runStatusChanged({ time: new Date(), run: { id: run.id, status: run.status, - updatedAt: new Date(), + updatedAt: run.updatedAt, + createdAt: run.createdAt, }, organization: { id: backgroundWorker.runtimeEnvironment.organizationId, diff --git a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts index c572b35a29..8781a454b2 100644 --- a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts +++ b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts @@ -4,7 +4,7 @@ import { findQueueInEnvironment } from "~/models/taskQueue.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; import { updateMetadataService } from "~/services/metadata/updateMetadata.server"; -import { emitRunFailed, emitRunSucceeded } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { marqs } from "~/v3/marqs/index.server"; import { generateFriendlyId } from "../friendlyIdentifiers"; import { socketIo } from "../handleSocketIo.server"; @@ -105,7 +105,7 @@ export class FinalizeTaskRunService extends BaseService { if (run.organizationId) { if (status === "COMPLETED_SUCCESSFULLY") { - emitRunSucceeded({ + runsDashboard.emit.runSucceeded({ time: new Date(), run: { id: run.id, @@ -132,7 +132,7 @@ export class FinalizeTaskRunService extends BaseService { }, }); } else if (taskRunError) { - emitRunFailed({ + runsDashboard.emit.runFailed({ time: new Date(), run: { id: run.id, diff --git a/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts b/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts index 7cf00603ab..00c51d5000 100644 --- a/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts +++ b/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts @@ -1,6 +1,6 @@ import { RescheduleRunRequestBody } from "@trigger.dev/core/v3"; import { TaskRun } from "@trigger.dev/database"; -import { emitRunDelayRescheduled } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { parseDelay } from "~/utils/delays"; import { BaseService, ServiceValidationError } from "./baseService.server"; import { EnqueueDelayedRunService } from "./enqueueDelayedRun.server"; @@ -27,13 +27,14 @@ export class RescheduleTaskRunService extends BaseService { }); if (taskRun.organizationId) { - emitRunDelayRescheduled({ + runsDashboard.emit.runDelayRescheduled({ time: new Date(), run: { id: taskRun.id, status: taskRun.status, delayUntil: delay, updatedAt: updatedRun.updatedAt, + createdAt: updatedRun.createdAt, }, organization: { id: taskRun.organizationId, diff --git a/apps/webapp/app/v3/services/triggerTaskV1.server.ts b/apps/webapp/app/v3/services/triggerTaskV1.server.ts index 5bf6971245..b5f847af87 100644 --- a/apps/webapp/app/v3/services/triggerTaskV1.server.ts +++ b/apps/webapp/app/v3/services/triggerTaskV1.server.ts @@ -43,7 +43,7 @@ import { import { getTaskEventStore } from "../taskEventStore.server"; import { enqueueRun } from "./enqueueRun.server"; import { z } from "zod"; -import { emitRunCreated } from "~/services/runsDashboardInstance.server"; +import { runsDashboard } from "~/services/runsDashboardInstance.server"; // This is here for backwords compatibility for v3 users const QueueOptions = z.object({ @@ -606,7 +606,7 @@ export class TriggerTaskServiceV1 extends BaseService { return; } - emitRunCreated(run.createdAt, run.id); + runsDashboard.emit.runCreated(run.createdAt, run.id); return { run, diff --git a/internal-packages/clickhouse/004_create_run_latest_v1.sql b/internal-packages/clickhouse/004_create_run_latest_v1.sql deleted file mode 100644 index db2770b68a..0000000000 --- a/internal-packages/clickhouse/004_create_run_latest_v1.sql +++ /dev/null @@ -1,101 +0,0 @@ --- +goose Up -/* one immutable row = the latest state we know about a run */ -CREATE TABLE trigger_dev.run_latest_v1 -( - -- identifiers / partition keys - organization_id String, - project_id String, - environment_id String, - run_id String, - friendly_id String, - last_event_time DateTime64(3), - - -- user-visible fields - status Enum8( - 'DELAYED'=1,'PENDING'=2,'PENDING_VERSION'=3, - 'WAITING_FOR_DEPLOY'=4,'EXECUTING'=5,'WAITING_TO_RESUME'=6, - 'RETRYING_AFTER_FAILURE'=7,'PAUSED'=8, - 'CANCELED'=9,'INTERRUPTED'=10, - 'COMPLETED_SUCCESSFULLY'=11,'COMPLETED_WITH_ERRORS'=12, - 'SYSTEM_FAILURE'=13,'CRASHED'=14,'EXPIRED'=15,'TIMED_OUT'=16), - task_identifier String, - task_version Nullable(String), - queue String, - schedule_id Nullable(String), - batch_id Nullable(String), - - root_run_id Nullable(String), - depth UInt8, - is_test UInt8, - - created_at DateTime64(3), - updated_at DateTime64(3), - started_at Nullable(DateTime64(3)), - completed_at Nullable(DateTime64(3)), - delay_until Nullable(DateTime64(3)), - - usage_duration_ms UInt32, - cost_in_cents Float64, - base_cost_in_cents Float64, - - ttl Nullable(String), - expired_at Nullable(DateTime64(3)), - - span_id Nullable(String), - idempotency_key Nullable(String), - - tags Array(String) CODEC(ZSTD(1)), - - _version DateTime64(3) -- used by ReplacingMergeTree dedupe -) -ENGINE = ReplacingMergeTree(_version) -PARTITION BY toYYYYMMDD(last_event_time) -ORDER BY (project_id, environment_id, last_event_time, run_id); - -CREATE MATERIALIZED VIEW trigger_dev.mv_run_latest_v1 -TO trigger_dev.run_latest_v1 -AS -SELECT - organization_id, - project_id, - environment_id, - run_id, - anyLast(friendly_id) AS friendly_id, - anyLast(status) AS status, - anyLast(task_identifier) AS task_identifier, - argMax(task_version, event_time) AS task_version, - argMax(queue, event_time) AS queue, - argMax(schedule_id, event_time) AS schedule_id, - argMax(batch_id, event_time) AS batch_id, - anyLast(root_run_id) AS root_run_id, - anyLast(depth) AS depth, - anyLast(is_test) AS is_test, - - min(created_at) AS created_at, - argMax(updated_at, event_time) AS updated_at, - argMax(started_at, event_time) AS started_at, - argMax(completed_at, event_time) AS completed_at, - argMax(delay_until, event_time) AS delay_until, - - argMax(usage_duration_ms,event_time) AS usage_duration_ms, - argMax(cost_in_cents, event_time) AS cost_in_cents, - argMax(base_cost_in_cents,event_time) AS base_cost_in_cents, - argMax(ttl, event_time) AS ttl, - argMax(expired_at, event_time) AS expired_at, - argMax(span_id, event_time) AS span_id, - argMax(idempotency_key, event_time) AS idempotency_key, - argMaxIf(tags, updated_at, arrayLength(tags) > 0) AS tags - - max(event_time) AS last_event_time, - max(event_time) AS _version -- for RMTree -FROM trigger_dev.raw_run_events_v1 -GROUP BY - organization_id, - project_id, - environment_id, - run_id; - --- +goose Down - -DROP MATERIALIZED VIEW trigger_dev.mv_run_latest_v1; -DROP TABLE trigger_dev.run_latest_v1; diff --git a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql index 984370748b..7864ff1173 100644 --- a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql +++ b/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql @@ -66,16 +66,14 @@ CREATE TABLE trigger_dev.raw_run_events_v1 /* ─── timing ─────────────────────────────────────────────── */ event_time DateTime64(3), -- when this row created - created_at Nullable(DateTime64(3)), + created_at DateTime64(3), updated_at DateTime64(3), started_at Nullable(DateTime64(3)), executed_at Nullable(DateTime64(3)), completed_at Nullable(DateTime64(3)), - finished_at Nullable(DateTime64(3)), -- end of *this* status delay_until Nullable(DateTime64(3)), queued_at Nullable(DateTime64(3)), expired_at Nullable(DateTime64(3)), - duration_ms Nullable(UInt32), expiration_ttl Nullable(String), /* ─── cost / usage ───────────────────────────────────────── */ diff --git a/internal-packages/clickhouse/schema/004_create_run_latest_v1.sql b/internal-packages/clickhouse/schema/004_create_run_latest_v1.sql new file mode 100644 index 0000000000..f05d669fd6 --- /dev/null +++ b/internal-packages/clickhouse/schema/004_create_run_latest_v1.sql @@ -0,0 +1,127 @@ +-- +goose Up +/* one immutable row = the latest state we know about a run */ +CREATE TABLE trigger_dev.run_latest_v1 +( + -- identifiers / partition keys + environment_id String, + run_id String, + last_event_time DateTime64(3), + + updated_at DateTime64(3), + created_at DateTime64(3), + + environment_type LowCardinality(Nullable(String)), + friendly_id Nullable(String), + attempt UInt8 DEFAULT 1, + + -- user-visible fields + engine Nullable(Enum8('V1'=1,'V2'=2)) + CODEC(T64, LZ4), + status Enum8( + 'DELAYED'=1,'PENDING'=2,'PENDING_VERSION'=3, + 'WAITING_FOR_DEPLOY'=4,'EXECUTING'=5,'WAITING_TO_RESUME'=6, + 'RETRYING_AFTER_FAILURE'=7,'PAUSED'=8, + 'CANCELED'=9,'INTERRUPTED'=10, + 'COMPLETED_SUCCESSFULLY'=11,'COMPLETED_WITH_ERRORS'=12, + 'SYSTEM_FAILURE'=13,'CRASHED'=14,'EXPIRED'=15,'TIMED_OUT'=16), + task_identifier Nullable(String), + task_version Nullable(String), + + sdk_version Nullable(String) CODEC(LZ4), + cli_version Nullable(String) CODEC(LZ4), + machine_preset LowCardinality(Nullable(String)) CODEC(LZ4), + + queue Nullable(String), + schedule_id Nullable(String), + batch_id Nullable(String), + + root_run_id Nullable(String), + depth UInt8 DEFAULT 0, + is_test UInt8 DEFAULT 0, + + started_at Nullable(DateTime64(3)), + completed_at Nullable(DateTime64(3)), + delay_until Nullable(DateTime64(3)), + + usage_duration_ms UInt32 DEFAULT 0, + cost_in_cents Float64 DEFAULT 0, + base_cost_in_cents Float64 DEFAULT 0, + + expiration_ttl Nullable(String), + expired_at Nullable(DateTime64(3)), + + span_id Nullable(String), + idempotency_key Nullable(String), + + tags Array(String) CODEC(ZSTD(1)), + + /* ─── payload & context ──────────────────────────────────── */ + payload Nullable(JSON(max_dynamic_paths = 2048)), + output Nullable(JSON(max_dynamic_paths = 2048)), + error Nullable(JSON(max_dynamic_paths = 64)), + + _version DateTime64(3) -- used by ReplacingMergeTree dedupe +) +ENGINE = ReplacingMergeTree(_version) +PARTITION BY toYYYYMMDD(created_at) +ORDER BY (toDate(created_at), environment_id, run_id); + +CREATE MATERIALIZED VIEW trigger_dev.run_latest_mv_v1 +TO trigger_dev.run_latest_v1 +AS +SELECT + environment_id, + run_id, + argMax(status, event_time) AS status, + argMax(updated_at, event_time) AS updated_at, + + argMaxIf(tags, event_time, notEmpty(tags) > 0) AS tags, + + max(attempt) AS attempt, + + anyLast(created_at) AS created_at, + anyLast(engine) AS engine, + anyLast(sdk_version) AS sdk_version, + anyLast(cli_version) AS cli_version, + anyLast(machine_preset) AS machine_preset, + + anyLast(environment_type) AS environment_type, + anyLast(friendly_id) AS friendly_id, + anyLast(task_identifier) AS task_identifier, + anyLast(task_version) AS task_version, + anyLast(queue) AS queue, + anyLast(schedule_id) AS schedule_id, + anyLast(batch_id) AS batch_id, + anyLast(root_run_id) AS root_run_id, + anyLast(depth) AS depth, + anyLast(is_test) AS is_test, + + anyLast(started_at) AS started_at, + anyLast(completed_at) AS completed_at, + anyLast(delay_until) AS delay_until, + + max(usage_duration_ms) AS usage_duration_ms, + max(cost_in_cents) AS cost_in_cents, + max(base_cost_in_cents) AS base_cost_in_cents, + anyLast(expiration_ttl) AS expiration_ttl, + anyLast(expired_at) AS expired_at, + anyLast(span_id) AS span_id, + anyLast(idempotency_key) AS idempotency_key, + + anyLast(payload) AS payload, + anyLast(output) AS output, + argMax(error, event_time) AS error, + + max(event_time) AS last_event_time, + max(event_time) AS _version -- for RMTree +FROM trigger_dev.raw_run_events_v1 +GROUP BY + organization_id, + project_id, + environment_id, + run_id; + +-- +goose Down + +DROP TABLE trigger_dev.run_latest_mv_v1; +DROP TABLE trigger_dev.run_latest_v1; diff --git a/internal-packages/clickhouse/src/client/client.ts b/internal-packages/clickhouse/src/client/client.ts index d86e5a68ba..dc1554e52d 100644 --- a/internal-packages/clickhouse/src/client/client.ts +++ b/internal-packages/clickhouse/src/client/client.ts @@ -26,7 +26,7 @@ export type ClickhouseConfig = { }; export class ClickhouseClient implements ClickhouseReader, ClickhouseWriter { - private readonly client: ClickHouseClient; + public readonly client: ClickHouseClient; private readonly tracer: Tracer; private readonly name: string; private readonly logger: Logger; diff --git a/internal-packages/clickhouse/src/runEvents.test.ts b/internal-packages/clickhouse/src/runEvents.test.ts index b34dbb390b..2730ea80d9 100644 --- a/internal-packages/clickhouse/src/runEvents.test.ts +++ b/internal-packages/clickhouse/src/runEvents.test.ts @@ -17,7 +17,7 @@ describe("Run Events", () => { const [insertError, insertResult] = await insert([ { environment_id: "env_1234", - event_name: "RUN_STATUS_CHANGED", + event_name: "created", environment_type: "DEVELOPMENT", organization_id: "org_1234", project_id: "project_1234", @@ -33,7 +33,7 @@ describe("Run Events", () => { event_time: Date.now(), created_at: Date.now(), updated_at: Date.now(), - completed_at: Date.now(), + completed_at: undefined, tags: ["tag1", "tag2"], payload: { key: "value", @@ -66,7 +66,7 @@ describe("Run Events", () => { expect(insertError).toBeNull(); expect(insertResult).toEqual(expect.objectContaining({ executed: true })); - expect(insertResult?.summary?.written_rows).toEqual("1"); + expect(insertResult?.summary?.written_rows).toEqual("2"); const query = client.query({ name: "query-run-events", @@ -75,9 +75,12 @@ describe("Run Events", () => { environment_id: z.string(), run_id: z.string(), }), + params: z.object({ + run_id: z.string(), + }), }); - const [queryError, result] = await query({}); + const [queryError, result] = await query({ run_id: "run_1234" }); expect(queryError).toBeNull(); expect(result).toEqual( @@ -89,4 +92,739 @@ describe("Run Events", () => { ]) ); }); + + clickhouseTest( + "should be able to handle multiple run events", + async ({ clickhouseContainer }) => { + const client = new ClickhouseClient({ + name: "test", + url: clickhouseContainer.getConnectionUrl(), + }); + + const insert = insertRunEvents(client, { + async_insert: 0, // turn off async insert for this test + }); + + const [insertError, insertResult] = await insert([ + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "created", + environment_type: "PRODUCTION", + friendly_id: "run_cma45oli70002qrdy47w0j4n7", + attempt: 1, + engine: "V2", + status: "PENDING", + task_identifier: "retry-task", + queue: "task/retry-task", + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: "538677637f937f54", + trace_id: "20a28486b0b9f50c647b35e8863e36a5", + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:04.341").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:04.312").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: new Date("2025-04-30 16:34:04.311").getTime(), + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: { failCount: "3" }, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: true, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "locked", + environment_type: null, + friendly_id: null, + attempt: 1, + engine: null, + status: "PENDING", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:05.402").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:05.378").getTime(), + started_at: null, + executed_at: new Date("2025-04-30 16:34:05.377").getTime(), + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: null, + output: null, + error: null, + tags: [], + task_version: "20250430.3", + sdk_version: "4.0.0-v4-beta.7", + cli_version: "4.0.0-v4-beta.7", + machine_preset: "small-1x", + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "attempt_started", + environment_type: null, + friendly_id: null, + attempt: 1, + engine: null, + status: "EXECUTING", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:08.129").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:08.111").getTime(), + started_at: null, + executed_at: new Date("2025-04-30 16:34:08.112").getTime(), + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: null, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "usage", + environment_type: null, + friendly_id: null, + attempt: 1, + engine: null, + status: "EXECUTING", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:13.084").getTime(), + created_at: new Date("2025-04-30 16:34:04.000").getTime(), + updated_at: new Date("2025-04-30 16:34:13.000").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 2635, + cost_in_cents: 0.008893125, + base_cost_in_cents: 0, + payload: null, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "retry_scheduled", + environment_type: null, + friendly_id: null, + attempt: 2, + engine: null, + status: "RETRYING_AFTER_FAILURE", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:13.748").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:13.751").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: null, + output: null, + error: { + message: "Intentionally failing attempt 1", + name: "Error", + stackTrace: + "Error: Intentionally failing attempt 1\n at run (file:///src/trigger/retry.ts:26:21)\n at _tracer.startActiveSpan.attributes (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:414:40)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:141:24\n at AsyncLocalStorage.run (node:async_hooks:346:14)\n at AsyncLocalStorageContextManager.with (file:///node_modules/.pnpm/@opentelemetry+context-async-hooks@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/context-async-hooks/src/AsyncLocalStorageContextManager.ts:40:36)\n at ContextAPI2.with (file:///node_modules/.pnpm/@opentelemetry+api@1.9.0/node_modules/@opentelemetry/api/src/api/context.ts:77:42)\n at Tracer.startActiveSpan (file:///node_modules/.pnpm/@opentelemetry+sdk-trace-base@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/sdk-trace-base/src/Tracer.ts:241:24)\n at TriggerTracer.startActiveSpan (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:85:24)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:409:33\n at _RunTimelineMetricsAPI.measureMetric (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/runTimelineMetrics/index.ts:67:28)", + type: "BUILT_IN_ERROR", + }, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "attempt_started", + environment_type: null, + friendly_id: null, + attempt: 2, + engine: null, + status: "EXECUTING", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:13.846").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:13.831").getTime(), + started_at: null, + executed_at: new Date("2025-04-30 16:34:08.112").getTime(), + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: null, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "usage", + environment_type: null, + friendly_id: null, + attempt: 2, + engine: null, + status: "EXECUTING", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:18.385").getTime(), + created_at: new Date("2025-04-30 16:34:04.000").getTime(), + updated_at: new Date("2025-04-30 16:34:18.000").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 5419, + cost_in_cents: 0.018289125, + base_cost_in_cents: 0, + payload: null, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "retry_scheduled", + environment_type: null, + friendly_id: null, + attempt: 3, + engine: null, + status: "RETRYING_AFTER_FAILURE", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:18.832").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:18.834").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: null, + output: null, + error: { + message: "Intentionally failing attempt 2", + name: "Error", + stackTrace: + "Error: Intentionally failing attempt 2\n at run (file:///src/trigger/retry.ts:26:21)\n at _tracer.startActiveSpan.attributes (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:414:40)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:141:24\n at AsyncLocalStorage.run (node:async_hooks:346:14)\n at AsyncLocalStorageContextManager.with (file:///node_modules/.pnpm/@opentelemetry+context-async-hooks@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/context-async-hooks/src/AsyncLocalStorageContextManager.ts:40:36)\n at ContextAPI2.with (file:///node_modules/.pnpm/@opentelemetry+api@1.9.0/node_modules/@opentelemetry/api/src/api/context.ts:77:42)\n at Tracer.startActiveSpan (file:///node_modules/.pnpm/@opentelemetry+sdk-trace-base@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/sdk-trace-base/src/Tracer.ts:241:24)\n at TriggerTracer.startActiveSpan (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:85:24)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:409:33\n at _RunTimelineMetricsAPI.measureMetric (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/runTimelineMetrics/index.ts:67:28)", + type: "BUILT_IN_ERROR", + }, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "attempt_started", + environment_type: null, + friendly_id: null, + attempt: 3, + engine: null, + status: "EXECUTING", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:18.946").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:18.931").getTime(), + started_at: null, + executed_at: new Date("2025-04-30 16:34:08.112").getTime(), + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: null, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "usage", + environment_type: null, + friendly_id: null, + attempt: 3, + engine: null, + status: "EXECUTING", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:23.559").getTime(), + created_at: new Date("2025-04-30 16:34:04.000").getTime(), + updated_at: new Date("2025-04-30 16:34:23.000").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 8217, + cost_in_cents: 0.027732375, + base_cost_in_cents: 0, + payload: null, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "retry_scheduled", + environment_type: null, + friendly_id: null, + attempt: 4, + engine: null, + status: "RETRYING_AFTER_FAILURE", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:24.045").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:24.047").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: null, + output: null, + error: { + message: "Intentionally failing attempt 3", + name: "Error", + stackTrace: + "Error: Intentionally failing attempt 3\n at run (file:///src/trigger/retry.ts:26:21)\n at _tracer.startActiveSpan.attributes (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:414:40)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:141:24\n at AsyncLocalStorage.run (node:async_hooks:346:14)\n at AsyncLocalStorageContextManager.with (file:///node_modules/.pnpm/@opentelemetry+context-async-hooks@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/context-async-hooks/src/AsyncLocalStorageContextManager.ts:40:36)\n at ContextAPI2.with (file:///node_modules/.pnpm/@opentelemetry+api@1.9.0/node_modules/@opentelemetry/api/src/api/context.ts:77:42)\n at Tracer.startActiveSpan (file:///node_modules/.pnpm/@opentelemetry+sdk-trace-base@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/sdk-trace-base/src/Tracer.ts:241:24)\n at TriggerTracer.startActiveSpan (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:85:24)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:409:33\n at _RunTimelineMetricsAPI.measureMetric (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/runTimelineMetrics/index.ts:67:28)", + type: "BUILT_IN_ERROR", + }, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "attempt_started", + environment_type: null, + friendly_id: null, + attempt: 4, + engine: null, + status: "EXECUTING", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:24.135").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:24.123").getTime(), + started_at: null, + executed_at: new Date("2025-04-30 16:34:08.112").getTime(), + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: null, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "usage", + environment_type: null, + friendly_id: null, + attempt: 4, + engine: null, + status: "EXECUTING", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:25.895").getTime(), + created_at: new Date("2025-04-30 16:34:04.000").getTime(), + updated_at: new Date("2025-04-30 16:34:25.000").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 8326, + cost_in_cents: 0.02810025, + base_cost_in_cents: 0, + payload: null, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + event_name: "succeeded", + environment_type: null, + friendly_id: null, + attempt: 4, + engine: null, + status: "COMPLETED_SUCCESSFULLY", + task_identifier: null, + queue: null, + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: null, + trace_id: null, + idempotency_key: null, + event_time: new Date("2025-04-30 16:34:26.139").getTime(), + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:26.140").getTime(), + started_at: null, + executed_at: null, + completed_at: new Date("2025-04-30 16:34:26.139").getTime(), + delay_until: null, + queued_at: null, + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 8326, + cost_in_cents: 0.02810025, + base_cost_in_cents: 0, + payload: null, + output: { attemptsTaken: "4" }, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: false, + }, + ]); + + expect(insertError).toBeNull(); + expect(insertResult).toEqual(expect.objectContaining({ executed: true })); + expect(insertResult?.summary?.written_rows).toEqual("15"); + + const query = client.query({ + name: "query-run-events", + query: "SELECT * FROM trigger_dev.raw_run_events_v1", + schema: z.object({ + environment_id: z.string(), + run_id: z.string(), + }), + params: z.object({ + run_id: z.string(), + }), + }); + + const [queryError, result] = await query({ run_id: "run_1234" }); + + expect(queryError).toBeNull(); + expect(result).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + run_id: "cma45oli70002qrdy47w0j4n7", + }), + ]) + ); + + // Query the materialized view to check the final state + const latestQuery = client.query({ + name: "query-run-latest", + query: "SELECT * FROM trigger_dev.run_latest_v1 FINAL", + schema: z.any(), + }); + + const [latestQueryError, latestResult] = await latestQuery({}); + + console.log(latestResult); + + expect(latestQueryError).toBeNull(); + expect(latestResult).not.toBeNull(); + if (!latestResult) throw new Error("Expected latestResult to not be null"); + expect(latestResult).toHaveLength(1); + expect(latestResult[0]).toEqual( + expect.objectContaining({ + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + run_id: "cma45oli70002qrdy47w0j4n7", + status: "COMPLETED_SUCCESSFULLY", + attempt: 4, + task_identifier: "retry-task", + task_version: "20250430.3", + sdk_version: "4.0.0-v4-beta.7", + cli_version: "4.0.0-v4-beta.7", + machine_preset: "small-1x", + usage_duration_ms: 8326, + cost_in_cents: 0.02810025, + base_cost_in_cents: 0, + payload: { failCount: "3" }, + output: { attemptsTaken: "4" }, + error: null, + completed_at: "2025-04-30 15:34:26.139", + is_test: 0, + environment_type: "PRODUCTION", + friendly_id: "run_cma45oli70002qrdy47w0j4n7", + queue: "task/retry-task", + schedule_id: null, + batch_id: null, + root_run_id: null, + depth: 0, + started_at: null, + delay_until: null, + expiration_ttl: null, + expired_at: null, + span_id: "538677637f937f54", + idempotency_key: null, + tags: [], + created_at: "2025-04-30 15:34:04.312", + _version: "2025-04-30 15:34:26.139", + last_event_time: "2025-04-30 15:34:26.139", + updated_at: "2025-04-30 15:34:26.140", + engine: "V2", + }) + ); + } + ); }); diff --git a/internal-packages/clickhouse/src/runEvents.ts b/internal-packages/clickhouse/src/runEvents.ts index a7abcc4c2f..8549c737bb 100644 --- a/internal-packages/clickhouse/src/runEvents.ts +++ b/internal-packages/clickhouse/src/runEvents.ts @@ -9,6 +9,7 @@ export const RawRunEventV1 = z.object({ project_id: z.string(), run_id: z.string(), updated_at: z.number().int(), + created_at: z.number().int(), event_time: z.number().int(), event_name: z.string(), status: z.enum([ @@ -30,42 +31,41 @@ export const RawRunEventV1 = z.object({ "TIMED_OUT", ]), /* ─── optional fields ─────────────────────────────────────────────── */ - created_at: z.number().int().optional(), - environment_type: z.string().optional(), - friendly_id: z.string().optional(), + environment_type: z.string().nullish(), + friendly_id: z.string().nullish(), attempt: z.number().int().default(1), - engine: z.enum(["V1", "V2"]).optional(), - task_identifier: z.string().optional(), - queue: z.string().optional(), - schedule_id: z.string().optional(), - batch_id: z.string().optional(), - completed_at: z.number().int().optional(), - started_at: z.number().int().optional(), - executed_at: z.number().int().optional(), - delay_until: z.number().int().optional(), - queued_at: z.number().int().optional(), - expired_at: z.number().int().optional(), - usage_duration_ms: z.number().int().optional(), - cost_in_cents: z.number().optional(), - base_cost_in_cents: z.number().optional(), - payload: z.unknown().optional(), - output: z.unknown().optional(), - error: TaskRunError.optional(), + engine: z.enum(["V1", "V2"]).nullish(), + task_identifier: z.string().nullish(), + queue: z.string().nullish(), + schedule_id: z.string().nullish(), + batch_id: z.string().nullish(), + completed_at: z.number().int().nullish(), + started_at: z.number().int().nullish(), + executed_at: z.number().int().nullish(), + delay_until: z.number().int().nullish(), + queued_at: z.number().int().nullish(), + expired_at: z.number().int().nullish(), + usage_duration_ms: z.number().int().nullish(), + cost_in_cents: z.number().nullish(), + base_cost_in_cents: z.number().nullish(), + payload: z.unknown().nullish(), + output: z.unknown().nullish(), + error: TaskRunError.nullish(), tags: z .array(z.string()) .transform((arr) => arr.sort()) - .optional(), - task_version: z.string().optional(), - sdk_version: z.string().optional(), - cli_version: z.string().optional(), - machine_preset: z.string().optional(), - root_run_id: z.string().optional(), - parent_run_id: z.string().optional(), + .nullish(), + task_version: z.string().nullish(), + sdk_version: z.string().nullish(), + cli_version: z.string().nullish(), + machine_preset: z.string().nullish(), + root_run_id: z.string().nullish(), + parent_run_id: z.string().nullish(), depth: z.number().int().default(0), - span_id: z.string().optional(), - trace_id: z.string().optional(), - idempotency_key: z.string().optional(), - expiration_ttl: z.string().optional(), + span_id: z.string().nullish(), + trace_id: z.string().nullish(), + idempotency_key: z.string().nullish(), + expiration_ttl: z.string().nullish(), is_test: z.boolean().default(false), }); diff --git a/internal-packages/run-engine/src/engine/eventBus.ts b/internal-packages/run-engine/src/engine/eventBus.ts index 963841a75e..178bdd5b3b 100644 --- a/internal-packages/run-engine/src/engine/eventBus.ts +++ b/internal-packages/run-engine/src/engine/eventBus.ts @@ -22,6 +22,7 @@ export type EventBusEvents = { status: TaskRunStatus; queuedAt: Date; updatedAt: Date; + createdAt: Date; }; organization: { id: string; @@ -42,6 +43,7 @@ export type EventBusEvents = { status: TaskRunStatus; delayUntil: Date; updatedAt: Date; + createdAt: Date; }; organization: { id: string; @@ -73,6 +75,7 @@ export type EventBusEvents = { cliVersion: string; maxDurationInSeconds?: number; maxAttempts?: number; + createdAt: Date; }; organization: { id: string; @@ -92,6 +95,7 @@ export type EventBusEvents = { id: string; status: TaskRunStatus; updatedAt: Date; + createdAt: Date; }; organization: { id?: string; @@ -238,6 +242,7 @@ export type EventBusEvents = { baseCostInCents: number; nextMachineAfterOOM?: string; updatedAt: Date; + createdAt: Date; error: TaskRunError; }; organization: { diff --git a/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts b/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts index f806db4ec3..8677d55ff4 100644 --- a/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts @@ -149,6 +149,7 @@ export class CheckpointSystem { id: runId, status: run.status, updatedAt: run.updatedAt, + createdAt: run.createdAt, }, organization: { id: run.runtimeEnvironment.organizationId, @@ -283,6 +284,7 @@ export class CheckpointSystem { runtimeEnvironmentId: true, projectId: true, updatedAt: true, + createdAt: true, }, }); @@ -300,6 +302,7 @@ export class CheckpointSystem { id: runId, status: run.status, updatedAt: run.updatedAt, + createdAt: run.createdAt, }, organization: { id: run.organizationId ?? undefined, diff --git a/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts b/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts index eef0ab4406..9a45977442 100644 --- a/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/delayedRunSystem.ts @@ -75,6 +75,7 @@ export class DelayedRunSystem { status: updatedRun.status, delayUntil: delayUntil, updatedAt: updatedRun.updatedAt, + createdAt: updatedRun.createdAt, }, organization: { id: snapshot.organizationId, @@ -120,11 +121,13 @@ export class DelayedRunSystem { batchId: run.batchId ?? undefined, }); - await this.$.prisma.taskRun.update({ + const queuedAt = new Date(); + + const updatedRun = await this.$.prisma.taskRun.update({ where: { id: runId }, data: { status: "PENDING", - queuedAt: new Date(), + queuedAt, }, }); @@ -133,8 +136,9 @@ export class DelayedRunSystem { run: { id: runId, status: "PENDING", - queuedAt: new Date(), - updatedAt: new Date(), + queuedAt, + updatedAt: updatedRun.updatedAt, + createdAt: updatedRun.createdAt, }, organization: { id: run.runtimeEnvironment.organizationId, diff --git a/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts b/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts index 7853abcb66..91e47c7ec4 100644 --- a/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts @@ -378,6 +378,7 @@ export class DequeueSystem { maxDurationInSeconds: lockedTaskRun.maxDurationInSeconds ?? undefined, maxAttempts: lockedTaskRun.maxAttempts ?? undefined, updatedAt: lockedTaskRun.updatedAt, + createdAt: lockedTaskRun.createdAt, }, organization: { id: orgId, @@ -574,6 +575,7 @@ export class DequeueSystem { status: true, attemptNumber: true, updatedAt: true, + createdAt: true, runtimeEnvironment: { select: { id: true, @@ -615,6 +617,7 @@ export class DequeueSystem { id: runId, status: run.status, updatedAt: run.updatedAt, + createdAt: run.createdAt, }, organization: { id: run.runtimeEnvironment.project.organizationId, diff --git a/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts b/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts index 6d6899bc1c..6dcb4f6eb0 100644 --- a/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/pendingVersionSystem.ts @@ -107,6 +107,7 @@ export class PendingVersionSystem { id: run.id, status: "PENDING", updatedAt: run.updatedAt, + createdAt: run.createdAt, }, organization: { id: backgroundWorker.runtimeEnvironment.organizationId, diff --git a/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts b/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts index 0517fd9f9d..bb3dce4fea 100644 --- a/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/runAttemptSystem.ts @@ -730,6 +730,7 @@ export class RunAttemptSystem { nextMachineAfterOOM: retryResult.machine, updatedAt: run.updatedAt, error: completion.error, + createdAt: run.createdAt, }, organization: { id: run.runtimeEnvironment.organizationId, From a8533e064ed7a319f89a3e043c0dc6af873f1be6 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 1 May 2025 13:47:44 +0100 Subject: [PATCH 11/33] Setup replication package --- internal-packages/replication/README.md | 1 + internal-packages/replication/package.json | 28 ++ internal-packages/replication/src/index.ts | 1 + .../replication/tsconfig.build.json | 21 ++ internal-packages/replication/tsconfig.json | 8 + .../replication/tsconfig.src.json | 20 ++ .../replication/tsconfig.test.json | 21 ++ .../replication/vitest.config.ts | 19 ++ pnpm-lock.yaml | 263 ++++++++++-------- 9 files changed, 268 insertions(+), 114 deletions(-) create mode 100644 internal-packages/replication/README.md create mode 100644 internal-packages/replication/package.json create mode 100644 internal-packages/replication/src/index.ts create mode 100644 internal-packages/replication/tsconfig.build.json create mode 100644 internal-packages/replication/tsconfig.json create mode 100644 internal-packages/replication/tsconfig.src.json create mode 100644 internal-packages/replication/tsconfig.test.json create mode 100644 internal-packages/replication/vitest.config.ts diff --git a/internal-packages/replication/README.md b/internal-packages/replication/README.md new file mode 100644 index 0000000000..6e6401b7ee --- /dev/null +++ b/internal-packages/replication/README.md @@ -0,0 +1 @@ +# Replication diff --git a/internal-packages/replication/package.json b/internal-packages/replication/package.json new file mode 100644 index 0000000000..bf4ac8664f --- /dev/null +++ b/internal-packages/replication/package.json @@ -0,0 +1,28 @@ +{ + "name": "@internal/replication", + "private": true, + "version": "0.0.1", + "main": "./dist/src/index.js", + "types": "./dist/src/index.d.ts", + "type": "module", + "dependencies": { + "@internal/tracing": "workspace:*", + "@trigger.dev/core": "workspace:*", + "pg": "8.15.6" + }, + "devDependencies": { + "@internal/testcontainers": "workspace:*", + "@vitest/coverage-v8": "^3.0.8", + "rimraf": "6.0.1", + "vitest": "^3.0.8", + "@types/pg": "8.11.14" + }, + "scripts": { + "clean": "rimraf dist", + "typecheck": "tsc --noEmit", + "build": "pnpm run clean && tsc -p tsconfig.build.json", + "dev": "tsc --watch -p tsconfig.build.json", + "test": "vitest --sequence.concurrent=false --no-file-parallelism", + "test:coverage": "vitest --sequence.concurrent=false --no-file-parallelism --coverage.enabled" + } +} \ No newline at end of file diff --git a/internal-packages/replication/src/index.ts b/internal-packages/replication/src/index.ts new file mode 100644 index 0000000000..cb0ff5c3b5 --- /dev/null +++ b/internal-packages/replication/src/index.ts @@ -0,0 +1 @@ +export {}; diff --git a/internal-packages/replication/tsconfig.build.json b/internal-packages/replication/tsconfig.build.json new file mode 100644 index 0000000000..619461da80 --- /dev/null +++ b/internal-packages/replication/tsconfig.build.json @@ -0,0 +1,21 @@ +{ + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.test.ts"], + "compilerOptions": { + "composite": true, + "target": "ES2019", + "lib": ["ES2019", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], + "outDir": "dist", + "module": "Node16", + "moduleResolution": "Node16", + "moduleDetection": "force", + "verbatimModuleSyntax": false, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "preserveWatchOutput": true, + "skipLibCheck": true, + "strict": true, + "declaration": true + } +} diff --git a/internal-packages/replication/tsconfig.json b/internal-packages/replication/tsconfig.json new file mode 100644 index 0000000000..af630abe1f --- /dev/null +++ b/internal-packages/replication/tsconfig.json @@ -0,0 +1,8 @@ +{ + "references": [{ "path": "./tsconfig.src.json" }, { "path": "./tsconfig.test.json" }], + "compilerOptions": { + "moduleResolution": "Node16", + "module": "Node16", + "customConditions": ["@triggerdotdev/source"] + } +} diff --git a/internal-packages/replication/tsconfig.src.json b/internal-packages/replication/tsconfig.src.json new file mode 100644 index 0000000000..6043e02ad2 --- /dev/null +++ b/internal-packages/replication/tsconfig.src.json @@ -0,0 +1,20 @@ +{ + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "src/**/*.test.ts"], + "compilerOptions": { + "composite": true, + "target": "ES2019", + "lib": ["ES2019", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], + "module": "Node16", + "moduleResolution": "Node16", + "moduleDetection": "force", + "verbatimModuleSyntax": false, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "preserveWatchOutput": true, + "skipLibCheck": true, + "strict": true, + "customConditions": ["@triggerdotdev/source"] + } +} diff --git a/internal-packages/replication/tsconfig.test.json b/internal-packages/replication/tsconfig.test.json new file mode 100644 index 0000000000..99db8eb7c9 --- /dev/null +++ b/internal-packages/replication/tsconfig.test.json @@ -0,0 +1,21 @@ +{ + "include": ["src/**/*.test.ts", "vitest.config.ts"], + "references": [{ "path": "./tsconfig.src.json" }], + "compilerOptions": { + "composite": true, + "target": "ES2019", + "lib": ["ES2019", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], + "module": "Node16", + "moduleResolution": "Node16", + "moduleDetection": "force", + "verbatimModuleSyntax": false, + "types": ["vitest/globals"], + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "preserveWatchOutput": true, + "skipLibCheck": true, + "strict": true, + "customConditions": ["@triggerdotdev/source"] + } +} diff --git a/internal-packages/replication/vitest.config.ts b/internal-packages/replication/vitest.config.ts new file mode 100644 index 0000000000..1d779c0957 --- /dev/null +++ b/internal-packages/replication/vitest.config.ts @@ -0,0 +1,19 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + include: ["**/*.test.ts"], + globals: true, + isolate: true, + fileParallelism: false, + poolOptions: { + threads: { + singleThread: true, + }, + }, + testTimeout: 60_000, + coverage: { + provider: "v8", + }, + }, +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 77c207e6c3..f8a5453a58 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -975,6 +975,34 @@ importers: specifier: ^1.4.0 version: 1.6.0(@types/node@20.14.14) + internal-packages/replication: + dependencies: + '@internal/tracing': + specifier: workspace:* + version: link:../tracing + '@trigger.dev/core': + specifier: workspace:* + version: link:../../packages/core + pg: + specifier: 8.15.6 + version: 8.15.6 + devDependencies: + '@internal/testcontainers': + specifier: workspace:* + version: link:../testcontainers + '@types/pg': + specifier: 8.11.14 + version: 8.11.14 + '@vitest/coverage-v8': + specifier: ^3.0.8 + version: 3.0.8(vitest@3.0.8) + rimraf: + specifier: 6.0.1 + version: 6.0.1 + vitest: + specifier: ^3.0.8 + version: 3.0.8(@types/node@20.14.14) + internal-packages/run-engine: dependencies: '@internal/redis': @@ -2730,13 +2758,6 @@ packages: resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} engines: {node: '>=10'} - /@ampproject/remapping@2.2.1: - resolution: {integrity: sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/gen-mapping': 0.3.5 - '@jridgewell/trace-mapping': 0.3.25 - /@ampproject/remapping@2.3.0: resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} engines: {node: '>=6.0.0'} @@ -3801,7 +3822,7 @@ packages: resolution: {integrity: sha512-2EENLmhpwplDux5PSsZnSbnSkB3tZ6QTksgO25xwEL7pIDcNOMhF5v/s6RzwjMZzZzw9Ofc30gHv5ChCC8pifQ==} engines: {node: '>=6.9.0'} dependencies: - '@ampproject/remapping': 2.2.1 + '@ampproject/remapping': 2.3.0 '@babel/code-frame': 7.22.13 '@babel/generator': 7.22.15 '@babel/helper-compilation-targets': 7.22.15 @@ -3812,7 +3833,7 @@ packages: '@babel/traverse': 7.24.7 '@babel/types': 7.24.0 convert-source-map: 1.9.0 - debug: 4.3.7(supports-color@10.0.0) + debug: 4.4.0 gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -3829,10 +3850,10 @@ packages: '@babel/helper-compilation-targets': 7.25.2 '@babel/helper-module-transforms': 7.25.2(@babel/core@7.24.5) '@babel/helpers': 7.25.6 - '@babel/parser': 7.26.8 + '@babel/parser': 7.27.0 '@babel/template': 7.24.7 '@babel/traverse': 7.24.7 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 convert-source-map: 2.0.0 debug: 4.4.0(supports-color@10.0.0) gensync: 1.0.0-beta.2 @@ -3892,8 +3913,8 @@ packages: resolution: {integrity: sha512-oipXieGC3i45Y1A41t4tAqpnEZWgB/lC6Ehh6+rOviR5XWpTtMmLN+fGjz9vOiNRt0p6RtO6DtD0pdU3vpqdSA==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 - '@jridgewell/gen-mapping': 0.3.5 + '@babel/types': 7.27.0 + '@jridgewell/gen-mapping': 0.3.8 '@jridgewell/trace-mapping': 0.3.25 jsesc: 2.5.2 @@ -3901,8 +3922,8 @@ packages: resolution: {integrity: sha512-VPC82gr1seXOpkjAAKoLhP50vx4vGNlF4msF64dSFq1P8RfB+QAuJWGHPXXPc8QyfVWwwB/TNNU4+ayZmHNbZw==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 - '@jridgewell/gen-mapping': 0.3.5 + '@babel/types': 7.27.0 + '@jridgewell/gen-mapping': 0.3.8 '@jridgewell/trace-mapping': 0.3.25 jsesc: 2.5.2 dev: false @@ -3922,7 +3943,7 @@ packages: resolution: {integrity: sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-builder-binary-assignment-operator-visitor@7.18.9: @@ -3930,7 +3951,7 @@ packages: engines: {node: '>=6.9.0'} dependencies: '@babel/helper-explode-assignable-expression': 7.18.6 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-compilation-targets@7.22.15: @@ -4058,13 +4079,13 @@ packages: resolution: {integrity: sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 /@babel/helper-explode-assignable-expression@7.18.6: resolution: {integrity: sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-function-name@7.22.5: @@ -4072,7 +4093,7 @@ packages: engines: {node: '>=6.9.0'} dependencies: '@babel/template': 7.22.15 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-function-name@7.23.0: @@ -4080,7 +4101,7 @@ packages: engines: {node: '>=6.9.0'} dependencies: '@babel/template': 7.24.7 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-function-name@7.24.7: @@ -4088,47 +4109,47 @@ packages: engines: {node: '>=6.9.0'} dependencies: '@babel/template': 7.24.7 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 /@babel/helper-hoist-variables@7.22.5: resolution: {integrity: sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-hoist-variables@7.24.7: resolution: {integrity: sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 /@babel/helper-member-expression-to-functions@7.21.5: resolution: {integrity: sha512-nIcGfgwpH2u4n9GG1HpStW5Ogx7x7ekiFHbjjFRKXbn5zUvqO9ZgotCO4x1aNbKn/x/xOUaXEhyNHCwtFCpxWg==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-member-expression-to-functions@7.23.0: resolution: {integrity: sha512-6gfrPwh7OuT6gZyJZvd6WbTfrqAo7vm4xCzAXOusKqq/vWdKXphTpj5klHKNmRUU6/QRGlBsyU9mAIPaWHlqJA==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-module-imports@7.22.15: resolution: {integrity: sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 /@babel/helper-module-imports@7.24.7: resolution: {integrity: sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==} engines: {node: '>=6.9.0'} dependencies: '@babel/traverse': 7.25.6 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 transitivePeerDependencies: - supports-color dev: false @@ -4189,14 +4210,14 @@ packages: resolution: {integrity: sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-optimise-call-expression@7.22.5: resolution: {integrity: sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-plugin-utils@7.22.5: @@ -4219,7 +4240,7 @@ packages: '@babel/helper-annotate-as-pure': 7.22.5 '@babel/helper-environment-visitor': 7.22.20 '@babel/helper-wrap-function': 7.20.5 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 transitivePeerDependencies: - supports-color dev: true @@ -4233,7 +4254,7 @@ packages: '@babel/helper-optimise-call-expression': 7.22.5 '@babel/template': 7.24.7 '@babel/traverse': 7.24.7 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 transitivePeerDependencies: - supports-color dev: true @@ -4254,14 +4275,14 @@ packages: resolution: {integrity: sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 /@babel/helper-simple-access@7.24.7: resolution: {integrity: sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==} engines: {node: '>=6.9.0'} dependencies: '@babel/traverse': 7.25.6 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 transitivePeerDependencies: - supports-color dev: false @@ -4270,27 +4291,27 @@ packages: resolution: {integrity: sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-skip-transparent-expression-wrappers@7.22.5: resolution: {integrity: sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/helper-split-export-declaration@7.22.6: resolution: {integrity: sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 /@babel/helper-split-export-declaration@7.24.7: resolution: {integrity: sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==} engines: {node: '>=6.9.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 /@babel/helper-string-parser@7.24.7: resolution: {integrity: sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg==} @@ -4329,7 +4350,7 @@ packages: '@babel/helper-function-name': 7.24.7 '@babel/template': 7.24.7 '@babel/traverse': 7.24.7 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 transitivePeerDependencies: - supports-color dev: true @@ -4340,7 +4361,7 @@ packages: dependencies: '@babel/template': 7.22.15 '@babel/traverse': 7.24.7 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 transitivePeerDependencies: - supports-color @@ -4349,7 +4370,7 @@ packages: engines: {node: '>=6.9.0'} dependencies: '@babel/template': 7.25.0 - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: false /@babel/helpers@7.27.0: @@ -4390,7 +4411,7 @@ packages: engines: {node: '>=6.0.0'} hasBin: true dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: false /@babel/parser@7.24.5: @@ -4398,7 +4419,7 @@ packages: engines: {node: '>=6.0.0'} hasBin: true dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: false /@babel/parser@7.24.7: @@ -4413,7 +4434,8 @@ packages: engines: {node: '>=6.0.0'} hasBin: true dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 + dev: true /@babel/parser@7.27.0: resolution: {integrity: sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==} @@ -5149,7 +5171,7 @@ packages: '@babel/helper-module-imports': 7.22.15 '@babel/helper-plugin-utils': 7.24.0 '@babel/plugin-syntax-jsx': 7.22.5(@babel/core@7.22.17) - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: true /@babel/plugin-transform-react-pure-annotations@7.18.6(@babel/core@7.22.17): @@ -5382,7 +5404,7 @@ packages: '@babel/helper-plugin-utils': 7.24.0 '@babel/plugin-proposal-unicode-property-regex': 7.18.6(@babel/core@7.22.17) '@babel/plugin-transform-dotall-regex': 7.18.6(@babel/core@7.22.17) - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 esutils: 2.0.3 dev: true @@ -5451,24 +5473,24 @@ packages: engines: {node: '>=6.9.0'} dependencies: '@babel/code-frame': 7.22.13 - '@babel/parser': 7.26.8 - '@babel/types': 7.26.8 + '@babel/parser': 7.27.0 + '@babel/types': 7.27.0 /@babel/template@7.24.7: resolution: {integrity: sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==} engines: {node: '>=6.9.0'} dependencies: '@babel/code-frame': 7.24.7 - '@babel/parser': 7.26.8 - '@babel/types': 7.26.8 + '@babel/parser': 7.27.0 + '@babel/types': 7.27.0 /@babel/template@7.25.0: resolution: {integrity: sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==} engines: {node: '>=6.9.0'} dependencies: '@babel/code-frame': 7.24.7 - '@babel/parser': 7.26.8 - '@babel/types': 7.26.8 + '@babel/parser': 7.27.0 + '@babel/types': 7.27.0 dev: false /@babel/template@7.27.0: @@ -5492,7 +5514,7 @@ packages: '@babel/helper-split-export-declaration': 7.22.6 '@babel/parser': 7.24.7 '@babel/types': 7.24.0 - debug: 4.3.7(supports-color@10.0.0) + debug: 4.4.0 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -5508,9 +5530,9 @@ packages: '@babel/helper-function-name': 7.24.7 '@babel/helper-hoist-variables': 7.24.7 '@babel/helper-split-export-declaration': 7.24.7 - '@babel/parser': 7.26.8 - '@babel/types': 7.26.8 - debug: 4.4.0(supports-color@10.0.0) + '@babel/parser': 7.27.0 + '@babel/types': 7.27.0 + debug: 4.4.0 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -5521,10 +5543,10 @@ packages: dependencies: '@babel/code-frame': 7.24.7 '@babel/generator': 7.25.6 - '@babel/parser': 7.26.8 + '@babel/parser': 7.27.0 '@babel/template': 7.25.0 - '@babel/types': 7.26.8 - debug: 4.4.0(supports-color@10.0.0) + '@babel/types': 7.27.0 + debug: 4.4.0 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -5567,6 +5589,7 @@ packages: dependencies: '@babel/helper-string-parser': 7.25.9 '@babel/helper-validator-identifier': 7.25.9 + dev: true /@babel/types@7.27.0: resolution: {integrity: sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==} @@ -5843,6 +5866,7 @@ packages: '@clack/core': 0.4.1 picocolors: 1.1.1 sisteransi: 1.0.5 + dev: false /@clickhouse/client-common@1.11.1: resolution: {integrity: sha512-bme0le2yhDSAh13d2fxhSW5ZrNoVqZ3LTyac8jK6hNH0qkksXnjYkLS6KQalPU6NMpffxHmpI4+/Gi2MnX0NCA==} @@ -6305,6 +6329,7 @@ packages: resolution: {integrity: sha512-Ei9jN3pDoGzc+a/bGqnB5ajb52IvSv7/n2btuyzUlcOHIR2kM9fqtYTJXPwZYKLkGZlHWlpHgWyRtrinkP2nHg==} optionalDependencies: '@rollup/rollup-darwin-arm64': 4.40.1 + dev: false /@electric-sql/react@0.3.5(react@18.2.0): resolution: {integrity: sha512-qPrlF3BsRg5L8zAn1sLGzc3pkswfEHyQI3lNOu7Xllv1DBx85RvHR1zgGGPAUfC8iwyWupQu9pFPE63GdbeuhA==} @@ -8037,7 +8062,7 @@ packages: engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: ajv: 6.12.6 - debug: 4.3.7(supports-color@10.0.0) + debug: 4.4.0 espree: 9.6.0 globals: 13.19.0 ignore: 5.2.4 @@ -8332,7 +8357,7 @@ packages: engines: {node: '>=10.10.0'} dependencies: '@humanwhocodes/object-schema': 1.2.1 - debug: 4.3.7(supports-color@10.0.0) + debug: 4.4.0 minimatch: 3.1.2 transitivePeerDependencies: - supports-color @@ -8790,7 +8815,6 @@ packages: '@jridgewell/set-array': 1.2.1 '@jridgewell/sourcemap-codec': 1.5.0 '@jridgewell/trace-mapping': 0.3.25 - dev: true /@jridgewell/resolve-uri@3.1.0: resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} @@ -8803,7 +8827,7 @@ packages: /@jridgewell/source-map@0.3.3: resolution: {integrity: sha512-b+fsZXeLYi9fEULmfBrhxn4IrPlINf8fiNarzTof004v3lFdntdwa9PF7vFJqm3mg7s+ScJMxXaE3Acp1irZcg==} dependencies: - '@jridgewell/gen-mapping': 0.3.5 + '@jridgewell/gen-mapping': 0.3.8 '@jridgewell/trace-mapping': 0.3.25 dev: false @@ -16342,6 +16366,7 @@ packages: cpu: [arm64] os: [darwin] requiresBuild: true + dev: false optional: true /@rollup/rollup-darwin-x64@4.36.0: @@ -18083,7 +18108,7 @@ packages: /@types/acorn@4.0.6: resolution: {integrity: sha512-veQTnWP+1D/xbxVrPC3zHnCZRjSrKfhbMUlEA43iMZLu7EsnTtkJklIuwrCPbOi8YkvDQAiW05VQQFvvz9oieQ==} dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 dev: true /@types/aria-query@5.0.1: @@ -18243,7 +18268,7 @@ packages: resolution: {integrity: sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA==} dependencies: '@types/eslint': 8.4.10 - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 dev: false /@types/eslint-scope@3.7.7: @@ -18269,7 +18294,7 @@ packages: /@types/estree-jsx@1.0.0: resolution: {integrity: sha512-3qvGd0z8F2ENTGr/GG1yViqfiKmRfrXVx5sJyHGFu3z7m5g5utCQtGp/g29JnjflhtQJBv1WDQukHiT58xPcYQ==} dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 /@types/estree@1.0.0: resolution: {integrity: sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ==} @@ -18518,6 +18543,14 @@ packages: resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==} dev: true + /@types/pg@8.11.14: + resolution: {integrity: sha512-qyD11E5R3u0eJmd1lB0WnWKXJGA7s015nyARWljfz5DcX83TKAIlY+QrmvzQTsbIe+hkiFtkyL2gHC6qwF6Fbg==} + dependencies: + '@types/node': 20.14.14 + pg-protocol: 1.9.5 + pg-types: 4.0.2 + dev: true + /@types/pg@8.11.6: resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==} dependencies: @@ -18890,7 +18923,7 @@ packages: dependencies: '@typescript-eslint/typescript-estree': 5.59.6(typescript@5.5.4) '@typescript-eslint/utils': 5.59.6(eslint@8.31.0)(typescript@5.5.4) - debug: 4.3.7(supports-color@10.0.0) + debug: 4.4.0 eslint: 8.31.0 tsutils: 3.21.0(typescript@5.5.4) typescript: 5.5.4 @@ -18914,7 +18947,7 @@ packages: dependencies: '@typescript-eslint/types': 5.59.6 '@typescript-eslint/visitor-keys': 5.59.6 - debug: 4.3.7(supports-color@10.0.0) + debug: 4.4.0 globby: 11.1.0 is-glob: 4.0.3 semver: 7.6.3 @@ -20695,7 +20728,7 @@ packages: resolution: {integrity: sha512-fdRxJkQ9MUSEi4jH2DcV3FAPFktk0wefilxrwNyUuWpoWawQGN7G7cB+fOYTtFfI6XNkFgwqJ/D3G18BoJJ/jg==} engines: {node: '>= 10.0.0'} dependencies: - '@babel/types': 7.26.8 + '@babel/types': 7.27.0 dev: false /bail@2.0.2: @@ -21068,29 +21101,6 @@ packages: rc9: 2.1.2 dev: false - /c12@1.11.1(magicast@0.3.5): - resolution: {integrity: sha512-KDU0TvSvVdaYcQKQ6iPHATGz/7p/KiVjPg4vQrB6Jg/wX9R0yl5RZxWm9IoZqaIHD2+6PZd81+KMGwRr/lRIUg==} - peerDependencies: - magicast: ^0.3.4 - peerDependenciesMeta: - magicast: - optional: true - dependencies: - chokidar: 3.6.0 - confbox: 0.1.7 - defu: 6.1.4 - dotenv: 16.4.5 - giget: 1.2.3 - jiti: 1.21.6 - magicast: 0.3.5 - mlly: 1.7.1 - ohash: 1.1.3 - pathe: 1.1.2 - perfect-debounce: 1.0.0 - pkg-types: 1.1.3 - rc9: 2.1.2 - dev: true - /cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -22619,7 +22629,7 @@ packages: resolution: {integrity: sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==} engines: {node: '>= 8.0'} dependencies: - debug: 4.3.7(supports-color@10.0.0) + debug: 4.4.0 readable-stream: 3.6.0 split-ca: 1.0.1 ssh2: 1.16.0 @@ -23068,10 +23078,10 @@ packages: /es-module-lexer@1.6.0: resolution: {integrity: sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==} + dev: true /es-module-lexer@1.7.0: resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} - dev: true /es-object-atoms@1.0.0: resolution: {integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==} @@ -23675,7 +23685,7 @@ packages: eslint: '*' eslint-plugin-import: '*' dependencies: - debug: 4.3.7(supports-color@10.0.0) + debug: 4.4.0 enhanced-resolve: 5.15.0 eslint: 8.31.0 eslint-module-utils: 2.7.4(@typescript-eslint/parser@5.59.6)(eslint-import-resolver-node@0.3.7)(eslint-import-resolver-typescript@3.5.5)(eslint@8.31.0) @@ -24081,7 +24091,7 @@ packages: /estree-util-attach-comments@2.1.0: resolution: {integrity: sha512-rJz6I4L0GaXYtHpoMScgDIwM0/Vwbu5shbMeER596rB2D1EWF6+Gj0e0UKzJPZrpoOc87+Q2kgVFHfjAymIqmw==} dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 dev: true /estree-util-build-jsx@2.2.2: @@ -24136,7 +24146,7 @@ packages: /estree-walker@3.0.3: resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 dev: true /esutils@2.0.3: @@ -25406,7 +25416,7 @@ packages: /hast-util-to-estree@2.1.0: resolution: {integrity: sha512-Vwch1etMRmm89xGgz+voWXvVHba2iiMdGMKmaMfYt35rbVtFDq8JNwwAIvi8zHMkO6Gvqo9oTMwJTmzVRfXh4g==} dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 '@types/estree-jsx': 1.0.0 '@types/hast': 2.3.4 '@types/unist': 2.0.6 @@ -26087,7 +26097,7 @@ packages: /is-reference@3.0.1: resolution: {integrity: sha512-baJJdQLiYaJdvFbJqXrcGv3WU3QCzBlUcI5QhbesIm6/xPsvmO+2CDoi/GMOFBQEQm+PXkwOPrp9KK5ozZsp2w==} dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 dev: true /is-reference@3.0.3: @@ -27882,7 +27892,7 @@ packages: resolution: {integrity: sha512-WWp3bf7xT9MppNuw3yPjpnOxa8cj5ACivEzXJKu0WwnjBYfzaBvIAT9KfeyI0Qkll+bfQtfftSwdgTH6QhTOKw==} dependencies: '@types/acorn': 4.0.6 - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 estree-util-visit: 1.2.0 micromark-util-types: 1.0.2 uvu: 0.5.6 @@ -28307,7 +28317,7 @@ packages: acorn: 8.14.1 pathe: 2.0.3 pkg-types: 1.3.1 - ufo: 1.5.4 + ufo: 1.6.1 dev: false /module-details-from-path@1.0.3: @@ -28927,7 +28937,7 @@ packages: execa: 8.0.1 pathe: 1.1.2 pkg-types: 1.1.3 - ufo: 1.5.4 + ufo: 1.6.1 dev: false /nypm@0.5.4: @@ -29064,7 +29074,6 @@ packages: /obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} - dev: false /ohash@1.1.3: resolution: {integrity: sha512-zuHHiGTYTA1sYJ/wZN+t5HKZaH23i4yI1HMwbuXm24Nid7Dv0KcuRlKoNKS9UNfAVSBlnGLcuQrnOKWOZoEGaw==} @@ -29714,7 +29723,7 @@ packages: /periscopic@3.1.0: resolution: {integrity: sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==} dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 estree-walker: 3.0.3 is-reference: 3.0.1 dev: true @@ -29729,6 +29738,10 @@ packages: resolution: {integrity: sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==} dev: false + /pg-connection-string@2.8.5: + resolution: {integrity: sha512-Ni8FuZ8yAF+sWZzojvtLE2b03cqjO5jNULcHFfM9ZZ0/JXrgom5pBREbtnAw7oxsxJqHw9Nz/XWORUEL3/IFow==} + dev: false + /pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} @@ -29736,7 +29749,6 @@ packages: /pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} - dev: false /pg-pool@3.6.2(pg@8.11.5): resolution: {integrity: sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==} @@ -29746,9 +29758,20 @@ packages: pg: 8.11.5 dev: false + /pg-pool@3.9.6(pg@8.15.6): + resolution: {integrity: sha512-rFen0G7adh1YmgvrmE5IPIqbb+IgEzENUm+tzm6MLLDSlPRoZVhzU1WdML9PV2W5GOdRA9qBKURlbt1OsXOsPw==} + peerDependencies: + pg: '>=8.0' + dependencies: + pg: 8.15.6 + dev: false + /pg-protocol@1.6.1: resolution: {integrity: sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==} + /pg-protocol@1.9.5: + resolution: {integrity: sha512-DYTWtWpfd5FOro3UnAfwvhD8jh59r2ig8bPtc9H8Ds7MscE/9NYruUQWFAOuraRl29jwcT2kyMFQ3MxeaVjUhg==} + /pg-types@2.2.0: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} engines: {node: '>=4'} @@ -29770,7 +29793,6 @@ packages: postgres-date: 2.1.0 postgres-interval: 3.0.0 postgres-range: 1.1.4 - dev: false /pg@8.11.5: resolution: {integrity: sha512-jqgNHSKL5cbDjFlHyYsCXmQDrfIX/3RsNwYqpd4N0Kt8niLuNoRNH+aazv6cOd43gPh9Y4DjQCtb+X0MH0Hvnw==} @@ -29790,6 +29812,24 @@ packages: pg-cloudflare: 1.2.5 dev: false + /pg@8.15.6: + resolution: {integrity: sha512-yvao7YI3GdmmrslNVsZgx9PfntfWrnXwtR+K/DjI0I/sTKif4Z623um+sjVZ1hk5670B+ODjvHDAckKdjmPTsg==} + engines: {node: '>= 8.0.0'} + peerDependencies: + pg-native: '>=3.0.1' + peerDependenciesMeta: + pg-native: + optional: true + dependencies: + pg-connection-string: 2.8.5 + pg-pool: 3.9.6(pg@8.15.6) + pg-protocol: 1.9.5 + pg-types: 2.2.0 + pgpass: 1.0.5 + optionalDependencies: + pg-cloudflare: 1.2.5 + dev: false + /pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} dependencies: @@ -30252,7 +30292,6 @@ packages: /postgres-array@3.0.2: resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} engines: {node: '>=12'} - dev: false /postgres-bytea@1.0.0: resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} @@ -30263,7 +30302,6 @@ packages: engines: {node: '>= 6'} dependencies: obuf: 1.1.2 - dev: false /postgres-date@1.0.7: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} @@ -30272,7 +30310,6 @@ packages: /postgres-date@2.1.0: resolution: {integrity: sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==} engines: {node: '>=12'} - dev: false /postgres-interval@1.2.0: resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} @@ -30283,11 +30320,9 @@ packages: /postgres-interval@3.0.0: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} - dev: false /postgres-range@1.1.4: resolution: {integrity: sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==} - dev: false /posthog-js@1.93.3: resolution: {integrity: sha512-jEOWwaQpTRbqLPrDLY6eZr7t95h+LyXqN7Yq1/K6u3V0Y1C9xHtYhpuGzYamirVnCDTbVq22RM++OBUaIpp9Wg==} @@ -33305,7 +33340,7 @@ packages: dependencies: component-emitter: 1.3.1 cookiejar: 2.1.4 - debug: 4.3.7(supports-color@10.0.0) + debug: 4.4.0 fast-safe-stringify: 2.1.1 form-data: 4.0.0 formidable: 3.5.1 @@ -36005,7 +36040,7 @@ packages: optional: true dependencies: '@types/eslint-scope': 3.7.4 - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 '@webassemblyjs/ast': 1.11.5 '@webassemblyjs/wasm-edit': 1.11.5 '@webassemblyjs/wasm-parser': 1.11.5 @@ -36014,7 +36049,7 @@ packages: browserslist: 4.24.4 chrome-trace-event: 1.0.3 enhanced-resolve: 5.18.1 - es-module-lexer: 1.6.0 + es-module-lexer: 1.7.0 eslint-scope: 5.1.1 events: 3.3.0 glob-to-regexp: 0.4.1 From 1c00d9871f4015a8a0a022ecf706d4f06743ded2 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 1 May 2025 16:15:44 +0100 Subject: [PATCH 12/33] scaffold the replication package --- internal-packages/replication/package.json | 4 +- .../replication/src/client.test.ts | 89 +++ internal-packages/replication/src/client.ts | 567 ++++++++++++++++++ internal-packages/replication/src/errors.ts | 5 + internal-packages/replication/src/pgoutput.ts | 385 ++++++++++++ .../replication/tsconfig.build.json | 4 +- .../replication/tsconfig.src.json | 4 +- .../replication/tsconfig.test.json | 4 +- internal-packages/testcontainers/src/index.ts | 9 + pnpm-lock.yaml | 6 + 10 files changed, 1070 insertions(+), 7 deletions(-) create mode 100644 internal-packages/replication/src/client.test.ts create mode 100644 internal-packages/replication/src/client.ts create mode 100644 internal-packages/replication/src/errors.ts create mode 100644 internal-packages/replication/src/pgoutput.ts diff --git a/internal-packages/replication/package.json b/internal-packages/replication/package.json index bf4ac8664f..adfc1bfe50 100644 --- a/internal-packages/replication/package.json +++ b/internal-packages/replication/package.json @@ -6,9 +6,11 @@ "types": "./dist/src/index.d.ts", "type": "module", "dependencies": { + "@internal/redis": "workspace:*", "@internal/tracing": "workspace:*", "@trigger.dev/core": "workspace:*", - "pg": "8.15.6" + "pg": "8.15.6", + "redlock": "5.0.0-beta.2" }, "devDependencies": { "@internal/testcontainers": "workspace:*", diff --git a/internal-packages/replication/src/client.test.ts b/internal-packages/replication/src/client.test.ts new file mode 100644 index 0000000000..c938d662ae --- /dev/null +++ b/internal-packages/replication/src/client.test.ts @@ -0,0 +1,89 @@ +import { postgresAndRedisTest } from "@internal/testcontainers"; +import { LogicalReplicationClient } from "./client.js"; +import { setTimeout } from "timers/promises"; + +describe("Replication Client", () => { + postgresAndRedisTest( + "should be able to subscribe to changes on a table", + async ({ postgresContainer, prisma, redisOptions }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const client = new LogicalReplicationClient({ + name: "test", + slotName: "test_slot", + publicationName: "test_publication", + redisOptions, + table: "TaskRun", + pgConfig: { + connectionString: postgresContainer.getConnectionUri(), + }, + }); + + const logs: Array<{ + lsn: string; + log: unknown; + }> = []; + + client.events.on("data", (data) => { + console.log(data); + logs.push(data); + }); + + client.events.on("error", (error) => { + console.error(error); + }); + + await client.subscribe(); + + const organization = await prisma.organization.create({ + data: { + title: "test", + slug: "test", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test", + slug: "test", + organizationId: organization.id, + externalRef: "test", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test", + pkApiKey: "test", + shortcode: "test", + }, + }); + + // Now we insert a row into the table + await prisma.taskRun.create({ + data: { + friendlyId: "run_1234", + taskIdentifier: "my-task", + payload: JSON.stringify({ foo: "bar" }), + traceId: "1234", + spanId: "1234", + queue: "test", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + }, + }); + + // Wait for a bit of time + await setTimeout(50); + + // Now we should see the row in the logs + expect(logs.length).toBeGreaterThan(0); + + await client.stop(); + } + ); +}); diff --git a/internal-packages/replication/src/client.ts b/internal-packages/replication/src/client.ts new file mode 100644 index 0000000000..8df3ca8aff --- /dev/null +++ b/internal-packages/replication/src/client.ts @@ -0,0 +1,567 @@ +import { tryCatch } from "@trigger.dev/core/utils"; +import { Redis, type RedisOptions } from "@internal/redis"; +import EventEmitter from "node:events"; +import { Client, ClientConfig, Connection } from "pg"; +import Redlock, { Lock } from "redlock"; +import { createRedisClient } from "@internal/redis"; +import { Logger } from "@trigger.dev/core/logger"; +import { LogicalReplicationClientError } from "./errors.js"; +import { PgoutputParser, getPgoutputStartReplicationSQL } from "./pgoutput.js"; + +export interface LogicalReplicationClientOptions { + /** + * The pg client config. + */ + pgConfig: ClientConfig; + + /** + * The name of this LogicalReplicationClient instance, used for leader election. + */ + name: string; + /** + * The table to replicate (for publication creation). + */ + table: string; + /** + * The name of the replication slot to use. + */ + slotName: string; + /** + * The name of the publication to use. + */ + publicationName: string; + /** + * A connected Redis client instance for Redlock. + */ + redisOptions: RedisOptions; + /** + * Whether to automatically acknowledge messages. + */ + autoAcknowledge?: boolean; + /** + * A logger instance for logging. + */ + logger?: Logger; + /** + * The initial leader lock timeout in ms (default: 30000) + */ + leaderLockTimeoutMs?: number; + /** + * The interval in ms to extend the leader lock (default: 10000) + */ + leaderLockExtendIntervalMs?: number; + /** + * The interval in seconds to automatically acknowledge the last LSN if no ack has been sent (default: 10) + */ + ackIntervalSeconds?: number; + + /** + * The actions to publish to the publication. + */ + publicationActions?: Array<"insert" | "update" | "delete" | "truncate">; +} + +export type LogicalReplicationClientEvents = { + leaderElection: [boolean]; + error: [Error]; + data: [{ lsn: string; log: unknown }]; + start: []; + acknowledge: [{ lsn: string }]; + heartbeat: [{ lsn: string; timestamp: number; shouldRespond: boolean }]; +}; + +export class LogicalReplicationClient { + private readonly options: LogicalReplicationClientOptions; + private client: Client | null = null; + private connection: Connection | null = null; + private redis: Redis; + private redlock: Redlock; + private leaderLock: Lock | null = null; + public readonly events: EventEmitter; + private logger: Logger; + private autoAcknowledge: boolean; + private lastAcknowledgedLsn: string | null = null; + private leaderLockTimeoutMs: number; + private leaderLockExtendIntervalMs: number; + private leaderLockHeartbeatTimer: NodeJS.Timeout | null = null; + private ackIntervalSeconds: number; + private lastAckTimestamp: number = 0; + private ackIntervalTimer: NodeJS.Timeout | null = null; + private _isStopped: boolean = false; + + public get lastLsn(): string { + return this.lastAcknowledgedLsn ?? "0/00000000"; + } + + public get isStopped(): boolean { + return this._isStopped; + } + + constructor(options: LogicalReplicationClientOptions) { + this.options = options; + this.logger = options.logger ?? new Logger("LogicalReplicationClient", "info"); + + this.autoAcknowledge = + typeof options.autoAcknowledge === "boolean" ? options.autoAcknowledge : true; + + this.leaderLockTimeoutMs = options.leaderLockTimeoutMs ?? 30000; + this.leaderLockExtendIntervalMs = options.leaderLockExtendIntervalMs ?? 10000; + this.ackIntervalSeconds = options.ackIntervalSeconds ?? 10; + + this.redis = createRedisClient( + { + ...options.redisOptions, + keyPrefix: `${options.redisOptions.keyPrefix}logical-replication-client:`, + }, + { + onError: (error) => { + this.logger.error(`RunLock redis client error:`, { + error, + keyPrefix: options.redisOptions.keyPrefix, + }); + }, + } + ); + + this.redlock = new Redlock([this.redis], { + retryCount: 0, + }); + this.events = new EventEmitter(); + } + + public async stop(): Promise { + if (this._isStopped) return this; + this._isStopped = true; + // Clean up leader lock heartbeat + if (this.leaderLockHeartbeatTimer) { + clearInterval(this.leaderLockHeartbeatTimer); + this.leaderLockHeartbeatTimer = null; + } + // Clean up ack interval + if (this.ackIntervalTimer) { + clearInterval(this.ackIntervalTimer); + this.ackIntervalTimer = null; + } + // Release leader lock if held + if (this.leaderLock) { + const [releaseError] = await tryCatch(this.leaderLock.release()); + + if (releaseError) { + this.logger.error("Failed to release leader lock", { + name: this.options.name, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + error: releaseError, + }); + } else { + this.logger.info("Released leader lock", { + name: this.options.name, + slotName: this.options.slotName, + }); + } + + this.leaderLock = null; + } + + this.connection?.removeAllListeners(); + this.connection = null; + + if (this.client) { + this.client.removeAllListeners(); + + const [endError] = await tryCatch(this.client.end()); + + if (endError) { + this.logger.error("Failed to end client", { + name: this.options.name, + error: endError, + }); + } else { + this.logger.info("Ended client", { + name: this.options.name, + }); + } + this.client = null; + } + + // clear any intervals + if (this.leaderLockHeartbeatTimer) { + clearInterval(this.leaderLockHeartbeatTimer); + this.leaderLockHeartbeatTimer = null; + } + + if (this.ackIntervalTimer) { + clearInterval(this.ackIntervalTimer); + this.ackIntervalTimer = null; + } + + return this; + } + + public async subscribe(startLsn?: string): Promise { + await this.stop(); + + this.lastAcknowledgedLsn = startLsn ?? this.lastAcknowledgedLsn; + + this.logger.info("Subscribing to logical replication", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + startLsn, + }); + + // 1. Leader election + try { + this.leaderLock = await this.redlock.acquire( + [`logical-replication-client:${this.options.name}`], + this.leaderLockTimeoutMs, + { + retryCount: 60, + retryDelay: 1000, + retryJitter: 100, + } + ); + } catch (err) { + this.logger.error("Leader election failed", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + startLsn, + error: err, + }); + + this.events.emit("leaderElection", false); + + return this.stop(); + } + + this.events.emit("leaderElection", true); + + this.logger.info("Leader election successful", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + startLsn, + }); + + // Start leader lock heartbeat + this.#startLeaderLockHeartbeat(); + + // Start auto-acknowledge interval + this.#startAckInterval(); + + // 2. Connect pg client + this.client = new Client({ + ...this.options.pgConfig, + // @ts-expect-error + replication: "database", + application_name: this.options.name, + }); + await this.client.connect(); + // @ts-ignore + this.connection = this.client.connection; + + const publicationCreated = await this.#createPublication(); + + if (!publicationCreated) { + return this.stop(); + } + + this.logger.info("Publication created", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + startLsn, + }); + + const slotCreated = await this.#createSlot(); + + if (!slotCreated) { + return this.stop(); + } + + this.logger.info("Slot created", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + startLsn, + }); + + // 5. Start replication (pgoutput) + const parser = new PgoutputParser(); + const sql = getPgoutputStartReplicationSQL(this.options.slotName, this.lastLsn, { + protoVersion: 1, + publicationNames: [this.options.publicationName], + messages: false, + }); + + // 6. Listen for replication events (copyData, etc.) + if (!this.connection) { + this.events.emit( + "error", + new LogicalReplicationClientError("No connection after starting replication") + ); + return this.stop(); + } + + this.connection.once("replicationStart", () => { + this._isStopped = false; + this.events.emit("start"); + }); + + this.connection.on( + "copyData", + async ({ chunk: buffer }: { length: number; chunk: Buffer; name: string }) => { + // pgoutput protocol: 0x77 = XLogData, 0x6b = Primary keepalive + if (buffer[0] !== 0x77 && buffer[0] !== 0x6b) { + this.logger.warn("Unknown replication message type", { byte: buffer[0] }); + return; + } + const lsn = + buffer.readUInt32BE(1).toString(16).toUpperCase() + + "/" + + buffer.readUInt32BE(5).toString(16).toUpperCase(); + + if (buffer[0] === 0x77) { + // XLogData + try { + const log = parser.parse(buffer.subarray(25)); + this.events.emit("data", { lsn, log }); + await this.#acknowledge(lsn); + } catch (err) { + this.logger.error("Failed to parse XLogData", { error: err }); + this.events.emit("error", err instanceof Error ? err : new Error(String(err))); + } + } else if (buffer[0] === 0x6b) { + // Primary keepalive message + const timestamp = Math.floor( + buffer.readUInt32BE(9) * 4294967.296 + buffer.readUInt32BE(13) / 1000 + 946080000000 + ); + const shouldRespond = !!buffer.readInt8(17); + this.events.emit("heartbeat", { lsn, timestamp, shouldRespond }); + if (shouldRespond) { + await this.#acknowledge(lsn); + } + } + + this.lastAcknowledgedLsn = lsn; + } + ); + + // 7. Handle errors and cleanup + this.client.on("error", (err) => { + this.events.emit("error", err); + }); + + this.logger.info("Started replication", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + startLsn, + sql: sql.replace(/\s+/g, " "), + }); + + // Start the replication stream + this.client.query(sql).catch((err) => { + this.logger.error("Failed to start replication", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + error: err, + }); + + this.events.emit("error", err); + return this.stop(); + }); + + return this; + } + + async #createPublication(): Promise { + if (!this.client) { + this.events.emit("error", new LogicalReplicationClientError("Client not connected")); + return false; + } + + if (await this.#doesPublicationExist()) { + return true; + } + + const [createError] = await tryCatch( + this.client.query( + `CREATE PUBLICATION "${this.options.publicationName}" FOR TABLE "${this.options.table}" ${ + this.options.publicationActions + ? `WITH (publish = '${this.options.publicationActions.join(", ")}')` + : "" + };` + ) + ); + + if (createError) { + this.logger.error("Failed to create publication", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + error: createError, + }); + + this.events.emit("error", createError); + return false; + } + + return true; + } + + async #doesPublicationExist(): Promise { + if (!this.client) { + this.events.emit( + "error", + new LogicalReplicationClientError("Cannot check if publication exists") + ); + return false; + } + + const res = await this.client.query( + `SELECT EXISTS (SELECT 1 FROM pg_publication WHERE pubname = '${this.options.publicationName}');` + ); + + return res.rows[0].exists; + } + + async #createSlot(): Promise { + if (!this.client) { + this.events.emit("error", new LogicalReplicationClientError("Cannot create slot")); + return false; + } + + if (await this.#doesSlotExist()) { + return true; + } + + const [createError] = await tryCatch( + this.client.query( + `SELECT * FROM pg_create_logical_replication_slot('${this.options.slotName}', 'pgoutput')` + ) + ); + + if (createError) { + this.logger.error("Failed to create slot", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + error: createError, + }); + + this.events.emit("error", createError); + return false; + } + + return true; + } + + async #doesSlotExist(): Promise { + if (!this.client) { + this.events.emit("error", new LogicalReplicationClientError("Cannot check if slot exists")); + return false; + } + + const res = await this.client.query( + `SELECT EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = '${this.options.slotName}');` + ); + + return res.rows[0].exists; + } + + async #acknowledge(lsn: string): Promise { + if (!this.autoAcknowledge) return; + this.events.emit("acknowledge", { lsn }); + await this.acknowledge(lsn); + } + + public async acknowledge(lsn: string): Promise { + if (this._isStopped) return false; + if (!this.connection) return false; + // WAL LSN split + const slice = lsn.split("/"); + let [upperWAL, lowerWAL]: [number, number] = [parseInt(slice[0], 16), parseInt(slice[1], 16)]; + // Timestamp as microseconds since midnight 2000-01-01 + const now = Date.now() - 946080000000; + const upperTimestamp = Math.floor(now / 4294967.296); + const lowerTimestamp = Math.floor(now - upperTimestamp * 4294967.296); + if (lowerWAL === 4294967295) { + upperWAL = upperWAL + 1; + lowerWAL = 0; + } else { + lowerWAL = lowerWAL + 1; + } + const response = Buffer.alloc(34); + response.fill(0x72); // 'r' + response.writeUInt32BE(upperWAL, 1); + response.writeUInt32BE(lowerWAL, 5); + response.writeUInt32BE(upperWAL, 9); + response.writeUInt32BE(lowerWAL, 13); + response.writeUInt32BE(upperWAL, 17); + response.writeUInt32BE(lowerWAL, 21); + response.writeUInt32BE(upperTimestamp, 25); + response.writeUInt32BE(lowerTimestamp, 29); + response.writeInt8(0, 33); + // @ts-ignore + this.connection.sendCopyFromChunk(response); + this.lastAckTimestamp = Date.now(); + return true; + } + + async #startLeaderLockHeartbeat() { + if (this.leaderLockHeartbeatTimer) { + clearInterval(this.leaderLockHeartbeatTimer); + } + if (!this.leaderLock) return; + this.leaderLockHeartbeatTimer = setInterval(async () => { + if (!this.leaderLock) return; + if (this._isStopped) return; + try { + this.leaderLock = await this.leaderLock.extend(this.leaderLockTimeoutMs); + this.logger.debug("Extended leader lock", { + name: this.options.name, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + }); + } catch (err) { + this.logger.error("Failed to extend leader lock", { + name: this.options.name, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + error: err, + }); + // Optionally emit an error or handle loss of leadership + this.events.emit("error", err instanceof Error ? err : new Error(String(err))); + } + }, this.leaderLockExtendIntervalMs); + } + + #startAckInterval() { + if (this.ackIntervalTimer) { + clearInterval(this.ackIntervalTimer); + } + if (!this.autoAcknowledge || this.ackIntervalSeconds <= 0) return; + this.ackIntervalTimer = setInterval(async () => { + if (this._isStopped) return; + const now = Date.now(); + if ( + this.lastAcknowledgedLsn && + now - this.lastAckTimestamp > this.ackIntervalSeconds * 1000 + ) { + await this.acknowledge(this.lastAcknowledgedLsn); + } + }, 1000); + } +} diff --git a/internal-packages/replication/src/errors.ts b/internal-packages/replication/src/errors.ts new file mode 100644 index 0000000000..0521305a5e --- /dev/null +++ b/internal-packages/replication/src/errors.ts @@ -0,0 +1,5 @@ +export class LogicalReplicationClientError extends Error { + constructor(message: string) { + super(message); + } +} diff --git a/internal-packages/replication/src/pgoutput.ts b/internal-packages/replication/src/pgoutput.ts new file mode 100644 index 0000000000..74790c213b --- /dev/null +++ b/internal-packages/replication/src/pgoutput.ts @@ -0,0 +1,385 @@ +// NOTE: This file requires ES2020 or higher for BigInt literals (used in BinaryReader.readTime) +import { Client } from "pg"; +import { types } from "pg"; + +export interface PgoutputOptions { + protoVersion: 1 | 2; + publicationNames: string[]; + messages?: boolean; +} + +export type PgoutputMessage = + | MessageBegin + | MessageCommit + | MessageDelete + | MessageInsert + | MessageMessage + | MessageOrigin + | MessageRelation + | MessageTruncate + | MessageType + | MessageUpdate; + +export interface MessageBegin { + tag: "begin"; + commitLsn: string | null; + commitTime: bigint; + xid: number; +} +export interface MessageCommit { + tag: "commit"; + flags: number; + commitLsn: string | null; + commitEndLsn: string | null; + commitTime: bigint; +} +export interface MessageDelete { + tag: "delete"; + relation: MessageRelation; + key: Record | null; + old: Record | null; +} +export interface MessageInsert { + tag: "insert"; + relation: MessageRelation; + new: Record; +} +export interface MessageMessage { + tag: "message"; + flags: number; + transactional: boolean; + messageLsn: string | null; + prefix: string; + content: Uint8Array; +} +export interface MessageOrigin { + tag: "origin"; + originLsn: string | null; + originName: string; +} +export interface MessageRelation { + tag: "relation"; + relationOid: number; + schema: string; + name: string; + replicaIdentity: "default" | "nothing" | "full" | "index"; + columns: RelationColumn[]; + keyColumns: string[]; +} +export interface RelationColumn { + name: string; + flags: number; + typeOid: number; + typeMod: number; + typeSchema: string | null; + typeName: string | null; + parser: (raw: any) => any; +} +export interface MessageTruncate { + tag: "truncate"; + cascade: boolean; + restartIdentity: boolean; + relations: MessageRelation[]; +} +export interface MessageType { + tag: "type"; + typeOid: number; + typeSchema: string; + typeName: string; +} +export interface MessageUpdate { + tag: "update"; + relation: MessageRelation; + key: Record | null; + old: Record | null; + new: Record; +} + +class BinaryReader { + private offset = 0; + constructor(private buf: Buffer) {} + readUint8(): number { + return this.buf.readUInt8(this.offset++); + } + readInt16(): number { + const v = this.buf.readInt16BE(this.offset); + this.offset += 2; + return v; + } + readInt32(): number { + const v = this.buf.readInt32BE(this.offset); + this.offset += 4; + return v; + } + readString(): string { + let end = this.buf.indexOf(0, this.offset); + if (end === -1) throw new Error("Null-terminated string not found"); + const str = this.buf.toString("utf8", this.offset, end); + this.offset = end + 1; + return str; + } + read(len: number): Buffer { + const b = this.buf.subarray(this.offset, this.offset + len); + this.offset += len; + return b; + } + decodeText(buf: Buffer): string { + return buf.toString("utf8"); + } + array(n: number, fn: () => T): T[] { + return Array.from({ length: n }, fn); + } + readLsn(): string { + const upper = this.readInt32(); + const lower = this.readInt32(); + return upper.toString(16).toUpperCase() + "/" + lower.toString(16).toUpperCase(); + } + readTime(): bigint { + // microseconds since 2000-01-01 + const high = this.readInt32(); + const low = this.readInt32(); + return BigInt(high) * 4294967296n + BigInt(low); + } +} + +export class PgoutputParser { + private _typeCache = new Map(); + private _relationCache = new Map(); + + public parse(buf: Buffer): PgoutputMessage { + const reader = new BinaryReader(buf); + const tag = reader.readUint8(); + switch (tag) { + case 0x42: + return this.msgBegin(reader); + case 0x4f: + return this.msgOrigin(reader); + case 0x59: + return this.msgType(reader); + case 0x52: + return this.msgRelation(reader); + case 0x49: + return this.msgInsert(reader); + case 0x55: + return this.msgUpdate(reader); + case 0x44: + return this.msgDelete(reader); + case 0x54: + return this.msgTruncate(reader); + case 0x4d: + return this.msgMessage(reader); + case 0x43: + return this.msgCommit(reader); + default: + throw Error("unknown pgoutput message"); + } + } + + private msgBegin(reader: BinaryReader): MessageBegin { + return { + tag: "begin", + commitLsn: reader.readLsn(), + commitTime: reader.readTime(), + xid: reader.readInt32(), + }; + } + private msgOrigin(reader: BinaryReader): MessageOrigin { + return { + tag: "origin", + originLsn: reader.readLsn(), + originName: reader.readString(), + }; + } + private msgType(reader: BinaryReader): MessageType { + const typeOid = reader.readInt32(); + const typeSchema = reader.readString(); + const typeName = reader.readString(); + this._typeCache.set(typeOid, { typeSchema, typeName }); + return { tag: "type", typeOid, typeSchema, typeName }; + } + private msgRelation(reader: BinaryReader): MessageRelation { + const relationOid = reader.readInt32(); + const schema = reader.readString(); + const name = reader.readString(); + const replicaIdentity = this.readRelationReplicaIdentity(reader); + const columns = reader.array(reader.readInt16(), () => this.readRelationColumn(reader)); + const keyColumns = columns.filter((it) => it.flags & 0b1).map((it) => it.name); + const msg: MessageRelation = { + tag: "relation", + relationOid, + schema, + name, + replicaIdentity, + columns, + keyColumns, + }; + this._relationCache.set(relationOid, msg); + return msg; + } + private readRelationReplicaIdentity(reader: BinaryReader) { + const ident = reader.readUint8(); + switch (ident) { + case 0x64: + return "default"; + case 0x6e: + return "nothing"; + case 0x66: + return "full"; + case 0x69: + return "index"; + default: + throw Error(`unknown replica identity ${String.fromCharCode(ident)}`); + } + } + private readRelationColumn(reader: BinaryReader): RelationColumn { + const flags = reader.readUint8(); + const name = reader.readString(); + const typeOid = reader.readInt32(); + const typeMod = reader.readInt32(); + return { + flags, + name, + typeOid, + typeMod, + typeSchema: null, + typeName: null, + ...this._typeCache.get(typeOid), + parser: types.getTypeParser(typeOid), + }; + } + private msgInsert(reader: BinaryReader): MessageInsert { + const relation = this._relationCache.get(reader.readInt32()); + if (!relation) throw Error("missing relation"); + reader.readUint8(); // consume the 'N' key + return { + tag: "insert", + relation, + new: this.readTuple(reader, relation), + }; + } + private msgUpdate(reader: BinaryReader): MessageUpdate { + const relation = this._relationCache.get(reader.readInt32()); + if (!relation) throw Error("missing relation"); + let key: Record | null = null; + let old: Record | null = null; + let new_: Record | null = null; + const subMsgKey = reader.readUint8(); + if (subMsgKey === 0x4b) { + key = this.readKeyTuple(reader, relation); + reader.readUint8(); + new_ = this.readTuple(reader, relation); + } else if (subMsgKey === 0x4f) { + old = this.readTuple(reader, relation); + reader.readUint8(); + new_ = this.readTuple(reader, relation, old); + } else if (subMsgKey === 0x4e) { + new_ = this.readTuple(reader, relation); + } else { + throw Error(`unknown submessage key ${String.fromCharCode(subMsgKey)}`); + } + return { tag: "update", relation, key, old, new: new_ }; + } + private msgDelete(reader: BinaryReader): MessageDelete { + const relation = this._relationCache.get(reader.readInt32()); + if (!relation) throw Error("missing relation"); + let key: Record | null = null; + let old: Record | null = null; + const subMsgKey = reader.readUint8(); + if (subMsgKey === 0x4b) { + key = this.readKeyTuple(reader, relation); + } else if (subMsgKey === 0x4f) { + old = this.readTuple(reader, relation); + } else { + throw Error(`unknown submessage key ${String.fromCharCode(subMsgKey)}`); + } + return { tag: "delete", relation, key, old }; + } + private readKeyTuple(reader: BinaryReader, relation: MessageRelation): Record { + const tuple = this.readTuple(reader, relation); + const key = Object.create(null); + for (const k of relation.keyColumns) { + key[k] = tuple[k] === null ? undefined : tuple[k]; + } + return key; + } + private readTuple( + reader: BinaryReader, + { columns }: MessageRelation, + unchangedToastFallback?: Record | null + ): Record { + const nfields = reader.readInt16(); + const tuple = Object.create(null); + for (let i = 0; i < nfields; i++) { + const { name, parser } = columns[i]; + const kind = reader.readUint8(); + switch (kind) { + case 0x62: // 'b' binary + const bsize = reader.readInt32(); + const bval = reader.read(bsize); + tuple[name] = bval; + break; + case 0x74: // 't' text + const valsize = reader.readInt32(); + const valbuf = reader.read(valsize); + const valtext = reader.decodeText(valbuf); + tuple[name] = parser(valtext); + break; + case 0x6e: // 'n' null + tuple[name] = null; + break; + case 0x75: // 'u' unchanged toast datum + tuple[name] = unchangedToastFallback?.[name]; + break; + default: + throw Error(`unknown attribute kind ${String.fromCharCode(kind)}`); + } + } + return tuple; + } + private msgTruncate(reader: BinaryReader): MessageTruncate { + const nrels = reader.readInt32(); + const flags = reader.readUint8(); + return { + tag: "truncate", + cascade: Boolean(flags & 0b1), + restartIdentity: Boolean(flags & 0b10), + relations: reader.array( + nrels, + () => this._relationCache.get(reader.readInt32()) as MessageRelation + ), + }; + } + private msgMessage(reader: BinaryReader): MessageMessage { + const flags = reader.readUint8(); + return { + tag: "message", + flags, + transactional: Boolean(flags & 0b1), + messageLsn: reader.readLsn(), + prefix: reader.readString(), + content: reader.read(reader.readInt32()), + }; + } + private msgCommit(reader: BinaryReader): MessageCommit { + return { + tag: "commit", + flags: reader.readUint8(), + commitLsn: reader.readLsn(), + commitEndLsn: reader.readLsn(), + commitTime: reader.readTime(), + }; + } +} + +export function getPgoutputStartReplicationSQL( + slotName: string, + lastLsn: string, + options: PgoutputOptions +): string { + const opts = [ + `proto_version '${options.protoVersion}'`, + `publication_names '${options.publicationNames.join(",")}'`, + `messages '${options.messages ?? false}'`, + ]; + return `START_REPLICATION SLOT "${slotName}" LOGICAL ${lastLsn} (${opts.join(", ")});`; +} diff --git a/internal-packages/replication/tsconfig.build.json b/internal-packages/replication/tsconfig.build.json index 619461da80..89c87a3dc6 100644 --- a/internal-packages/replication/tsconfig.build.json +++ b/internal-packages/replication/tsconfig.build.json @@ -3,8 +3,8 @@ "exclude": ["src/**/*.test.ts"], "compilerOptions": { "composite": true, - "target": "ES2019", - "lib": ["ES2019", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], + "target": "ES2020", + "lib": ["ES2020", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], "outDir": "dist", "module": "Node16", "moduleResolution": "Node16", diff --git a/internal-packages/replication/tsconfig.src.json b/internal-packages/replication/tsconfig.src.json index 6043e02ad2..0df3d2d222 100644 --- a/internal-packages/replication/tsconfig.src.json +++ b/internal-packages/replication/tsconfig.src.json @@ -3,8 +3,8 @@ "exclude": ["node_modules", "src/**/*.test.ts"], "compilerOptions": { "composite": true, - "target": "ES2019", - "lib": ["ES2019", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], + "target": "ES2020", + "lib": ["ES2020", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], "module": "Node16", "moduleResolution": "Node16", "moduleDetection": "force", diff --git a/internal-packages/replication/tsconfig.test.json b/internal-packages/replication/tsconfig.test.json index 99db8eb7c9..37b885fc8a 100644 --- a/internal-packages/replication/tsconfig.test.json +++ b/internal-packages/replication/tsconfig.test.json @@ -3,8 +3,8 @@ "references": [{ "path": "./tsconfig.src.json" }], "compilerOptions": { "composite": true, - "target": "ES2019", - "lib": ["ES2019", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], + "target": "ES2020", + "lib": ["ES2020", "DOM", "DOM.Iterable", "DOM.AsyncIterable"], "module": "Node16", "moduleResolution": "Node16", "moduleDetection": "force", diff --git a/internal-packages/testcontainers/src/index.ts b/internal-packages/testcontainers/src/index.ts index a232cde5aa..9892e0962d 100644 --- a/internal-packages/testcontainers/src/index.ts +++ b/internal-packages/testcontainers/src/index.ts @@ -36,6 +36,7 @@ type ElectricContext = { }; type ContainerContext = NetworkContext & PostgresContext & RedisContext & ClickhouseContext; +type PostgresAndRedisContext = NetworkContext & PostgresContext & RedisContext; type ContainerWithElectricAndRedisContext = ContainerContext & ElectricContext; type ContainerWithElectricContext = NetworkContext & PostgresContext & ElectricContext; @@ -206,6 +207,14 @@ export const clickhouseTest = test.extend({ clickhouseClient, }); +export const postgresAndRedisTest = test.extend({ + network, + postgresContainer, + prisma, + redisContainer, + redisOptions, +}); + export const containerTest = test.extend({ network, postgresContainer, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f8a5453a58..77ce6ff477 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -977,6 +977,9 @@ importers: internal-packages/replication: dependencies: + '@internal/redis': + specifier: workspace:* + version: link:../redis '@internal/tracing': specifier: workspace:* version: link:../tracing @@ -986,6 +989,9 @@ importers: pg: specifier: 8.15.6 version: 8.15.6 + redlock: + specifier: 5.0.0-beta.2 + version: 5.0.0-beta.2(patch_hash=rwyegdki7iserrd7fgjwxkhnlu) devDependencies: '@internal/testcontainers': specifier: workspace:* From 1c141256bb1ed36d6640355b56e68cf6d5fe4cca Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 2 May 2025 08:59:53 +0100 Subject: [PATCH 13/33] replication wip --- internal-packages/replication/src/client.ts | 4 +- .../replication/src/stream.test.ts | 100 ++++++++++ internal-packages/replication/src/stream.ts | 179 ++++++++++++++++++ .../src/v3/streams/asyncIterableStream.ts | 10 +- 4 files changed, 290 insertions(+), 3 deletions(-) create mode 100644 internal-packages/replication/src/stream.test.ts create mode 100644 internal-packages/replication/src/stream.ts diff --git a/internal-packages/replication/src/client.ts b/internal-packages/replication/src/client.ts index 8df3ca8aff..b208d3f115 100644 --- a/internal-packages/replication/src/client.ts +++ b/internal-packages/replication/src/client.ts @@ -6,7 +6,7 @@ import Redlock, { Lock } from "redlock"; import { createRedisClient } from "@internal/redis"; import { Logger } from "@trigger.dev/core/logger"; import { LogicalReplicationClientError } from "./errors.js"; -import { PgoutputParser, getPgoutputStartReplicationSQL } from "./pgoutput.js"; +import { PgoutputMessage, PgoutputParser, getPgoutputStartReplicationSQL } from "./pgoutput.js"; export interface LogicalReplicationClientOptions { /** @@ -64,7 +64,7 @@ export interface LogicalReplicationClientOptions { export type LogicalReplicationClientEvents = { leaderElection: [boolean]; error: [Error]; - data: [{ lsn: string; log: unknown }]; + data: [{ lsn: string; log: PgoutputMessage }]; start: []; acknowledge: [{ lsn: string }]; heartbeat: [{ lsn: string; timestamp: number; shouldRespond: boolean }]; diff --git a/internal-packages/replication/src/stream.test.ts b/internal-packages/replication/src/stream.test.ts new file mode 100644 index 0000000000..0a4683d995 --- /dev/null +++ b/internal-packages/replication/src/stream.test.ts @@ -0,0 +1,100 @@ +import { postgresAndRedisTest } from "@internal/testcontainers"; +import { createSubscription, Transaction } from "./stream.js"; +import { setTimeout } from "timers/promises"; + +describe("LogicalReplicationStream", () => { + postgresAndRedisTest( + "should group changes by transaction and filter relevant events", + async ({ postgresContainer, prisma, redisOptions }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + type TaskRunData = { + friendlyId: string; + taskIdentifier: string; + payload: string; + traceId: string; + spanId: string; + queue: string; + runtimeEnvironmentId: string; + projectId: string; + }; + + const received: Transaction[] = []; + + const subscription = createSubscription({ + name: "test_stream", + publicationName: "test_publication_stream", + slotName: "test_slot_stream", + pgConfig: { + connectionString: postgresContainer.getConnectionUri(), + }, + table: "TaskRun", + redisOptions, + filterTags: ["insert"], + abortSignal: AbortSignal.timeout(10000), + }); + + const organization = await prisma.organization.create({ + data: { + title: "test", + slug: "test", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test", + slug: "test", + organizationId: organization.id, + externalRef: "test", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test", + pkApiKey: "test", + shortcode: "test", + }, + }); + + // Insert a row into the table + new Promise(async (resolve) => { + await setTimeout(2000); + + await prisma.taskRun.create({ + data: { + friendlyId: "run_5678", + taskIdentifier: "my-task", + payload: JSON.stringify({ foo: "bar" }), + traceId: "5678", + spanId: "5678", + queue: "test", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + }, + }); + + resolve(undefined); + }).then(() => {}); + // Now we want to read from the stream + for await (const transaction of subscription.stream) { + received.push(transaction); + } + + console.log(received); + + expect(received.length).toBeGreaterThan(0); + const transaction = received[0]; + expect(transaction.events.length).toBeGreaterThan(0); + expect(transaction.events[0].data.friendlyId).toBe("run_5678"); + + // Clean up + await subscription.client.stop(); + } + ); +}); diff --git a/internal-packages/replication/src/stream.ts b/internal-packages/replication/src/stream.ts new file mode 100644 index 0000000000..9d3d547e88 --- /dev/null +++ b/internal-packages/replication/src/stream.ts @@ -0,0 +1,179 @@ +import { createAsyncIterableStreamFromAsyncIterable } from "@trigger.dev/core/v3"; +import { Readable } from "node:stream"; +import type { ClientConfig } from "pg"; +import { LogicalReplicationClient, LogicalReplicationClientOptions } from "./client.js"; +import type { MessageDelete, MessageInsert, MessageUpdate, PgoutputMessage } from "./pgoutput.js"; + +export interface LogicalReplicationStreamOptions extends LogicalReplicationClientOptions { + onError?: (err: Error) => void; + filterTags?: Array<"insert" | "update" | "delete">; + abortSignal?: AbortSignal; + highWaterMark?: number; +} + +export interface TransactionEvent { + tag: "insert" | "update" | "delete"; + data: T; + raw: MessageInsert | MessageUpdate | MessageDelete; +} + +export interface Transaction { + commitLsn: string | null; + commitEndLsn: string | null; + xid: number; + events: TransactionEvent[]; + replicationLagMs: number; +} + +export function createLogicalReplicationStream( + client: LogicalReplicationClient, + highWaterMark?: number, + signal?: AbortSignal +) { + let lastLsn: string | null = null; + let isSubscribed = false; + + const source = new ReadableStream<{ lsn: string; message: PgoutputMessage }>( + { + async start(controller) { + console.log("ReadableStream.start"); + + if (signal) { + signal.addEventListener("abort", () => { + controller.close(); + }); + } + + client.events.on("data", async ({ lsn, log }) => { + console.log("ReadableStream.data"); + lastLsn = lsn; + + if (signal?.aborted) { + return; + } + + if (isRelevantTag(log.tag)) { + controller.enqueue({ lsn, message: log }); + } + + if (typeof controller.desiredSize === "number" && controller.desiredSize <= 0) { + await client.stop(); + } + }); + }, + async cancel() { + console.log("ReadableStream.cancel"); + await client.stop(); + }, + async pull() { + if (!isSubscribed) { + isSubscribed = true; + console.log("ReadableStream.pull"); + await client.subscribe(lastLsn ?? undefined); + } + }, + }, + new CountQueuingStrategy({ highWaterMark: highWaterMark ?? 1 }) + ); + + return createAsyncIterableStreamFromAsyncIterable>(groupByTransaction(source)); +} + +export async function* groupByTransaction( + stream: ReadableStream<{ + lsn: string; + message: PgoutputMessage; + }> +) { + let currentTransaction: Omit, "commitEndLsn" | "replicationLagMs"> & { + commitEndLsn?: string | null; + replicationLagMs?: number; + } = { + commitLsn: null, + xid: 0, + events: [], + }; + for await (const { lsn, message } of stream as AsyncIterable<{ + lsn: string; + message: PgoutputMessage; + }>) { + console.log("groupByTransaction.for await"); + console.log(message); + switch (message.tag) { + case "begin": { + currentTransaction = { + commitLsn: message.commitLsn, + xid: message.xid, + events: [], + }; + break; + } + case "insert": { + currentTransaction.events.push({ + tag: message.tag, + data: message.new as T, + raw: message, + }); + break; + } + case "update": { + currentTransaction.events.push({ + tag: message.tag, + data: message.new as T, + raw: message, + }); + break; + } + case "delete": { + currentTransaction.events.push({ + tag: message.tag, + data: message.old as T, + raw: message, + }); + break; + } + case "commit": { + const replicationLagMs = Date.now() - Number(message.commitTime / 1000n); + currentTransaction.commitEndLsn = message.commitEndLsn; + currentTransaction.replicationLagMs = replicationLagMs; + yield currentTransaction as Transaction; + break; + } + } + } +} + +export function createSubscription(opts: LogicalReplicationStreamOptions) { + const client = new LogicalReplicationClient({ + name: opts.name, + publicationName: opts.publicationName, + slotName: opts.slotName, + pgConfig: opts.pgConfig, + table: opts.table, + redisOptions: opts.redisOptions, + publicationActions: opts.filterTags, + }); + + client.events.on("error", (err) => { + if (opts.onError) opts.onError(err); + }); + + client.events.on("heartbeat", async ({ lsn, shouldRespond }) => { + if (shouldRespond) { + await client.acknowledge(lsn); + } + }); + + const stream = createLogicalReplicationStream(client, opts.highWaterMark, opts.abortSignal); + + return { + stream, + client, + }; +} + +function isRelevantTag(tag: string): tag is "insert" | "update" | "delete" | "begin" | "commit" { + return ( + tag === "insert" || tag === "update" || tag === "delete" || tag === "begin" || tag === "commit" + ); +} diff --git a/packages/core/src/v3/streams/asyncIterableStream.ts b/packages/core/src/v3/streams/asyncIterableStream.ts index 6c9ad1ea12..1ca8ad6da0 100644 --- a/packages/core/src/v3/streams/asyncIterableStream.ts +++ b/packages/core/src/v3/streams/asyncIterableStream.ts @@ -51,7 +51,7 @@ export function createAsyncIterableReadable( export function createAsyncIterableStreamFromAsyncIterable( asyncIterable: AsyncIterable, - transformer: Transformer, + transformer?: Transformer, signal?: AbortSignal ): AsyncIterableStream { const stream = new ReadableStream({ @@ -95,3 +95,11 @@ export function createAsyncIterableStreamFromAsyncIterable( return transformedStream as AsyncIterableStream; } + +export function createAsyncIterableStreamFromAsyncGenerator( + asyncGenerator: AsyncGenerator, + transformer: Transformer, + signal?: AbortSignal +): AsyncIterableStream { + return createAsyncIterableStreamFromAsyncIterable(asyncGenerator, transformer, signal); +} From cde1bfbf3fe3b3c1468f3c2eebd12fbf8bac6eb7 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Sat, 3 May 2025 20:36:29 +0100 Subject: [PATCH 14/33] resolve conflicts --- pnpm-lock.yaml | 64 ++++++++++---------------------------------------- 1 file changed, 12 insertions(+), 52 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 77ce6ff477..1848634e9b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -3839,7 +3839,7 @@ packages: '@babel/traverse': 7.24.7 '@babel/types': 7.24.0 convert-source-map: 1.9.0 - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -5520,7 +5520,7 @@ packages: '@babel/helper-split-export-declaration': 7.22.6 '@babel/parser': 7.24.7 '@babel/types': 7.24.0 - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -5538,7 +5538,7 @@ packages: '@babel/helper-split-export-declaration': 7.24.7 '@babel/parser': 7.27.0 '@babel/types': 7.27.0 - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -5552,7 +5552,7 @@ packages: '@babel/parser': 7.27.0 '@babel/template': 7.25.0 '@babel/types': 7.27.0 - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -5567,7 +5567,7 @@ packages: '@babel/parser': 7.27.0 '@babel/template': 7.27.0 '@babel/types': 7.27.0 - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -8066,27 +8066,10 @@ packages: /@eslint/eslintrc@1.4.1: resolution: {integrity: sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - ajv: 6.12.6 - debug: 4.4.0 - espree: 9.6.0 - globals: 13.19.0 - ignore: 5.2.4 - import-fresh: 3.3.0 - js-yaml: 4.1.0 - minimatch: 3.1.2 - strip-json-comments: 3.1.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@eslint/eslintrc@2.1.4: - resolution: {integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: ajv: 6.12.6 debug: 4.4.0(supports-color@10.0.0) - espree: 9.6.1 + espree: 9.6.0 globals: 13.19.0 ignore: 5.2.4 import-fresh: 3.3.0 @@ -8096,10 +8079,6 @@ packages: transitivePeerDependencies: - supports-color - /@eslint/js@8.49.0: - resolution: {integrity: sha512-1S8uAY/MTJqVx0SC4epBq+N2yhuwtNwLbJYNZyhL2pO1ZVKn5HFXav5T41Ryzy9K9V7ZId2JB2oy/W4aCd9/2w==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - /@fal-ai/serverless-client@0.15.0: resolution: {integrity: sha512-4Vuocu0342OijAN6xO/lwohDV7h90LbkTnOAEwH+pYvMFVC6RYmHS4GILc/wnOWBTw+iFlZFEKlljEVolkjVfg==} engines: {node: '>=18.0.0'} @@ -8347,23 +8326,12 @@ packages: - utf-8-validate dev: true - /@humanwhocodes/config-array@0.11.14: - resolution: {integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==} - engines: {node: '>=10.10.0'} - deprecated: Use @eslint/config-array instead - dependencies: - '@humanwhocodes/object-schema': 2.0.3 - debug: 4.4.0(supports-color@10.0.0) - minimatch: 3.1.2 - transitivePeerDependencies: - - supports-color - /@humanwhocodes/config-array@0.11.8: resolution: {integrity: sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==} engines: {node: '>=10.10.0'} dependencies: '@humanwhocodes/object-schema': 1.2.1 - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) minimatch: 3.1.2 transitivePeerDependencies: - supports-color @@ -18929,7 +18897,7 @@ packages: dependencies: '@typescript-eslint/typescript-estree': 5.59.6(typescript@5.5.4) '@typescript-eslint/utils': 5.59.6(eslint@8.31.0)(typescript@5.5.4) - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) eslint: 8.31.0 tsutils: 3.21.0(typescript@5.5.4) typescript: 5.5.4 @@ -18953,7 +18921,7 @@ packages: dependencies: '@typescript-eslint/types': 5.59.6 '@typescript-eslint/visitor-keys': 5.59.6 - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) globby: 11.1.0 is-glob: 4.0.3 semver: 7.6.3 @@ -22635,7 +22603,7 @@ packages: resolution: {integrity: sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==} engines: {node: '>= 8.0'} dependencies: - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) readable-stream: 3.6.0 split-ca: 1.0.1 ssh2: 1.16.0 @@ -23691,7 +23659,7 @@ packages: eslint: '*' eslint-plugin-import: '*' dependencies: - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) enhanced-resolve: 5.15.0 eslint: 8.31.0 eslint-module-utils: 2.7.4(@typescript-eslint/parser@5.59.6)(eslint-import-resolver-node@0.3.7)(eslint-import-resolver-typescript@3.5.5)(eslint@8.31.0) @@ -33346,7 +33314,7 @@ packages: dependencies: component-emitter: 1.3.1 cookiejar: 2.1.4 - debug: 4.4.0 + debug: 4.4.0(supports-color@10.0.0) fast-safe-stringify: 2.1.1 form-data: 4.0.0 formidable: 3.5.1 @@ -33797,14 +33765,6 @@ packages: supports-hyperlinks: 2.3.0 dev: true - /terminal-link@3.0.0: - resolution: {integrity: sha512-flFL3m4wuixmf6IfhFJd1YPiLiMuxEc8uHRM1buzIeZPm22Au2pDqBJQgdo7n1WfPU1ONFGv7YDwpFBmHGF6lg==} - engines: {node: '>=12'} - dependencies: - ansi-escapes: 5.0.0 - supports-hyperlinks: 2.3.0(patch_hash=xmw2etywyp5w2jf77wkqg4ob3a) - dev: false - /terser-webpack-plugin@5.3.14(@swc/core@1.3.26)(esbuild@0.15.18)(webpack@5.99.7): resolution: {integrity: sha512-vkZjpUjb6OMS7dhV+tILUW6BhpDR7P2L/aQSAv+Uwk+m8KATX9EccViHTJR2qDtACKPIYndLGCyl3FMo+r2LMw==} engines: {node: '>= 10.13.0'} From f3dc43b6ebca932938fac07379871b0fb3a0520a Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 6 May 2025 13:46:11 +0100 Subject: [PATCH 15/33] more replication stuff --- internal-packages/replication/src/client.ts | 124 ++++++++++++------ internal-packages/replication/src/pgoutput.ts | 18 ++- .../replication/src/stream.test.ts | 103 +++++++++++++++ 3 files changed, 202 insertions(+), 43 deletions(-) diff --git a/internal-packages/replication/src/client.ts b/internal-packages/replication/src/client.ts index b208d3f115..7cf1c9e633 100644 --- a/internal-packages/replication/src/client.ts +++ b/internal-packages/replication/src/client.ts @@ -143,25 +143,7 @@ export class LogicalReplicationClient { this.ackIntervalTimer = null; } // Release leader lock if held - if (this.leaderLock) { - const [releaseError] = await tryCatch(this.leaderLock.release()); - - if (releaseError) { - this.logger.error("Failed to release leader lock", { - name: this.options.name, - slotName: this.options.slotName, - publicationName: this.options.publicationName, - error: releaseError, - }); - } else { - this.logger.info("Released leader lock", { - name: this.options.name, - slotName: this.options.slotName, - }); - } - - this.leaderLock = null; - } + await this.#releaseLeaderLock(); this.connection?.removeAllListeners(); this.connection = null; @@ -198,6 +180,24 @@ export class LogicalReplicationClient { return this; } + public async teardown(): Promise { + await this.stop(); + + // Acquire the leaderLock + const leaderLockAcquired = await this.#acquireLeaderLock(); + + if (!leaderLockAcquired) { + return false; + } + + // Drop the slot + const slotDropped = await this.#dropSlot(); + + await this.#releaseLeaderLock(); + + return slotDropped; + } + public async subscribe(startLsn?: string): Promise { await this.stop(); @@ -212,28 +212,10 @@ export class LogicalReplicationClient { }); // 1. Leader election - try { - this.leaderLock = await this.redlock.acquire( - [`logical-replication-client:${this.options.name}`], - this.leaderLockTimeoutMs, - { - retryCount: 60, - retryDelay: 1000, - retryJitter: 100, - } - ); - } catch (err) { - this.logger.error("Leader election failed", { - name: this.options.name, - table: this.options.table, - slotName: this.options.slotName, - publicationName: this.options.publicationName, - startLsn, - error: err, - }); + const leaderLockAcquired = await this.#acquireLeaderLock(); + if (!leaderLockAcquired) { this.events.emit("leaderElection", false); - return this.stop(); } @@ -481,6 +463,31 @@ export class LogicalReplicationClient { return res.rows[0].exists; } + async #dropSlot(): Promise { + if (!this.client) { + this.events.emit("error", new LogicalReplicationClientError("Cannot drop slot")); + return false; + } + + const [dropError] = await tryCatch( + this.client.query(`SELECT pg_drop_replication_slot('${this.options.slotName}');`) + ); + + if (dropError) { + this.logger.error("Failed to drop slot", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + error: dropError, + }); + + this.events.emit("error", dropError); + } + + return true; + } + async #acknowledge(lsn: string): Promise { if (!this.autoAcknowledge) return; this.events.emit("acknowledge", { lsn }); @@ -520,6 +527,45 @@ export class LogicalReplicationClient { return true; } + async #acquireLeaderLock(): Promise { + try { + this.leaderLock = await this.redlock.acquire( + [`logical-replication-client:${this.options.name}`], + this.leaderLockTimeoutMs, + { + retryCount: 60, + retryDelay: 1000, + retryJitter: 100, + } + ); + } catch (err) { + this.logger.error("Leader election failed", { + name: this.options.name, + table: this.options.table, + slotName: this.options.slotName, + publicationName: this.options.publicationName, + error: err, + }); + + return false; + } + + return true; + } + + async #releaseLeaderLock() { + if (!this.leaderLock) return; + const [releaseError] = await tryCatch(this.leaderLock.release()); + this.leaderLock = null; + + if (releaseError) { + this.logger.error("Failed to release leader lock", { + name: this.options.name, + error: releaseError, + }); + } + } + async #startLeaderLockHeartbeat() { if (this.leaderLockHeartbeatTimer) { clearInterval(this.leaderLockHeartbeatTimer); diff --git a/internal-packages/replication/src/pgoutput.ts b/internal-packages/replication/src/pgoutput.ts index 74790c213b..1bcab2c656 100644 --- a/internal-packages/replication/src/pgoutput.ts +++ b/internal-packages/replication/src/pgoutput.ts @@ -134,11 +134,21 @@ class BinaryReader { const lower = this.readInt32(); return upper.toString(16).toUpperCase() + "/" + lower.toString(16).toUpperCase(); } + readUint32(): number { + // >>> 0 ensures unsigned + return this.readInt32() >>> 0; + } + + readUint64(): bigint { + // Combine two unsigned 32-bit ints into a 64-bit bigint + return (BigInt(this.readUint32()) << 32n) | BigInt(this.readUint32()); + } + readTime(): bigint { - // microseconds since 2000-01-01 - const high = this.readInt32(); - const low = this.readInt32(); - return BigInt(high) * 4294967296n + BigInt(low); + // (POSTGRES_EPOCH_JDATE - UNIX_EPOCH_JDATE) * USECS_PER_DAY == 946684800000000 + const microsSinceUnixEpoch = this.readUint64() + 946684800000000n; + // Convert to milliseconds for JS Date compatibility + return microsSinceUnixEpoch; } } diff --git a/internal-packages/replication/src/stream.test.ts b/internal-packages/replication/src/stream.test.ts index 0a4683d995..8eb3be57fa 100644 --- a/internal-packages/replication/src/stream.test.ts +++ b/internal-packages/replication/src/stream.test.ts @@ -81,6 +81,7 @@ describe("LogicalReplicationStream", () => { resolve(undefined); }).then(() => {}); + // Now we want to read from the stream for await (const transaction of subscription.stream) { received.push(transaction); @@ -97,4 +98,106 @@ describe("LogicalReplicationStream", () => { await subscription.client.stop(); } ); + + postgresAndRedisTest( + "should respect highWaterMark and not pull more data than allowed", + async ({ postgresContainer, prisma, redisOptions }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + type TaskRunData = { + friendlyId: string; + taskIdentifier: string; + payload: string; + traceId: string; + spanId: string; + queue: string; + runtimeEnvironmentId: string; + projectId: string; + }; + + const subscription = createSubscription({ + name: "test_stream", + publicationName: "test_publication_stream", + slotName: "test_slot_stream", + pgConfig: { + connectionString: postgresContainer.getConnectionUri(), + }, + table: "TaskRun", + redisOptions, + filterTags: ["insert"], + abortSignal: AbortSignal.timeout(10000), + }); + + const organization = await prisma.organization.create({ + data: { + title: "test", + slug: "test", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test", + slug: "test", + organizationId: organization.id, + externalRef: "test", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test", + pkApiKey: "test", + shortcode: "test", + }, + }); + + // Insert a row into the table + new Promise(async (resolve) => { + await setTimeout(2000); + + for (let i = 0; i < 5; i++) { + await prisma.taskRun.create({ + data: { + friendlyId: `run_${i}`, + taskIdentifier: "my-task", + payload: JSON.stringify({ foo: "bar" }), + traceId: `${i}`, + spanId: `${i}`, + queue: "test", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + }, + }); + } + + resolve(undefined); + }).then(() => {}); + + const received: Transaction[] = []; + const iterator = subscription.stream[Symbol.asyncIterator](); + + // Pull the first item, then wait before pulling the next + const first = await iterator.next(); + received.push(first.value); + + // Wait to simulate slow consumer + await setTimeout(2000); + + // Pull the next item + const second = await iterator.next(); + received.push(second.value); + + // Optionally, check internal state or spy on client.subscribe/data to ensure only 1 item was buffered at a time + + expect(received.length).toBe(2); + + // Clean up + await subscription.client.stop(); + } + ); }); From fa4185bbe26b5770161caaa171d6146d56afa3d0 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 6 May 2025 13:56:39 +0100 Subject: [PATCH 16/33] Add ability to drop the replication slot completely on teardown --- .../replication/src/client.test.ts | 95 +++++++++++++++++++ internal-packages/replication/src/client.ts | 11 +++ 2 files changed, 106 insertions(+) diff --git a/internal-packages/replication/src/client.test.ts b/internal-packages/replication/src/client.test.ts index c938d662ae..2f5c1d3e48 100644 --- a/internal-packages/replication/src/client.test.ts +++ b/internal-packages/replication/src/client.test.ts @@ -86,4 +86,99 @@ describe("Replication Client", () => { await client.stop(); } ); + + postgresAndRedisTest( + "should be able to teardown", + async ({ postgresContainer, prisma, redisOptions }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const client = new LogicalReplicationClient({ + name: "test", + slotName: "test_slot", + publicationName: "test_publication", + redisOptions, + table: "TaskRun", + pgConfig: { + connectionString: postgresContainer.getConnectionUri(), + }, + }); + + const logs: Array<{ + lsn: string; + log: unknown; + }> = []; + + client.events.on("data", (data) => { + console.log(data); + logs.push(data); + }); + + client.events.on("error", (error) => { + console.error(error); + }); + + await client.subscribe(); + + const organization = await prisma.organization.create({ + data: { + title: "test", + slug: "test", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test", + slug: "test", + organizationId: organization.id, + externalRef: "test", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test", + pkApiKey: "test", + shortcode: "test", + }, + }); + + // Now we insert a row into the table + await prisma.taskRun.create({ + data: { + friendlyId: "run_1234", + taskIdentifier: "my-task", + payload: JSON.stringify({ foo: "bar" }), + traceId: "1234", + spanId: "1234", + queue: "test", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + }, + }); + + // Wait for a bit of time + await setTimeout(50); + + // Now we should see the row in the logs + expect(logs.length).toBeGreaterThan(0); + + const slotDropped = await client.teardown(); + + expect(slotDropped).toBe(true); + + // Now the replication slot should be gone + const slotExists = await prisma.$queryRaw< + { exists: boolean }[] + >`SELECT EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = 'test_slot');`; + + console.log(slotExists); + + expect(slotExists[0].exists).toBe(false); + } + ); }); diff --git a/internal-packages/replication/src/client.ts b/internal-packages/replication/src/client.ts index 7cf1c9e633..c4e73daac3 100644 --- a/internal-packages/replication/src/client.ts +++ b/internal-packages/replication/src/client.ts @@ -190,9 +190,20 @@ export class LogicalReplicationClient { return false; } + this.client = new Client({ + ...this.options.pgConfig, + // @ts-expect-error + replication: "database", + application_name: this.options.name, + }); + await this.client.connect(); + // Drop the slot const slotDropped = await this.#dropSlot(); + await this.client.end(); + this.client = null; + await this.#releaseLeaderLock(); return slotDropped; From ee13ebb21935b00be2a2460461dd54baa7956852 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 6 May 2025 16:21:24 +0100 Subject: [PATCH 17/33] Use the new single replacingmergetree task events table for replication --- .../services/runsDashboardInstance.server.ts | 33 +- .../services/runsDashboardService.server.ts | 352 +------- apps/webapp/package.json | 1 + ...nts_v1.sql => 003_create_task_runs_v1.sql} | 81 +- .../schema/004_create_run_latest_v1.sql | 127 --- internal-packages/clickhouse/src/index.ts | 6 +- .../clickhouse/src/runEvents.test.ts | 830 ------------------ .../clickhouse/src/taskRuns.test.ts | 223 +++++ .../src/{runEvents.ts => taskRuns.ts} | 38 +- pnpm-lock.yaml | 3 + 10 files changed, 298 insertions(+), 1396 deletions(-) rename internal-packages/clickhouse/schema/{003_create_raw_run_events_v1.sql => 003_create_task_runs_v1.sql} (59%) delete mode 100644 internal-packages/clickhouse/schema/004_create_run_latest_v1.sql delete mode 100644 internal-packages/clickhouse/src/runEvents.test.ts create mode 100644 internal-packages/clickhouse/src/taskRuns.test.ts rename internal-packages/clickhouse/src/{runEvents.ts => taskRuns.ts} (66%) diff --git a/apps/webapp/app/services/runsDashboardInstance.server.ts b/apps/webapp/app/services/runsDashboardInstance.server.ts index 6d58577a2d..725c07bf31 100644 --- a/apps/webapp/app/services/runsDashboardInstance.server.ts +++ b/apps/webapp/app/services/runsDashboardInstance.server.ts @@ -1,6 +1,5 @@ import { ClickHouse } from "@internal/clickhouse"; import { EventEmitter } from "node:events"; -import { prisma } from "~/db.server"; import { singleton } from "~/utils/singleton"; import { engine } from "~/v3/runEngine.server"; import { logger } from "./logger.server"; @@ -250,35 +249,5 @@ export const runsDashboard = singleton("runsDashboard", () => { }); async function runCreated(time: Date, runId: string, service: RunsDashboardService) { - const run = await prisma.taskRun.findFirst({ - where: { - id: runId, - }, - }); - - if (!run) { - logger.error("RunDashboard: runCreated: run not found", { - runId, - }); - - return; - } - - if (!run.environmentType) { - logger.error("RunDashboard: runCreated: run environment type not found", { - runId, - }); - - return; - } - - if (!run.organizationId) { - logger.error("RunDashboard: runCreated: run organization id not found", { - runId, - }); - - return; - } - - await service.runCreated(time, run, run.environmentType, run.organizationId); + // Noop for now } diff --git a/apps/webapp/app/services/runsDashboardService.server.ts b/apps/webapp/app/services/runsDashboardService.server.ts index 79c4697592..b1eebd6fc6 100644 --- a/apps/webapp/app/services/runsDashboardService.server.ts +++ b/apps/webapp/app/services/runsDashboardService.server.ts @@ -1,10 +1,9 @@ import type { ClickHouse } from "@internal/clickhouse"; -import { TaskRunError } from "@trigger.dev/core/v3/schemas"; +import { EventBusEvents } from "@internal/run-engine"; +import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; import { RuntimeEnvironmentType, TaskRun, TaskRunStatus } from "@trigger.dev/database"; -import { logger } from "./logger.server"; import { EventEmitter } from "node:events"; -import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; -import { EventBusEvents } from "@internal/run-engine"; +import { logger } from "./logger.server"; export class RunsDashboardService { constructor(private readonly clickhouse: ClickHouse) {} @@ -14,290 +13,47 @@ export class RunsDashboardService { }); async runAttemptStarted(event: RunDashboardEventRunAttemptStarted) { - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - attempt: event.run.attemptNumber ?? 1, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - base_cost_in_cents: event.run.baseCostInCents, - executed_at: event.run.executedAt ? event.run.executedAt.getTime() : undefined, - event_name: "attempt_started", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runAttemptStarted", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runEnqueuedAfterDelay(event: RunDashboardEventRunEnqueuedAfterDelay) { - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - event_name: "enqueued_after_delay", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runEnqueuedAfterDelay", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runDelayRescheduled(event: RunDashboardEventRunDelayRescheduled) { - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - delay_until: event.run.delayUntil ? event.run.delayUntil.getTime() : undefined, - event_name: "delay_rescheduled", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runDelayRescheduled", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runLocked(event: RunDashboardEventRunLocked) { - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - base_cost_in_cents: event.run.baseCostInCents, - task_version: event.run.taskVersion ?? undefined, - sdk_version: event.run.sdkVersion ?? undefined, - cli_version: event.run.cliVersion ?? undefined, - machine_preset: event.run.machinePreset ?? undefined, - executed_at: event.run.startedAt ? event.run.startedAt.getTime() : undefined, - event_name: "locked", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runLocked", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runStatusChanged(event: RunDashboardEventRunStatusChanged) { - if (!event.organization.id || !event.project.id || !event.environment.id) { - return false; - } - - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - event_name: "status_changed", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runStatusChanged", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runExpired(event: RunDashboardEventRunExpired) { - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - expired_at: event.run.expiredAt ? event.run.expiredAt.getTime() : undefined, - event_name: "run_expired", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runExpired", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runSucceeded(event: RunDashboardEventRunSucceeded) { - const output = await this.#prepareOutput(event.run); - - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - completed_at: event.run.completedAt ? event.run.completedAt.getTime() : undefined, - usage_duration_ms: event.run.usageDurationMs, - cost_in_cents: event.run.costInCents, - output: output, - attempt: event.run.attemptNumber, - event_name: "succeeded", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runSucceeded", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runFailed(event: RunDashboardEventRunFailed) { - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - completed_at: event.run.completedAt ? event.run.completedAt.getTime() : undefined, - error: event.run.error, - attempt: event.run.attemptNumber, - usage_duration_ms: event.run.usageDurationMs, - cost_in_cents: event.run.costInCents, - event_name: "failed", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runFailed", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runRetryScheduled(event: RunDashboardEventRunRetryScheduled) { - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.environment.projectId, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - machine_preset: event.run.nextMachineAfterOOM ?? undefined, - attempt: event.run.attemptNumber, - error: event.run.error, - event_name: "retry_scheduled", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runRetryScheduled", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runCancelled(event: RunDashboardEventRunCancelled) { - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - completed_at: event.run.completedAt ? event.run.completedAt.getTime() : undefined, - error: event.run.error ? (event.run.error as TaskRunError) : undefined, - attempt: event.run.attemptNumber, - event_name: "cancelled", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runCancelled", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runTagsUpdated(event: RunDashboardEventRunTagsUpdated) { - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: event.environment.id, - organization_id: event.organization.id, - project_id: event.project.id, - run_id: event.run.id, - status: event.run.status, - event_time: event.time.getTime(), - updated_at: event.run.updatedAt.getTime(), - created_at: event.run.createdAt.getTime(), - tags: event.run.tags, - event_name: "tags_updated", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: runTagsUpdated", { - error: insertError, - event, - }); - } - - return insertResult?.executed === true; + // Noop for now } async runCreated( @@ -306,65 +62,7 @@ export class RunsDashboardService { environmentType: RuntimeEnvironmentType, organizationId: string ) { - const payload = await this.#preparePayload(taskRun); - - const [insertError, insertResult] = await this.clickhouse.runEvents.insert({ - environment_id: taskRun.runtimeEnvironmentId, - environment_type: environmentType, - organization_id: organizationId, - project_id: taskRun.projectId, - run_id: taskRun.id, - friendly_id: taskRun.friendlyId, - attempt: taskRun.attemptNumber ?? 1, - engine: taskRun.engine, - status: taskRun.status, - task_identifier: taskRun.taskIdentifier, - queue: taskRun.queue, - schedule_id: taskRun.scheduleId ?? undefined, - batch_id: taskRun.batchId ?? undefined, - event_time: eventTime.getTime(), - created_at: taskRun.createdAt.getTime(), - updated_at: taskRun.updatedAt.getTime(), - completed_at: taskRun.completedAt ? taskRun.completedAt.getTime() : undefined, - started_at: taskRun.startedAt ? taskRun.startedAt.getTime() : undefined, - executed_at: taskRun.executedAt ? taskRun.executedAt.getTime() : undefined, - delay_until: taskRun.delayUntil ? taskRun.delayUntil.getTime() : undefined, - queued_at: taskRun.queuedAt ? taskRun.queuedAt.getTime() : undefined, - expired_at: taskRun.expiredAt ? taskRun.expiredAt.getTime() : undefined, - usage_duration_ms: taskRun.usageDurationMs, - tags: taskRun.runTags, - payload: payload, - task_version: taskRun.taskVersion ?? undefined, - sdk_version: taskRun.sdkVersion ?? undefined, - cli_version: taskRun.cliVersion ?? undefined, - machine_preset: taskRun.machinePreset ?? undefined, - is_test: taskRun.isTest ?? false, - root_run_id: taskRun.rootTaskRunId ?? undefined, - parent_run_id: taskRun.parentTaskRunId ?? undefined, - depth: taskRun.depth ?? 0, - span_id: taskRun.spanId ?? undefined, - trace_id: taskRun.traceId ?? undefined, - idempotency_key: taskRun.idempotencyKey ?? undefined, - expiration_ttl: taskRun.ttl ?? undefined, - cost_in_cents: taskRun.costInCents ?? undefined, - base_cost_in_cents: taskRun.baseCostInCents ?? undefined, - event_name: "created", - }); - - if (insertError) { - this.logger.error("RunsDashboardService: upsertRun", { - error: insertError, - taskRun, - }); - } else { - this.logger.info("RunsDashboardService: upsertRun", { - id: taskRun.id, - friendlyId: taskRun.friendlyId, - status: taskRun.status, - }); - } - - return insertResult?.executed === true; + // Noop for now } async #preparePayload(run: TaskRun): Promise { @@ -403,26 +101,6 @@ export class RunsDashboardService { return await parsePacket(packet); } - - async #prepareMetadata(run: { - metadata: string | undefined; - metadataType: string; - }): Promise { - if (!run.metadata) { - return undefined; - } - - if (run.metadataType !== "application/json" && run.metadataType !== "application/super+json") { - return undefined; - } - - const packet = { - data: run.metadata, - dataType: run.metadataType, - }; - - return await parsePacket(packet); - } } export type RunDashboardEvents = { diff --git a/apps/webapp/package.json b/apps/webapp/package.json index 810649ece8..ab30ad5739 100644 --- a/apps/webapp/package.json +++ b/apps/webapp/package.json @@ -193,6 +193,7 @@ }, "devDependencies": { "@internal/testcontainers": "workspace:*", + "@internal/replication": "workspace:*", "@internal/clickhouse": "workspace:*", "@remix-run/dev": "2.1.0", "@remix-run/eslint-config": "2.1.0", diff --git a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql b/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql similarity index 59% rename from internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql rename to internal-packages/clickhouse/schema/003_create_task_runs_v1.sql index 7864ff1173..442d947d3f 100644 --- a/internal-packages/clickhouse/schema/003_create_raw_run_events_v1.sql +++ b/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql @@ -1,55 +1,42 @@ -- +goose Up SET enable_json_type = 1; -/* ───────────────────────────────────────────────────────────── - RAW EVENT STREAM trigger_dev.raw_run_events_v1 - ───────────────────────────────────────────────────────────── - • One row for every status change / retry / metric emission - • All TaskRun scalar columns duplicated in each row - – they compress brilliantly and remove JOINs later - • Heavy blobs → ZSTD - • High-cardinality enums / strings → LowCardinality + LZ4 - • Array / JSON fields → ZSTD + late-materialised - • Bloom-filter index on tags for instant “has(tag)” - ----------------------------------------------------------------- */ - -CREATE TABLE trigger_dev.raw_run_events_v1 +CREATE TABLE trigger_dev.task_runs_v1 ( /* ─── ids & hierarchy ─────────────────────────────────────── */ environment_id String, organization_id String, project_id String, run_id String, - event_name LowCardinality(String), - environment_type LowCardinality(Nullable(String)), - friendly_id Nullable(String), + environment_type LowCardinality(String), + friendly_id String, attempt UInt8 DEFAULT 1, /* ─── enums / status ──────────────────────────────────────── */ - engine Nullable(Enum8('V1'=1,'V2'=2)) - CODEC(T64, LZ4), - status Enum8( -- TaskRunStatus - 'DELAYED'=1, - 'PENDING'=2, - 'PENDING_VERSION'=3, - 'WAITING_FOR_DEPLOY'=4, - 'EXECUTING'=5, - 'WAITING_TO_RESUME'=6, - 'RETRYING_AFTER_FAILURE'=7, - 'PAUSED'=8, - 'CANCELED'=9, - 'INTERRUPTED'=10, - 'COMPLETED_SUCCESSFULLY'=11, - 'COMPLETED_WITH_ERRORS'=12, - 'SYSTEM_FAILURE'=13, - 'CRASHED'=14, - 'EXPIRED'=15, - 'TIMED_OUT'=16), + engine Enum8('V1'=1,'V2'=2) CODEC(T64, LZ4), + status Enum8( + 'DELAYED'=1, + 'PENDING'=2, + 'PENDING_VERSION'=3, + 'WAITING_FOR_DEPLOY'=4, + 'EXECUTING'=5, + 'WAITING_TO_RESUME'=6, + 'RETRYING_AFTER_FAILURE'=7, + 'PAUSED'=8, + 'CANCELED'=9, + 'INTERRUPTED'=10, + 'COMPLETED_SUCCESSFULLY'=11, + 'COMPLETED_WITH_ERRORS'=12, + 'SYSTEM_FAILURE'=13, + 'CRASHED'=14, + 'EXPIRED'=15, + 'TIMED_OUT'=16 + ), /* ─── queue / concurrency / schedule ─────────────────────── */ - task_identifier Nullable(String), - queue Nullable(String), + task_identifier String, + queue String, schedule_id Nullable(String), batch_id Nullable(String), @@ -60,8 +47,8 @@ CREATE TABLE trigger_dev.raw_run_events_v1 depth UInt8 DEFAULT 0, /* ─── telemetry ─────────────────────────────────────────────── */ - span_id Nullable(String), - trace_id Nullable(String), + span_id String, + trace_id String, idempotency_key Nullable(String), /* ─── timing ─────────────────────────────────────────────── */ @@ -94,19 +81,19 @@ CREATE TABLE trigger_dev.raw_run_events_v1 machine_preset LowCardinality(Nullable(String)) CODEC(LZ4), is_test UInt8 DEFAULT 0, + + /* ─── commit lsn ─────────────────────────────────────────────── */ + _version UInt64 ) -ENGINE = MergeTree -PARTITION BY toYYYYMMDD(event_time) -ORDER BY (organization_id, project_id, environment_id, event_time, run_id) -SETTINGS - index_granularity = 8192, - vertical_merge_algorithm_min_rows_to_activate = 100000; +ENGINE = ReplacingMergeTree(_version) +PARTITION BY toYYYYMMDD(created_at) +ORDER BY (toDate(created_at), environment_id, task_identifier, created_at, run_id); /* Fast tag filtering */ -ALTER TABLE trigger_dev.raw_run_events_v1 +ALTER TABLE trigger_dev.task_runs_v1 ADD INDEX idx_tags tags TYPE tokenbf_v1(32768, 3, 0) GRANULARITY 4; -- +goose Down SET enable_json_type = 0; -DROP TABLE IF EXISTS trigger_dev.raw_run_events_v1; +DROP TABLE IF EXISTS trigger_dev.task_runs_v1; diff --git a/internal-packages/clickhouse/schema/004_create_run_latest_v1.sql b/internal-packages/clickhouse/schema/004_create_run_latest_v1.sql deleted file mode 100644 index f05d669fd6..0000000000 --- a/internal-packages/clickhouse/schema/004_create_run_latest_v1.sql +++ /dev/null @@ -1,127 +0,0 @@ --- +goose Up -/* one immutable row = the latest state we know about a run */ -CREATE TABLE trigger_dev.run_latest_v1 -( - -- identifiers / partition keys - environment_id String, - run_id String, - last_event_time DateTime64(3), - - updated_at DateTime64(3), - created_at DateTime64(3), - - environment_type LowCardinality(Nullable(String)), - friendly_id Nullable(String), - attempt UInt8 DEFAULT 1, - - -- user-visible fields - engine Nullable(Enum8('V1'=1,'V2'=2)) - CODEC(T64, LZ4), - status Enum8( - 'DELAYED'=1,'PENDING'=2,'PENDING_VERSION'=3, - 'WAITING_FOR_DEPLOY'=4,'EXECUTING'=5,'WAITING_TO_RESUME'=6, - 'RETRYING_AFTER_FAILURE'=7,'PAUSED'=8, - 'CANCELED'=9,'INTERRUPTED'=10, - 'COMPLETED_SUCCESSFULLY'=11,'COMPLETED_WITH_ERRORS'=12, - 'SYSTEM_FAILURE'=13,'CRASHED'=14,'EXPIRED'=15,'TIMED_OUT'=16), - task_identifier Nullable(String), - task_version Nullable(String), - - sdk_version Nullable(String) CODEC(LZ4), - cli_version Nullable(String) CODEC(LZ4), - machine_preset LowCardinality(Nullable(String)) CODEC(LZ4), - - queue Nullable(String), - schedule_id Nullable(String), - batch_id Nullable(String), - - root_run_id Nullable(String), - depth UInt8 DEFAULT 0, - is_test UInt8 DEFAULT 0, - - started_at Nullable(DateTime64(3)), - completed_at Nullable(DateTime64(3)), - delay_until Nullable(DateTime64(3)), - - usage_duration_ms UInt32 DEFAULT 0, - cost_in_cents Float64 DEFAULT 0, - base_cost_in_cents Float64 DEFAULT 0, - - expiration_ttl Nullable(String), - expired_at Nullable(DateTime64(3)), - - span_id Nullable(String), - idempotency_key Nullable(String), - - tags Array(String) CODEC(ZSTD(1)), - - /* ─── payload & context ──────────────────────────────────── */ - payload Nullable(JSON(max_dynamic_paths = 2048)), - output Nullable(JSON(max_dynamic_paths = 2048)), - error Nullable(JSON(max_dynamic_paths = 64)), - - _version DateTime64(3) -- used by ReplacingMergeTree dedupe -) -ENGINE = ReplacingMergeTree(_version) -PARTITION BY toYYYYMMDD(created_at) -ORDER BY (toDate(created_at), environment_id, run_id); - -CREATE MATERIALIZED VIEW trigger_dev.run_latest_mv_v1 -TO trigger_dev.run_latest_v1 -AS -SELECT - environment_id, - run_id, - argMax(status, event_time) AS status, - argMax(updated_at, event_time) AS updated_at, - - argMaxIf(tags, event_time, notEmpty(tags) > 0) AS tags, - - max(attempt) AS attempt, - - anyLast(created_at) AS created_at, - anyLast(engine) AS engine, - anyLast(sdk_version) AS sdk_version, - anyLast(cli_version) AS cli_version, - anyLast(machine_preset) AS machine_preset, - - anyLast(environment_type) AS environment_type, - anyLast(friendly_id) AS friendly_id, - anyLast(task_identifier) AS task_identifier, - anyLast(task_version) AS task_version, - anyLast(queue) AS queue, - anyLast(schedule_id) AS schedule_id, - anyLast(batch_id) AS batch_id, - anyLast(root_run_id) AS root_run_id, - anyLast(depth) AS depth, - anyLast(is_test) AS is_test, - - anyLast(started_at) AS started_at, - anyLast(completed_at) AS completed_at, - anyLast(delay_until) AS delay_until, - - max(usage_duration_ms) AS usage_duration_ms, - max(cost_in_cents) AS cost_in_cents, - max(base_cost_in_cents) AS base_cost_in_cents, - anyLast(expiration_ttl) AS expiration_ttl, - anyLast(expired_at) AS expired_at, - anyLast(span_id) AS span_id, - anyLast(idempotency_key) AS idempotency_key, - - anyLast(payload) AS payload, - anyLast(output) AS output, - argMax(error, event_time) AS error, - - max(event_time) AS last_event_time, - max(event_time) AS _version -- for RMTree -FROM trigger_dev.raw_run_events_v1 -GROUP BY - organization_id, - project_id, - environment_id, - run_id; - --- +goose Down - -DROP TABLE trigger_dev.run_latest_mv_v1; -DROP TABLE trigger_dev.run_latest_v1; diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts index f3de3aded6..90e59166cd 100644 --- a/internal-packages/clickhouse/src/index.ts +++ b/internal-packages/clickhouse/src/index.ts @@ -2,7 +2,7 @@ import { ClickHouseSettings } from "@clickhouse/client"; import { ClickhouseClient } from "./client/client.js"; import { ClickhouseReader, ClickhouseWriter } from "./client/types.js"; import { NoopClient } from "./client/noop.js"; -import { insertRunEvents } from "./runEvents.js"; +import { insertTaskRuns } from "./taskRuns.js"; import { Logger } from "@trigger.dev/core/logger"; export type ClickHouseConfig = @@ -85,9 +85,9 @@ export class ClickHouse { }); } - get runEvents() { + get taskRuns() { return { - insert: insertRunEvents(this.writer), + insert: insertTaskRuns(this.writer), }; } } diff --git a/internal-packages/clickhouse/src/runEvents.test.ts b/internal-packages/clickhouse/src/runEvents.test.ts deleted file mode 100644 index 2730ea80d9..0000000000 --- a/internal-packages/clickhouse/src/runEvents.test.ts +++ /dev/null @@ -1,830 +0,0 @@ -import { clickhouseTest } from "@internal/testcontainers"; -import { z } from "zod"; -import { ClickhouseClient } from "./client/client.js"; -import { insertRunEvents } from "./runEvents.js"; - -describe("Run Events", () => { - clickhouseTest("should be able to insert run events", async ({ clickhouseContainer }) => { - const client = new ClickhouseClient({ - name: "test", - url: clickhouseContainer.getConnectionUrl(), - }); - - const insert = insertRunEvents(client, { - async_insert: 0, // turn off async insert for this test - }); - - const [insertError, insertResult] = await insert([ - { - environment_id: "env_1234", - event_name: "created", - environment_type: "DEVELOPMENT", - organization_id: "org_1234", - project_id: "project_1234", - run_id: "run_1234", - friendly_id: "friendly_1234", - attempt: 1, - engine: "V2", - status: "PENDING", - task_identifier: "my-task", - queue: "my-queue", - schedule_id: "schedule_1234", - batch_id: "batch_1234", - event_time: Date.now(), - created_at: Date.now(), - updated_at: Date.now(), - completed_at: undefined, - tags: ["tag1", "tag2"], - payload: { - key: "value", - }, - output: { - key: "value", - }, - error: { - type: "BUILT_IN_ERROR", - name: "Error", - message: "error", - stackTrace: "stack trace", - }, - usage_duration_ms: 1000, - cost_in_cents: 100, - task_version: "1.0.0", - sdk_version: "1.0.0", - cli_version: "1.0.0", - machine_preset: "small-1x", - is_test: true, - span_id: "span_1234", - trace_id: "trace_1234", - idempotency_key: "idempotency_key_1234", - expiration_ttl: "1h", - root_run_id: "root_run_1234", - parent_run_id: "parent_run_1234", - depth: 1, - }, - ]); - - expect(insertError).toBeNull(); - expect(insertResult).toEqual(expect.objectContaining({ executed: true })); - expect(insertResult?.summary?.written_rows).toEqual("2"); - - const query = client.query({ - name: "query-run-events", - query: "SELECT * FROM trigger_dev.raw_run_events_v1", - schema: z.object({ - environment_id: z.string(), - run_id: z.string(), - }), - params: z.object({ - run_id: z.string(), - }), - }); - - const [queryError, result] = await query({ run_id: "run_1234" }); - - expect(queryError).toBeNull(); - expect(result).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - environment_id: "env_1234", - run_id: "run_1234", - }), - ]) - ); - }); - - clickhouseTest( - "should be able to handle multiple run events", - async ({ clickhouseContainer }) => { - const client = new ClickhouseClient({ - name: "test", - url: clickhouseContainer.getConnectionUrl(), - }); - - const insert = insertRunEvents(client, { - async_insert: 0, // turn off async insert for this test - }); - - const [insertError, insertResult] = await insert([ - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "created", - environment_type: "PRODUCTION", - friendly_id: "run_cma45oli70002qrdy47w0j4n7", - attempt: 1, - engine: "V2", - status: "PENDING", - task_identifier: "retry-task", - queue: "task/retry-task", - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: "538677637f937f54", - trace_id: "20a28486b0b9f50c647b35e8863e36a5", - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:04.341").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:04.312").getTime(), - started_at: null, - executed_at: null, - completed_at: null, - delay_until: null, - queued_at: new Date("2025-04-30 16:34:04.311").getTime(), - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 0, - cost_in_cents: 0, - base_cost_in_cents: 0, - payload: { failCount: "3" }, - output: null, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: true, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "locked", - environment_type: null, - friendly_id: null, - attempt: 1, - engine: null, - status: "PENDING", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:05.402").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:05.378").getTime(), - started_at: null, - executed_at: new Date("2025-04-30 16:34:05.377").getTime(), - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 0, - cost_in_cents: 0, - base_cost_in_cents: 0, - payload: null, - output: null, - error: null, - tags: [], - task_version: "20250430.3", - sdk_version: "4.0.0-v4-beta.7", - cli_version: "4.0.0-v4-beta.7", - machine_preset: "small-1x", - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "attempt_started", - environment_type: null, - friendly_id: null, - attempt: 1, - engine: null, - status: "EXECUTING", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:08.129").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:08.111").getTime(), - started_at: null, - executed_at: new Date("2025-04-30 16:34:08.112").getTime(), - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 0, - cost_in_cents: 0, - base_cost_in_cents: 0, - payload: null, - output: null, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "usage", - environment_type: null, - friendly_id: null, - attempt: 1, - engine: null, - status: "EXECUTING", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:13.084").getTime(), - created_at: new Date("2025-04-30 16:34:04.000").getTime(), - updated_at: new Date("2025-04-30 16:34:13.000").getTime(), - started_at: null, - executed_at: null, - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 2635, - cost_in_cents: 0.008893125, - base_cost_in_cents: 0, - payload: null, - output: null, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "retry_scheduled", - environment_type: null, - friendly_id: null, - attempt: 2, - engine: null, - status: "RETRYING_AFTER_FAILURE", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:13.748").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:13.751").getTime(), - started_at: null, - executed_at: null, - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 0, - cost_in_cents: 0, - base_cost_in_cents: 0, - payload: null, - output: null, - error: { - message: "Intentionally failing attempt 1", - name: "Error", - stackTrace: - "Error: Intentionally failing attempt 1\n at run (file:///src/trigger/retry.ts:26:21)\n at _tracer.startActiveSpan.attributes (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:414:40)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:141:24\n at AsyncLocalStorage.run (node:async_hooks:346:14)\n at AsyncLocalStorageContextManager.with (file:///node_modules/.pnpm/@opentelemetry+context-async-hooks@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/context-async-hooks/src/AsyncLocalStorageContextManager.ts:40:36)\n at ContextAPI2.with (file:///node_modules/.pnpm/@opentelemetry+api@1.9.0/node_modules/@opentelemetry/api/src/api/context.ts:77:42)\n at Tracer.startActiveSpan (file:///node_modules/.pnpm/@opentelemetry+sdk-trace-base@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/sdk-trace-base/src/Tracer.ts:241:24)\n at TriggerTracer.startActiveSpan (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:85:24)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:409:33\n at _RunTimelineMetricsAPI.measureMetric (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/runTimelineMetrics/index.ts:67:28)", - type: "BUILT_IN_ERROR", - }, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "attempt_started", - environment_type: null, - friendly_id: null, - attempt: 2, - engine: null, - status: "EXECUTING", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:13.846").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:13.831").getTime(), - started_at: null, - executed_at: new Date("2025-04-30 16:34:08.112").getTime(), - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 0, - cost_in_cents: 0, - base_cost_in_cents: 0, - payload: null, - output: null, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "usage", - environment_type: null, - friendly_id: null, - attempt: 2, - engine: null, - status: "EXECUTING", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:18.385").getTime(), - created_at: new Date("2025-04-30 16:34:04.000").getTime(), - updated_at: new Date("2025-04-30 16:34:18.000").getTime(), - started_at: null, - executed_at: null, - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 5419, - cost_in_cents: 0.018289125, - base_cost_in_cents: 0, - payload: null, - output: null, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "retry_scheduled", - environment_type: null, - friendly_id: null, - attempt: 3, - engine: null, - status: "RETRYING_AFTER_FAILURE", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:18.832").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:18.834").getTime(), - started_at: null, - executed_at: null, - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 0, - cost_in_cents: 0, - base_cost_in_cents: 0, - payload: null, - output: null, - error: { - message: "Intentionally failing attempt 2", - name: "Error", - stackTrace: - "Error: Intentionally failing attempt 2\n at run (file:///src/trigger/retry.ts:26:21)\n at _tracer.startActiveSpan.attributes (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:414:40)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:141:24\n at AsyncLocalStorage.run (node:async_hooks:346:14)\n at AsyncLocalStorageContextManager.with (file:///node_modules/.pnpm/@opentelemetry+context-async-hooks@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/context-async-hooks/src/AsyncLocalStorageContextManager.ts:40:36)\n at ContextAPI2.with (file:///node_modules/.pnpm/@opentelemetry+api@1.9.0/node_modules/@opentelemetry/api/src/api/context.ts:77:42)\n at Tracer.startActiveSpan (file:///node_modules/.pnpm/@opentelemetry+sdk-trace-base@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/sdk-trace-base/src/Tracer.ts:241:24)\n at TriggerTracer.startActiveSpan (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:85:24)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:409:33\n at _RunTimelineMetricsAPI.measureMetric (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/runTimelineMetrics/index.ts:67:28)", - type: "BUILT_IN_ERROR", - }, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "attempt_started", - environment_type: null, - friendly_id: null, - attempt: 3, - engine: null, - status: "EXECUTING", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:18.946").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:18.931").getTime(), - started_at: null, - executed_at: new Date("2025-04-30 16:34:08.112").getTime(), - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 0, - cost_in_cents: 0, - base_cost_in_cents: 0, - payload: null, - output: null, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "usage", - environment_type: null, - friendly_id: null, - attempt: 3, - engine: null, - status: "EXECUTING", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:23.559").getTime(), - created_at: new Date("2025-04-30 16:34:04.000").getTime(), - updated_at: new Date("2025-04-30 16:34:23.000").getTime(), - started_at: null, - executed_at: null, - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 8217, - cost_in_cents: 0.027732375, - base_cost_in_cents: 0, - payload: null, - output: null, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "retry_scheduled", - environment_type: null, - friendly_id: null, - attempt: 4, - engine: null, - status: "RETRYING_AFTER_FAILURE", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:24.045").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:24.047").getTime(), - started_at: null, - executed_at: null, - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 0, - cost_in_cents: 0, - base_cost_in_cents: 0, - payload: null, - output: null, - error: { - message: "Intentionally failing attempt 3", - name: "Error", - stackTrace: - "Error: Intentionally failing attempt 3\n at run (file:///src/trigger/retry.ts:26:21)\n at _tracer.startActiveSpan.attributes (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:414:40)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:141:24\n at AsyncLocalStorage.run (node:async_hooks:346:14)\n at AsyncLocalStorageContextManager.with (file:///node_modules/.pnpm/@opentelemetry+context-async-hooks@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/context-async-hooks/src/AsyncLocalStorageContextManager.ts:40:36)\n at ContextAPI2.with (file:///node_modules/.pnpm/@opentelemetry+api@1.9.0/node_modules/@opentelemetry/api/src/api/context.ts:77:42)\n at Tracer.startActiveSpan (file:///node_modules/.pnpm/@opentelemetry+sdk-trace-base@1.25.1_@opentelemetry+api@1.9.0/node_modules/@opentelemetry/sdk-trace-base/src/Tracer.ts:241:24)\n at TriggerTracer.startActiveSpan (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/tracer.ts:85:24)\n at file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/workers/taskExecutor.ts:409:33\n at _RunTimelineMetricsAPI.measureMetric (file:///node_modules/.pnpm/@trigger.dev+core@4.0.0-v4-beta.7/node_modules/@trigger.dev/core/src/v3/runTimelineMetrics/index.ts:67:28)", - type: "BUILT_IN_ERROR", - }, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "attempt_started", - environment_type: null, - friendly_id: null, - attempt: 4, - engine: null, - status: "EXECUTING", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:24.135").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:24.123").getTime(), - started_at: null, - executed_at: new Date("2025-04-30 16:34:08.112").getTime(), - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 0, - cost_in_cents: 0, - base_cost_in_cents: 0, - payload: null, - output: null, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "usage", - environment_type: null, - friendly_id: null, - attempt: 4, - engine: null, - status: "EXECUTING", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:25.895").getTime(), - created_at: new Date("2025-04-30 16:34:04.000").getTime(), - updated_at: new Date("2025-04-30 16:34:25.000").getTime(), - started_at: null, - executed_at: null, - completed_at: null, - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 8326, - cost_in_cents: 0.02810025, - base_cost_in_cents: 0, - payload: null, - output: null, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - { - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - organization_id: "cm8zs78wb0002dy616dg75tv3", - project_id: "cm9kddfbz01zpdy88t9dstecu", - run_id: "cma45oli70002qrdy47w0j4n7", - event_name: "succeeded", - environment_type: null, - friendly_id: null, - attempt: 4, - engine: null, - status: "COMPLETED_SUCCESSFULLY", - task_identifier: null, - queue: null, - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, - depth: 0, - span_id: null, - trace_id: null, - idempotency_key: null, - event_time: new Date("2025-04-30 16:34:26.139").getTime(), - created_at: new Date("2025-04-30 16:34:04.312").getTime(), - updated_at: new Date("2025-04-30 16:34:26.140").getTime(), - started_at: null, - executed_at: null, - completed_at: new Date("2025-04-30 16:34:26.139").getTime(), - delay_until: null, - queued_at: null, - expired_at: null, - expiration_ttl: null, - usage_duration_ms: 8326, - cost_in_cents: 0.02810025, - base_cost_in_cents: 0, - payload: null, - output: { attemptsTaken: "4" }, - error: null, - tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, - is_test: false, - }, - ]); - - expect(insertError).toBeNull(); - expect(insertResult).toEqual(expect.objectContaining({ executed: true })); - expect(insertResult?.summary?.written_rows).toEqual("15"); - - const query = client.query({ - name: "query-run-events", - query: "SELECT * FROM trigger_dev.raw_run_events_v1", - schema: z.object({ - environment_id: z.string(), - run_id: z.string(), - }), - params: z.object({ - run_id: z.string(), - }), - }); - - const [queryError, result] = await query({ run_id: "run_1234" }); - - expect(queryError).toBeNull(); - expect(result).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - run_id: "cma45oli70002qrdy47w0j4n7", - }), - ]) - ); - - // Query the materialized view to check the final state - const latestQuery = client.query({ - name: "query-run-latest", - query: "SELECT * FROM trigger_dev.run_latest_v1 FINAL", - schema: z.any(), - }); - - const [latestQueryError, latestResult] = await latestQuery({}); - - console.log(latestResult); - - expect(latestQueryError).toBeNull(); - expect(latestResult).not.toBeNull(); - if (!latestResult) throw new Error("Expected latestResult to not be null"); - expect(latestResult).toHaveLength(1); - expect(latestResult[0]).toEqual( - expect.objectContaining({ - environment_id: "cm9kddfcs01zqdy88ld9mmrli", - run_id: "cma45oli70002qrdy47w0j4n7", - status: "COMPLETED_SUCCESSFULLY", - attempt: 4, - task_identifier: "retry-task", - task_version: "20250430.3", - sdk_version: "4.0.0-v4-beta.7", - cli_version: "4.0.0-v4-beta.7", - machine_preset: "small-1x", - usage_duration_ms: 8326, - cost_in_cents: 0.02810025, - base_cost_in_cents: 0, - payload: { failCount: "3" }, - output: { attemptsTaken: "4" }, - error: null, - completed_at: "2025-04-30 15:34:26.139", - is_test: 0, - environment_type: "PRODUCTION", - friendly_id: "run_cma45oli70002qrdy47w0j4n7", - queue: "task/retry-task", - schedule_id: null, - batch_id: null, - root_run_id: null, - depth: 0, - started_at: null, - delay_until: null, - expiration_ttl: null, - expired_at: null, - span_id: "538677637f937f54", - idempotency_key: null, - tags: [], - created_at: "2025-04-30 15:34:04.312", - _version: "2025-04-30 15:34:26.139", - last_event_time: "2025-04-30 15:34:26.139", - updated_at: "2025-04-30 15:34:26.140", - engine: "V2", - }) - ); - } - ); -}); diff --git a/internal-packages/clickhouse/src/taskRuns.test.ts b/internal-packages/clickhouse/src/taskRuns.test.ts new file mode 100644 index 0000000000..a1488062e5 --- /dev/null +++ b/internal-packages/clickhouse/src/taskRuns.test.ts @@ -0,0 +1,223 @@ +import { clickhouseTest } from "@internal/testcontainers"; +import { z } from "zod"; +import { ClickhouseClient } from "./client/client.js"; +import { insertTaskRuns } from "./taskRuns.js"; + +describe("Task Runs V1", () => { + clickhouseTest("should be able to insert task runs", async ({ clickhouseContainer }) => { + const client = new ClickhouseClient({ + name: "test", + url: clickhouseContainer.getConnectionUrl(), + }); + + const insert = insertTaskRuns(client, { + async_insert: 0, // turn off async insert for this test + }); + + const [insertError, insertResult] = await insert([ + { + environment_id: "env_1234", + environment_type: "DEVELOPMENT", + organization_id: "org_1234", + project_id: "project_1234", + run_id: "run_1234", + friendly_id: "friendly_1234", + attempt: 1, + engine: "V2", + status: "PENDING", + task_identifier: "my-task", + queue: "my-queue", + schedule_id: "schedule_1234", + batch_id: "batch_1234", + created_at: Date.now(), + updated_at: Date.now(), + completed_at: undefined, + tags: ["tag1", "tag2"], + payload: { + key: "value", + }, + output: { + key: "value", + }, + error: { + type: "BUILT_IN_ERROR", + name: "Error", + message: "error", + stackTrace: "stack trace", + }, + usage_duration_ms: 1000, + cost_in_cents: 100, + task_version: "1.0.0", + sdk_version: "1.0.0", + cli_version: "1.0.0", + machine_preset: "small-1x", + is_test: true, + span_id: "span_1234", + trace_id: "trace_1234", + idempotency_key: "idempotency_key_1234", + expiration_ttl: "1h", + root_run_id: "root_run_1234", + parent_run_id: "parent_run_1234", + depth: 1, + _version: "1", + }, + ]); + + expect(insertError).toBeNull(); + expect(insertResult).toEqual(expect.objectContaining({ executed: true })); + expect(insertResult?.summary?.written_rows).toEqual("1"); + + const query = client.query({ + name: "query-task-runs", + query: "SELECT * FROM trigger_dev.task_runs_v1", + schema: z.object({ + environment_id: z.string(), + run_id: z.string(), + }), + params: z.object({ + run_id: z.string(), + }), + }); + + const [queryError, result] = await query({ run_id: "run_1234" }); + + expect(queryError).toBeNull(); + expect(result).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + environment_id: "env_1234", + run_id: "run_1234", + }), + ]) + ); + }); + + clickhouseTest("should deduplicate on the _version column", async ({ clickhouseContainer }) => { + const client = new ClickhouseClient({ + name: "test", + url: clickhouseContainer.getConnectionUrl(), + }); + + const insert = insertTaskRuns(client, { + async_insert: 0, // turn off async insert for this test + }); + + const [insertError, insertResult] = await insert([ + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + environment_type: "PRODUCTION", + friendly_id: "run_cma45oli70002qrdy47w0j4n7", + attempt: 1, + engine: "V2", + status: "PENDING", + task_identifier: "retry-task", + queue: "task/retry-task", + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: "538677637f937f54", + trace_id: "20a28486b0b9f50c647b35e8863e36a5", + idempotency_key: null, + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:04.312").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: new Date("2025-04-30 16:34:04.311").getTime(), + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: { failCount: "3" }, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: true, + _version: "1", + }, + { + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + organization_id: "cm8zs78wb0002dy616dg75tv3", + project_id: "cm9kddfbz01zpdy88t9dstecu", + run_id: "cma45oli70002qrdy47w0j4n7", + environment_type: "PRODUCTION", + friendly_id: "run_cma45oli70002qrdy47w0j4n7", + attempt: 1, + engine: "V2", + status: "COMPLETED_SUCCESSFULLY", + task_identifier: "retry-task", + queue: "task/retry-task", + schedule_id: null, + batch_id: null, + root_run_id: null, + parent_run_id: null, + depth: 0, + span_id: "538677637f937f54", + trace_id: "20a28486b0b9f50c647b35e8863e36a5", + idempotency_key: null, + created_at: new Date("2025-04-30 16:34:04.312").getTime(), + updated_at: new Date("2025-04-30 16:34:04.312").getTime(), + started_at: null, + executed_at: null, + completed_at: null, + delay_until: null, + queued_at: new Date("2025-04-30 16:34:04.311").getTime(), + expired_at: null, + expiration_ttl: null, + usage_duration_ms: 0, + cost_in_cents: 0, + base_cost_in_cents: 0, + payload: { failCount: "3" }, + output: null, + error: null, + tags: [], + task_version: null, + sdk_version: null, + cli_version: null, + machine_preset: null, + is_test: true, + _version: "2", + }, + ]); + + expect(insertError).toBeNull(); + expect(insertResult).toEqual(expect.objectContaining({ executed: true })); + + const query = client.query({ + name: "query-run-events", + query: "SELECT * FROM trigger_dev.task_runs_v1 FINAL", + schema: z.object({ + environment_id: z.string(), + run_id: z.string(), + status: z.string(), + }), + params: z.object({ + run_id: z.string(), + }), + }); + + const [queryError, result] = await query({ run_id: "cma45oli70002qrdy47w0j4n7" }); + + expect(queryError).toBeNull(); + expect(result).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + environment_id: "cm9kddfcs01zqdy88ld9mmrli", + run_id: "cma45oli70002qrdy47w0j4n7", + status: "COMPLETED_SUCCESSFULLY", + }), + ]) + ); + }); +}); diff --git a/internal-packages/clickhouse/src/runEvents.ts b/internal-packages/clickhouse/src/taskRuns.ts similarity index 66% rename from internal-packages/clickhouse/src/runEvents.ts rename to internal-packages/clickhouse/src/taskRuns.ts index 8549c737bb..d2a0d6efcc 100644 --- a/internal-packages/clickhouse/src/runEvents.ts +++ b/internal-packages/clickhouse/src/taskRuns.ts @@ -3,15 +3,13 @@ import { ClickhouseWriter } from "./client/types.js"; import { ClickHouseSettings } from "@clickhouse/client"; import { TaskRunError } from "@trigger.dev/core/v3/schemas"; -export const RawRunEventV1 = z.object({ +export const TaskRunV1 = z.object({ environment_id: z.string(), organization_id: z.string(), project_id: z.string(), run_id: z.string(), updated_at: z.number().int(), created_at: z.number().int(), - event_time: z.number().int(), - event_name: z.string(), status: z.enum([ "DELAYED", "PENDING", @@ -30,13 +28,12 @@ export const RawRunEventV1 = z.object({ "EXPIRED", "TIMED_OUT", ]), - /* ─── optional fields ─────────────────────────────────────────────── */ - environment_type: z.string().nullish(), - friendly_id: z.string().nullish(), + environment_type: z.string(), + friendly_id: z.string(), attempt: z.number().int().default(1), - engine: z.enum(["V1", "V2"]).nullish(), - task_identifier: z.string().nullish(), - queue: z.string().nullish(), + engine: z.enum(["V1", "V2"]), + task_identifier: z.string(), + queue: z.string(), schedule_id: z.string().nullish(), batch_id: z.string().nullish(), completed_at: z.number().int().nullish(), @@ -45,16 +42,16 @@ export const RawRunEventV1 = z.object({ delay_until: z.number().int().nullish(), queued_at: z.number().int().nullish(), expired_at: z.number().int().nullish(), - usage_duration_ms: z.number().int().nullish(), - cost_in_cents: z.number().nullish(), - base_cost_in_cents: z.number().nullish(), + usage_duration_ms: z.number().int().default(0), + cost_in_cents: z.number().default(0), + base_cost_in_cents: z.number().default(0), payload: z.unknown().nullish(), output: z.unknown().nullish(), error: TaskRunError.nullish(), tags: z .array(z.string()) .transform((arr) => arr.sort()) - .nullish(), + .default([]), task_version: z.string().nullish(), sdk_version: z.string().nullish(), cli_version: z.string().nullish(), @@ -62,20 +59,21 @@ export const RawRunEventV1 = z.object({ root_run_id: z.string().nullish(), parent_run_id: z.string().nullish(), depth: z.number().int().default(0), - span_id: z.string().nullish(), - trace_id: z.string().nullish(), + span_id: z.string(), + trace_id: z.string(), idempotency_key: z.string().nullish(), expiration_ttl: z.string().nullish(), is_test: z.boolean().default(false), + _version: z.string(), }); -export type RawRunEventV1 = z.infer; +export type TaskRunV1 = z.infer; -export function insertRunEvents(ch: ClickhouseWriter, settings?: ClickHouseSettings) { +export function insertTaskRuns(ch: ClickhouseWriter, settings?: ClickHouseSettings) { return ch.insert({ - name: "insertRunEvents", - table: "trigger_dev.raw_run_events_v1", - schema: RawRunEventV1, + name: "insertTaskRuns", + table: "trigger_dev.task_runs_v1", + schema: TaskRunV1, settings: { async_insert: 1, wait_for_async_insert: 0, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1848634e9b..ef230b4ca8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -673,6 +673,9 @@ importers: '@internal/clickhouse': specifier: workspace:* version: link:../../internal-packages/clickhouse + '@internal/replication': + specifier: workspace:* + version: link:../../internal-packages/replication '@internal/testcontainers': specifier: workspace:* version: link:../../internal-packages/testcontainers From c30a014c51163f6807407c63fc39dc9716697558 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Wed, 7 May 2025 22:23:21 +0100 Subject: [PATCH 18/33] get it working --- .configs/prometheus.yml | 7 + apps/webapp/app/env.server.ts | 29 + apps/webapp/app/metrics.server.ts | 4 +- .../admin.api.v1.runs-replication.start.ts | 37 ++ .../admin.api.v1.runs-replication.stop.ts | 37 ++ .../admin.api.v1.runs-replication.teardown.ts | 37 ++ .../runsReplicationInstance.server.ts | 38 ++ .../services/runsReplicationService.server.ts | 579 ++++++++++++++++++ apps/webapp/package.json | 1 + apps/webapp/remix.config.js | 1 + apps/webapp/tsconfig.json | 4 +- .../schema/003_create_task_runs_v1.sql | 1 - internal-packages/clickhouse/src/index.ts | 2 + internal-packages/clickhouse/src/taskRuns.ts | 5 +- internal-packages/replication/src/index.ts | 5 +- internal-packages/replication/src/pgoutput.ts | 17 +- internal-packages/replication/src/stream.ts | 2 - pnpm-lock.yaml | 3 + scripts/start-prometheus.sh | 5 + 19 files changed, 799 insertions(+), 15 deletions(-) create mode 100644 .configs/prometheus.yml create mode 100644 apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts create mode 100644 apps/webapp/app/routes/admin.api.v1.runs-replication.stop.ts create mode 100644 apps/webapp/app/routes/admin.api.v1.runs-replication.teardown.ts create mode 100644 apps/webapp/app/services/runsReplicationInstance.server.ts create mode 100644 apps/webapp/app/services/runsReplicationService.server.ts create mode 100755 scripts/start-prometheus.sh diff --git a/.configs/prometheus.yml b/.configs/prometheus.yml new file mode 100644 index 0000000000..bfc97660cc --- /dev/null +++ b/.configs/prometheus.yml @@ -0,0 +1,7 @@ +global: + scrape_interval: 15s # how often to scrape targets + +scrape_configs: + - job_name: "trigger-dev" + static_configs: + - targets: ["localhost:3030"] diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index ec7072b8e3..f79036bf43 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -725,6 +725,35 @@ const EnvironmentSchema = z.object({ // BetterStack BETTERSTACK_API_KEY: z.string().optional(), BETTERSTACK_STATUS_PAGE_ID: z.string().optional(), + + RUN_REPLICATION_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + RUN_REPLICATION_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + RUN_REPLICATION_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + RUN_REPLICATION_REDIS_PORT: z.coerce + .number() + .optional() + .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), + RUN_REPLICATION_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + RUN_REPLICATION_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + RUN_REPLICATION_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), }); export type Environment = z.infer; diff --git a/apps/webapp/app/metrics.server.ts b/apps/webapp/app/metrics.server.ts index 16cbbf07f9..1fe43687db 100644 --- a/apps/webapp/app/metrics.server.ts +++ b/apps/webapp/app/metrics.server.ts @@ -4,7 +4,9 @@ import { env } from "./env.server"; export const metricsRegister = singleton("metricsRegister", initializeMetricsRegister); -function initializeMetricsRegister() { +export type MetricsRegister = Registry; + +function initializeMetricsRegister(): MetricsRegister { const registry = new Registry(); register.setDefaultLabels({ diff --git a/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts b/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts new file mode 100644 index 0000000000..108ae7db91 --- /dev/null +++ b/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts @@ -0,0 +1,37 @@ +import { ActionFunctionArgs, json } from "@remix-run/server-runtime"; +import { prisma } from "~/db.server"; +import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; +import { runsReplicationInstance } from "~/services/runsReplicationInstance.server"; + +export async function action({ request }: ActionFunctionArgs) { + // Next authenticate the request + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing API key" }, { status: 401 }); + } + + const user = await prisma.user.findUnique({ + where: { + id: authenticationResult.userId, + }, + }); + + if (!user) { + return json({ error: "Invalid or Missing API key" }, { status: 401 }); + } + + if (!user.admin) { + return json({ error: "You must be an admin to perform this action" }, { status: 403 }); + } + + try { + await runsReplicationInstance.start(); + + return json({ + success: true, + }); + } catch (error) { + return json({ error: error instanceof Error ? error.message : error }, { status: 400 }); + } +} diff --git a/apps/webapp/app/routes/admin.api.v1.runs-replication.stop.ts b/apps/webapp/app/routes/admin.api.v1.runs-replication.stop.ts new file mode 100644 index 0000000000..ae198fe883 --- /dev/null +++ b/apps/webapp/app/routes/admin.api.v1.runs-replication.stop.ts @@ -0,0 +1,37 @@ +import { ActionFunctionArgs, json } from "@remix-run/server-runtime"; +import { prisma } from "~/db.server"; +import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; +import { runsReplicationInstance } from "~/services/runsReplicationInstance.server"; + +export async function action({ request }: ActionFunctionArgs) { + // Next authenticate the request + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing API key" }, { status: 401 }); + } + + const user = await prisma.user.findUnique({ + where: { + id: authenticationResult.userId, + }, + }); + + if (!user) { + return json({ error: "Invalid or Missing API key" }, { status: 401 }); + } + + if (!user.admin) { + return json({ error: "You must be an admin to perform this action" }, { status: 403 }); + } + + try { + await runsReplicationInstance.stop(); + + return json({ + success: true, + }); + } catch (error) { + return json({ error: error instanceof Error ? error.message : error }, { status: 400 }); + } +} diff --git a/apps/webapp/app/routes/admin.api.v1.runs-replication.teardown.ts b/apps/webapp/app/routes/admin.api.v1.runs-replication.teardown.ts new file mode 100644 index 0000000000..9d17d72742 --- /dev/null +++ b/apps/webapp/app/routes/admin.api.v1.runs-replication.teardown.ts @@ -0,0 +1,37 @@ +import { ActionFunctionArgs, json } from "@remix-run/server-runtime"; +import { prisma } from "~/db.server"; +import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; +import { runsReplicationInstance } from "~/services/runsReplicationInstance.server"; + +export async function action({ request }: ActionFunctionArgs) { + // Next authenticate the request + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing API key" }, { status: 401 }); + } + + const user = await prisma.user.findUnique({ + where: { + id: authenticationResult.userId, + }, + }); + + if (!user) { + return json({ error: "Invalid or Missing API key" }, { status: 401 }); + } + + if (!user.admin) { + return json({ error: "You must be an admin to perform this action" }, { status: 403 }); + } + + try { + await runsReplicationInstance.teardown(); + + return json({ + success: true, + }); + } catch (error) { + return json({ error: error instanceof Error ? error.message : error }, { status: 400 }); + } +} diff --git a/apps/webapp/app/services/runsReplicationInstance.server.ts b/apps/webapp/app/services/runsReplicationInstance.server.ts new file mode 100644 index 0000000000..5a0c110e51 --- /dev/null +++ b/apps/webapp/app/services/runsReplicationInstance.server.ts @@ -0,0 +1,38 @@ +import { ClickHouse } from "@internal/clickhouse"; +import { RunsReplicationService } from "./runsReplicationService.server"; +import { singleton } from "~/utils/singleton"; +import invariant from "tiny-invariant"; +import { env } from "~/env.server"; +import { metricsRegister } from "~/metrics.server"; + +export const runsReplicationInstance = singleton( + "runsReplicationInstance", + initializeRunsReplicationInstance +); + +function initializeRunsReplicationInstance() { + const { DATABASE_URL } = process.env; + invariant(typeof DATABASE_URL === "string", "DATABASE_URL env var not set"); + + const clickhouse = ClickHouse.fromEnv(); + + const service = new RunsReplicationService({ + clickhouse: clickhouse, + pgConnectionUrl: DATABASE_URL, + serviceName: "runs-replication", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions: { + keyPrefix: "runs-replication:", + port: env.RUN_REPLICATION_REDIS_PORT ?? undefined, + host: env.RUN_REPLICATION_REDIS_HOST ?? undefined, + username: env.RUN_REPLICATION_REDIS_USERNAME ?? undefined, + password: env.RUN_REPLICATION_REDIS_PASSWORD ?? undefined, + enableAutoPipelining: true, + ...(env.RUN_REPLICATION_REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), + }, + metricsRegister: metricsRegister, + }); + + return service; +} diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts new file mode 100644 index 0000000000..91d74b4e41 --- /dev/null +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -0,0 +1,579 @@ +import type { ClickHouse, TaskRunV1 } from "@internal/clickhouse"; +import { RedisOptions } from "@internal/redis"; +import { LogicalReplicationClient, Transaction, type PgoutputMessage } from "@internal/replication"; +import { Logger } from "@trigger.dev/core/logger"; +import { tryCatch } from "@trigger.dev/core/utils"; +import { TaskRunError } from "@trigger.dev/core/v3/schemas"; +import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; +import { TaskRun } from "@trigger.dev/database"; +import { nanoid } from "nanoid"; +import pLimit from "p-limit"; +import { Counter, Gauge } from "prom-client"; +import type { MetricsRegister } from "~/metrics.server"; + +export type RunsReplicationServiceOptions = { + clickhouse: ClickHouse; + pgConnectionUrl: string; + serviceName: string; + slotName: string; + publicationName: string; + redisOptions: RedisOptions; + metricsRegister?: MetricsRegister; + insertStrategy?: "streaming" | "batching"; + maxFlushConcurrency?: number; + flushIntervalMs?: number; + flushBatchSize?: number; +}; + +export class RunsReplicationService { + private _lastLsn: string | null = null; + private _isSubscribed = false; + private _currentTransaction: + | (Omit, "commitEndLsn" | "replicationLagMs"> & { + commitEndLsn?: string | null; + replicationLagMs?: number; + }) + | null = null; + + private _replicationClient: LogicalReplicationClient; + private _concurrentFlushScheduler: ConcurrentFlushScheduler<{ _version: bigint; run: TaskRun }>; + private logger: Logger; + private _lastReplicationLagMs: number | null = null; + private _transactionCounter?: Counter; + private _insertStrategy: "streaming" | "batching"; + + constructor(private readonly options: RunsReplicationServiceOptions) { + this.logger = new Logger("RunsReplicationService", "debug"); + + this._insertStrategy = options.insertStrategy ?? "streaming"; + + this._replicationClient = new LogicalReplicationClient({ + pgConfig: { + connectionString: options.pgConnectionUrl, + }, + name: options.serviceName, + slotName: options.slotName, + publicationName: options.publicationName, + table: "TaskRun", + redisOptions: options.redisOptions, + autoAcknowledge: false, + publicationActions: ["insert", "update"], + logger: new Logger("RunsReplicationService", "debug"), + leaderLockTimeoutMs: 30_000, + leaderLockExtendIntervalMs: 10_000, + ackIntervalSeconds: 10, + }); + + this._concurrentFlushScheduler = new ConcurrentFlushScheduler<{ + _version: bigint; + run: TaskRun; + }>({ + batchSize: options.flushBatchSize ?? 50, + flushInterval: options.flushIntervalMs ?? 100, + maxConcurrency: options.maxFlushConcurrency ?? 100, + callback: this.#flushBatch.bind(this), + metricsRegister: options.metricsRegister, + }); + + this._replicationClient.events.on("data", async ({ lsn, log }) => { + this._lastLsn = lsn; + + await this.#handleData(lsn, log); + }); + + this._replicationClient.events.on("heartbeat", async ({ lsn, shouldRespond }) => { + if (shouldRespond) { + await this._replicationClient.acknowledge(lsn); + } + }); + + this._replicationClient.events.on("error", (error) => { + this.logger.error("Replication client error", { + error, + }); + }); + + this._replicationClient.events.on("start", () => { + this.logger.debug("Replication client started"); + }); + + this._replicationClient.events.on("acknowledge", ({ lsn }) => { + this.logger.debug("Acknowledged", { lsn }); + }); + + this._replicationClient.events.on("leaderElection", (isLeader) => { + this.logger.debug("Leader election", { isLeader }); + }); + + if (options.metricsRegister) { + const replicationService = this; + new Gauge({ + name: "runs_replication_service_replication_lag_ms", + help: "The replication lag in milliseconds", + collect() { + if (!replicationService._lastReplicationLagMs) { + return; + } + + this.set(replicationService._lastReplicationLagMs); + }, + registers: [options.metricsRegister], + }); + + replicationService._transactionCounter = new Counter({ + name: "runs_replication_service_transactions", + help: "The number of transactions", + registers: [options.metricsRegister], + }); + } + } + + async start() { + this.logger.info("Starting replication client", { + lastLsn: this._lastLsn, + }); + + await this._replicationClient.subscribe(this._lastLsn ?? undefined); + } + + async stop() { + this.logger.info("Stopping replication client"); + + await this._replicationClient.stop(); + } + + async teardown() { + this.logger.info("Teardown replication client"); + + await this._replicationClient.teardown(); + } + + async #handleData(lsn: string, message: PgoutputMessage) { + switch (message.tag) { + case "begin": { + this._currentTransaction = { + commitLsn: message.commitLsn, + xid: message.xid, + events: [], + }; + break; + } + case "insert": { + if (!this._currentTransaction) { + return; + } + + this._currentTransaction.events.push({ + tag: message.tag, + data: message.new as TaskRun, + raw: message, + }); + break; + } + case "update": { + if (!this._currentTransaction) { + return; + } + + this._currentTransaction.events.push({ + tag: message.tag, + data: message.new as TaskRun, + raw: message, + }); + break; + } + case "commit": { + if (!this._currentTransaction) { + return; + } + const replicationLagMs = Date.now() - Number(message.commitTime / 1000n); + this._currentTransaction.commitEndLsn = message.commitEndLsn; + this._currentTransaction.replicationLagMs = replicationLagMs; + await this.#handleTransaction(this._currentTransaction as Transaction); + this._currentTransaction = null; + break; + } + } + } + + async #handleTransaction(transaction: Transaction) { + this._lastReplicationLagMs = transaction.replicationLagMs; + + // If there are no events, do nothing + if (transaction.events.length === 0) { + if (transaction.commitEndLsn) { + await this._replicationClient.acknowledge(transaction.commitEndLsn); + } + + return; + } + + if (!transaction.commitEndLsn) { + this.logger.error("Transaction has no commit end lsn", { + transaction, + }); + + return; + } + + this.logger.debug("Handling transaction", { + transaction, + }); + + // If there are events, we need to handle them + const _version = lsnToUInt64(transaction.commitEndLsn); + + this._transactionCounter?.inc(); + + if (this._insertStrategy === "streaming") { + await this._concurrentFlushScheduler.addToBatch( + transaction.events.map((event) => ({ _version, run: event.data })) + ); + } else { + const [flushError] = await tryCatch( + this.#flushBatch( + nanoid(), + transaction.events.map((event) => ({ _version, run: event.data })) + ) + ); + + if (flushError) { + this.logger.error("Error flushing batch", { + error: flushError, + }); + } + } + + await this._replicationClient.acknowledge(transaction.commitEndLsn); + } + + async #flushBatch(flushId: string, batch: Array<{ _version: bigint; run: TaskRun }>) { + if (batch.length === 0) { + this.logger.debug("No runs to flush", { + flushId, + }); + return; + } + + this.logger.info("Flushing batch", { + flushId, + batchSize: batch.length, + }); + + const preparedRuns = await Promise.all(batch.map(this.#prepareRun.bind(this))); + const runsToInsert = preparedRuns.filter(Boolean); + + if (runsToInsert.length === 0) { + this.logger.debug("No runs to insert", { + flushId, + batchSize: batch.length, + }); + return; + } + + const [insertError, insertResult] = await this.options.clickhouse.taskRuns.insert( + runsToInsert, + { + params: { + clickhouse_settings: { + wait_for_async_insert: this._insertStrategy === "batching" ? 1 : 0, + }, + }, + } + ); + + if (insertError) { + this.logger.error("Error inserting runs", { + error: insertError, + flushId, + batchSize: batch.length, + }); + } else { + this.logger.info("Flushed batch", { + flushId, + insertResult, + }); + } + } + + async #prepareRun(batchedRun: { + run: TaskRun; + _version: bigint; + }): Promise { + this.logger.debug("Preparing run", { + batchedRun, + }); + + const { run, _version } = batchedRun; + + if (!run.environmentType) { + return undefined; + } + + if (!run.organizationId) { + return undefined; + } + + const [payload, output] = await Promise.all([ + this.#prepareJson(run.payload, run.payloadType), + this.#prepareJson(run.output, run.outputType), + ]); + + return { + environment_id: run.runtimeEnvironmentId, + organization_id: run.organizationId, + project_id: run.projectId, + run_id: run.id, + updated_at: run.updatedAt.getTime(), + created_at: run.createdAt.getTime(), + status: run.status, + environment_type: run.environmentType, + friendly_id: run.friendlyId, + engine: run.engine, + task_identifier: run.taskIdentifier, + queue: run.queue, + span_id: run.spanId, + trace_id: run.traceId, + error: run.error ? (run.error as TaskRunError) : undefined, + attempt: run.attemptNumber ?? 1, + schedule_id: run.scheduleId, + batch_id: run.batchId, + completed_at: run.completedAt?.getTime(), + started_at: run.startedAt?.getTime(), + executed_at: run.executedAt?.getTime(), + delay_until: run.delayUntil?.getTime(), + queued_at: run.queuedAt?.getTime(), + expired_at: run.expiredAt?.getTime(), + usage_duration_ms: run.usageDurationMs, + cost_in_cents: run.costInCents, + base_cost_in_cents: run.baseCostInCents, + tags: run.runTags, + task_version: run.taskVersion, + sdk_version: run.sdkVersion, + cli_version: run.cliVersion, + machine_preset: run.machinePreset, + root_run_id: run.rootTaskRunId, + parent_run_id: run.parentTaskRunId, + depth: run.depth, + is_test: run.isTest, + idempotency_key: run.idempotencyKey, + expiration_ttl: run.ttl, + payload, + output, + _version: _version.toString(), + }; + } + + async #prepareJson( + data: string | undefined | null, + dataType: string + ): Promise { + if (!data) { + return undefined; + } + + if (dataType !== "application/json" && dataType !== "application/super+json") { + return undefined; + } + + const packet = { + data, + dataType, + }; + + const parsedData = await parsePacket(packet); + + if (!parsedData) { + return undefined; + } + + return { data: parsedData }; + } +} + +export type ConcurrentFlushSchedulerConfig = { + batchSize: number; + flushInterval: number; + maxConcurrency?: number; + callback: (flushId: string, batch: T[]) => Promise; + metricsRegister?: MetricsRegister; +}; + +export class ConcurrentFlushScheduler { + private currentBatch: T[]; // Adjust the type according to your data structure + private readonly BATCH_SIZE: number; + private readonly FLUSH_INTERVAL: number; + private readonly MAX_CONCURRENCY: number; + private readonly concurrencyLimiter: ReturnType; + private flushTimer: NodeJS.Timeout | null; + private isShuttingDown; + private failedBatchCount; + private metricsRegister?: MetricsRegister; + private logger: Logger; + + constructor(private readonly config: ConcurrentFlushSchedulerConfig) { + this.logger = new Logger("ConcurrentFlushScheduler", "info"); + this.currentBatch = []; + this.BATCH_SIZE = config.batchSize; + this.FLUSH_INTERVAL = config.flushInterval; + this.MAX_CONCURRENCY = config.maxConcurrency || 1; + this.concurrencyLimiter = pLimit(this.MAX_CONCURRENCY); + this.flushTimer = null; + this.isShuttingDown = false; + this.failedBatchCount = 0; + + this.logger.info("Initializing ConcurrentFlushScheduler", { + batchSize: this.BATCH_SIZE, + flushInterval: this.FLUSH_INTERVAL, + maxConcurrency: this.MAX_CONCURRENCY, + }); + + this.startFlushTimer(); + this.setupShutdownHandlers(); + + if (!process.env.VITEST && config.metricsRegister) { + this.metricsRegister = config.metricsRegister; + + const scheduler = this; + + new Gauge({ + name: "concurrent_flush_scheduler_batch_size", + help: "Number of items in the current concurrent flush scheduler batch", + collect() { + this.set(scheduler.currentBatch.length); + }, + registers: [this.metricsRegister], + }); + + new Gauge({ + name: "concurrent_flush_scheduler_failed_batches", + help: "Number of failed batches", + collect() { + this.set(scheduler.failedBatchCount); + }, + registers: [this.metricsRegister], + }); + } + } + + /** + * + * If you want to fire and forget, don't await this method. + */ + async addToBatch(items: T[]): Promise { + // TODO: consider using concat. spread is not performant + this.currentBatch.push(...items); + this.logger.debug("Adding items to batch", { + currentBatchSize: this.currentBatch.length, + itemsAdded: items.length, + }); + + if (this.currentBatch.length >= this.BATCH_SIZE) { + this.logger.debug("Batch size threshold reached, initiating flush", { + batchSize: this.BATCH_SIZE, + currentSize: this.currentBatch.length, + }); + await this.flushNextBatch(); + this.resetFlushTimer(); + } + } + + private startFlushTimer(): void { + this.flushTimer = setInterval(() => this.checkAndFlush(), this.FLUSH_INTERVAL); + this.logger.debug("Started flush timer", { interval: this.FLUSH_INTERVAL }); + } + + private setupShutdownHandlers() { + process.on("SIGTERM", this.shutdown.bind(this)); + process.on("SIGINT", this.shutdown.bind(this)); + this.logger.debug("Shutdown handlers configured"); + } + + private async shutdown(): Promise { + if (this.isShuttingDown) return; + this.isShuttingDown = true; + this.logger.info("Initiating shutdown of dynamic flush scheduler", { + remainingItems: this.currentBatch.length, + }); + + await this.checkAndFlush(); + this.clearTimer(); + + this.logger.info("Dynamic flush scheduler shutdown complete", { + totalFailedBatches: this.failedBatchCount, + }); + } + + private clearTimer(): void { + if (this.flushTimer) { + clearInterval(this.flushTimer); + this.logger.debug("Flush timer cleared"); + } + } + + private resetFlushTimer(): void { + this.clearTimer(); + this.startFlushTimer(); + this.logger.debug("Flush timer reset"); + } + + private async checkAndFlush(): Promise { + if (this.currentBatch.length > 0) { + this.logger.debug("Periodic flush check triggered", { + currentBatchSize: this.currentBatch.length, + }); + await this.flushNextBatch(); + } + } + + private async flushNextBatch(): Promise { + if (this.currentBatch.length === 0) return; + + const batches: T[][] = []; + while (this.currentBatch.length > 0) { + batches.push(this.currentBatch.splice(0, this.BATCH_SIZE)); + } + + this.logger.info("Starting batch flush", { + numberOfBatches: batches.length, + totalItems: batches.reduce((sum, batch) => sum + batch.length, 0), + }); + + const callback = this.config.callback; + + // TODO: report plimit.activeCount and pLimit.pendingCount and pLimit.concurrency to /metrics + const promises = batches.map((batch) => + this.concurrencyLimiter(async () => { + const batchId = nanoid(); + try { + await callback(batchId, batch); + } catch (error) { + this.logger.error("Error processing batch", { + batchId, + error, + batchSize: batch.length, + errorMessage: error instanceof Error ? error.message : "Unknown error", + }); + throw error; + } + }) + ); + + const results = await Promise.allSettled(promises); + + const failedBatches = results.filter((result) => result.status === "rejected").length; + this.failedBatchCount += failedBatches; + + this.logger.info("Batch flush complete", { + totalBatches: batches.length, + successfulBatches: batches.length - failedBatches, + failedBatches, + totalFailedBatches: this.failedBatchCount, + }); + } +} + +function lsnToUInt64(lsn: string): bigint { + const [seg, off] = lsn.split("/"); + return (BigInt("0x" + seg) << 32n) | BigInt("0x" + off); +} diff --git a/apps/webapp/package.json b/apps/webapp/package.json index ab30ad5739..0e9063e5a1 100644 --- a/apps/webapp/package.json +++ b/apps/webapp/package.json @@ -146,6 +146,7 @@ "ohash": "^1.1.3", "openai": "^4.33.1", "parse-duration": "^1.1.0", + "p-limit": "^6.2.0", "posthog-js": "^1.93.3", "posthog-node": "^3.1.3", "prism-react-renderer": "^2.3.1", diff --git a/apps/webapp/remix.config.js b/apps/webapp/remix.config.js index eb290765e8..9e582d89f2 100644 --- a/apps/webapp/remix.config.js +++ b/apps/webapp/remix.config.js @@ -23,6 +23,7 @@ module.exports = { "superjson", "prismjs/components/prism-json", "prismjs/components/prism-typescript", + "redlock", ], browserNodeBuiltinsPolyfill: { modules: { path: true, os: true, crypto: true } }, }; diff --git a/apps/webapp/tsconfig.json b/apps/webapp/tsconfig.json index 5c80d471ea..a10eda99cf 100644 --- a/apps/webapp/tsconfig.json +++ b/apps/webapp/tsconfig.json @@ -3,14 +3,14 @@ "include": ["remix.env.d.ts", "global.d.ts", "**/*.ts", "**/*.tsx"], "compilerOptions": { "types": ["vitest/globals"], - "lib": ["DOM", "DOM.Iterable", "DOM.AsyncIterable", "ES2019"], + "lib": ["DOM", "DOM.Iterable", "DOM.AsyncIterable", "ES2020"], "isolatedModules": true, "esModuleInterop": true, "jsx": "react-jsx", "module": "esnext", "moduleResolution": "Bundler", "resolveJsonModule": true, - "target": "ES2019", + "target": "ES2020", "strict": true, "allowJs": true, "forceConsistentCasingInFileNames": true, diff --git a/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql b/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql index 442d947d3f..9a1fdebdd3 100644 --- a/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql +++ b/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql @@ -52,7 +52,6 @@ CREATE TABLE trigger_dev.task_runs_v1 idempotency_key Nullable(String), /* ─── timing ─────────────────────────────────────────────── */ - event_time DateTime64(3), -- when this row created created_at DateTime64(3), updated_at DateTime64(3), started_at Nullable(DateTime64(3)), diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts index 90e59166cd..602ace450c 100644 --- a/internal-packages/clickhouse/src/index.ts +++ b/internal-packages/clickhouse/src/index.ts @@ -5,6 +5,8 @@ import { NoopClient } from "./client/noop.js"; import { insertTaskRuns } from "./taskRuns.js"; import { Logger } from "@trigger.dev/core/logger"; +export type * from "./taskRuns.js"; + export type ClickHouseConfig = | { name?: string; diff --git a/internal-packages/clickhouse/src/taskRuns.ts b/internal-packages/clickhouse/src/taskRuns.ts index d2a0d6efcc..5f2ad8bf71 100644 --- a/internal-packages/clickhouse/src/taskRuns.ts +++ b/internal-packages/clickhouse/src/taskRuns.ts @@ -48,10 +48,7 @@ export const TaskRunV1 = z.object({ payload: z.unknown().nullish(), output: z.unknown().nullish(), error: TaskRunError.nullish(), - tags: z - .array(z.string()) - .transform((arr) => arr.sort()) - .default([]), + tags: z.array(z.string()).nullish(), task_version: z.string().nullish(), sdk_version: z.string().nullish(), cli_version: z.string().nullish(), diff --git a/internal-packages/replication/src/index.ts b/internal-packages/replication/src/index.ts index cb0ff5c3b5..d8dc8e725a 100644 --- a/internal-packages/replication/src/index.ts +++ b/internal-packages/replication/src/index.ts @@ -1 +1,4 @@ -export {}; +export * from "./client.js"; +export * from "./errors.js"; +export * from "./stream.js"; +export type * from "./pgoutput.js"; diff --git a/internal-packages/replication/src/pgoutput.ts b/internal-packages/replication/src/pgoutput.ts index 1bcab2c656..90b88dab87 100644 --- a/internal-packages/replication/src/pgoutput.ts +++ b/internal-packages/replication/src/pgoutput.ts @@ -129,11 +129,20 @@ class BinaryReader { array(n: number, fn: () => T): T[] { return Array.from({ length: n }, fn); } - readLsn(): string { - const upper = this.readInt32(); - const lower = this.readInt32(); - return upper.toString(16).toUpperCase() + "/" + lower.toString(16).toUpperCase(); + + readLsn(): string | null { + const upper = this.readUint32(); + const lower = this.readUint32(); + if (upper === 0 && lower === 0) { + return null; + } + return ( + upper.toString(16).padStart(8, "0").toUpperCase() + + "/" + + lower.toString(16).padStart(8, "0").toUpperCase() + ); } + readUint32(): number { // >>> 0 ensures unsigned return this.readInt32() >>> 0; diff --git a/internal-packages/replication/src/stream.ts b/internal-packages/replication/src/stream.ts index 9d3d547e88..4ec32a92ab 100644 --- a/internal-packages/replication/src/stream.ts +++ b/internal-packages/replication/src/stream.ts @@ -1,6 +1,4 @@ import { createAsyncIterableStreamFromAsyncIterable } from "@trigger.dev/core/v3"; -import { Readable } from "node:stream"; -import type { ClientConfig } from "pg"; import { LogicalReplicationClient, LogicalReplicationClientOptions } from "./client.js"; import type { MessageDelete, MessageInsert, MessageUpdate, PgoutputMessage } from "./pgoutput.js"; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ef230b4ca8..c3eeb65281 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -534,6 +534,9 @@ importers: openai: specifier: ^4.33.1 version: 4.33.1 + p-limit: + specifier: ^6.2.0 + version: 6.2.0 parse-duration: specifier: ^1.1.0 version: 1.1.0 diff --git a/scripts/start-prometheus.sh b/scripts/start-prometheus.sh new file mode 100755 index 0000000000..04e43d4317 --- /dev/null +++ b/scripts/start-prometheus.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +set -e + +prometheus --config.file=./.configs/prometheus.yml --storage.tsdb.path=/tmp/prom-data \ No newline at end of file From 2131b661f03b85ba3b067d8c77ad550dbfbfa258 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 8 May 2025 11:22:31 +0100 Subject: [PATCH 19/33] insert payloads into their own table only on insert and then join --- .../admin.api.v1.runs-replication.start.ts | 10 +- .../services/runsReplicationService.server.ts | 207 ++++++++++++++---- .../schema/003_create_task_runs_v1.sql | 22 +- internal-packages/clickhouse/src/index.ts | 3 +- .../clickhouse/src/taskRuns.test.ts | 42 +++- internal-packages/clickhouse/src/taskRuns.ts | 30 ++- 6 files changed, 255 insertions(+), 59 deletions(-) diff --git a/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts b/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts index 108ae7db91..8e50006158 100644 --- a/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts +++ b/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts @@ -2,6 +2,11 @@ import { ActionFunctionArgs, json } from "@remix-run/server-runtime"; import { prisma } from "~/db.server"; import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; import { runsReplicationInstance } from "~/services/runsReplicationInstance.server"; +import { z } from "zod"; + +const schema = z.object({ + insertStrategy: z.enum(["streaming", "batching"]).optional(), +}); export async function action({ request }: ActionFunctionArgs) { // Next authenticate the request @@ -26,7 +31,10 @@ export async function action({ request }: ActionFunctionArgs) { } try { - await runsReplicationInstance.start(); + const body = await request.json(); + const { insertStrategy } = schema.parse(body); + + await runsReplicationInstance.start(insertStrategy); return json({ success: true, diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index 91d74b4e41..486e3b544a 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -1,4 +1,4 @@ -import type { ClickHouse, TaskRunV1 } from "@internal/clickhouse"; +import type { ClickHouse, TaskRunV1, RawTaskRunPayloadV1 } from "@internal/clickhouse"; import { RedisOptions } from "@internal/redis"; import { LogicalReplicationClient, Transaction, type PgoutputMessage } from "@internal/replication"; import { Logger } from "@trigger.dev/core/logger"; @@ -25,6 +25,8 @@ export type RunsReplicationServiceOptions = { flushBatchSize?: number; }; +type TaskRunInsert = { _version: bigint; run: TaskRun; event: "insert" | "update" }; + export class RunsReplicationService { private _lastLsn: string | null = null; private _isSubscribed = false; @@ -36,7 +38,7 @@ export class RunsReplicationService { | null = null; private _replicationClient: LogicalReplicationClient; - private _concurrentFlushScheduler: ConcurrentFlushScheduler<{ _version: bigint; run: TaskRun }>; + private _concurrentFlushScheduler: ConcurrentFlushScheduler; private logger: Logger; private _lastReplicationLagMs: number | null = null; private _transactionCounter?: Counter; @@ -64,10 +66,7 @@ export class RunsReplicationService { ackIntervalSeconds: 10, }); - this._concurrentFlushScheduler = new ConcurrentFlushScheduler<{ - _version: bigint; - run: TaskRun; - }>({ + this._concurrentFlushScheduler = new ConcurrentFlushScheduler({ batchSize: options.flushBatchSize ?? 50, flushInterval: options.flushIntervalMs ?? 100, maxConcurrency: options.maxFlushConcurrency ?? 100, @@ -128,7 +127,9 @@ export class RunsReplicationService { } } - async start() { + async start(insertStrategy?: "streaming" | "batching") { + this._insertStrategy = insertStrategy ?? this._insertStrategy; + this.logger.info("Starting replication client", { lastLsn: this._lastLsn, }); @@ -216,6 +217,20 @@ export class RunsReplicationService { return; } + const relevantEvents = transaction.events.filter( + (event) => event.tag === "insert" || event.tag === "update" + ); + + if (relevantEvents.length === 0) { + this.logger.debug("No relevant events", { + transaction, + }); + + await this._replicationClient.acknowledge(transaction.commitEndLsn); + + return; + } + this.logger.debug("Handling transaction", { transaction, }); @@ -227,13 +242,21 @@ export class RunsReplicationService { if (this._insertStrategy === "streaming") { await this._concurrentFlushScheduler.addToBatch( - transaction.events.map((event) => ({ _version, run: event.data })) + relevantEvents.map((event) => ({ + _version, + run: event.data, + event: event.tag as "insert" | "update", + })) ); } else { const [flushError] = await tryCatch( this.#flushBatch( nanoid(), - transaction.events.map((event) => ({ _version, run: event.data })) + relevantEvents.map((event) => ({ + _version, + run: event.data, + event: event.tag as "insert" | "update", + })) ) ); @@ -247,7 +270,7 @@ export class RunsReplicationService { await this._replicationClient.acknowledge(transaction.commitEndLsn); } - async #flushBatch(flushId: string, batch: Array<{ _version: bigint; run: TaskRun }>) { + async #flushBatch(flushId: string, batch: Array) { if (batch.length === 0) { this.logger.debug("No runs to flush", { flushId, @@ -260,19 +283,37 @@ export class RunsReplicationService { batchSize: batch.length, }); - const preparedRuns = await Promise.all(batch.map(this.#prepareRun.bind(this))); - const runsToInsert = preparedRuns.filter(Boolean); + const preparedInserts = await Promise.all(batch.map(this.#prepareRunInserts.bind(this))); - if (runsToInsert.length === 0) { - this.logger.debug("No runs to insert", { - flushId, - batchSize: batch.length, - }); - return; - } + const taskRunInserts = preparedInserts + .map(({ taskRunInsert }) => taskRunInsert) + .filter(Boolean); + + const payloadInserts = preparedInserts + .map(({ payloadInsert }) => payloadInsert) + .filter(Boolean); + + this.logger.info("Flushing inserts", { + flushId, + taskRunInserts: taskRunInserts.length, + payloadInserts: payloadInserts.length, + }); + await Promise.all([ + this.#insertTaskRunInserts(taskRunInserts), + this.#insertPayloadInserts(payloadInserts), + ]); + + this.logger.info("Flushed inserts", { + flushId, + taskRunInserts: taskRunInserts.length, + payloadInserts: payloadInserts.length, + }); + } + + async #insertTaskRunInserts(taskRunInserts: TaskRunV1[]) { const [insertError, insertResult] = await this.options.clickhouse.taskRuns.insert( - runsToInsert, + taskRunInserts, { params: { clickhouse_settings: { @@ -283,51 +324,100 @@ export class RunsReplicationService { ); if (insertError) { - this.logger.error("Error inserting runs", { + this.logger.error("Error inserting task run inserts", { error: insertError, - flushId, - batchSize: batch.length, }); - } else { - this.logger.info("Flushed batch", { - flushId, - insertResult, + } + + return insertResult; + } + + async #insertPayloadInserts(payloadInserts: RawTaskRunPayloadV1[]) { + const [insertError, insertResult] = await this.options.clickhouse.taskRuns.insertPayloads( + payloadInserts, + { + params: { + clickhouse_settings: { + wait_for_async_insert: this._insertStrategy === "batching" ? 1 : 0, + }, + }, + } + ); + + if (insertError) { + this.logger.error("Error inserting payload inserts", { + error: insertError, }); } + + return insertResult; } - async #prepareRun(batchedRun: { - run: TaskRun; - _version: bigint; - }): Promise { + async #prepareRunInserts( + batchedRun: TaskRunInsert + ): Promise<{ taskRunInsert?: TaskRunV1; payloadInsert?: RawTaskRunPayloadV1 }> { this.logger.debug("Preparing run", { batchedRun, }); - const { run, _version } = batchedRun; + const { run, _version, event } = batchedRun; if (!run.environmentType) { - return undefined; + return { + taskRunInsert: undefined, + payloadInsert: undefined, + }; } if (!run.organizationId) { - return undefined; + return { + taskRunInsert: undefined, + payloadInsert: undefined, + }; } - const [payload, output] = await Promise.all([ - this.#prepareJson(run.payload, run.payloadType), - this.#prepareJson(run.output, run.outputType), + if (event === "update") { + const taskRunInsert = await this.#prepareTaskRunInsert( + run, + run.organizationId, + run.environmentType, + _version + ); + + return { + taskRunInsert, + payloadInsert: undefined, + }; + } + + const [taskRunInsert, payloadInsert] = await Promise.all([ + this.#prepareTaskRunInsert(run, run.organizationId, run.environmentType, _version), + this.#preparePayloadInsert(run, _version), ]); + return { + taskRunInsert, + payloadInsert, + }; + } + + async #prepareTaskRunInsert( + run: TaskRun, + organizationId: string, + environmentType: string, + _version: bigint + ): Promise { + const output = await this.#prepareJson(run.output, run.outputType); + return { environment_id: run.runtimeEnvironmentId, - organization_id: run.organizationId, + organization_id: organizationId, project_id: run.projectId, run_id: run.id, updated_at: run.updatedAt.getTime(), created_at: run.createdAt.getTime(), status: run.status, - environment_type: run.environmentType, + environment_type: environmentType, friendly_id: run.friendlyId, engine: run.engine, task_identifier: run.taskIdentifier, @@ -347,7 +437,7 @@ export class RunsReplicationService { usage_duration_ms: run.usageDurationMs, cost_in_cents: run.costInCents, base_cost_in_cents: run.baseCostInCents, - tags: run.runTags, + tags: run.runTags ?? [], task_version: run.taskVersion, sdk_version: run.sdkVersion, cli_version: run.cliVersion, @@ -358,22 +448,31 @@ export class RunsReplicationService { is_test: run.isTest, idempotency_key: run.idempotencyKey, expiration_ttl: run.ttl, - payload, output, _version: _version.toString(), }; } + async #preparePayloadInsert(run: TaskRun, _version: bigint): Promise { + const payload = await this.#prepareJson(run.payload, run.payloadType); + + return { + run_id: run.id, + created_at: run.createdAt.getTime(), + payload, + }; + } + async #prepareJson( data: string | undefined | null, dataType: string - ): Promise { + ): Promise<{ data: unknown }> { if (!data) { - return undefined; + return { data: undefined }; } if (dataType !== "application/json" && dataType !== "application/super+json") { - return undefined; + return { data: undefined }; } const packet = { @@ -384,7 +483,7 @@ export class RunsReplicationService { const parsedData = await parsePacket(packet); if (!parsedData) { - return undefined; + return { data: undefined }; } return { data: parsedData }; @@ -453,6 +552,24 @@ export class ConcurrentFlushScheduler { }, registers: [this.metricsRegister], }); + + new Gauge({ + name: "concurrent_flush_scheduler_active_concurrency", + help: "Number of active concurrency", + collect() { + this.set(scheduler.concurrencyLimiter.activeCount); + }, + registers: [this.metricsRegister], + }); + + new Gauge({ + name: "concurrent_flush_scheduler_pending_concurrency", + help: "Number of pending concurrency", + collect() { + this.set(scheduler.concurrencyLimiter.pendingCount); + }, + registers: [this.metricsRegister], + }); } } diff --git a/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql b/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql index 9a1fdebdd3..dbab50101c 100644 --- a/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql +++ b/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql @@ -68,8 +68,7 @@ CREATE TABLE trigger_dev.task_runs_v1 base_cost_in_cents Float64 DEFAULT 0, /* ─── payload & context ──────────────────────────────────── */ - payload Nullable(JSON(max_dynamic_paths = 2048)), - output Nullable(JSON(max_dynamic_paths = 2048)), + output JSON(max_dynamic_paths = 2048), error Nullable(JSON(max_dynamic_paths = 64)), /* ─── tagging / versions ─────────────────────────────────── */ @@ -92,7 +91,26 @@ ORDER BY (toDate(created_at), environment_id, task_identifier, created_at, run_i ALTER TABLE trigger_dev.task_runs_v1 ADD INDEX idx_tags tags TYPE tokenbf_v1(32768, 3, 0) GRANULARITY 4; +CREATE TABLE trigger_dev.raw_task_runs_payload_v1 +( + run_id String, + created_at DateTime64(3), + payload JSON(max_dynamic_paths = 2048) +) +ENGINE = MergeTree +PARTITION BY toYYYYMMDD(created_at) +ORDER BY (run_id); + +CREATE VIEW trigger_dev.tmp_eric_task_runs_full_v1 AS +SELECT + s.*, + p.payload as payload +FROM trigger_dev.task_runs_v1 AS s FINAL +LEFT JOIN trigger_dev.raw_task_runs_payload_v1 AS p ON s.run_id = p.run_id; + -- +goose Down SET enable_json_type = 0; DROP TABLE IF EXISTS trigger_dev.task_runs_v1; +DROP TABLE IF EXISTS trigger_dev.raw_task_runs_payload_v1; +DROP VIEW IF EXISTS trigger_dev.tmp_eric_task_runs_full_v1; \ No newline at end of file diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts index 602ace450c..5c76955dfa 100644 --- a/internal-packages/clickhouse/src/index.ts +++ b/internal-packages/clickhouse/src/index.ts @@ -2,7 +2,7 @@ import { ClickHouseSettings } from "@clickhouse/client"; import { ClickhouseClient } from "./client/client.js"; import { ClickhouseReader, ClickhouseWriter } from "./client/types.js"; import { NoopClient } from "./client/noop.js"; -import { insertTaskRuns } from "./taskRuns.js"; +import { insertTaskRuns, insertRawTaskRunPayloads } from "./taskRuns.js"; import { Logger } from "@trigger.dev/core/logger"; export type * from "./taskRuns.js"; @@ -90,6 +90,7 @@ export class ClickHouse { get taskRuns() { return { insert: insertTaskRuns(this.writer), + insertPayloads: insertRawTaskRunPayloads(this.writer), }; } } diff --git a/internal-packages/clickhouse/src/taskRuns.test.ts b/internal-packages/clickhouse/src/taskRuns.test.ts index a1488062e5..c34586970d 100644 --- a/internal-packages/clickhouse/src/taskRuns.test.ts +++ b/internal-packages/clickhouse/src/taskRuns.test.ts @@ -1,7 +1,7 @@ import { clickhouseTest } from "@internal/testcontainers"; import { z } from "zod"; import { ClickhouseClient } from "./client/client.js"; -import { insertTaskRuns } from "./taskRuns.js"; +import { insertRawTaskRunPayloads, insertTaskRuns } from "./taskRuns.js"; describe("Task Runs V1", () => { clickhouseTest("should be able to insert task runs", async ({ clickhouseContainer }) => { @@ -14,6 +14,10 @@ describe("Task Runs V1", () => { async_insert: 0, // turn off async insert for this test }); + const insertPayloads = insertRawTaskRunPayloads(client, { + async_insert: 0, // turn off async insert for this test + }); + const [insertError, insertResult] = await insert([ { environment_id: "env_1234", @@ -33,9 +37,6 @@ describe("Task Runs V1", () => { updated_at: Date.now(), completed_at: undefined, tags: ["tag1", "tag2"], - payload: { - key: "value", - }, output: { key: "value", }, @@ -90,6 +91,37 @@ describe("Task Runs V1", () => { }), ]) ); + + const [insertPayloadsError, insertPayloadsResult] = await insertPayloads([ + { + run_id: "run_1234", + created_at: Date.now(), + payload: { + key: "value", + }, + }, + ]); + + expect(insertPayloadsError).toBeNull(); + expect(insertPayloadsResult).toEqual(expect.objectContaining({ executed: true })); + expect(insertPayloadsResult?.summary?.written_rows).toEqual("1"); + + const queryPayloads = client.query({ + name: "query-raw-task-run-payloads", + query: "SELECT * FROM trigger_dev.raw_task_runs_payload_v1", + schema: z.object({ + run_id: z.string(), + created_at: z.coerce.date(), + payload: z.unknown(), + }), + }); + + const [queryPayloadsError, resultPayloads] = await queryPayloads({ run_id: "run_1234" }); + + expect(queryPayloadsError).toBeNull(); + expect(resultPayloads).toEqual( + expect.arrayContaining([expect.objectContaining({ run_id: "run_1234" })]) + ); }); clickhouseTest("should deduplicate on the _version column", async ({ clickhouseContainer }) => { @@ -135,7 +167,6 @@ describe("Task Runs V1", () => { usage_duration_ms: 0, cost_in_cents: 0, base_cost_in_cents: 0, - payload: { failCount: "3" }, output: null, error: null, tags: [], @@ -178,7 +209,6 @@ describe("Task Runs V1", () => { usage_duration_ms: 0, cost_in_cents: 0, base_cost_in_cents: 0, - payload: { failCount: "3" }, output: null, error: null, tags: [], diff --git a/internal-packages/clickhouse/src/taskRuns.ts b/internal-packages/clickhouse/src/taskRuns.ts index 5f2ad8bf71..fc9572f9c3 100644 --- a/internal-packages/clickhouse/src/taskRuns.ts +++ b/internal-packages/clickhouse/src/taskRuns.ts @@ -45,10 +45,9 @@ export const TaskRunV1 = z.object({ usage_duration_ms: z.number().int().default(0), cost_in_cents: z.number().default(0), base_cost_in_cents: z.number().default(0), - payload: z.unknown().nullish(), - output: z.unknown().nullish(), + output: z.unknown(), error: TaskRunError.nullish(), - tags: z.array(z.string()).nullish(), + tags: z.array(z.string()).default([]), task_version: z.string().nullish(), sdk_version: z.string().nullish(), cli_version: z.string().nullish(), @@ -64,7 +63,7 @@ export const TaskRunV1 = z.object({ _version: z.string(), }); -export type TaskRunV1 = z.infer; +export type TaskRunV1 = z.input; export function insertTaskRuns(ch: ClickhouseWriter, settings?: ClickHouseSettings) { return ch.insert({ @@ -80,3 +79,26 @@ export function insertTaskRuns(ch: ClickhouseWriter, settings?: ClickHouseSettin }, }); } + +export const RawTaskRunPayloadV1 = z.object({ + run_id: z.string(), + created_at: z.number().int(), + payload: z.unknown(), +}); + +export type RawTaskRunPayloadV1 = z.infer; + +export function insertRawTaskRunPayloads(ch: ClickhouseWriter, settings?: ClickHouseSettings) { + return ch.insert({ + name: "insertRawTaskRunPayloads", + table: "trigger_dev.raw_task_runs_payload_v1", + schema: RawTaskRunPayloadV1, + settings: { + async_insert: 1, + wait_for_async_insert: 0, + async_insert_max_data_size: "1000000", + async_insert_busy_timeout_ms: 1000, + ...settings, + }, + }); +} From f3e90412165f530b464c8bb6f5f39add1183e77c Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 8 May 2025 16:54:24 +0100 Subject: [PATCH 20/33] prepare for using clickhouse cloud and now running ch migrations during boot in the entrypoint.sh --- apps/webapp/app/env.server.ts | 12 +++ .../admin.api.v1.runs-replication.start.ts | 2 +- .../admin.api.v1.runs-replication.stop.ts | 2 +- .../admin.api.v1.runs-replication.teardown.ts | 2 +- .../runsReplicationInstance.server.ts | 35 +++++- .../services/runsReplicationService.server.ts | 62 +++++------ docker/Dockerfile | 12 +++ docker/dev-compose.yml | 102 +++++++++++++++--- docker/scripts/entrypoint.sh | 14 +++ .../schema/003_create_task_runs_v1.sql | 68 +++++------- .../clickhouse/src/taskRuns.test.ts | 42 ++++---- internal-packages/clickhouse/src/taskRuns.ts | 49 +++------ package.json | 4 +- scripts/build-dockerfile.sh | 18 ++++ 14 files changed, 270 insertions(+), 154 deletions(-) create mode 100755 scripts/build-dockerfile.sh diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index f79036bf43..5fc4f1a1a3 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -754,6 +754,18 @@ const EnvironmentSchema = z.object({ .optional() .transform((v) => v ?? process.env.REDIS_PASSWORD), RUN_REPLICATION_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), + + RUN_REPLICATION_CLICKHOUSE_URL: z.string().optional(), + RUN_REPLICATION_ENABLED: z.string().default("0"), + RUN_REPLICATION_SLOT_NAME: z.string().default("task_runs_to_clickhouse_v1"), + RUN_REPLICATION_PUBLICATION_NAME: z.string().default("task_runs_to_clickhouse_v1_publication"), + RUN_REPLICATION_MAX_FLUSH_CONCURRENCY: z.coerce.number().int().default(100), + RUN_REPLICATION_FLUSH_INTERVAL_MS: z.coerce.number().int().default(1000), + RUN_REPLICATION_FLUSH_BATCH_SIZE: z.coerce.number().int().default(100), + RUN_REPLICATION_INSERT_STRATEGY: z.enum(["streaming", "batching"]).default("batching"), + RUN_REPLICATION_LEADER_LOCK_TIMEOUT_MS: z.coerce.number().int().default(30_000), + RUN_REPLICATION_LEADER_LOCK_EXTEND_INTERVAL_MS: z.coerce.number().int().default(10_000), + RUN_REPLICATION_ACK_INTERVAL_SECONDS: z.coerce.number().int().default(10), }); export type Environment = z.infer; diff --git a/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts b/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts index 8e50006158..c7af85e208 100644 --- a/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts +++ b/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts @@ -34,7 +34,7 @@ export async function action({ request }: ActionFunctionArgs) { const body = await request.json(); const { insertStrategy } = schema.parse(body); - await runsReplicationInstance.start(insertStrategy); + await runsReplicationInstance?.start(insertStrategy); return json({ success: true, diff --git a/apps/webapp/app/routes/admin.api.v1.runs-replication.stop.ts b/apps/webapp/app/routes/admin.api.v1.runs-replication.stop.ts index ae198fe883..6163ff5f70 100644 --- a/apps/webapp/app/routes/admin.api.v1.runs-replication.stop.ts +++ b/apps/webapp/app/routes/admin.api.v1.runs-replication.stop.ts @@ -26,7 +26,7 @@ export async function action({ request }: ActionFunctionArgs) { } try { - await runsReplicationInstance.stop(); + await runsReplicationInstance?.stop(); return json({ success: true, diff --git a/apps/webapp/app/routes/admin.api.v1.runs-replication.teardown.ts b/apps/webapp/app/routes/admin.api.v1.runs-replication.teardown.ts index 9d17d72742..f32b76383d 100644 --- a/apps/webapp/app/routes/admin.api.v1.runs-replication.teardown.ts +++ b/apps/webapp/app/routes/admin.api.v1.runs-replication.teardown.ts @@ -26,7 +26,7 @@ export async function action({ request }: ActionFunctionArgs) { } try { - await runsReplicationInstance.teardown(); + await runsReplicationInstance?.teardown(); return json({ success: true, diff --git a/apps/webapp/app/services/runsReplicationInstance.server.ts b/apps/webapp/app/services/runsReplicationInstance.server.ts index 5a0c110e51..5c76616364 100644 --- a/apps/webapp/app/services/runsReplicationInstance.server.ts +++ b/apps/webapp/app/services/runsReplicationInstance.server.ts @@ -4,6 +4,7 @@ import { singleton } from "~/utils/singleton"; import invariant from "tiny-invariant"; import { env } from "~/env.server"; import { metricsRegister } from "~/metrics.server"; +import { logger } from "./logger.server"; export const runsReplicationInstance = singleton( "runsReplicationInstance", @@ -14,14 +15,22 @@ function initializeRunsReplicationInstance() { const { DATABASE_URL } = process.env; invariant(typeof DATABASE_URL === "string", "DATABASE_URL env var not set"); - const clickhouse = ClickHouse.fromEnv(); + if (!env.RUN_REPLICATION_CLICKHOUSE_URL) { + logger.info("🗃️ Runs replication service not enabled"); + return; + } + + const clickhouse = new ClickHouse({ + url: env.RUN_REPLICATION_CLICKHOUSE_URL, + name: "runs-replication", + }); const service = new RunsReplicationService({ clickhouse: clickhouse, pgConnectionUrl: DATABASE_URL, serviceName: "runs-replication", - slotName: "task_runs_to_clickhouse_v1", - publicationName: "task_runs_to_clickhouse_v1_publication", + slotName: env.RUN_REPLICATION_SLOT_NAME, + publicationName: env.RUN_REPLICATION_PUBLICATION_NAME, redisOptions: { keyPrefix: "runs-replication:", port: env.RUN_REPLICATION_REDIS_PORT ?? undefined, @@ -32,7 +41,27 @@ function initializeRunsReplicationInstance() { ...(env.RUN_REPLICATION_REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), }, metricsRegister: metricsRegister, + maxFlushConcurrency: env.RUN_REPLICATION_MAX_FLUSH_CONCURRENCY, + flushIntervalMs: env.RUN_REPLICATION_FLUSH_INTERVAL_MS, + flushBatchSize: env.RUN_REPLICATION_FLUSH_BATCH_SIZE, + insertStrategy: env.RUN_REPLICATION_INSERT_STRATEGY, + leaderLockTimeoutMs: env.RUN_REPLICATION_LEADER_LOCK_TIMEOUT_MS, + leaderLockExtendIntervalMs: env.RUN_REPLICATION_LEADER_LOCK_EXTEND_INTERVAL_MS, + ackIntervalSeconds: env.RUN_REPLICATION_ACK_INTERVAL_SECONDS, }); + if (env.RUN_REPLICATION_ENABLED === "1") { + service + .start() + .then(() => { + logger.info("🗃️ Runs replication service started"); + }) + .catch((error) => { + logger.error("🗃️ Runs replication service failed to start", { + error, + }); + }); + } + return service; } diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index 486e3b544a..f2f4541df2 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -23,9 +23,12 @@ export type RunsReplicationServiceOptions = { maxFlushConcurrency?: number; flushIntervalMs?: number; flushBatchSize?: number; + leaderLockTimeoutMs?: number; + leaderLockExtendIntervalMs?: number; + ackIntervalSeconds?: number; }; -type TaskRunInsert = { _version: bigint; run: TaskRun; event: "insert" | "update" }; +type TaskRunInsert = { _version: bigint; run: TaskRun; event: "insert" | "update" | "delete" }; export class RunsReplicationService { private _lastLsn: string | null = null; @@ -61,9 +64,9 @@ export class RunsReplicationService { autoAcknowledge: false, publicationActions: ["insert", "update"], logger: new Logger("RunsReplicationService", "debug"), - leaderLockTimeoutMs: 30_000, - leaderLockExtendIntervalMs: 10_000, - ackIntervalSeconds: 10, + leaderLockTimeoutMs: options.leaderLockTimeoutMs ?? 30_000, + leaderLockExtendIntervalMs: options.leaderLockExtendIntervalMs ?? 10_000, + ackIntervalSeconds: options.ackIntervalSeconds ?? 10, }); this._concurrentFlushScheduler = new ConcurrentFlushScheduler({ @@ -217,20 +220,6 @@ export class RunsReplicationService { return; } - const relevantEvents = transaction.events.filter( - (event) => event.tag === "insert" || event.tag === "update" - ); - - if (relevantEvents.length === 0) { - this.logger.debug("No relevant events", { - transaction, - }); - - await this._replicationClient.acknowledge(transaction.commitEndLsn); - - return; - } - this.logger.debug("Handling transaction", { transaction, }); @@ -242,20 +231,20 @@ export class RunsReplicationService { if (this._insertStrategy === "streaming") { await this._concurrentFlushScheduler.addToBatch( - relevantEvents.map((event) => ({ + transaction.events.map((event) => ({ _version, run: event.data, - event: event.tag as "insert" | "update", + event: event.tag, })) ); } else { const [flushError] = await tryCatch( this.#flushBatch( nanoid(), - relevantEvents.map((event) => ({ + transaction.events.map((event) => ({ _version, run: event.data, - event: event.tag as "insert" | "update", + event: event.tag, })) ) ); @@ -376,11 +365,12 @@ export class RunsReplicationService { }; } - if (event === "update") { + if (event === "update" || event === "delete") { const taskRunInsert = await this.#prepareTaskRunInsert( run, run.organizationId, run.environmentType, + event, _version ); @@ -391,7 +381,7 @@ export class RunsReplicationService { } const [taskRunInsert, payloadInsert] = await Promise.all([ - this.#prepareTaskRunInsert(run, run.organizationId, run.environmentType, _version), + this.#prepareTaskRunInsert(run, run.organizationId, run.environmentType, event, _version), this.#preparePayloadInsert(run, _version), ]); @@ -405,6 +395,7 @@ export class RunsReplicationService { run: TaskRun, organizationId: string, environmentType: string, + event: "insert" | "update" | "delete", _version: bigint ): Promise { const output = await this.#prepareJson(run.output, run.outputType); @@ -424,10 +415,10 @@ export class RunsReplicationService { queue: run.queue, span_id: run.spanId, trace_id: run.traceId, - error: run.error ? (run.error as TaskRunError) : undefined, + error: { data: run.error }, attempt: run.attemptNumber ?? 1, - schedule_id: run.scheduleId, - batch_id: run.batchId, + schedule_id: run.scheduleId ?? "", + batch_id: run.batchId ?? "", completed_at: run.completedAt?.getTime(), started_at: run.startedAt?.getTime(), executed_at: run.executedAt?.getTime(), @@ -438,18 +429,19 @@ export class RunsReplicationService { cost_in_cents: run.costInCents, base_cost_in_cents: run.baseCostInCents, tags: run.runTags ?? [], - task_version: run.taskVersion, - sdk_version: run.sdkVersion, - cli_version: run.cliVersion, - machine_preset: run.machinePreset, - root_run_id: run.rootTaskRunId, - parent_run_id: run.parentTaskRunId, + task_version: run.taskVersion ?? "", + sdk_version: run.sdkVersion ?? "", + cli_version: run.cliVersion ?? "", + machine_preset: run.machinePreset ?? "", + root_run_id: run.rootTaskRunId ?? "", + parent_run_id: run.parentTaskRunId ?? "", depth: run.depth, is_test: run.isTest, - idempotency_key: run.idempotencyKey, - expiration_ttl: run.ttl, + idempotency_key: run.idempotencyKey ?? "", + expiration_ttl: run.ttl ?? "", output, _version: _version.toString(), + _is_deleted: event === "delete" ? 1 : 0, }; } diff --git a/docker/Dockerfile b/docker/Dockerfile index 9f054ba8e2..210de7f309 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,5 +1,8 @@ ARG NODE_IMAGE=node:20.11.1-bullseye-slim@sha256:5a5a92b3a8d392691c983719dbdc65d9f30085d6dcd65376e7a32e6fe9bf4cbe +FROM golang:1.23-alpine AS goose_builder +RUN go install github.com/pressly/goose/v3/cmd/goose@latest + FROM ${NODE_IMAGE} AS pruner WORKDIR /triggerdotdev @@ -43,6 +46,11 @@ WORKDIR /triggerdotdev # Corepack is used to install pnpm RUN corepack enable +# Goose and schemas +COPY --from=goose_builder /go/bin/goose /usr/local/bin/goose +RUN chmod +x /usr/local/bin/goose +COPY --chown=node:node internal-packages/clickhouse/schema /triggerdotdev/internal-packages/clickhouse/schema + COPY --from=pruner --chown=node:node /triggerdotdev/out/full/ . COPY --from=dev-deps --chown=node:node /triggerdotdev/ . COPY --chown=node:node turbo.json turbo.json @@ -70,6 +78,10 @@ COPY --from=builder --chown=node:node /triggerdotdev/apps/webapp/public ./apps/w COPY --from=builder --chown=node:node /triggerdotdev/apps/webapp/prisma/seed.js ./apps/webapp/prisma/seed.js COPY --from=builder --chown=node:node /triggerdotdev/scripts ./scripts +# Goose and schemas +COPY --from=builder /usr/local/bin/goose /usr/local/bin/goose +COPY --from=builder --chown=node:node /triggerdotdev/internal-packages/clickhouse/schema /triggerdotdev/internal-packages/clickhouse/schema + EXPOSE 3000 USER node diff --git a/docker/dev-compose.yml b/docker/dev-compose.yml index 642510e286..d3b02b3bf3 100644 --- a/docker/dev-compose.yml +++ b/docker/dev-compose.yml @@ -3,6 +3,7 @@ version: "3" volumes: database-data: redis-data: + clickhouse: networks: app_network: @@ -10,8 +11,10 @@ networks: services: db: - container_name: devdb - image: postgres:14 + container_name: db-dev + build: + context: . + dockerfile: Dockerfile.postgres restart: always volumes: - database-data:/var/lib/postgresql/data/ @@ -23,6 +26,72 @@ services: - app_network ports: - 5432:5432 + command: + - -c + - listen_addresses=* + - -c + - wal_level=logical + - -c + - shared_preload_libraries=pg_partman_bgw + + electric: + container_name: electric-dev + image: electricsql/electric:1.0.0-beta.15@sha256:4ae0f895753b82684aa31ea1c708e9e86d0a9bca355acb7270dcb24062520810 + restart: always + environment: + DATABASE_URL: postgresql://postgres:postgres@db:5432/postgres?sslmode=disable + networks: + - app_network + ports: + - "3060:3000" + depends_on: + - db + + clickhouse: + image: bitnami/clickhouse:latest + container_name: clickhouse-dev + environment: + CLICKHOUSE_ADMIN_USER: default + CLICKHOUSE_ADMIN_PASSWORD: password + ports: + - "8123:8123" + - "9000:9000" + volumes: + - clickhouse:/bitnami/clickhouse + networks: + - app_network + healthcheck: + test: + [ + "CMD", + "clickhouse-client", + "--host", + "localhost", + "--port", + "9000", + "--user", + "default", + "--password", + "password", + "--query", + "SELECT 1", + ] + interval: 3s + timeout: 5s + retries: 5 + start_period: 10s + + redis: + container_name: redis-dev + image: redis:7 + restart: always + volumes: + - redis-data:/data + networks: + - app_network + ports: + - 6379:6379 + app: build: context: ../ @@ -31,11 +100,15 @@ services: - 3030:3030 depends_on: - db + - electric + - clickhouse + - redis env_file: - ../.env environment: DATABASE_URL: postgres://postgres:postgres@db:5432/postgres?schema=public DIRECT_URL: postgres://postgres:postgres@db:5432/postgres?schema=public + CLICKHOUSE_URL: http://default:password@clickhouse:8123 SESSION_SECRET: secret123 MAGIC_LINK_SECRET: secret123 ENCRYPTION_KEY: secret123 @@ -44,20 +117,17 @@ services: networks: - app_network - redis: - container_name: redis - image: redis:7 + ch-ui: + image: ghcr.io/caioricciuti/ch-ui:latest + container_name: ch-ui-dev restart: always - volumes: - - redis-data:/data + ports: + - "5521:5521" + environment: + VITE_CLICKHOUSE_URL: "http://clickhouse:8123" + VITE_CLICKHOUSE_USER: "default" + VITE_CLICKHOUSE_PASS: "password" + depends_on: + - clickhouse networks: - app_network - ports: - - 6379:6379 - - redisinsight: - image: redislabs/redisinsight:latest - ports: - - "8001:8001" - volumes: - - redis-data:/redisinsight diff --git a/docker/scripts/entrypoint.sh b/docker/scripts/entrypoint.sh index 79186330b2..3c7b3165ab 100755 --- a/docker/scripts/entrypoint.sh +++ b/docker/scripts/entrypoint.sh @@ -6,7 +6,21 @@ if [ -n "$DATABASE_HOST" ]; then fi # Run migrations +echo "Running prisma migrations" pnpm --filter @trigger.dev/database db:migrate:deploy +echo "Prisma migrations done" + +if [ -n "$CLICKHOUSE_URL" ]; then + # Run ClickHouse migrations + echo "Running ClickHouse migrations..." + export GOOSE_DRIVER=clickhouse + export GOOSE_DBSTRING="$CLICKHOUSE_URL" # Use the full URL provided by the env var + export GOOSE_MIGRATION_DIR=/triggerdotdev/internal-packages/clickhouse/schema + /usr/local/bin/goose up + echo "ClickHouse migrations complete." +else + echo "CLICKHOUSE_URL not set, skipping ClickHouse migrations." +fi # Copy over required prisma files cp internal-packages/database/prisma/schema.prisma apps/webapp/prisma/ diff --git a/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql b/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql index dbab50101c..2b0b0b2a24 100644 --- a/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql +++ b/internal-packages/clickhouse/schema/003_create_task_runs_v1.sql @@ -1,6 +1,4 @@ -- +goose Up -SET enable_json_type = 1; - CREATE TABLE trigger_dev.task_runs_v1 ( /* ─── ids & hierarchy ─────────────────────────────────────── */ @@ -14,42 +12,25 @@ CREATE TABLE trigger_dev.task_runs_v1 attempt UInt8 DEFAULT 1, /* ─── enums / status ──────────────────────────────────────── */ - engine Enum8('V1'=1,'V2'=2) CODEC(T64, LZ4), - status Enum8( - 'DELAYED'=1, - 'PENDING'=2, - 'PENDING_VERSION'=3, - 'WAITING_FOR_DEPLOY'=4, - 'EXECUTING'=5, - 'WAITING_TO_RESUME'=6, - 'RETRYING_AFTER_FAILURE'=7, - 'PAUSED'=8, - 'CANCELED'=9, - 'INTERRUPTED'=10, - 'COMPLETED_SUCCESSFULLY'=11, - 'COMPLETED_WITH_ERRORS'=12, - 'SYSTEM_FAILURE'=13, - 'CRASHED'=14, - 'EXPIRED'=15, - 'TIMED_OUT'=16 - ), + engine LowCardinality(String), + status LowCardinality(String), /* ─── queue / concurrency / schedule ─────────────────────── */ task_identifier String, queue String, - schedule_id Nullable(String), - batch_id Nullable(String), + schedule_id String, + batch_id String, /* ─── related runs ─────────────────────────────────────────────── */ - root_run_id Nullable(String), - parent_run_id Nullable(String), + root_run_id String, + parent_run_id String, depth UInt8 DEFAULT 0, /* ─── telemetry ─────────────────────────────────────────────── */ span_id String, trace_id String, - idempotency_key Nullable(String), + idempotency_key String, /* ─── timing ─────────────────────────────────────────────── */ created_at DateTime64(3), @@ -60,7 +41,7 @@ CREATE TABLE trigger_dev.task_runs_v1 delay_until Nullable(DateTime64(3)), queued_at Nullable(DateTime64(3)), expired_at Nullable(DateTime64(3)), - expiration_ttl Nullable(String), + expiration_ttl String, /* ─── cost / usage ───────────────────────────────────────── */ usage_duration_ms UInt32 DEFAULT 0, @@ -68,24 +49,26 @@ CREATE TABLE trigger_dev.task_runs_v1 base_cost_in_cents Float64 DEFAULT 0, /* ─── payload & context ──────────────────────────────────── */ - output JSON(max_dynamic_paths = 2048), - error Nullable(JSON(max_dynamic_paths = 64)), + output JSON(max_dynamic_paths = 1024), + error JSON(max_dynamic_paths = 64), /* ─── tagging / versions ─────────────────────────────────── */ tags Array(String) CODEC(ZSTD(1)), - task_version Nullable(String) CODEC(LZ4), - sdk_version Nullable(String) CODEC(LZ4), - cli_version Nullable(String) CODEC(LZ4), - machine_preset LowCardinality(Nullable(String)) CODEC(LZ4), + task_version String CODEC(LZ4), + sdk_version String CODEC(LZ4), + cli_version String CODEC(LZ4), + machine_preset LowCardinality(String) CODEC(LZ4), is_test UInt8 DEFAULT 0, /* ─── commit lsn ─────────────────────────────────────────────── */ - _version UInt64 + _version UInt64, + _is_deleted UInt8 DEFAULT 0 ) -ENGINE = ReplacingMergeTree(_version) -PARTITION BY toYYYYMMDD(created_at) -ORDER BY (toDate(created_at), environment_id, task_identifier, created_at, run_id); +ENGINE = ReplacingMergeTree(_version, _is_deleted) +PARTITION BY toYYYYMM(created_at) +ORDER BY (toDate(created_at), environment_id, task_identifier, created_at, run_id) +SETTINGS enable_json_type = 1; /* Fast tag filtering */ ALTER TABLE trigger_dev.task_runs_v1 @@ -95,22 +78,23 @@ CREATE TABLE trigger_dev.raw_task_runs_payload_v1 ( run_id String, created_at DateTime64(3), - payload JSON(max_dynamic_paths = 2048) + payload JSON(max_dynamic_paths = 1024) ) ENGINE = MergeTree -PARTITION BY toYYYYMMDD(created_at) -ORDER BY (run_id); +PARTITION BY toYYYYMM(created_at) +ORDER BY (run_id) +SETTINGS enable_json_type = 1; CREATE VIEW trigger_dev.tmp_eric_task_runs_full_v1 AS SELECT s.*, p.payload as payload FROM trigger_dev.task_runs_v1 AS s FINAL -LEFT JOIN trigger_dev.raw_task_runs_payload_v1 AS p ON s.run_id = p.run_id; +LEFT JOIN trigger_dev.raw_task_runs_payload_v1 AS p ON s.run_id = p.run_id +SETTINGS enable_json_type = 1; -- +goose Down -SET enable_json_type = 0; DROP TABLE IF EXISTS trigger_dev.task_runs_v1; DROP TABLE IF EXISTS trigger_dev.raw_task_runs_payload_v1; DROP VIEW IF EXISTS trigger_dev.tmp_eric_task_runs_full_v1; \ No newline at end of file diff --git a/internal-packages/clickhouse/src/taskRuns.test.ts b/internal-packages/clickhouse/src/taskRuns.test.ts index c34586970d..2374d5de19 100644 --- a/internal-packages/clickhouse/src/taskRuns.test.ts +++ b/internal-packages/clickhouse/src/taskRuns.test.ts @@ -147,14 +147,14 @@ describe("Task Runs V1", () => { status: "PENDING", task_identifier: "retry-task", queue: "task/retry-task", - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, + schedule_id: "", + batch_id: "", + root_run_id: "", + parent_run_id: "", depth: 0, span_id: "538677637f937f54", trace_id: "20a28486b0b9f50c647b35e8863e36a5", - idempotency_key: null, + idempotency_key: "", created_at: new Date("2025-04-30 16:34:04.312").getTime(), updated_at: new Date("2025-04-30 16:34:04.312").getTime(), started_at: null, @@ -163,17 +163,17 @@ describe("Task Runs V1", () => { delay_until: null, queued_at: new Date("2025-04-30 16:34:04.311").getTime(), expired_at: null, - expiration_ttl: null, + expiration_ttl: "", usage_duration_ms: 0, cost_in_cents: 0, base_cost_in_cents: 0, output: null, error: null, tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, + task_version: "", + sdk_version: "", + cli_version: "", + machine_preset: "", is_test: true, _version: "1", }, @@ -189,14 +189,14 @@ describe("Task Runs V1", () => { status: "COMPLETED_SUCCESSFULLY", task_identifier: "retry-task", queue: "task/retry-task", - schedule_id: null, - batch_id: null, - root_run_id: null, - parent_run_id: null, + schedule_id: "", + batch_id: "", + root_run_id: "", + parent_run_id: "", depth: 0, span_id: "538677637f937f54", trace_id: "20a28486b0b9f50c647b35e8863e36a5", - idempotency_key: null, + idempotency_key: "", created_at: new Date("2025-04-30 16:34:04.312").getTime(), updated_at: new Date("2025-04-30 16:34:04.312").getTime(), started_at: null, @@ -205,17 +205,17 @@ describe("Task Runs V1", () => { delay_until: null, queued_at: new Date("2025-04-30 16:34:04.311").getTime(), expired_at: null, - expiration_ttl: null, + expiration_ttl: "", usage_duration_ms: 0, cost_in_cents: 0, base_cost_in_cents: 0, output: null, error: null, tags: [], - task_version: null, - sdk_version: null, - cli_version: null, - machine_preset: null, + task_version: "", + sdk_version: "", + cli_version: "", + machine_preset: "", is_test: true, _version: "2", }, @@ -225,7 +225,7 @@ describe("Task Runs V1", () => { expect(insertResult).toEqual(expect.objectContaining({ executed: true })); const query = client.query({ - name: "query-run-events", + name: "query-task-runs", query: "SELECT * FROM trigger_dev.task_runs_v1 FINAL", schema: z.object({ environment_id: z.string(), diff --git a/internal-packages/clickhouse/src/taskRuns.ts b/internal-packages/clickhouse/src/taskRuns.ts index fc9572f9c3..9a478f523d 100644 --- a/internal-packages/clickhouse/src/taskRuns.ts +++ b/internal-packages/clickhouse/src/taskRuns.ts @@ -1,7 +1,6 @@ +import { ClickHouseSettings } from "@clickhouse/client"; import { z } from "zod"; import { ClickhouseWriter } from "./client/types.js"; -import { ClickHouseSettings } from "@clickhouse/client"; -import { TaskRunError } from "@trigger.dev/core/v3/schemas"; export const TaskRunV1 = z.object({ environment_id: z.string(), @@ -10,32 +9,15 @@ export const TaskRunV1 = z.object({ run_id: z.string(), updated_at: z.number().int(), created_at: z.number().int(), - status: z.enum([ - "DELAYED", - "PENDING", - "PENDING_VERSION", - "WAITING_FOR_DEPLOY", - "EXECUTING", - "WAITING_TO_RESUME", - "RETRYING_AFTER_FAILURE", - "PAUSED", - "CANCELED", - "INTERRUPTED", - "COMPLETED_SUCCESSFULLY", - "COMPLETED_WITH_ERRORS", - "SYSTEM_FAILURE", - "CRASHED", - "EXPIRED", - "TIMED_OUT", - ]), + status: z.string(), environment_type: z.string(), friendly_id: z.string(), attempt: z.number().int().default(1), - engine: z.enum(["V1", "V2"]), + engine: z.string(), task_identifier: z.string(), queue: z.string(), - schedule_id: z.string().nullish(), - batch_id: z.string().nullish(), + schedule_id: z.string(), + batch_id: z.string(), completed_at: z.number().int().nullish(), started_at: z.number().int().nullish(), executed_at: z.number().int().nullish(), @@ -46,21 +28,22 @@ export const TaskRunV1 = z.object({ cost_in_cents: z.number().default(0), base_cost_in_cents: z.number().default(0), output: z.unknown(), - error: TaskRunError.nullish(), + error: z.unknown(), tags: z.array(z.string()).default([]), - task_version: z.string().nullish(), - sdk_version: z.string().nullish(), - cli_version: z.string().nullish(), - machine_preset: z.string().nullish(), - root_run_id: z.string().nullish(), - parent_run_id: z.string().nullish(), + task_version: z.string(), + sdk_version: z.string(), + cli_version: z.string(), + machine_preset: z.string(), + root_run_id: z.string(), + parent_run_id: z.string(), depth: z.number().int().default(0), span_id: z.string(), trace_id: z.string(), - idempotency_key: z.string().nullish(), - expiration_ttl: z.string().nullish(), + idempotency_key: z.string(), + expiration_ttl: z.string(), is_test: z.boolean().default(false), _version: z.string(), + _is_deleted: z.number().int().default(0), }); export type TaskRunV1 = z.input; @@ -75,6 +58,7 @@ export function insertTaskRuns(ch: ClickhouseWriter, settings?: ClickHouseSettin wait_for_async_insert: 0, async_insert_max_data_size: "1000000", async_insert_busy_timeout_ms: 1000, + enable_json_type: 1, ...settings, }, }); @@ -98,6 +82,7 @@ export function insertRawTaskRunPayloads(ch: ClickhouseWriter, settings?: ClickH wait_for_async_insert: 0, async_insert_max_data_size: "1000000", async_insert_busy_timeout_ms: 1000, + enable_json_type: 1, ...settings, }, }); diff --git a/package.json b/package.json index 6afb03eb1f..ab216b8ec0 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ "lint": "turbo run lint", "docker": "docker compose -p triggerdotdev-docker -f docker/docker-compose.yml up -d --build --remove-orphans", "docker:stop": "docker compose -p triggerdotdev-docker -f docker/docker-compose.yml stop", - "dev:docker": "docker compose -p triggerdotdev-dev-docker -f docker/dev-compose.yml up -d", + "dev:docker": "docker compose -p triggerdotdev-dev-docker -f docker/dev-compose.yml up -d --build --remove-orphans", "dev:docker:build": "docker compose -p triggerdotdev-dev-docker -f docker/dev-compose.yml up -d --build", "dev:docker:stop": "docker compose -p triggerdotdev-dev-docker -f docker/dev-compose.yml stop", "test": "turbo run test --concurrency=1 -- --run", @@ -81,4 +81,4 @@ "@kubernetes/client-node@1.0.0": "patches/@kubernetes__client-node@1.0.0.patch" } } -} +} \ No newline at end of file diff --git a/scripts/build-dockerfile.sh b/scripts/build-dockerfile.sh new file mode 100755 index 0000000000..68baacb4e9 --- /dev/null +++ b/scripts/build-dockerfile.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +set -e + +docker build -t local-triggerdotdev:latest -f docker/Dockerfile . +image=local-triggerdotdev:latest +src=/triggerdotdev +dst=$(mktemp -d) + +mkdir -p $dst + +echo -e "Extracting image into $dst..." + +container=$(docker create "$image") +docker cp "$container:$src" "$dst" +docker rm "$container" +/Applications/Visual\ Studio\ Code.app/Contents/Resources/app/bin/code "$dst/triggerdotdev" + From da0565e76b02ab76010c633bf61f23c47a165365 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 9 May 2025 11:26:34 +0100 Subject: [PATCH 21/33] Handover WIP and tests --- .../runsReplicationInstance.server.ts | 3 + .../services/runsReplicationService.server.ts | 82 +++++++----- apps/webapp/test/engine/triggerTask.test.ts | 2 - .../test/runsReplicationService.test.ts | 123 ++++++++++++++++++ internal-packages/replication/src/client.ts | 20 ++- 5 files changed, 189 insertions(+), 41 deletions(-) create mode 100644 apps/webapp/test/runsReplicationService.test.ts diff --git a/apps/webapp/app/services/runsReplicationInstance.server.ts b/apps/webapp/app/services/runsReplicationInstance.server.ts index 5c76616364..3eb92aab81 100644 --- a/apps/webapp/app/services/runsReplicationInstance.server.ts +++ b/apps/webapp/app/services/runsReplicationInstance.server.ts @@ -61,6 +61,9 @@ function initializeRunsReplicationInstance() { error, }); }); + + process.on("SIGTERM", service.shutdown.bind(service)); + process.on("SIGINT", service.shutdown.bind(service)); } return service; diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index f2f4541df2..2b93e3fdc8 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -1,9 +1,8 @@ -import type { ClickHouse, TaskRunV1, RawTaskRunPayloadV1 } from "@internal/clickhouse"; +import type { ClickHouse, RawTaskRunPayloadV1, TaskRunV1 } from "@internal/clickhouse"; import { RedisOptions } from "@internal/redis"; import { LogicalReplicationClient, Transaction, type PgoutputMessage } from "@internal/replication"; import { Logger } from "@trigger.dev/core/logger"; import { tryCatch } from "@trigger.dev/core/utils"; -import { TaskRunError } from "@trigger.dev/core/v3/schemas"; import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; import { TaskRun } from "@trigger.dev/database"; import { nanoid } from "nanoid"; @@ -46,6 +45,8 @@ export class RunsReplicationService { private _lastReplicationLagMs: number | null = null; private _transactionCounter?: Counter; private _insertStrategy: "streaming" | "batching"; + private _isShuttingDown = false; + private _isShutDownComplete = false; constructor(private readonly options: RunsReplicationServiceOptions) { this.logger = new Logger("RunsReplicationService", "debug"); @@ -62,7 +63,7 @@ export class RunsReplicationService { table: "TaskRun", redisOptions: options.redisOptions, autoAcknowledge: false, - publicationActions: ["insert", "update"], + publicationActions: ["insert", "update", "delete"], logger: new Logger("RunsReplicationService", "debug"), leaderLockTimeoutMs: options.leaderLockTimeoutMs ?? 30_000, leaderLockExtendIntervalMs: options.leaderLockExtendIntervalMs ?? 10_000, @@ -84,6 +85,9 @@ export class RunsReplicationService { }); this._replicationClient.events.on("heartbeat", async ({ lsn, shouldRespond }) => { + if (this._isShuttingDown) return; + if (this._isShutDownComplete) return; + if (shouldRespond) { await this._replicationClient.acknowledge(lsn); } @@ -130,6 +134,11 @@ export class RunsReplicationService { } } + public shutdown() { + this.logger.info("Initiating shutdown of runs replication service"); + this._isShuttingDown = true; + } + async start(insertStrategy?: "streaming" | "batching") { this._insertStrategy = insertStrategy ?? this._insertStrategy; @@ -201,11 +210,27 @@ export class RunsReplicationService { } async #handleTransaction(transaction: Transaction) { + if (this._isShutDownComplete) return; + + let alreadyAcknowledged = false; + + if (this._isShuttingDown) { + // We need to immediately acknowledge the transaction + // And then try and handle this transaction + if (transaction.commitEndLsn) { + await this._replicationClient.acknowledge(transaction.commitEndLsn); + alreadyAcknowledged = true; + } + + await this._replicationClient.stop(); + this._isShutDownComplete = true; + } + this._lastReplicationLagMs = transaction.replicationLagMs; // If there are no events, do nothing if (transaction.events.length === 0) { - if (transaction.commitEndLsn) { + if (transaction.commitEndLsn && !alreadyAcknowledged) { await this._replicationClient.acknowledge(transaction.commitEndLsn); } @@ -222,6 +247,7 @@ export class RunsReplicationService { this.logger.debug("Handling transaction", { transaction, + alreadyAcknowledged, }); // If there are events, we need to handle them @@ -230,13 +256,19 @@ export class RunsReplicationService { this._transactionCounter?.inc(); if (this._insertStrategy === "streaming") { - await this._concurrentFlushScheduler.addToBatch( - transaction.events.map((event) => ({ - _version, - run: event.data, - event: event.tag, - })) - ); + this._concurrentFlushScheduler + .addToBatch( + transaction.events.map((event) => ({ + _version, + run: event.data, + event: event.tag, + })) + ) + .catch((error) => { + this.logger.error("Error adding to batch", { + error, + }); + }); } else { const [flushError] = await tryCatch( this.#flushBatch( @@ -256,7 +288,9 @@ export class RunsReplicationService { } } - await this._replicationClient.acknowledge(transaction.commitEndLsn); + if (!alreadyAcknowledged) { + await this._replicationClient.acknowledge(transaction.commitEndLsn); + } } async #flushBatch(flushId: string, batch: Array) { @@ -497,7 +531,6 @@ export class ConcurrentFlushScheduler { private readonly MAX_CONCURRENCY: number; private readonly concurrencyLimiter: ReturnType; private flushTimer: NodeJS.Timeout | null; - private isShuttingDown; private failedBatchCount; private metricsRegister?: MetricsRegister; private logger: Logger; @@ -510,7 +543,6 @@ export class ConcurrentFlushScheduler { this.MAX_CONCURRENCY = config.maxConcurrency || 1; this.concurrencyLimiter = pLimit(this.MAX_CONCURRENCY); this.flushTimer = null; - this.isShuttingDown = false; this.failedBatchCount = 0; this.logger.info("Initializing ConcurrentFlushScheduler", { @@ -520,7 +552,6 @@ export class ConcurrentFlushScheduler { }); this.startFlushTimer(); - this.setupShutdownHandlers(); if (!process.env.VITEST && config.metricsRegister) { this.metricsRegister = config.metricsRegister; @@ -592,27 +623,6 @@ export class ConcurrentFlushScheduler { this.logger.debug("Started flush timer", { interval: this.FLUSH_INTERVAL }); } - private setupShutdownHandlers() { - process.on("SIGTERM", this.shutdown.bind(this)); - process.on("SIGINT", this.shutdown.bind(this)); - this.logger.debug("Shutdown handlers configured"); - } - - private async shutdown(): Promise { - if (this.isShuttingDown) return; - this.isShuttingDown = true; - this.logger.info("Initiating shutdown of dynamic flush scheduler", { - remainingItems: this.currentBatch.length, - }); - - await this.checkAndFlush(); - this.clearTimer(); - - this.logger.info("Dynamic flush scheduler shutdown complete", { - totalFailedBatches: this.failedBatchCount, - }); - } - private clearTimer(): void { if (this.flushTimer) { clearInterval(this.flushTimer); diff --git a/apps/webapp/test/engine/triggerTask.test.ts b/apps/webapp/test/engine/triggerTask.test.ts index adde07a435..7b1804578d 100644 --- a/apps/webapp/test/engine/triggerTask.test.ts +++ b/apps/webapp/test/engine/triggerTask.test.ts @@ -32,8 +32,6 @@ import { ValidationResult, } from "~/runEngine/types"; import { RunEngineTriggerTaskService } from "../../app/runEngine/services/triggerTask.server"; -import { ClickHouse } from "@internal/clickhouse"; -import { RunsDashboardService } from "~/services/runsDashboardService.server"; vi.setConfig({ testTimeout: 30_000 }); // 30 seconds timeout diff --git a/apps/webapp/test/runsReplicationService.test.ts b/apps/webapp/test/runsReplicationService.test.ts new file mode 100644 index 0000000000..563052f97a --- /dev/null +++ b/apps/webapp/test/runsReplicationService.test.ts @@ -0,0 +1,123 @@ +import { describe, vi } from "vitest"; + +// Mock the db prisma client +vi.mock("~/db.server", () => ({ + prisma: {}, +})); + +vi.mock("~/services/platform.v3.server", () => ({ + getEntitlement: vi.fn(), +})); + +import { ClickHouse } from "@internal/clickhouse"; +import { containerTest } from "@internal/testcontainers"; +import { setTimeout } from "node:timers/promises"; +import { RunsReplicationService } from "~/services/runsReplicationService.server"; +import { z } from "zod"; + +vi.setConfig({ testTimeout: 60_000 }); + +describe("RunsReplicationService", () => { + containerTest( + "should replicate runs to clickhouse", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 1000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationService.start(); + + const organization = await prisma.organization.create({ + data: { + title: "test", + slug: "test", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test", + slug: "test", + organizationId: organization.id, + externalRef: "test", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test", + pkApiKey: "test", + shortcode: "test", + }, + }); + + // Now we insert a row into the table + const taskRun = await prisma.taskRun.create({ + data: { + friendlyId: "run_1234", + taskIdentifier: "my-task", + payload: JSON.stringify({ foo: "bar" }), + traceId: "1234", + spanId: "1234", + queue: "test", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + }, + }); + + await setTimeout(1000); + + // Check that the row was replicated to clickhouse + const queryRuns = clickhouse.reader.query({ + name: "runs-replication", + query: "SELECT * FROM trigger_dev.task_runs_v1", + schema: z.any(), + }); + + const [queryError, result] = await queryRuns({}); + + expect(queryError).toBeNull(); + expect(result?.length).toBe(1); + expect(result?.[0]).toEqual( + expect.objectContaining({ + run_id: taskRun.id, + friendly_id: taskRun.friendlyId, + task_identifier: taskRun.taskIdentifier, + environment_id: runtimeEnvironment.id, + project_id: project.id, + organization_id: organization.id, + environment_type: "DEVELOPMENT", + engine: "V2", + }) + ); + + await runsReplicationService.stop(); + } + ); +}); diff --git a/internal-packages/replication/src/client.ts b/internal-packages/replication/src/client.ts index c4e73daac3..227517ebe1 100644 --- a/internal-packages/replication/src/client.ts +++ b/internal-packages/replication/src/client.ts @@ -50,6 +50,17 @@ export interface LogicalReplicationClientOptions { * The interval in ms to extend the leader lock (default: 10000) */ leaderLockExtendIntervalMs?: number; + + /** + * The number of times to retry acquiring the leader lock (default: 120) + */ + leaderLockRetryCount?: number; + + /** + * The interval in ms to retry acquiring the leader lock (default: 500) + */ + leaderLockRetryIntervalMs?: number; + /** * The interval in seconds to automatically acknowledge the last LSN if no ack has been sent (default: 10) */ @@ -83,6 +94,8 @@ export class LogicalReplicationClient { private lastAcknowledgedLsn: string | null = null; private leaderLockTimeoutMs: number; private leaderLockExtendIntervalMs: number; + private leaderLockRetryCount: number; + private leaderLockRetryIntervalMs: number; private leaderLockHeartbeatTimer: NodeJS.Timeout | null = null; private ackIntervalSeconds: number; private lastAckTimestamp: number = 0; @@ -106,6 +119,8 @@ export class LogicalReplicationClient { this.leaderLockTimeoutMs = options.leaderLockTimeoutMs ?? 30000; this.leaderLockExtendIntervalMs = options.leaderLockExtendIntervalMs ?? 10000; + this.leaderLockRetryCount = options.leaderLockRetryCount ?? 120; + this.leaderLockRetryIntervalMs = options.leaderLockRetryIntervalMs ?? 500; this.ackIntervalSeconds = options.ackIntervalSeconds ?? 10; this.redis = createRedisClient( @@ -544,9 +559,8 @@ export class LogicalReplicationClient { [`logical-replication-client:${this.options.name}`], this.leaderLockTimeoutMs, { - retryCount: 60, - retryDelay: 1000, - retryJitter: 100, + retryCount: this.leaderLockRetryCount, + retryDelay: this.leaderLockRetryIntervalMs, } ); } catch (err) { From fc6b69bc356fc665defdab702dc3266d654bb84f Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 9 May 2025 15:51:46 +0100 Subject: [PATCH 22/33] Testing the replication service --- .../services/runsReplicationService.server.ts | 61 +- .../test/runsReplicationService.test.ts | 1227 ++++++++++++++++- internal-packages/testcontainers/src/index.ts | 14 +- 3 files changed, 1277 insertions(+), 25 deletions(-) diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index 2b93e3fdc8..faae02ea08 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -6,6 +6,7 @@ import { tryCatch } from "@trigger.dev/core/utils"; import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; import { TaskRun } from "@trigger.dev/database"; import { nanoid } from "nanoid"; +import EventEmitter from "node:events"; import pLimit from "p-limit"; import { Counter, Gauge } from "prom-client"; import type { MetricsRegister } from "~/metrics.server"; @@ -25,10 +26,15 @@ export type RunsReplicationServiceOptions = { leaderLockTimeoutMs?: number; leaderLockExtendIntervalMs?: number; ackIntervalSeconds?: number; + logger?: Logger; }; type TaskRunInsert = { _version: bigint; run: TaskRun; event: "insert" | "update" | "delete" }; +export type RunsReplicationServiceEvents = { + message: [{ lsn: string; message: PgoutputMessage; service: RunsReplicationService }]; +}; + export class RunsReplicationService { private _lastLsn: string | null = null; private _isSubscribed = false; @@ -44,12 +50,16 @@ export class RunsReplicationService { private logger: Logger; private _lastReplicationLagMs: number | null = null; private _transactionCounter?: Counter; + private _lagGauge?: Gauge; private _insertStrategy: "streaming" | "batching"; private _isShuttingDown = false; private _isShutDownComplete = false; + public readonly events: EventEmitter; + constructor(private readonly options: RunsReplicationServiceOptions) { - this.logger = new Logger("RunsReplicationService", "debug"); + this.logger = options.logger ?? new Logger("RunsReplicationService", "debug"); + this.events = new EventEmitter(); this._insertStrategy = options.insertStrategy ?? "streaming"; @@ -113,7 +123,7 @@ export class RunsReplicationService { if (options.metricsRegister) { const replicationService = this; - new Gauge({ + this._lagGauge = new Gauge({ name: "runs_replication_service_replication_lag_ms", help: "The replication lag in milliseconds", collect() { @@ -134,9 +144,25 @@ export class RunsReplicationService { } } - public shutdown() { - this.logger.info("Initiating shutdown of runs replication service"); + public async getTransactionCountMetric() { + return this._transactionCounter?.get(); + } + + public async getLagGaugeMetric() { + return this._lagGauge?.get(); + } + + public async shutdown() { this._isShuttingDown = true; + + this.logger.info("Initiating shutdown of runs replication service"); + + if (!this._currentTransaction) { + this.logger.info("No transaction to commit, shutting down immediately"); + await this._replicationClient.stop(); + this._isShutDownComplete = true; + return; + } } async start(insertStrategy?: "streaming" | "batching") { @@ -162,8 +188,19 @@ export class RunsReplicationService { } async #handleData(lsn: string, message: PgoutputMessage) { + this.logger.debug("Handling data", { + lsn, + tag: message.tag, + }); + + this.events.emit("message", { lsn, message, service: this }); + switch (message.tag) { case "begin": { + if (this._isShuttingDown || this._isShutDownComplete) { + return; + } + this._currentTransaction = { commitLsn: message.commitLsn, xid: message.xid, @@ -195,6 +232,19 @@ export class RunsReplicationService { }); break; } + case "delete": { + if (!this._currentTransaction) { + return; + } + + this._currentTransaction.events.push({ + tag: message.tag, + data: message.old as TaskRun, + raw: message, + }); + + break; + } case "commit": { if (!this._currentTransaction) { return; @@ -202,8 +252,9 @@ export class RunsReplicationService { const replicationLagMs = Date.now() - Number(message.commitTime / 1000n); this._currentTransaction.commitEndLsn = message.commitEndLsn; this._currentTransaction.replicationLagMs = replicationLagMs; - await this.#handleTransaction(this._currentTransaction as Transaction); + const transaction = this._currentTransaction as Transaction; this._currentTransaction = null; + await this.#handleTransaction(transaction); break; } } diff --git a/apps/webapp/test/runsReplicationService.test.ts b/apps/webapp/test/runsReplicationService.test.ts index 563052f97a..98695209df 100644 --- a/apps/webapp/test/runsReplicationService.test.ts +++ b/apps/webapp/test/runsReplicationService.test.ts @@ -1,19 +1,11 @@ -import { describe, vi } from "vitest"; - -// Mock the db prisma client -vi.mock("~/db.server", () => ({ - prisma: {}, -})); - -vi.mock("~/services/platform.v3.server", () => ({ - getEntitlement: vi.fn(), -})); - import { ClickHouse } from "@internal/clickhouse"; import { containerTest } from "@internal/testcontainers"; +import { Logger } from "@trigger.dev/core/logger"; import { setTimeout } from "node:timers/promises"; -import { RunsReplicationService } from "~/services/runsReplicationService.server"; +import { OpenMetricsContentType, Registry } from "prom-client"; import { z } from "zod"; +import { TaskRunStatus } from "~/database-types"; +import { RunsReplicationService } from "~/services/runsReplicationService.server"; vi.setConfig({ testTimeout: 60_000 }); @@ -39,7 +31,7 @@ describe("RunsReplicationService", () => { flushIntervalMs: 100, flushBatchSize: 1, insertStrategy: "batching", - leaderLockTimeoutMs: 1000, + leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); @@ -120,4 +112,1213 @@ describe("RunsReplicationService", () => { await runsReplicationService.stop(); } ); + + containerTest( + "should replicate a new TaskRun to ClickHouse using batching insert strategy", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-batching", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-batching", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationService.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-batching", + slug: "test-batching", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-batching", + slug: "test-batching", + organizationId: organization.id, + externalRef: "test-batching", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-batching", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-batching", + pkApiKey: "test-batching", + shortcode: "test-batching", + }, + }); + + // Insert a row into the table with a unique friendlyId + const uniqueFriendlyId = `run_batching_${Date.now()}`; + const taskRun = await prisma.taskRun.create({ + data: { + friendlyId: uniqueFriendlyId, + taskIdentifier: "my-task-batching", + payload: JSON.stringify({ foo: "bar-batching" }), + traceId: "batching-1234", + spanId: "batching-1234", + queue: "test-batching", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + }, + }); + + // Wait for replication + await setTimeout(1000); + + // Query ClickHouse for the replicated run + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-batching", + query: "SELECT * FROM trigger_dev.task_runs_v1 WHERE run_id = {run_id:String}", + schema: z.any(), + params: z.object({ run_id: z.string() }), + }); + + const [queryError, result] = await queryRuns({ run_id: taskRun.id }); + + expect(queryError).toBeNull(); + expect(result?.length).toBe(1); + expect(result?.[0]).toEqual( + expect.objectContaining({ + run_id: taskRun.id, + friendly_id: taskRun.friendlyId, + task_identifier: taskRun.taskIdentifier, + environment_id: runtimeEnvironment.id, + project_id: project.id, + organization_id: organization.id, + environment_type: "DEVELOPMENT", + engine: "V2", + }) + ); + + await runsReplicationService.stop(); + } + ); + + containerTest( + "should replicate a new TaskRun to ClickHouse using streaming insert strategy", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-streaming", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-streaming", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "streaming", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationService.start("streaming"); + + const organization = await prisma.organization.create({ + data: { + title: "test-streaming", + slug: "test-streaming", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-streaming", + slug: "test-streaming", + organizationId: organization.id, + externalRef: "test-streaming", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-streaming", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-streaming", + pkApiKey: "test-streaming", + shortcode: "test-streaming", + }, + }); + + // Insert a row into the table with a unique friendlyId + const uniqueFriendlyId = `run_streaming_${Date.now()}`; + const taskRun = await prisma.taskRun.create({ + data: { + friendlyId: uniqueFriendlyId, + taskIdentifier: "my-task-streaming", + payload: JSON.stringify({ foo: "bar-streaming" }), + traceId: "streaming-1234", + spanId: "streaming-1234", + queue: "test-streaming", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + }, + }); + + // Wait for replication + await setTimeout(1000); + + // Query ClickHouse for the replicated run + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-streaming", + query: "SELECT * FROM trigger_dev.task_runs_v1 WHERE run_id = {run_id:String}", + schema: z.any(), + params: z.object({ run_id: z.string() }), + }); + + const [queryError, result] = await queryRuns({ run_id: taskRun.id }); + + expect(queryError).toBeNull(); + expect(result?.length).toBe(1); + expect(result?.[0]).toEqual( + expect.objectContaining({ + run_id: taskRun.id, + friendly_id: taskRun.friendlyId, + task_identifier: taskRun.taskIdentifier, + environment_id: runtimeEnvironment.id, + project_id: project.id, + organization_id: organization.id, + environment_type: "DEVELOPMENT", + engine: "V2", + }) + ); + + await runsReplicationService.stop(); + } + ); + + containerTest( + "should insert the payload into ClickHouse when a TaskRun is created", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-payload", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-payload", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationService.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-payload", + slug: "test-payload", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-payload", + slug: "test-payload", + organizationId: organization.id, + externalRef: "test-payload", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-payload", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-payload", + pkApiKey: "test-payload", + shortcode: "test-payload", + }, + }); + + // Insert a row into the table with a unique payload + const uniquePayload = { foo: "payload-test", bar: Date.now() }; + const taskRun = await prisma.taskRun.create({ + data: { + friendlyId: `run_payload_${Date.now()}`, + taskIdentifier: "my-task-payload", + payload: JSON.stringify(uniquePayload), + payloadType: "application/json", + traceId: "payload-1234", + spanId: "payload-1234", + queue: "test-payload", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + }, + }); + + // Wait for replication + await setTimeout(1000); + + // Query ClickHouse for the replicated payload + const queryPayloads = clickhouse.reader.query({ + name: "runs-replication-payload", + query: "SELECT * FROM trigger_dev.raw_task_runs_payload_v1 WHERE run_id = {run_id:String}", + schema: z.any(), + params: z.object({ run_id: z.string() }), + }); + + const [queryError, result] = await queryPayloads({ run_id: taskRun.id }); + + expect(queryError).toBeNull(); + expect(result?.length).toBe(1); + expect(result?.[0]).toEqual( + expect.objectContaining({ + run_id: taskRun.id, + payload: expect.objectContaining({ + data: uniquePayload, + }), + }) + ); + + await runsReplicationService.stop(); + } + ); + + containerTest( + "should replicate updates to an existing TaskRun to ClickHouse", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-update", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-update", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationService.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-update", + slug: "test-update", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-update", + slug: "test-update", + organizationId: organization.id, + externalRef: "test-update", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-update", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-update", + pkApiKey: "test-update", + shortcode: "test-update", + }, + }); + + // Insert a row into the table + const uniqueFriendlyId = `run_update_${Date.now()}`; + const taskRun = await prisma.taskRun.create({ + data: { + friendlyId: uniqueFriendlyId, + taskIdentifier: "my-task-update", + payload: JSON.stringify({ foo: "update-test" }), + payloadType: "application/json", + traceId: "update-1234", + spanId: "update-1234", + queue: "test-update", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + status: "PENDING", + }, + }); + + // Wait for initial replication + await setTimeout(1000); + + // Update the status field + await prisma.taskRun.update({ + where: { id: taskRun.id }, + data: { status: TaskRunStatus.COMPLETED_SUCCESSFULLY }, + }); + + // Wait for replication + await setTimeout(1000); + + // Query ClickHouse for the replicated run + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-update", + query: "SELECT * FROM trigger_dev.task_runs_v1 FINAL WHERE run_id = {run_id:String}", + schema: z.any(), + params: z.object({ run_id: z.string() }), + }); + + const [queryError, result] = await queryRuns({ run_id: taskRun.id }); + + expect(queryError).toBeNull(); + expect(result?.length).toBe(1); + expect(result?.[0]).toEqual( + expect.objectContaining({ + run_id: taskRun.id, + status: TaskRunStatus.COMPLETED_SUCCESSFULLY, + }) + ); + + await runsReplicationService.stop(); + } + ); + + containerTest( + "should replicate deletions of a TaskRun to ClickHouse and mark as deleted", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-delete", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-delete", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationService.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-delete", + slug: "test-delete", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-delete", + slug: "test-delete", + organizationId: organization.id, + externalRef: "test-delete", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-delete", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-delete", + pkApiKey: "test-delete", + shortcode: "test-delete", + }, + }); + + // Insert a row into the table + const uniqueFriendlyId = `run_delete_${Date.now()}`; + const taskRun = await prisma.taskRun.create({ + data: { + friendlyId: uniqueFriendlyId, + taskIdentifier: "my-task-delete", + payload: JSON.stringify({ foo: "delete-test" }), + payloadType: "application/json", + traceId: "delete-1234", + spanId: "delete-1234", + queue: "test-delete", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + status: "PENDING", + }, + }); + + // Wait for initial replication + await setTimeout(1000); + + // Delete the TaskRun + await prisma.taskRun.delete({ + where: { id: taskRun.id }, + }); + + // Wait for replication + await setTimeout(1000); + + // Query ClickHouse for the replicated run using FINAL + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-delete", + query: "SELECT * FROM trigger_dev.task_runs_v1 FINAL WHERE run_id = {run_id:String}", + schema: z.any(), + params: z.object({ run_id: z.string() }), + }); + + const [queryError, result] = await queryRuns({ run_id: taskRun.id }); + + expect(queryError).toBeNull(); + expect(result?.length).toBe(0); + + await runsReplicationService.stop(); + } + ); + + containerTest( + "should gracefully shutdown and allow a new service to pick up from the correct LSN (handover)", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-shutdown-handover", + }); + + // Service A + const runsReplicationServiceA = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-shutdown-handover", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationServiceA.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-shutdown-handover", + slug: "test-shutdown-handover", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-shutdown-handover", + slug: "test-shutdown-handover", + organizationId: organization.id, + externalRef: "test-shutdown-handover", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-shutdown-handover", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-shutdown-handover", + pkApiKey: "test-shutdown-handover", + shortcode: "test-shutdown-handover", + }, + }); + + // Insert Run 1 + const run1Id = `run_shutdown_handover_1_${Date.now()}`; + + // Initiate shutdown when the first insert message is received + runsReplicationServiceA.events.on("message", async ({ message, service }) => { + if (message.tag === "insert") { + // Initiate shutdown + await service.shutdown(); + } + }); + + const taskRun1 = await prisma.taskRun.create({ + data: { + friendlyId: run1Id, + taskIdentifier: "my-task-shutdown-handover-1", + payload: JSON.stringify({ foo: "handover-1" }), + payloadType: "application/json", + traceId: "handover-1-1234", + spanId: "handover-1-1234", + queue: "test-shutdown-handover", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + status: "PENDING", + }, + }); + + // Insert Run 2 after shutdown is initiated + const run2Id = `run_shutdown_handover_2_${Date.now()}`; + const taskRun2 = await prisma.taskRun.create({ + data: { + friendlyId: run2Id, + taskIdentifier: "my-task-shutdown-handover-2", + payload: JSON.stringify({ foo: "handover-2" }), + payloadType: "application/json", + traceId: "handover-2-1234", + spanId: "handover-2-1234", + queue: "test-shutdown-handover", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + status: "PENDING", + }, + }); + + // Wait for flush to complete + await setTimeout(1000); + + // Query ClickHouse for both runs using FINAL + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-shutdown-handover", + query: "SELECT * FROM trigger_dev.task_runs_v1 FINAL ORDER BY created_at ASC", + schema: z.any(), + }); + + // Make sure only the first run is in ClickHouse + const [queryError, result] = await queryRuns({}); + expect(queryError).toBeNull(); + expect(result?.length).toBe(1); + expect(result?.[0]).toEqual(expect.objectContaining({ run_id: taskRun1.id })); + + // Service B + const runsReplicationServiceB = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-shutdown-handover", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationServiceB.start("batching"); + + // Wait for replication + await setTimeout(1000); + + const [queryErrorB, resultB] = await queryRuns({}); + + expect(queryErrorB).toBeNull(); + expect(resultB?.length).toBe(2); + expect(resultB).toEqual( + expect.arrayContaining([ + expect.objectContaining({ run_id: taskRun1.id }), + expect.objectContaining({ run_id: taskRun2.id }), + ]) + ); + + await runsReplicationServiceB.stop(); + } + ); + + containerTest( + "should not re-process already handled data if shutdown is called after all transactions are processed", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-shutdown-after-processed", + }); + + // Service A + const runsReplicationServiceA = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-shutdown-after-processed", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationServiceA.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-shutdown-after-processed", + slug: "test-shutdown-after-processed", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-shutdown-after-processed", + slug: "test-shutdown-after-processed", + organizationId: organization.id, + externalRef: "test-shutdown-after-processed", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-shutdown-after-processed", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-shutdown-after-processed", + pkApiKey: "test-shutdown-after-processed", + shortcode: "test-shutdown-after-processed", + }, + }); + + // Insert Run 1 + const run1Id = `run_shutdown_after_processed_${Date.now()}`; + const taskRun1 = await prisma.taskRun.create({ + data: { + friendlyId: run1Id, + taskIdentifier: "my-task-shutdown-after-processed", + payload: JSON.stringify({ foo: "after-processed" }), + payloadType: "application/json", + traceId: "after-processed-1234", + spanId: "after-processed-1234", + queue: "test-shutdown-after-processed", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + status: "PENDING", + }, + }); + + // Wait for replication to ensure transaction is processed + await setTimeout(1000); + + // Query ClickHouse for the run using FINAL + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-shutdown-after-processed", + query: "SELECT * FROM trigger_dev.task_runs_v1 FINAL WHERE run_id = {run_id:String}", + schema: z.any(), + params: z.object({ run_id: z.string() }), + }); + + const [queryErrorA, resultA] = await queryRuns({ run_id: taskRun1.id }); + expect(queryErrorA).toBeNull(); + expect(resultA?.length).toBe(1); + expect(resultA?.[0]).toEqual(expect.objectContaining({ run_id: taskRun1.id })); + + // Shutdown after all transactions are processed + await runsReplicationServiceA.shutdown(); + + await setTimeout(500); // Give a moment for shutdown + + // Insert another run + const taskRun2 = await prisma.taskRun.create({ + data: { + friendlyId: `run_shutdown_after_processed_${Date.now()}`, + taskIdentifier: "my-task-shutdown-after-processed", + payload: JSON.stringify({ foo: "after-processed-2" }), + payloadType: "application/json", + traceId: "after-processed-2-1234", + spanId: "after-processed-2-1234", + queue: "test-shutdown-after-processed", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + status: "PENDING", + }, + }); + + // Service B + const runsReplicationServiceB = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-shutdown-after-processed", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationServiceB.start("batching"); + + await setTimeout(1000); + + // Query ClickHouse for the second run + const [queryErrorB, resultB] = await queryRuns({ run_id: taskRun2.id }); + expect(queryErrorB).toBeNull(); + expect(resultB?.length).toBe(1); + expect(resultB?.[0]).toEqual(expect.objectContaining({ run_id: taskRun2.id })); + + await runsReplicationServiceB.stop(); + } + ); + + containerTest( + "should replicate all 1,000 TaskRuns inserted in bulk to ClickHouse", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-stress-bulk-insert", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-stress-bulk-insert", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 10, + flushIntervalMs: 100, + flushBatchSize: 50, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + logger: new Logger("runs-replication-stress-bulk-insert", "info"), + }); + + await runsReplicationService.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-stress-bulk-insert", + slug: "test-stress-bulk-insert", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-stress-bulk-insert", + slug: "test-stress-bulk-insert", + organizationId: organization.id, + externalRef: "test-stress-bulk-insert", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-stress-bulk-insert", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-stress-bulk-insert", + pkApiKey: "test-stress-bulk-insert", + shortcode: "test-stress-bulk-insert", + }, + }); + + // Prepare 1,000 unique TaskRuns + const now = Date.now(); + const runsData = Array.from({ length: 1000 }, (_, i) => ({ + friendlyId: `run_bulk_${now}_${i}`, + taskIdentifier: `my-task-bulk`, + payload: JSON.stringify({ bulk: i }), + payloadType: "application/json", + traceId: `bulk-${i}`, + spanId: `bulk-${i}`, + queue: "test-stress-bulk-insert", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT" as const, + engine: "V2" as const, + status: "PENDING" as const, + attemptNumber: 1, + createdAt: new Date(now + i), + updatedAt: new Date(now + i), + })); + + // Bulk insert + const created = await prisma.taskRun.createMany({ data: runsData }); + expect(created.count).toBe(1000); + + // Wait for replication + await setTimeout(5000); + + // Query ClickHouse for all runs using FINAL + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-stress-bulk-insert", + query: `SELECT run_id, friendly_id, trace_id, task_identifier FROM trigger_dev.task_runs_v1 FINAL`, + schema: z.any(), + }); + + const [queryError, result] = await queryRuns({}); + expect(queryError).toBeNull(); + expect(result?.length).toBe(1000); + + // Check a few random runs for correctness + for (let i = 0; i < 10; i++) { + const idx = Math.floor(Math.random() * 1000); + const expected = runsData[idx]; + const found = result?.find((r: any) => r.friendly_id === expected.friendlyId); + expect(found).toBeDefined(); + expect(found).toEqual( + expect.objectContaining({ + friendly_id: expected.friendlyId, + trace_id: expected.traceId, + task_identifier: expected.taskIdentifier, + }) + ); + } + + await runsReplicationService.stop(); + } + ); + + containerTest( + "should replicate all 1,000 TaskRuns inserted in bulk to ClickHouse with updates", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-stress-bulk-insert", + }); + + const registry = new Registry(); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-stress-bulk-insert", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 10, + flushIntervalMs: 100, + flushBatchSize: 50, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + logger: new Logger("runs-replication-stress-bulk-insert", "info"), + metricsRegister: registry, + }); + + await runsReplicationService.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-stress-bulk-insert", + slug: "test-stress-bulk-insert", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-stress-bulk-insert", + slug: "test-stress-bulk-insert", + organizationId: organization.id, + externalRef: "test-stress-bulk-insert", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-stress-bulk-insert", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-stress-bulk-insert", + pkApiKey: "test-stress-bulk-insert", + shortcode: "test-stress-bulk-insert", + }, + }); + + // Prepare 1,000 unique TaskRuns + const now = Date.now(); + const runsData = Array.from({ length: 1000 }, (_, i) => ({ + friendlyId: `run_bulk_${now}_${i}`, + taskIdentifier: `my-task-bulk`, + payload: JSON.stringify({ bulk: i }), + payloadType: "application/json", + traceId: `bulk-${i}`, + spanId: `bulk-${i}`, + queue: "test-stress-bulk-insert", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT" as const, + engine: "V2" as const, + status: "PENDING" as const, + attemptNumber: 1, + createdAt: new Date(now + i), + updatedAt: new Date(now + i), + })); + + // Bulk insert + const created = await prisma.taskRun.createMany({ data: runsData }); + expect(created.count).toBe(1000); + + // Update all the runs + await prisma.taskRun.updateMany({ + data: { status: "COMPLETED_SUCCESSFULLY" }, + }); + + // Wait for replication + await setTimeout(5000); + + // Query ClickHouse for all runs using FINAL + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-stress-bulk-insert", + query: `SELECT * FROM trigger_dev.task_runs_v1 FINAL`, + schema: z.any(), + }); + + const [queryError, result] = await queryRuns({}); + expect(queryError).toBeNull(); + expect(result?.length).toBe(1000); + + // Check a few random runs for correctness + for (let i = 0; i < 10; i++) { + const idx = Math.floor(Math.random() * 1000); + const expected = runsData[idx]; + const found = result?.find((r: any) => r.friendly_id === expected.friendlyId); + expect(found).toBeDefined(); + expect(found).toEqual( + expect.objectContaining({ + friendly_id: expected.friendlyId, + trace_id: expected.traceId, + task_identifier: expected.taskIdentifier, + status: "COMPLETED_SUCCESSFULLY", + }) + ); + } + + await runsReplicationService.stop(); + } + ); + + containerTest( + "should replicate all events in a single transaction (insert, update)", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-multi-event-tx", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-multi-event-tx", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 10, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationService.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-multi-event-tx", + slug: "test-multi-event-tx", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-multi-event-tx", + slug: "test-multi-event-tx", + organizationId: organization.id, + externalRef: "test-multi-event-tx", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-multi-event-tx", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-multi-event-tx", + pkApiKey: "test-multi-event-tx", + shortcode: "test-multi-event-tx", + }, + }); + + // Start a transaction + const [run1, run2] = await prisma.$transaction(async (tx) => { + const run1 = await tx.taskRun.create({ + data: { + friendlyId: `run_multi_event_1_${Date.now()}`, + taskIdentifier: "my-task-multi-event-1", + payload: JSON.stringify({ multi: 1 }), + payloadType: "application/json", + traceId: `multi-1-${Date.now()}`, + spanId: `multi-1-${Date.now()}`, + queue: "test-multi-event-tx", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + status: "PENDING", + attemptNumber: 1, + createdAt: new Date(), + updatedAt: new Date(), + }, + }); + const run2 = await tx.taskRun.create({ + data: { + friendlyId: `run_multi_event_2_${Date.now()}`, + taskIdentifier: "my-task-multi-event-2", + payload: JSON.stringify({ multi: 2 }), + payloadType: "application/json", + traceId: `multi-2-${Date.now()}`, + spanId: `multi-2-${Date.now()}`, + queue: "test-multi-event-tx", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + status: "PENDING", + attemptNumber: 1, + createdAt: new Date(), + updatedAt: new Date(), + }, + }); + await tx.taskRun.update({ + where: { id: run1.id }, + data: { status: "COMPLETED_SUCCESSFULLY" }, + }); + + return [run1, run2]; + }); + + // Wait for replication + await setTimeout(1000); + + // Query ClickHouse for both runs using FINAL + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-multi-event-tx", + query: `SELECT * FROM trigger_dev.task_runs_v1 FINAL WHERE run_id IN ({run_id_1:String}, {run_id_2:String})`, + schema: z.any(), + params: z.object({ run_id_1: z.string(), run_id_2: z.string() }), + }); + + const [queryError, result] = await queryRuns({ run_id_1: run1.id, run_id_2: run2.id }); + expect(queryError).toBeNull(); + expect(result?.length).toBe(2); + const run1Result = result?.find((r: any) => r.run_id === run1.id); + const run2Result = result?.find((r: any) => r.run_id === run2.id); + expect(run1Result).toBeDefined(); + expect(run1Result).toEqual( + expect.objectContaining({ run_id: run1.id, status: "COMPLETED_SUCCESSFULLY" }) + ); + expect(run2Result).toBeDefined(); + expect(run2Result).toEqual(expect.objectContaining({ run_id: run2.id })); + + await runsReplicationService.stop(); + } + ); }); diff --git a/internal-packages/testcontainers/src/index.ts b/internal-packages/testcontainers/src/index.ts index 9892e0962d..f136f48fc4 100644 --- a/internal-packages/testcontainers/src/index.ts +++ b/internal-packages/testcontainers/src/index.ts @@ -175,16 +175,16 @@ const electricOrigin = async ( }; const clickhouseContainer = async ( - { network }: { network: StartedNetwork }, + { network, task }: { network: StartedNetwork } & TaskContext, use: Use ) => { - const { container } = await createClickHouseContainer(network); + const { container, metadata } = await withContainerSetup({ + name: "clickhouseContainer", + task, + setup: createClickHouseContainer(network), + }); - try { - await use(container); - } finally { - await container.stop(); - } + await useContainer("clickhouseContainer", { container, task, use: () => use(container) }); }; const clickhouseClient = async ( From 651d51a0071b1c3488d1df64228f037796b89027 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 9 May 2025 15:55:51 +0100 Subject: [PATCH 23/33] Remove the runs dashboard stuff that we aren't using anymore --- .../app/routes/api.v1.runs.$runId.tags.ts | 23 +- .../services/runsDashboardInstance.server.ts | 253 ------------------ .../services/runsDashboardService.server.ts | 160 ----------- .../app/v3/marqs/devQueueConsumer.server.ts | 31 --- .../v3/marqs/sharedQueueConsumer.server.ts | 54 ---- .../app/v3/services/completeAttempt.server.ts | 27 -- .../services/createTaskRunAttempt.server.ts | 23 -- .../v3/services/enqueueDelayedRun.server.ts | 23 -- .../services/executeTasksWaitingForDeploy.ts | 22 -- .../app/v3/services/finalizeTaskRun.server.ts | 58 ---- .../v3/services/rescheduleTaskRun.server.ts | 23 -- .../app/v3/services/triggerTaskV1.server.ts | 3 - 12 files changed, 1 insertion(+), 699 deletions(-) delete mode 100644 apps/webapp/app/services/runsDashboardInstance.server.ts delete mode 100644 apps/webapp/app/services/runsDashboardService.server.ts diff --git a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts index 8660a2178d..a02e3ecefa 100644 --- a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts +++ b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts @@ -4,7 +4,6 @@ import { z } from "zod"; import { prisma } from "~/db.server"; import { createTag, getTagsForRunId, MAX_TAGS_PER_RUN } from "~/models/taskRunTag.server"; import { authenticateApiRequest } from "~/services/apiAuth.server"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; const ParamsSchema = z.object({ runId: z.string(), @@ -80,7 +79,7 @@ export async function action({ request, params }: ActionFunctionArgs) { } } - const taskRun = await prisma.taskRun.update({ + await prisma.taskRun.update({ where: { friendlyId: parsedParams.data.runId, runtimeEnvironmentId: authenticationResult.environment.id, @@ -95,26 +94,6 @@ export async function action({ request, params }: ActionFunctionArgs) { }, }); - runsDashboard.emit.runTagsUpdated({ - time: new Date(), - run: { - id: taskRun.id, - tags: taskRun.runTags, - status: taskRun.status, - updatedAt: taskRun.updatedAt, - createdAt: taskRun.createdAt, - }, - organization: { - id: authenticationResult.environment.organizationId, - }, - project: { - id: authenticationResult.environment.projectId, - }, - environment: { - id: authenticationResult.environment.id, - }, - }); - return json({ message: `Successfully set ${newTags.length} new tags.` }, { status: 200 }); } catch (error) { return json( diff --git a/apps/webapp/app/services/runsDashboardInstance.server.ts b/apps/webapp/app/services/runsDashboardInstance.server.ts deleted file mode 100644 index 725c07bf31..0000000000 --- a/apps/webapp/app/services/runsDashboardInstance.server.ts +++ /dev/null @@ -1,253 +0,0 @@ -import { ClickHouse } from "@internal/clickhouse"; -import { EventEmitter } from "node:events"; -import { singleton } from "~/utils/singleton"; -import { engine } from "~/v3/runEngine.server"; -import { logger } from "./logger.server"; -import { - RunDashboardEventBus, - RunDashboardEventRunAttemptStarted, - RunDashboardEventRunCancelled, - RunDashboardEventRunDelayRescheduled, - RunDashboardEventRunEnqueuedAfterDelay, - RunDashboardEventRunExpired, - RunDashboardEventRunFailed, - RunDashboardEventRunLocked, - RunDashboardEventRunRetryScheduled, - RunDashboardEventRunStatusChanged, - RunDashboardEventRunSucceeded, - RunDashboardEventRunTagsUpdated, - RunDashboardEvents, - RunsDashboardService, -} from "./runsDashboardService.server"; -import { tryCatch } from "@trigger.dev/core/utils"; - -const runDashboardEventBus: RunDashboardEventBus = new EventEmitter(); - -function emitRunStatusChanged(event: RunDashboardEventRunStatusChanged) { - runDashboardEventBus.emit("runStatusChanged", event); -} - -function emitRunCreated(time: Date, runId: string) { - runDashboardEventBus.emit("runCreated", { - time, - runId, - }); -} - -function emitRunAttemptStarted(event: RunDashboardEventRunAttemptStarted) { - runDashboardEventBus.emit("runAttemptStarted", event); -} - -function emitRunFailed(event: RunDashboardEventRunFailed) { - runDashboardEventBus.emit("runFailed", event); -} - -function emitRunSucceeded(event: RunDashboardEventRunSucceeded) { - runDashboardEventBus.emit("runSucceeded", event); -} - -function emitRunCancelled(event: RunDashboardEventRunCancelled) { - runDashboardEventBus.emit("runCancelled", event); -} - -function emitRunRetryScheduled(event: RunDashboardEventRunRetryScheduled) { - runDashboardEventBus.emit("runRetryScheduled", event); -} - -function emitRunDelayRescheduled(event: RunDashboardEventRunDelayRescheduled) { - runDashboardEventBus.emit("runDelayRescheduled", event); -} - -function emitRunLocked(event: RunDashboardEventRunLocked) { - runDashboardEventBus.emit("runLocked", event); -} - -function emitRunExpired(event: RunDashboardEventRunExpired) { - runDashboardEventBus.emit("runExpired", event); -} - -function emitRunTagsUpdated(event: RunDashboardEventRunTagsUpdated) { - runDashboardEventBus.emit("runTagsUpdated", event); -} - -function emitRunEnqueuedAfterDelay(event: RunDashboardEventRunEnqueuedAfterDelay) { - runDashboardEventBus.emit("runEnqueuedAfterDelay", event); -} - -export const runsDashboard = singleton("runsDashboard", () => { - const clickhouse = ClickHouse.fromEnv(); - - const service = new RunsDashboardService(clickhouse); - - runDashboardEventBus.on("runCreated", async (event) => { - const [runCreatedError] = await tryCatch(runCreated(event.time, event.runId, service)); - - if (runCreatedError) { - logger.error("RunDashboard: runCreated: runCreated error", { - runId: event.runId, - error: runCreatedError, - }); - } - }); - - runDashboardEventBus.on("runAttemptStarted", async (event) => { - const [runAttemptStartedError] = await tryCatch(service.runAttemptStarted(event)); - - if (runAttemptStartedError) { - logger.error("RunDashboard: runAttemptStarted: runAttemptStarted error", { - runId: event.run.id, - error: runAttemptStartedError, - }); - } - }); - - runDashboardEventBus.on("runStatusChanged", async (event) => { - const [runStatusChangedError] = await tryCatch(service.runStatusChanged(event)); - - if (runStatusChangedError) { - logger.error("RunDashboard: runStatusChanged: runStatusChanged error", { - runId: event.run.id, - error: runStatusChangedError, - }); - } - }); - - runDashboardEventBus.on("runFailed", async (event) => { - const [runFailedError] = await tryCatch(service.runFailed(event)); - - if (runFailedError) { - logger.error("RunDashboard: runFailed: runFailed error", { - runId: event.run.id, - error: runFailedError, - }); - } - }); - - runDashboardEventBus.on("runSucceeded", async (event) => { - const [runSucceededError] = await tryCatch(service.runSucceeded(event)); - - if (runSucceededError) { - logger.error("RunDashboard: runSucceeded: runSucceeded error", { - runId: event.run.id, - error: runSucceededError, - }); - } - }); - - runDashboardEventBus.on("runCancelled", async (event) => { - const [runCancelledError] = await tryCatch(service.runCancelled(event)); - - if (runCancelledError) { - logger.error("RunDashboard: runCancelled: runCancelled error", { - runId: event.run.id, - error: runCancelledError, - }); - } - }); - - runDashboardEventBus.on("runRetryScheduled", async (event) => { - const [runRetryScheduledError] = await tryCatch(service.runRetryScheduled(event)); - - if (runRetryScheduledError) { - logger.error("RunDashboard: runRetryScheduled: runRetryScheduled error", { - runId: event.run.id, - error: runRetryScheduledError, - }); - } - }); - - runDashboardEventBus.on("runDelayRescheduled", async (event) => { - const [runDelayRescheduledError] = await tryCatch(service.runDelayRescheduled(event)); - - if (runDelayRescheduledError) { - logger.error("RunDashboard: runDelayRescheduled: runDelayRescheduled error", { - runId: event.run.id, - error: runDelayRescheduledError, - }); - } - }); - - runDashboardEventBus.on("runLocked", async (event) => { - const [runLockedError] = await tryCatch(service.runLocked(event)); - - if (runLockedError) { - logger.error("RunDashboard: runLocked: runLocked error", { - runId: event.run.id, - error: runLockedError, - }); - } - }); - - runDashboardEventBus.on("runExpired", async (event) => { - const [runExpiredError] = await tryCatch(service.runExpired(event)); - - if (runExpiredError) { - logger.error("RunDashboard: runExpired: runExpired error", { - runId: event.run.id, - error: runExpiredError, - }); - } - }); - - engine.eventBus.on("runCreated", async (event) => { - runDashboardEventBus.emit("runCreated", event); - }); - - engine.eventBus.on("runAttemptStarted", async (event) => { - runDashboardEventBus.emit("runAttemptStarted", event); - }); - - engine.eventBus.on("runStatusChanged", async (event) => { - runDashboardEventBus.emit("runStatusChanged", event); - }); - - engine.eventBus.on("runFailed", async (event) => { - runDashboardEventBus.emit("runFailed", event); - }); - - engine.eventBus.on("runSucceeded", async (event) => { - runDashboardEventBus.emit("runSucceeded", event); - }); - - engine.eventBus.on("runCancelled", async (event) => { - runDashboardEventBus.emit("runCancelled", event); - }); - - engine.eventBus.on("runRetryScheduled", async (event) => { - runDashboardEventBus.emit("runRetryScheduled", event); - }); - - engine.eventBus.on("runDelayRescheduled", async (event) => { - runDashboardEventBus.emit("runDelayRescheduled", event); - }); - - engine.eventBus.on("runLocked", async (event) => { - runDashboardEventBus.emit("runLocked", event); - }); - - engine.eventBus.on("runExpired", async (event) => { - runDashboardEventBus.emit("runExpired", event); - }); - - return { - service, - emit: { - runStatusChanged: emitRunStatusChanged, - runCreated: emitRunCreated, - runAttemptStarted: emitRunAttemptStarted, - runFailed: emitRunFailed, - runSucceeded: emitRunSucceeded, - runCancelled: emitRunCancelled, - runRetryScheduled: emitRunRetryScheduled, - runDelayRescheduled: emitRunDelayRescheduled, - runLocked: emitRunLocked, - runExpired: emitRunExpired, - runTagsUpdated: emitRunTagsUpdated, - runEnqueuedAfterDelay: emitRunEnqueuedAfterDelay, - }, - }; -}); - -async function runCreated(time: Date, runId: string, service: RunsDashboardService) { - // Noop for now -} diff --git a/apps/webapp/app/services/runsDashboardService.server.ts b/apps/webapp/app/services/runsDashboardService.server.ts deleted file mode 100644 index b1eebd6fc6..0000000000 --- a/apps/webapp/app/services/runsDashboardService.server.ts +++ /dev/null @@ -1,160 +0,0 @@ -import type { ClickHouse } from "@internal/clickhouse"; -import { EventBusEvents } from "@internal/run-engine"; -import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; -import { RuntimeEnvironmentType, TaskRun, TaskRunStatus } from "@trigger.dev/database"; -import { EventEmitter } from "node:events"; -import { logger } from "./logger.server"; - -export class RunsDashboardService { - constructor(private readonly clickhouse: ClickHouse) {} - - private readonly logger = logger.child({ - service: "RunsDashboardService", - }); - - async runAttemptStarted(event: RunDashboardEventRunAttemptStarted) { - // Noop for now - } - - async runEnqueuedAfterDelay(event: RunDashboardEventRunEnqueuedAfterDelay) { - // Noop for now - } - - async runDelayRescheduled(event: RunDashboardEventRunDelayRescheduled) { - // Noop for now - } - - async runLocked(event: RunDashboardEventRunLocked) { - // Noop for now - } - - async runStatusChanged(event: RunDashboardEventRunStatusChanged) { - // Noop for now - } - - async runExpired(event: RunDashboardEventRunExpired) { - // Noop for now - } - - async runSucceeded(event: RunDashboardEventRunSucceeded) { - // Noop for now - } - - async runFailed(event: RunDashboardEventRunFailed) { - // Noop for now - } - - async runRetryScheduled(event: RunDashboardEventRunRetryScheduled) { - // Noop for now - } - - async runCancelled(event: RunDashboardEventRunCancelled) { - // Noop for now - } - - async runTagsUpdated(event: RunDashboardEventRunTagsUpdated) { - // Noop for now - } - - async runCreated( - eventTime: Date, - taskRun: TaskRun, - environmentType: RuntimeEnvironmentType, - organizationId: string - ) { - // Noop for now - } - - async #preparePayload(run: TaskRun): Promise { - if (run.status !== "PENDING" && run.status !== "DELAYED") { - return undefined; - } - - if (run.payloadType !== "application/json" && run.payloadType !== "application/super+json") { - return undefined; - } - - const packet = { - data: run.payload, - dataType: run.payloadType, - }; - - return await parsePacket(packet); - } - - async #prepareOutput(run: { - output: string | undefined; - outputType: string; - }): Promise { - if (!run.output) { - return undefined; - } - - if (run.outputType !== "application/json" && run.outputType !== "application/super+json") { - return undefined; - } - - const packet = { - data: run.output, - dataType: run.outputType, - }; - - return await parsePacket(packet); - } -} - -export type RunDashboardEvents = { - runCreated: [ - { - time: Date; - runId: string; - } - ]; - runEnqueuedAfterDelay: EventBusEvents["runEnqueuedAfterDelay"]; - runDelayRescheduled: EventBusEvents["runDelayRescheduled"]; - runLocked: EventBusEvents["runLocked"]; - runStatusChanged: EventBusEvents["runStatusChanged"]; - runAttemptStarted: EventBusEvents["runAttemptStarted"]; - runExpired: EventBusEvents["runExpired"]; - runSucceeded: EventBusEvents["runSucceeded"]; - runFailed: EventBusEvents["runFailed"]; - runRetryScheduled: EventBusEvents["runRetryScheduled"]; - runCancelled: EventBusEvents["runCancelled"]; - runTagsUpdated: [ - { - time: Date; - run: { - id: string; - tags: string[]; - status: TaskRunStatus; - updatedAt: Date; - createdAt: Date; - }; - organization: { - id: string; - }; - project: { - id: string; - }; - environment: { - id: string; - }; - } - ]; -}; - -export type RunDashboardEventArgs = RunDashboardEvents[T]; -export type RunDashboardEventBus = EventEmitter; -export type RunDashboardEventRunAttemptStarted = RunDashboardEventArgs<"runAttemptStarted">[0]; -export type RunDashboardEventRunCreated = RunDashboardEventArgs<"runCreated">[0]; -export type RunDashboardEventRunEnqueuedAfterDelay = - RunDashboardEventArgs<"runEnqueuedAfterDelay">[0]; -export type RunDashboardEventRunDelayRescheduled = RunDashboardEventArgs<"runDelayRescheduled">[0]; -export type RunDashboardEventRunLocked = RunDashboardEventArgs<"runLocked">[0]; -export type RunDashboardEventRunStatusChanged = RunDashboardEventArgs<"runStatusChanged">[0]; -export type RunDashboardEventRunExpired = RunDashboardEventArgs<"runExpired">[0]; -export type RunDashboardEventRunSucceeded = RunDashboardEventArgs<"runSucceeded">[0]; -export type RunDashboardEventRunFailed = RunDashboardEventArgs<"runFailed">[0]; -export type RunDashboardEventRunRetryScheduled = RunDashboardEventArgs<"runRetryScheduled">[0]; -export type RunDashboardEventRunCancelled = RunDashboardEventArgs<"runCancelled">[0]; -export type RunDashboardEventRunTagsUpdated = RunDashboardEventArgs<"runTagsUpdated">[0]; diff --git a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts index d5f0036fd6..307a6be5d4 100644 --- a/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/devQueueConsumer.server.ts @@ -16,7 +16,6 @@ import { findQueueInEnvironment, sanitizeQueueName } from "~/models/taskQueue.se import { RedisClient, createRedisClient } from "~/redis.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { marqs } from "~/v3/marqs/index.server"; import { resolveVariablesForEnvironment } from "../environmentVariables/environmentVariablesRepository.server"; import { FailedTaskRunService } from "../failedTaskRun.server"; @@ -543,36 +542,6 @@ export class DevQueueConsumer { messageId: message.messageId, }); - runsDashboard.emit.runLocked({ - time: new Date(), - run: { - id: lockedTaskRun.id, - updatedAt: lockedTaskRun.updatedAt, - createdAt: lockedTaskRun.createdAt, - status: lockedTaskRun.status, - lockedAt, - lockedById: backgroundTask.id, - lockedToVersionId: backgroundWorker.id, - lockedQueueId: queue.id, - startedAt, - maxDurationInSeconds: lockedTaskRun.maxDurationInSeconds ?? undefined, - taskVersion: backgroundWorker.version, - sdkVersion: backgroundWorker.sdkVersion, - cliVersion: backgroundWorker.cliVersion, - baseCostInCents: lockedTaskRun.baseCostInCents, - machinePreset: lockedTaskRun.machinePreset ?? "small-1x", - }, - organization: { - id: this.env.organizationId, - }, - project: { - id: this.env.projectId, - }, - environment: { - id: this.env.id, - }, - }); - this._inProgressRuns.set(lockedTaskRun.friendlyId, message.messageId); } catch (e) { if (e instanceof Error) { diff --git a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts index d1925f8bc6..21544cc756 100644 --- a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts @@ -37,7 +37,6 @@ import { findEnvironmentById } from "~/models/runtimeEnvironment.server"; import { findQueueInEnvironment, sanitizeQueueName } from "~/models/taskQueue.server"; import { generateJWTTokenForEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { singleton } from "~/utils/singleton"; import { marqs } from "~/v3/marqs/index.server"; import { @@ -929,38 +928,6 @@ export class SharedQueueConsumer { }, }); - if (lockedTaskRun.organizationId) { - runsDashboard.emit.runLocked({ - time: new Date(), - run: { - id: lockedTaskRun.id, - status: lockedTaskRun.status, - updatedAt: lockedTaskRun.updatedAt, - createdAt: lockedTaskRun.createdAt, - lockedAt, - lockedById: backgroundTask.id, - lockedToVersionId: worker.id, - lockedQueueId: queue.id, - startedAt, - baseCostInCents, - machinePreset, - maxDurationInSeconds, - taskVersion: worker.version, - sdkVersion: worker.sdkVersion, - cliVersion: worker.cliVersion, - }, - organization: { - id: lockedTaskRun.organizationId, - }, - project: { - id: lockedTaskRun.projectId, - }, - environment: { - id: lockedTaskRun.runtimeEnvironmentId, - }, - }); - } - return { action: "noop", reason: "scheduled_attempt", @@ -1477,27 +1444,6 @@ export class SharedQueueConsumer { status: "WAITING_FOR_DEPLOY", }, }); - - if (run.organizationId) { - runsDashboard.emit.runStatusChanged({ - time: new Date(), - run: { - id: runId, - status: "WAITING_FOR_DEPLOY", - updatedAt: run.updatedAt, - createdAt: run.createdAt, - }, - organization: { - id: run.organizationId, - }, - project: { - id: run.projectId, - }, - environment: { - id: run.runtimeEnvironmentId, - }, - }); - } } async #resolveCompletedAttemptsForResumeMessage( diff --git a/apps/webapp/app/v3/services/completeAttempt.server.ts b/apps/webapp/app/v3/services/completeAttempt.server.ts index 576b1dbcf5..0296937593 100644 --- a/apps/webapp/app/v3/services/completeAttempt.server.ts +++ b/apps/webapp/app/v3/services/completeAttempt.server.ts @@ -21,7 +21,6 @@ import { PrismaClientOrTransaction } from "~/db.server"; import { env } from "~/env.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { safeJsonParse } from "~/utils/json"; import { marqs } from "~/v3/marqs/index.server"; import { createExceptionPropertiesFromError, eventRepository } from "../eventRepository.server"; @@ -617,32 +616,6 @@ export class CompleteAttemptService extends BaseService { }, }); - runsDashboard.emit.runRetryScheduled({ - time: new Date(), - run: { - id: taskRunAttempt.taskRunId, - status: "RETRYING_AFTER_FAILURE", - friendlyId: taskRunAttempt.taskRun.friendlyId, - spanId: taskRunAttempt.taskRun.spanId, - attemptNumber: execution.attempt.number, - queue: taskRunAttempt.taskRun.queue, - traceContext: taskRunAttempt.taskRun.traceContext as Record, - taskIdentifier: taskRunAttempt.taskRun.taskIdentifier, - baseCostInCents: taskRunAttempt.taskRun.baseCostInCents, - updatedAt: taskRunAttempt.taskRun.updatedAt, - createdAt: taskRunAttempt.taskRun.createdAt, - error, - }, - organization: { - id: environment.organizationId, - }, - environment: { - ...environment, - orgMember: environment.orgMember ?? null, - }, - retryAt, - }); - if (environment.type === "DEVELOPMENT") { await marqs.requeueMessage(taskRunAttempt.taskRunId, {}, executionRetry.timestamp, "retry"); diff --git a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts index 242cf343bd..60d7448b2e 100644 --- a/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts +++ b/apps/webapp/app/v3/services/createTaskRunAttempt.server.ts @@ -6,7 +6,6 @@ import { findQueueInEnvironment } from "~/models/taskQueue.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; import { reportInvocationUsage } from "~/services/platform.v3.server"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { generateFriendlyId } from "../friendlyIdentifiers"; import { machinePresetFromConfig, machinePresetFromRun } from "../machinePresets.server"; import { FINAL_RUN_STATUSES } from "../taskStatus"; @@ -182,28 +181,6 @@ export class CreateTaskRunAttemptService extends BaseService { }); } - runsDashboard.emit.runAttemptStarted({ - time: new Date(), - run: { - id: taskRun.id, - status: taskRun.status, - createdAt: taskRun.createdAt, - updatedAt: taskRun.updatedAt, - attemptNumber: taskRunAttempt.number, - baseCostInCents: taskRun.baseCostInCents, - executedAt: taskRun.executedAt ?? undefined, - }, - organization: { - id: environment.organizationId, - }, - project: { - id: environment.projectId, - }, - environment: { - id: environment.id, - }, - }); - const machinePreset = machinePresetFromRun(taskRun) ?? machinePresetFromConfig(lockedBy.machineConfig ?? {}); diff --git a/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts b/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts index 4ee4db562e..0b6149dfae 100644 --- a/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts +++ b/apps/webapp/app/v3/services/enqueueDelayedRun.server.ts @@ -1,6 +1,5 @@ import { parseNaturalLanguageDuration } from "@trigger.dev/core/v3/isomorphic"; import { logger } from "~/services/logger.server"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { workerQueue } from "~/services/worker.server"; import { commonWorker } from "../commonWorker.server"; import { BaseService } from "./baseService.server"; @@ -99,28 +98,6 @@ export class EnqueueDelayedRunService extends BaseService { } } - if (run.organizationId) { - runsDashboard.emit.runEnqueuedAfterDelay({ - time: new Date(), - run: { - id: run.id, - status: run.status, - queuedAt: run.queuedAt ?? new Date(), - updatedAt: run.updatedAt, - createdAt: run.createdAt, - }, - organization: { - id: run.organizationId, - }, - project: { - id: run.projectId, - }, - environment: { - id: run.runtimeEnvironmentId, - }, - }); - } - await enqueueRun({ env: run.runtimeEnvironment, run: run, diff --git a/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts b/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts index 68c3a34c65..b3e2e4f724 100644 --- a/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts +++ b/apps/webapp/app/v3/services/executeTasksWaitingForDeploy.ts @@ -1,7 +1,6 @@ import { PrismaClientOrTransaction } from "~/db.server"; import { env } from "~/env.server"; import { logger } from "~/services/logger.server"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { workerQueue } from "~/services/worker.server"; import { marqs } from "~/v3/marqs/index.server"; import { BaseService } from "./baseService.server"; @@ -81,27 +80,6 @@ export class ExecuteTasksWaitingForDeployService extends BaseService { }); } - for (const run of runsWaitingForDeploy) { - runsDashboard.emit.runStatusChanged({ - time: new Date(), - run: { - id: run.id, - status: run.status, - updatedAt: run.updatedAt, - createdAt: run.createdAt, - }, - organization: { - id: backgroundWorker.runtimeEnvironment.organizationId, - }, - project: { - id: backgroundWorker.runtimeEnvironment.projectId, - }, - environment: { - id: backgroundWorker.runtimeEnvironment.id, - }, - }); - } - for (const run of runsWaitingForDeploy) { await marqs?.enqueueMessage( backgroundWorker.runtimeEnvironment, diff --git a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts index 8781a454b2..1fe21eed47 100644 --- a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts +++ b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts @@ -4,7 +4,6 @@ import { findQueueInEnvironment } from "~/models/taskQueue.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; import { updateMetadataService } from "~/services/metadata/updateMetadata.server"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { marqs } from "~/v3/marqs/index.server"; import { generateFriendlyId } from "../friendlyIdentifiers"; import { socketIo } from "../handleSocketIo.server"; @@ -103,63 +102,6 @@ export class FinalizeTaskRunService extends BaseService { ...(include ? { include } : {}), }); - if (run.organizationId) { - if (status === "COMPLETED_SUCCESSFULLY") { - runsDashboard.emit.runSucceeded({ - time: new Date(), - run: { - id: run.id, - status: run.status, - spanId: run.spanId, - output: run.output ?? undefined, - outputType: run.outputType, - taskEventStore: run.taskEventStore, - createdAt: run.createdAt, - completedAt: run.completedAt, - updatedAt: run.updatedAt, - attemptNumber: run.attemptNumber ?? 1, - usageDurationMs: run.usageDurationMs, - costInCents: run.costInCents, - }, - organization: { - id: run.organizationId, - }, - project: { - id: run.projectId, - }, - environment: { - id: run.runtimeEnvironmentId, - }, - }); - } else if (taskRunError) { - runsDashboard.emit.runFailed({ - time: new Date(), - run: { - id: run.id, - status: run.status, - spanId: run.spanId, - error: taskRunError, - taskEventStore: run.taskEventStore, - createdAt: run.createdAt, - completedAt: run.completedAt, - updatedAt: run.updatedAt, - attemptNumber: run.attemptNumber ?? 1, - usageDurationMs: run.usageDurationMs, - costInCents: run.costInCents, - }, - organization: { - id: run.organizationId, - }, - project: { - id: run.projectId, - }, - environment: { - id: run.runtimeEnvironmentId, - }, - }); - } - } - if (run.ttl) { await ExpireEnqueuedRunService.ack(run.id); } diff --git a/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts b/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts index 00c51d5000..122fcc2c59 100644 --- a/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts +++ b/apps/webapp/app/v3/services/rescheduleTaskRun.server.ts @@ -1,6 +1,5 @@ import { RescheduleRunRequestBody } from "@trigger.dev/core/v3"; import { TaskRun } from "@trigger.dev/database"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; import { parseDelay } from "~/utils/delays"; import { BaseService, ServiceValidationError } from "./baseService.server"; import { EnqueueDelayedRunService } from "./enqueueDelayedRun.server"; @@ -26,28 +25,6 @@ export class RescheduleTaskRunService extends BaseService { }, }); - if (taskRun.organizationId) { - runsDashboard.emit.runDelayRescheduled({ - time: new Date(), - run: { - id: taskRun.id, - status: taskRun.status, - delayUntil: delay, - updatedAt: updatedRun.updatedAt, - createdAt: updatedRun.createdAt, - }, - organization: { - id: taskRun.organizationId, - }, - project: { - id: taskRun.projectId, - }, - environment: { - id: taskRun.runtimeEnvironmentId, - }, - }); - } - await EnqueueDelayedRunService.reschedule(taskRun.id, delay); return updatedRun; diff --git a/apps/webapp/app/v3/services/triggerTaskV1.server.ts b/apps/webapp/app/v3/services/triggerTaskV1.server.ts index b5f847af87..ff33cd8011 100644 --- a/apps/webapp/app/v3/services/triggerTaskV1.server.ts +++ b/apps/webapp/app/v3/services/triggerTaskV1.server.ts @@ -43,7 +43,6 @@ import { import { getTaskEventStore } from "../taskEventStore.server"; import { enqueueRun } from "./enqueueRun.server"; import { z } from "zod"; -import { runsDashboard } from "~/services/runsDashboardInstance.server"; // This is here for backwords compatibility for v3 users const QueueOptions = z.object({ @@ -606,8 +605,6 @@ export class TriggerTaskServiceV1 extends BaseService { return; } - runsDashboard.emit.runCreated(run.createdAt, run.id); - return { run, isCached: result?.isCached, From b8dc32d330e5b15bcd1f9d68b3000a092a663e55 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 9 May 2025 16:09:11 +0100 Subject: [PATCH 24/33] Added a test for large payloads --- .../test/runsReplicationService.test.ts | 108 ++++++++++++++++++ 1 file changed, 108 insertions(+) diff --git a/apps/webapp/test/runsReplicationService.test.ts b/apps/webapp/test/runsReplicationService.test.ts index 98695209df..1c3d145438 100644 --- a/apps/webapp/test/runsReplicationService.test.ts +++ b/apps/webapp/test/runsReplicationService.test.ts @@ -428,6 +428,114 @@ describe("RunsReplicationService", () => { } ); + containerTest( + "should insert the payload even if it's very large into ClickHouse when a TaskRun is created", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-payload", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-payload", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + insertStrategy: "batching", + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + }); + + await runsReplicationService.start("batching"); + + const organization = await prisma.organization.create({ + data: { + title: "test-payload", + slug: "test-payload", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-payload", + slug: "test-payload", + organizationId: organization.id, + externalRef: "test-payload", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-payload", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-payload", + pkApiKey: "test-payload", + shortcode: "test-payload", + }, + }); + + // Insert a row into the table with a unique payload + const largePayload = { + foo: Array.from({ length: 100 }, () => "foo").join(""), + bar: Array.from({ length: 100 }, () => "bar").join(""), + baz: Array.from({ length: 100 }, () => "baz").join(""), + }; + + const taskRun = await prisma.taskRun.create({ + data: { + friendlyId: `run_payload_${Date.now()}`, + taskIdentifier: "my-task-payload", + payload: JSON.stringify(largePayload), + payloadType: "application/json", + traceId: "payload-1234", + spanId: "payload-1234", + queue: "test-payload", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + }, + }); + + // Wait for replication + await setTimeout(1000); + + // Query ClickHouse for the replicated payload + const queryPayloads = clickhouse.reader.query({ + name: "runs-replication-payload", + query: "SELECT * FROM trigger_dev.raw_task_runs_payload_v1 WHERE run_id = {run_id:String}", + schema: z.any(), + params: z.object({ run_id: z.string() }), + }); + + const [queryError, result] = await queryPayloads({ run_id: taskRun.id }); + + expect(queryError).toBeNull(); + expect(result?.length).toBe(1); + expect(result?.[0]).toEqual( + expect.objectContaining({ + run_id: taskRun.id, + payload: expect.objectContaining({ + data: largePayload, + }), + }) + ); + + await runsReplicationService.stop(); + } + ); + containerTest( "should replicate updates to an existing TaskRun to ClickHouse", async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { From ae14fa2be47720c9766c1c65cda45ceaeb421806 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 9 May 2025 16:28:26 +0100 Subject: [PATCH 25/33] hacky typecheck fix --- internal-packages/testcontainers/src/clickhouse.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal-packages/testcontainers/src/clickhouse.ts b/internal-packages/testcontainers/src/clickhouse.ts index ee59521156..577111af3d 100644 --- a/internal-packages/testcontainers/src/clickhouse.ts +++ b/internal-packages/testcontainers/src/clickhouse.ts @@ -171,7 +171,7 @@ async function getAllClickhouseMigrationQueries(migrationsPath: string) { // The array will be: ["", "Up", "up queries", "Down", "down queries"] // We want the "up queries" part which is at index 2 if (parts.length >= 3) { - const upQueries = parts[2].trim(); + const upQueries = parts[2]!.trim(); queries.push( ...upQueries .split(";") From 1d7c2ada29ca8414b1b2d609b26cfc42b04ed2d6 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 9 May 2025 19:08:00 +0100 Subject: [PATCH 26/33] Fix new internal package typecheck issues and start adding telemetry to the replication service --- .../services/runsReplicationService.server.ts | 47 +++++- apps/webapp/package.json | 1 + internal-packages/clickhouse/package.json | 2 +- internal-packages/replication/package.json | 2 +- internal-packages/replication/src/client.ts | 158 ++++++++++-------- pnpm-lock.yaml | 3 + 6 files changed, 142 insertions(+), 71 deletions(-) diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index faae02ea08..9a94149818 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -10,6 +10,7 @@ import EventEmitter from "node:events"; import pLimit from "p-limit"; import { Counter, Gauge } from "prom-client"; import type { MetricsRegister } from "~/metrics.server"; +import { Span, type Tracer, recordSpanError, trace } from "@internal/tracing"; export type RunsReplicationServiceOptions = { clickhouse: ClickHouse; @@ -27,6 +28,7 @@ export type RunsReplicationServiceOptions = { leaderLockExtendIntervalMs?: number; ackIntervalSeconds?: number; logger?: Logger; + tracer?: Tracer; }; type TaskRunInsert = { _version: bigint; run: TaskRun; event: "insert" | "update" | "delete" }; @@ -54,12 +56,15 @@ export class RunsReplicationService { private _insertStrategy: "streaming" | "batching"; private _isShuttingDown = false; private _isShutDownComplete = false; + private _tracer: Tracer; + private _currentSpan: Span | null = null; public readonly events: EventEmitter; constructor(private readonly options: RunsReplicationServiceOptions) { this.logger = options.logger ?? new Logger("RunsReplicationService", "debug"); this.events = new EventEmitter(); + this._tracer = options.tracer ?? trace.getTracer("runs-replication-service"); this._insertStrategy = options.insertStrategy ?? "streaming"; @@ -206,6 +211,13 @@ export class RunsReplicationService { xid: message.xid, events: [], }; + + this._currentSpan = this._tracer.startSpan("handle_transaction", { + attributes: { + "transaction.xid": message.xid, + }, + }); + break; } case "insert": { @@ -269,6 +281,8 @@ export class RunsReplicationService { // We need to immediately acknowledge the transaction // And then try and handle this transaction if (transaction.commitEndLsn) { + this._currentSpan?.setAttribute("transaction.shutdown", true); + await this._replicationClient.acknowledge(transaction.commitEndLsn); alreadyAcknowledged = true; } @@ -279,12 +293,23 @@ export class RunsReplicationService { this._lastReplicationLagMs = transaction.replicationLagMs; + this._currentSpan?.setAttribute("transaction.replication_lag_ms", transaction.replicationLagMs); + this._currentSpan?.setAttribute("transaction.xid", transaction.xid); + + if (transaction.commitEndLsn) { + this._currentSpan?.setAttribute("transaction.commit_end_lsn", transaction.commitEndLsn); + } + + this._currentSpan?.setAttribute("transaction.events", transaction.events.length); + // If there are no events, do nothing if (transaction.events.length === 0) { if (transaction.commitEndLsn && !alreadyAcknowledged) { await this._replicationClient.acknowledge(transaction.commitEndLsn); } + this._currentSpan?.end(); + return; } @@ -293,6 +318,8 @@ export class RunsReplicationService { transaction, }); + this._currentSpan?.end(); + return; } @@ -301,12 +328,19 @@ export class RunsReplicationService { alreadyAcknowledged, }); + const lsnToUInt64Start = process.hrtime.bigint(); + // If there are events, we need to handle them const _version = lsnToUInt64(transaction.commitEndLsn); + this._currentSpan?.setAttribute( + "transaction.lsn_to_uint64_ms", + Number(process.hrtime.bigint() - lsnToUInt64Start) / 1_000_000 + ); + this._transactionCounter?.inc(); - if (this._insertStrategy === "streaming") { + if (this._insertStrategy === "batching") { this._concurrentFlushScheduler .addToBatch( transaction.events.map((event) => ({ @@ -336,12 +370,23 @@ export class RunsReplicationService { this.logger.error("Error flushing batch", { error: flushError, }); + + if (this._currentSpan) { + recordSpanError(this._currentSpan, flushError); + } } } if (!alreadyAcknowledged) { + const acknowledgeStart = process.hrtime.bigint(); await this._replicationClient.acknowledge(transaction.commitEndLsn); + this._currentSpan?.setAttribute( + "transaction.acknowledge_ms", + Number(process.hrtime.bigint() - acknowledgeStart) / 1_000_000 + ); } + + this._currentSpan?.end(); } async #flushBatch(flushId: string, batch: Array) { diff --git a/apps/webapp/package.json b/apps/webapp/package.json index 0e9063e5a1..a4ace5f946 100644 --- a/apps/webapp/package.json +++ b/apps/webapp/package.json @@ -105,6 +105,7 @@ "@trigger.dev/otlp-importer": "workspace:*", "@trigger.dev/platform": "1.0.14", "@trigger.dev/sdk": "workspace:*", + "@internal/tracing": "workspace:*", "@types/pg": "8.6.6", "@uiw/react-codemirror": "^4.19.5", "@unkey/cache": "^1.5.0", diff --git a/internal-packages/clickhouse/package.json b/internal-packages/clickhouse/package.json index a8109eec5b..3bfe84f762 100644 --- a/internal-packages/clickhouse/package.json +++ b/internal-packages/clickhouse/package.json @@ -20,7 +20,7 @@ }, "scripts": { "clean": "rimraf dist", - "typecheck": "tsc --noEmit", + "typecheck": "tsc --noEmit -p tsconfig.build.json", "build": "pnpm run clean && tsc -p tsconfig.build.json", "dev": "tsc --watch -p tsconfig.build.json", "db:migrate": "docker compose -p triggerdotdev-docker -f ../../docker/docker-compose.yml up clickhouse_migrator --build", diff --git a/internal-packages/replication/package.json b/internal-packages/replication/package.json index adfc1bfe50..6f5206d7fc 100644 --- a/internal-packages/replication/package.json +++ b/internal-packages/replication/package.json @@ -21,7 +21,7 @@ }, "scripts": { "clean": "rimraf dist", - "typecheck": "tsc --noEmit", + "typecheck": "tsc --noEmit -p tsconfig.build.json", "build": "pnpm run clean && tsc -p tsconfig.build.json", "dev": "tsc --watch -p tsconfig.build.json", "test": "vitest --sequence.concurrent=false --no-file-parallelism", diff --git a/internal-packages/replication/src/client.ts b/internal-packages/replication/src/client.ts index 227517ebe1..db9044c1a0 100644 --- a/internal-packages/replication/src/client.ts +++ b/internal-packages/replication/src/client.ts @@ -7,6 +7,7 @@ import { createRedisClient } from "@internal/redis"; import { Logger } from "@trigger.dev/core/logger"; import { LogicalReplicationClientError } from "./errors.js"; import { PgoutputMessage, PgoutputParser, getPgoutputStartReplicationSQL } from "./pgoutput.js"; +import { startSpan, trace, Tracer } from "@internal/tracing"; export interface LogicalReplicationClientOptions { /** @@ -70,6 +71,8 @@ export interface LogicalReplicationClientOptions { * The actions to publish to the publication. */ publicationActions?: Array<"insert" | "update" | "delete" | "truncate">; + + tracer?: Tracer; } export type LogicalReplicationClientEvents = { @@ -101,6 +104,7 @@ export class LogicalReplicationClient { private lastAckTimestamp: number = 0; private ackIntervalTimer: NodeJS.Timeout | null = null; private _isStopped: boolean = false; + private _tracer: Tracer; public get lastLsn(): string { return this.lastAcknowledgedLsn ?? "0/00000000"; @@ -113,6 +117,7 @@ export class LogicalReplicationClient { constructor(options: LogicalReplicationClientOptions) { this.options = options; this.logger = options.logger ?? new Logger("LogicalReplicationClient", "info"); + this._tracer = options.tracer ?? trace.getTracer("logical-replication-client"); this.autoAcknowledge = typeof options.autoAcknowledge === "boolean" ? options.autoAcknowledge : true; @@ -145,54 +150,62 @@ export class LogicalReplicationClient { } public async stop(): Promise { - if (this._isStopped) return this; - this._isStopped = true; - // Clean up leader lock heartbeat - if (this.leaderLockHeartbeatTimer) { - clearInterval(this.leaderLockHeartbeatTimer); - this.leaderLockHeartbeatTimer = null; - } - // Clean up ack interval - if (this.ackIntervalTimer) { - clearInterval(this.ackIntervalTimer); - this.ackIntervalTimer = null; - } - // Release leader lock if held - await this.#releaseLeaderLock(); + return await startSpan(this._tracer, "logical_replication_client.stop", async (span) => { + if (this._isStopped) return this; + + span.setAttribute("replication_client.name", this.options.name); + span.setAttribute("replication_client.table", this.options.table); + span.setAttribute("replication_client.slot_name", this.options.slotName); + span.setAttribute("replication_client.publication_name", this.options.publicationName); + + this._isStopped = true; + // Clean up leader lock heartbeat + if (this.leaderLockHeartbeatTimer) { + clearInterval(this.leaderLockHeartbeatTimer); + this.leaderLockHeartbeatTimer = null; + } + // Clean up ack interval + if (this.ackIntervalTimer) { + clearInterval(this.ackIntervalTimer); + this.ackIntervalTimer = null; + } + // Release leader lock if held + await this.#releaseLeaderLock(); - this.connection?.removeAllListeners(); - this.connection = null; + this.connection?.removeAllListeners(); + this.connection = null; - if (this.client) { - this.client.removeAllListeners(); + if (this.client) { + this.client.removeAllListeners(); - const [endError] = await tryCatch(this.client.end()); + const [endError] = await tryCatch(this.client.end()); - if (endError) { - this.logger.error("Failed to end client", { - name: this.options.name, - error: endError, - }); - } else { - this.logger.info("Ended client", { - name: this.options.name, - }); + if (endError) { + this.logger.error("Failed to end client", { + name: this.options.name, + error: endError, + }); + } else { + this.logger.info("Ended client", { + name: this.options.name, + }); + } + this.client = null; } - this.client = null; - } - // clear any intervals - if (this.leaderLockHeartbeatTimer) { - clearInterval(this.leaderLockHeartbeatTimer); - this.leaderLockHeartbeatTimer = null; - } + // clear any intervals + if (this.leaderLockHeartbeatTimer) { + clearInterval(this.leaderLockHeartbeatTimer); + this.leaderLockHeartbeatTimer = null; + } - if (this.ackIntervalTimer) { - clearInterval(this.ackIntervalTimer); - this.ackIntervalTimer = null; - } + if (this.ackIntervalTimer) { + clearInterval(this.ackIntervalTimer); + this.ackIntervalTimer = null; + } - return this; + return this; + }); } public async teardown(): Promise { @@ -523,34 +536,43 @@ export class LogicalReplicationClient { public async acknowledge(lsn: string): Promise { if (this._isStopped) return false; if (!this.connection) return false; - // WAL LSN split - const slice = lsn.split("/"); - let [upperWAL, lowerWAL]: [number, number] = [parseInt(slice[0], 16), parseInt(slice[1], 16)]; - // Timestamp as microseconds since midnight 2000-01-01 - const now = Date.now() - 946080000000; - const upperTimestamp = Math.floor(now / 4294967.296); - const lowerTimestamp = Math.floor(now - upperTimestamp * 4294967.296); - if (lowerWAL === 4294967295) { - upperWAL = upperWAL + 1; - lowerWAL = 0; - } else { - lowerWAL = lowerWAL + 1; - } - const response = Buffer.alloc(34); - response.fill(0x72); // 'r' - response.writeUInt32BE(upperWAL, 1); - response.writeUInt32BE(lowerWAL, 5); - response.writeUInt32BE(upperWAL, 9); - response.writeUInt32BE(lowerWAL, 13); - response.writeUInt32BE(upperWAL, 17); - response.writeUInt32BE(lowerWAL, 21); - response.writeUInt32BE(upperTimestamp, 25); - response.writeUInt32BE(lowerTimestamp, 29); - response.writeInt8(0, 33); - // @ts-ignore - this.connection.sendCopyFromChunk(response); - this.lastAckTimestamp = Date.now(); - return true; + + return await startSpan(this._tracer, "logical_replication_client.acknowledge", async (span) => { + span.setAttribute("replication_client.lsn", lsn); + span.setAttribute("replication_client.name", this.options.name); + span.setAttribute("replication_client.table", this.options.table); + span.setAttribute("replication_client.slot_name", this.options.slotName); + span.setAttribute("replication_client.publication_name", this.options.publicationName); + + // WAL LSN split + const slice = lsn.split("/"); + let [upperWAL, lowerWAL]: [number, number] = [parseInt(slice[0], 16), parseInt(slice[1], 16)]; + // Timestamp as microseconds since midnight 2000-01-01 + const now = Date.now() - 946080000000; + const upperTimestamp = Math.floor(now / 4294967.296); + const lowerTimestamp = Math.floor(now - upperTimestamp * 4294967.296); + if (lowerWAL === 4294967295) { + upperWAL = upperWAL + 1; + lowerWAL = 0; + } else { + lowerWAL = lowerWAL + 1; + } + const response = Buffer.alloc(34); + response.fill(0x72); // 'r' + response.writeUInt32BE(upperWAL, 1); + response.writeUInt32BE(lowerWAL, 5); + response.writeUInt32BE(upperWAL, 9); + response.writeUInt32BE(lowerWAL, 13); + response.writeUInt32BE(upperWAL, 17); + response.writeUInt32BE(lowerWAL, 21); + response.writeUInt32BE(upperTimestamp, 25); + response.writeUInt32BE(lowerTimestamp, 29); + response.writeInt8(0, 33); + // @ts-ignore + this.connection.sendCopyFromChunk(response); + this.lastAckTimestamp = Date.now(); + return true; + }); } async #acquireLeaderLock(): Promise { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c3eeb65281..e9a3af4e48 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -255,6 +255,9 @@ importers: '@internal/run-engine': specifier: workspace:* version: link:../../internal-packages/run-engine + '@internal/tracing': + specifier: workspace:* + version: link:../../internal-packages/tracing '@internal/zod-worker': specifier: workspace:* version: link:../../internal-packages/zod-worker From 955bc252e0c69bb726414f59c140bafe5506a79e Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Sun, 11 May 2025 11:59:40 +0100 Subject: [PATCH 27/33] tracing over spans, some other improvements --- apps/webapp/app/env.server.ts | 1 - .../runsReplicationInstance.server.ts | 2 - .../services/runsReplicationService.server.ts | 306 ++++++++---------- .../test/runsReplicationService.test.ts | 274 ++++++++-------- apps/webapp/test/utils/tracing.ts | 19 ++ internal-packages/replication/src/client.ts | 6 +- 6 files changed, 295 insertions(+), 313 deletions(-) create mode 100644 apps/webapp/test/utils/tracing.ts diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index 5fc4f1a1a3..1000e6d798 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -762,7 +762,6 @@ const EnvironmentSchema = z.object({ RUN_REPLICATION_MAX_FLUSH_CONCURRENCY: z.coerce.number().int().default(100), RUN_REPLICATION_FLUSH_INTERVAL_MS: z.coerce.number().int().default(1000), RUN_REPLICATION_FLUSH_BATCH_SIZE: z.coerce.number().int().default(100), - RUN_REPLICATION_INSERT_STRATEGY: z.enum(["streaming", "batching"]).default("batching"), RUN_REPLICATION_LEADER_LOCK_TIMEOUT_MS: z.coerce.number().int().default(30_000), RUN_REPLICATION_LEADER_LOCK_EXTEND_INTERVAL_MS: z.coerce.number().int().default(10_000), RUN_REPLICATION_ACK_INTERVAL_SECONDS: z.coerce.number().int().default(10), diff --git a/apps/webapp/app/services/runsReplicationInstance.server.ts b/apps/webapp/app/services/runsReplicationInstance.server.ts index 3eb92aab81..1b40f2849a 100644 --- a/apps/webapp/app/services/runsReplicationInstance.server.ts +++ b/apps/webapp/app/services/runsReplicationInstance.server.ts @@ -40,11 +40,9 @@ function initializeRunsReplicationInstance() { enableAutoPipelining: true, ...(env.RUN_REPLICATION_REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), }, - metricsRegister: metricsRegister, maxFlushConcurrency: env.RUN_REPLICATION_MAX_FLUSH_CONCURRENCY, flushIntervalMs: env.RUN_REPLICATION_FLUSH_INTERVAL_MS, flushBatchSize: env.RUN_REPLICATION_FLUSH_BATCH_SIZE, - insertStrategy: env.RUN_REPLICATION_INSERT_STRATEGY, leaderLockTimeoutMs: env.RUN_REPLICATION_LEADER_LOCK_TIMEOUT_MS, leaderLockExtendIntervalMs: env.RUN_REPLICATION_LEADER_LOCK_EXTEND_INTERVAL_MS, ackIntervalSeconds: env.RUN_REPLICATION_ACK_INTERVAL_SECONDS, diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index 9a94149818..287314af63 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -1,6 +1,7 @@ import type { ClickHouse, RawTaskRunPayloadV1, TaskRunV1 } from "@internal/clickhouse"; import { RedisOptions } from "@internal/redis"; import { LogicalReplicationClient, Transaction, type PgoutputMessage } from "@internal/replication"; +import { Span, startSpan, trace, type Tracer } from "@internal/tracing"; import { Logger } from "@trigger.dev/core/logger"; import { tryCatch } from "@trigger.dev/core/utils"; import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; @@ -9,8 +10,6 @@ import { nanoid } from "nanoid"; import EventEmitter from "node:events"; import pLimit from "p-limit"; import { Counter, Gauge } from "prom-client"; -import type { MetricsRegister } from "~/metrics.server"; -import { Span, type Tracer, recordSpanError, trace } from "@internal/tracing"; export type RunsReplicationServiceOptions = { clickhouse: ClickHouse; @@ -19,14 +18,13 @@ export type RunsReplicationServiceOptions = { slotName: string; publicationName: string; redisOptions: RedisOptions; - metricsRegister?: MetricsRegister; - insertStrategy?: "streaming" | "batching"; maxFlushConcurrency?: number; flushIntervalMs?: number; flushBatchSize?: number; leaderLockTimeoutMs?: number; leaderLockExtendIntervalMs?: number; ackIntervalSeconds?: number; + acknowledgeTimeoutMs?: number; logger?: Logger; tracer?: Tracer; }; @@ -53,11 +51,13 @@ export class RunsReplicationService { private _lastReplicationLagMs: number | null = null; private _transactionCounter?: Counter; private _lagGauge?: Gauge; - private _insertStrategy: "streaming" | "batching"; private _isShuttingDown = false; private _isShutDownComplete = false; private _tracer: Tracer; private _currentSpan: Span | null = null; + private _currentParseDurationMs: number | null = null; + private _lastAcknowledgedAt: number | null = null; + private _acknowledgeTimeoutMs: number; public readonly events: EventEmitter; @@ -66,7 +66,7 @@ export class RunsReplicationService { this.events = new EventEmitter(); this._tracer = options.tracer ?? trace.getTracer("runs-replication-service"); - this._insertStrategy = options.insertStrategy ?? "streaming"; + this._acknowledgeTimeoutMs = options.acknowledgeTimeoutMs ?? 1_000; this._replicationClient = new LogicalReplicationClient({ pgConfig: { @@ -79,7 +79,7 @@ export class RunsReplicationService { redisOptions: options.redisOptions, autoAcknowledge: false, publicationActions: ["insert", "update", "delete"], - logger: new Logger("RunsReplicationService", "debug"), + logger: this.logger, leaderLockTimeoutMs: options.leaderLockTimeoutMs ?? 30_000, leaderLockExtendIntervalMs: options.leaderLockExtendIntervalMs ?? 10_000, ackIntervalSeconds: options.ackIntervalSeconds ?? 10, @@ -90,13 +90,12 @@ export class RunsReplicationService { flushInterval: options.flushIntervalMs ?? 100, maxConcurrency: options.maxFlushConcurrency ?? 100, callback: this.#flushBatch.bind(this), - metricsRegister: options.metricsRegister, }); - this._replicationClient.events.on("data", async ({ lsn, log }) => { + this._replicationClient.events.on("data", async ({ lsn, log, parseDuration }) => { this._lastLsn = lsn; - await this.#handleData(lsn, log); + await this.#handleData(lsn, log, parseDuration); }); this._replicationClient.events.on("heartbeat", async ({ lsn, shouldRespond }) => { @@ -125,28 +124,6 @@ export class RunsReplicationService { this._replicationClient.events.on("leaderElection", (isLeader) => { this.logger.debug("Leader election", { isLeader }); }); - - if (options.metricsRegister) { - const replicationService = this; - this._lagGauge = new Gauge({ - name: "runs_replication_service_replication_lag_ms", - help: "The replication lag in milliseconds", - collect() { - if (!replicationService._lastReplicationLagMs) { - return; - } - - this.set(replicationService._lastReplicationLagMs); - }, - registers: [options.metricsRegister], - }); - - replicationService._transactionCounter = new Counter({ - name: "runs_replication_service_transactions", - help: "The number of transactions", - registers: [options.metricsRegister], - }); - } } public async getTransactionCountMetric() { @@ -170,9 +147,7 @@ export class RunsReplicationService { } } - async start(insertStrategy?: "streaming" | "batching") { - this._insertStrategy = insertStrategy ?? this._insertStrategy; - + async start() { this.logger.info("Starting replication client", { lastLsn: this._lastLsn, }); @@ -192,7 +167,7 @@ export class RunsReplicationService { await this._replicationClient.teardown(); } - async #handleData(lsn: string, message: PgoutputMessage) { + async #handleData(lsn: string, message: PgoutputMessage, parseDuration: bigint) { this.logger.debug("Handling data", { lsn, tag: message.tag, @@ -218,6 +193,8 @@ export class RunsReplicationService { }, }); + this._currentParseDurationMs = Number(parseDuration) / 1_000_000; + break; } case "insert": { @@ -225,6 +202,11 @@ export class RunsReplicationService { return; } + if (this._currentParseDurationMs) { + this._currentParseDurationMs = + this._currentParseDurationMs + Number(parseDuration) / 1_000_000; + } + this._currentTransaction.events.push({ tag: message.tag, data: message.new as TaskRun, @@ -237,6 +219,11 @@ export class RunsReplicationService { return; } + if (this._currentParseDurationMs) { + this._currentParseDurationMs = + this._currentParseDurationMs + Number(parseDuration) / 1_000_000; + } + this._currentTransaction.events.push({ tag: message.tag, data: message.new as TaskRun, @@ -249,6 +236,11 @@ export class RunsReplicationService { return; } + if (this._currentParseDurationMs) { + this._currentParseDurationMs = + this._currentParseDurationMs + Number(parseDuration) / 1_000_000; + } + this._currentTransaction.events.push({ tag: message.tag, data: message.old as TaskRun, @@ -261,6 +253,17 @@ export class RunsReplicationService { if (!this._currentTransaction) { return; } + + if (this._currentParseDurationMs) { + this._currentParseDurationMs = + this._currentParseDurationMs + Number(parseDuration) / 1_000_000; + + this._currentSpan?.setAttribute( + "transaction.parse_duration_ms", + this._currentParseDurationMs + ); + } + const replicationLagMs = Date.now() - Number(message.commitTime / 1000n); this._currentTransaction.commitEndLsn = message.commitEndLsn; this._currentTransaction.replicationLagMs = replicationLagMs; @@ -283,7 +286,7 @@ export class RunsReplicationService { if (transaction.commitEndLsn) { this._currentSpan?.setAttribute("transaction.shutdown", true); - await this._replicationClient.acknowledge(transaction.commitEndLsn); + await this.#maybeAcknowledge(transaction.commitEndLsn); alreadyAcknowledged = true; } @@ -305,7 +308,7 @@ export class RunsReplicationService { // If there are no events, do nothing if (transaction.events.length === 0) { if (transaction.commitEndLsn && !alreadyAcknowledged) { - await this._replicationClient.acknowledge(transaction.commitEndLsn); + await this.#maybeAcknowledge(transaction.commitEndLsn); } this._currentSpan?.end(); @@ -340,46 +343,23 @@ export class RunsReplicationService { this._transactionCounter?.inc(); - if (this._insertStrategy === "batching") { - this._concurrentFlushScheduler - .addToBatch( - transaction.events.map((event) => ({ - _version, - run: event.data, - event: event.tag, - })) - ) - .catch((error) => { - this.logger.error("Error adding to batch", { - error, - }); + this._concurrentFlushScheduler + .addToBatch( + transaction.events.map((event) => ({ + _version, + run: event.data, + event: event.tag, + })) + ) + .catch((error) => { + this.logger.error("Error adding to batch", { + error, }); - } else { - const [flushError] = await tryCatch( - this.#flushBatch( - nanoid(), - transaction.events.map((event) => ({ - _version, - run: event.data, - event: event.tag, - })) - ) - ); - - if (flushError) { - this.logger.error("Error flushing batch", { - error: flushError, - }); - - if (this._currentSpan) { - recordSpanError(this._currentSpan, flushError); - } - } - } + }); if (!alreadyAcknowledged) { const acknowledgeStart = process.hrtime.bigint(); - await this._replicationClient.acknowledge(transaction.commitEndLsn); + await this.#maybeAcknowledge(transaction.commitEndLsn); this._currentSpan?.setAttribute( "transaction.acknowledge_ms", Number(process.hrtime.bigint() - acknowledgeStart) / 1_000_000 @@ -389,6 +369,27 @@ export class RunsReplicationService { this._currentSpan?.end(); } + async #maybeAcknowledge(commitEndLsn: string) { + const now = Date.now(); + + if (this._lastAcknowledgedAt) { + const timeSinceLastAcknowledged = now - this._lastAcknowledgedAt; + // If we've already acknowledged within the last second, don't acknowledge again + if (timeSinceLastAcknowledged < this._acknowledgeTimeoutMs) { + return; + } + } + + this._lastAcknowledgedAt = now; + + this.logger.info("Acknowledging transaction", { + commitEndLsn, + lastAcknowledgedAt: this._lastAcknowledgedAt, + }); + + await this._replicationClient.acknowledge(commitEndLsn); + } + async #flushBatch(flushId: string, batch: Array) { if (batch.length === 0) { this.logger.debug("No runs to flush", { @@ -402,31 +403,38 @@ export class RunsReplicationService { batchSize: batch.length, }); - const preparedInserts = await Promise.all(batch.map(this.#prepareRunInserts.bind(this))); + await startSpan(this._tracer, "flushBatch", async (span) => { + const preparedInserts = await startSpan(this._tracer, "prepare_inserts", async (span) => { + return await Promise.all(batch.map(this.#prepareRunInserts.bind(this))); + }); - const taskRunInserts = preparedInserts - .map(({ taskRunInsert }) => taskRunInsert) - .filter(Boolean); + const taskRunInserts = preparedInserts + .map(({ taskRunInsert }) => taskRunInsert) + .filter(Boolean); - const payloadInserts = preparedInserts - .map(({ payloadInsert }) => payloadInsert) - .filter(Boolean); + const payloadInserts = preparedInserts + .map(({ payloadInsert }) => payloadInsert) + .filter(Boolean); - this.logger.info("Flushing inserts", { - flushId, - taskRunInserts: taskRunInserts.length, - payloadInserts: payloadInserts.length, - }); + span.setAttribute("task_run_inserts", taskRunInserts.length); + span.setAttribute("payload_inserts", payloadInserts.length); - await Promise.all([ - this.#insertTaskRunInserts(taskRunInserts), - this.#insertPayloadInserts(payloadInserts), - ]); + this.logger.info("Flushing inserts", { + flushId, + taskRunInserts: taskRunInserts.length, + payloadInserts: payloadInserts.length, + }); - this.logger.info("Flushed inserts", { - flushId, - taskRunInserts: taskRunInserts.length, - payloadInserts: payloadInserts.length, + await Promise.all([ + this.#insertTaskRunInserts(taskRunInserts), + this.#insertPayloadInserts(payloadInserts), + ]); + + this.logger.info("Flushed inserts", { + flushId, + taskRunInserts: taskRunInserts.length, + payloadInserts: payloadInserts.length, + }); }); } @@ -436,7 +444,7 @@ export class RunsReplicationService { { params: { clickhouse_settings: { - wait_for_async_insert: this._insertStrategy === "batching" ? 1 : 0, + wait_for_async_insert: 1, }, }, } @@ -457,7 +465,7 @@ export class RunsReplicationService { { params: { clickhouse_settings: { - wait_for_async_insert: this._insertStrategy === "batching" ? 1 : 0, + wait_for_async_insert: 1, }, }, } @@ -617,7 +625,7 @@ export type ConcurrentFlushSchedulerConfig = { flushInterval: number; maxConcurrency?: number; callback: (flushId: string, batch: T[]) => Promise; - metricsRegister?: MetricsRegister; + tracer?: Tracer; }; export class ConcurrentFlushScheduler { @@ -628,11 +636,13 @@ export class ConcurrentFlushScheduler { private readonly concurrencyLimiter: ReturnType; private flushTimer: NodeJS.Timeout | null; private failedBatchCount; - private metricsRegister?: MetricsRegister; private logger: Logger; + private _tracer: Tracer; constructor(private readonly config: ConcurrentFlushSchedulerConfig) { this.logger = new Logger("ConcurrentFlushScheduler", "info"); + this._tracer = config.tracer ?? trace.getTracer("concurrent-flush-scheduler"); + this.currentBatch = []; this.BATCH_SIZE = config.batchSize; this.FLUSH_INTERVAL = config.flushInterval; @@ -648,48 +658,6 @@ export class ConcurrentFlushScheduler { }); this.startFlushTimer(); - - if (!process.env.VITEST && config.metricsRegister) { - this.metricsRegister = config.metricsRegister; - - const scheduler = this; - - new Gauge({ - name: "concurrent_flush_scheduler_batch_size", - help: "Number of items in the current concurrent flush scheduler batch", - collect() { - this.set(scheduler.currentBatch.length); - }, - registers: [this.metricsRegister], - }); - - new Gauge({ - name: "concurrent_flush_scheduler_failed_batches", - help: "Number of failed batches", - collect() { - this.set(scheduler.failedBatchCount); - }, - registers: [this.metricsRegister], - }); - - new Gauge({ - name: "concurrent_flush_scheduler_active_concurrency", - help: "Number of active concurrency", - collect() { - this.set(scheduler.concurrencyLimiter.activeCount); - }, - registers: [this.metricsRegister], - }); - - new Gauge({ - name: "concurrent_flush_scheduler_pending_concurrency", - help: "Number of pending concurrency", - collect() { - this.set(scheduler.concurrencyLimiter.pendingCount); - }, - registers: [this.metricsRegister], - }); - } } /** @@ -697,8 +665,8 @@ export class ConcurrentFlushScheduler { * If you want to fire and forget, don't await this method. */ async addToBatch(items: T[]): Promise { - // TODO: consider using concat. spread is not performant - this.currentBatch.push(...items); + this.currentBatch = this.currentBatch.concat(items); + this.logger.debug("Adding items to batch", { currentBatchSize: this.currentBatch.length, itemsAdded: items.length, @@ -744,45 +712,37 @@ export class ConcurrentFlushScheduler { private async flushNextBatch(): Promise { if (this.currentBatch.length === 0) return; - const batches: T[][] = []; - while (this.currentBatch.length > 0) { - batches.push(this.currentBatch.splice(0, this.BATCH_SIZE)); - } - - this.logger.info("Starting batch flush", { - numberOfBatches: batches.length, - totalItems: batches.reduce((sum, batch) => sum + batch.length, 0), - }); + const batch = [...this.currentBatch]; + this.currentBatch = []; const callback = this.config.callback; - // TODO: report plimit.activeCount and pLimit.pendingCount and pLimit.concurrency to /metrics - const promises = batches.map((batch) => - this.concurrencyLimiter(async () => { + const promise = this.concurrencyLimiter(async () => { + await startSpan(this._tracer, "flushNextBatch", async (span) => { const batchId = nanoid(); - try { - await callback(batchId, batch); - } catch (error) { - this.logger.error("Error processing batch", { - batchId, - error, - batchSize: batch.length, - errorMessage: error instanceof Error ? error.message : "Unknown error", - }); - throw error; - } - }) - ); - const results = await Promise.allSettled(promises); + span.setAttribute("batch_id", batchId); + span.setAttribute("batch_size", batch.length); + span.setAttribute("concurrency_active_count", this.concurrencyLimiter.activeCount); + span.setAttribute("concurrency_pending_count", this.concurrencyLimiter.pendingCount); + span.setAttribute("concurrency_concurrency", this.concurrencyLimiter.concurrency); + + await callback(batchId, batch); + }); + }); - const failedBatches = results.filter((result) => result.status === "rejected").length; - this.failedBatchCount += failedBatches; + const [error] = await tryCatch(promise); + + if (error) { + this.logger.error("Error processing batch", { + error, + }); + } this.logger.info("Batch flush complete", { - totalBatches: batches.length, - successfulBatches: batches.length - failedBatches, - failedBatches, + totalBatches: 1, + successfulBatches: 1, + failedBatches: 0, totalFailedBatches: this.failedBatchCount, }); } diff --git a/apps/webapp/test/runsReplicationService.test.ts b/apps/webapp/test/runsReplicationService.test.ts index 1c3d145438..3afbf6dcb3 100644 --- a/apps/webapp/test/runsReplicationService.test.ts +++ b/apps/webapp/test/runsReplicationService.test.ts @@ -2,10 +2,10 @@ import { ClickHouse } from "@internal/clickhouse"; import { containerTest } from "@internal/testcontainers"; import { Logger } from "@trigger.dev/core/logger"; import { setTimeout } from "node:timers/promises"; -import { OpenMetricsContentType, Registry } from "prom-client"; import { z } from "zod"; import { TaskRunStatus } from "~/database-types"; import { RunsReplicationService } from "~/services/runsReplicationService.server"; +import { createInMemoryTracing } from "./utils/tracing"; vi.setConfig({ testTimeout: 60_000 }); @@ -20,6 +20,8 @@ describe("RunsReplicationService", () => { name: "runs-replication", }); + const { tracer, exporter } = createInMemoryTracing(); + const runsReplicationService = new RunsReplicationService({ clickhouse, pgConnectionUrl: postgresContainer.getConnectionUri(), @@ -30,10 +32,10 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, + tracer, }); await runsReplicationService.start(); @@ -109,6 +111,21 @@ describe("RunsReplicationService", () => { }) ); + const spans = exporter.getFinishedSpans(); + + expect(spans.length).toBeGreaterThan(0); + + const transactionSpan = spans.find( + (span) => + span.name === "handle_transaction" && + typeof span.attributes["transaction.events"] === "number" && + span.attributes["transaction.events"] > 0 + ); + + expect(transactionSpan).not.toBeNull(); + expect(transactionSpan?.attributes["transaction.parse_duration_ms"]).toBeGreaterThan(0); + expect(transactionSpan?.attributes["transaction.parse_duration_ms"]).toBeLessThan(1); + await runsReplicationService.stop(); } ); @@ -133,13 +150,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationService.start("batching"); + await runsReplicationService.start(); const organization = await prisma.organization.create({ data: { @@ -219,112 +235,6 @@ describe("RunsReplicationService", () => { } ); - containerTest( - "should replicate a new TaskRun to ClickHouse using streaming insert strategy", - async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { - await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); - - const clickhouse = new ClickHouse({ - url: clickhouseContainer.getConnectionUrl(), - name: "runs-replication-streaming", - }); - - const runsReplicationService = new RunsReplicationService({ - clickhouse, - pgConnectionUrl: postgresContainer.getConnectionUri(), - serviceName: "runs-replication-streaming", - slotName: "task_runs_to_clickhouse_v1", - publicationName: "task_runs_to_clickhouse_v1_publication", - redisOptions, - maxFlushConcurrency: 1, - flushIntervalMs: 100, - flushBatchSize: 1, - insertStrategy: "streaming", - leaderLockTimeoutMs: 5000, - leaderLockExtendIntervalMs: 1000, - ackIntervalSeconds: 5, - }); - - await runsReplicationService.start("streaming"); - - const organization = await prisma.organization.create({ - data: { - title: "test-streaming", - slug: "test-streaming", - }, - }); - - const project = await prisma.project.create({ - data: { - name: "test-streaming", - slug: "test-streaming", - organizationId: organization.id, - externalRef: "test-streaming", - }, - }); - - const runtimeEnvironment = await prisma.runtimeEnvironment.create({ - data: { - slug: "test-streaming", - type: "DEVELOPMENT", - projectId: project.id, - organizationId: organization.id, - apiKey: "test-streaming", - pkApiKey: "test-streaming", - shortcode: "test-streaming", - }, - }); - - // Insert a row into the table with a unique friendlyId - const uniqueFriendlyId = `run_streaming_${Date.now()}`; - const taskRun = await prisma.taskRun.create({ - data: { - friendlyId: uniqueFriendlyId, - taskIdentifier: "my-task-streaming", - payload: JSON.stringify({ foo: "bar-streaming" }), - traceId: "streaming-1234", - spanId: "streaming-1234", - queue: "test-streaming", - runtimeEnvironmentId: runtimeEnvironment.id, - projectId: project.id, - organizationId: organization.id, - environmentType: "DEVELOPMENT", - engine: "V2", - }, - }); - - // Wait for replication - await setTimeout(1000); - - // Query ClickHouse for the replicated run - const queryRuns = clickhouse.reader.query({ - name: "runs-replication-streaming", - query: "SELECT * FROM trigger_dev.task_runs_v1 WHERE run_id = {run_id:String}", - schema: z.any(), - params: z.object({ run_id: z.string() }), - }); - - const [queryError, result] = await queryRuns({ run_id: taskRun.id }); - - expect(queryError).toBeNull(); - expect(result?.length).toBe(1); - expect(result?.[0]).toEqual( - expect.objectContaining({ - run_id: taskRun.id, - friendly_id: taskRun.friendlyId, - task_identifier: taskRun.taskIdentifier, - environment_id: runtimeEnvironment.id, - project_id: project.id, - organization_id: organization.id, - environment_type: "DEVELOPMENT", - engine: "V2", - }) - ); - - await runsReplicationService.stop(); - } - ); - containerTest( "should insert the payload into ClickHouse when a TaskRun is created", async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { @@ -345,13 +255,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationService.start("batching"); + await runsReplicationService.start(); const organization = await prisma.organization.create({ data: { @@ -448,13 +357,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationService.start("batching"); + await runsReplicationService.start(); const organization = await prisma.organization.create({ data: { @@ -556,13 +464,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationService.start("batching"); + await runsReplicationService.start(); const organization = await prisma.organization.create({ data: { @@ -667,13 +574,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationService.start("batching"); + await runsReplicationService.start(); const organization = await prisma.organization.create({ data: { @@ -772,13 +678,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationServiceA.start("batching"); + await runsReplicationServiceA.start(); const organization = await prisma.organization.create({ data: { @@ -884,13 +789,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationServiceB.start("batching"); + await runsReplicationServiceB.start(); // Wait for replication await setTimeout(1000); @@ -931,13 +835,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationServiceA.start("batching"); + await runsReplicationServiceA.start(); const organization = await prisma.organization.create({ data: { @@ -1038,13 +941,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 1, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationServiceB.start("batching"); + await runsReplicationServiceB.start(); await setTimeout(1000); @@ -1078,14 +980,13 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 10, flushIntervalMs: 100, flushBatchSize: 50, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, logger: new Logger("runs-replication-stress-bulk-insert", "info"), }); - await runsReplicationService.start("batching"); + await runsReplicationService.start(); const organization = await prisma.organization.create({ data: { @@ -1183,8 +1084,6 @@ describe("RunsReplicationService", () => { name: "runs-replication-stress-bulk-insert", }); - const registry = new Registry(); - const runsReplicationService = new RunsReplicationService({ clickhouse, pgConnectionUrl: postgresContainer.getConnectionUri(), @@ -1195,15 +1094,13 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 10, flushIntervalMs: 100, flushBatchSize: 50, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, logger: new Logger("runs-replication-stress-bulk-insert", "info"), - metricsRegister: registry, }); - await runsReplicationService.start("batching"); + await runsReplicationService.start(); const organization = await prisma.organization.create({ data: { @@ -1317,13 +1214,12 @@ describe("RunsReplicationService", () => { maxFlushConcurrency: 1, flushIntervalMs: 100, flushBatchSize: 10, - insertStrategy: "batching", leaderLockTimeoutMs: 5000, leaderLockExtendIntervalMs: 1000, ackIntervalSeconds: 5, }); - await runsReplicationService.start("batching"); + await runsReplicationService.start(); const organization = await prisma.organization.create({ data: { @@ -1429,4 +1325,112 @@ describe("RunsReplicationService", () => { await runsReplicationService.stop(); } ); + + containerTest( + "should be able to handle processing transactions for a long period of time", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication-long-tx", + }); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication-long-tx", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 10, + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + logger: new Logger("runs-replication-long-tx", "info"), + }); + + await runsReplicationService.start(); + + const organization = await prisma.organization.create({ + data: { + title: "test-long-tx", + slug: "test-long-tx", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-long-tx", + slug: "test-long-tx", + organizationId: organization.id, + externalRef: "test-long-tx", + }, + }); + + const runtimeEnvironment = await prisma.runtimeEnvironment.create({ + data: { + slug: "test-long-tx", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test-long-tx", + pkApiKey: "test-long-tx", + shortcode: "test-long-tx", + }, + }); + + // Start an interval that will create a new run every 500ms for 4 minutes + const interval = setInterval(async () => { + await prisma.taskRun.create({ + data: { + friendlyId: `run_long_tx_${Date.now()}`, + taskIdentifier: "my-task-long-tx", + payload: JSON.stringify({ long: 1 }), + payloadType: "application/json", + traceId: `long-${Date.now()}`, + spanId: `long-${Date.now()}`, + queue: "test-long-tx", + runtimeEnvironmentId: runtimeEnvironment.id, + projectId: project.id, + organizationId: organization.id, + environmentType: "DEVELOPMENT", + engine: "V2", + status: "PENDING", + attemptNumber: 1, + createdAt: new Date(), + updatedAt: new Date(), + }, + }); + }, 500); + + // Wait for 4 minutes + await setTimeout(4 * 60 * 1000); + + // Stop the interval + clearInterval(interval); + + // Wait for replication + await setTimeout(1000); + + // Query ClickHouse for all runs using FINAL + const queryRuns = clickhouse.reader.query({ + name: "runs-replication-long-tx", + query: `SELECT * FROM trigger_dev.task_runs_v1 FINAL`, + schema: z.any(), + }); + + const [queryError, result] = await queryRuns({}); + expect(queryError).toBeNull(); + + // Check that there are between 200 and 480 runs in ClickHouse + expect(result?.length).toBeGreaterThanOrEqual(200); + expect(result?.length).toBeLessThanOrEqual(480); + + await runsReplicationService.stop(); + }, + { timeout: 60_000 * 5 } + ); }); diff --git a/apps/webapp/test/utils/tracing.ts b/apps/webapp/test/utils/tracing.ts new file mode 100644 index 0000000000..591e27120a --- /dev/null +++ b/apps/webapp/test/utils/tracing.ts @@ -0,0 +1,19 @@ +import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node"; +import { InMemorySpanExporter, SimpleSpanProcessor } from "@opentelemetry/sdk-trace-base"; +import { context, trace } from "@opentelemetry/api"; + +export function createInMemoryTracing() { + // Initialize the tracer provider and exporter + const provider = new NodeTracerProvider(); + const exporter = new InMemorySpanExporter(); + provider.addSpanProcessor(new SimpleSpanProcessor(exporter)); + provider.register(); + + // Retrieve the tracer + const tracer = trace.getTracer("test-tracer"); + + return { + exporter, + tracer, + }; +} diff --git a/internal-packages/replication/src/client.ts b/internal-packages/replication/src/client.ts index db9044c1a0..c522c38231 100644 --- a/internal-packages/replication/src/client.ts +++ b/internal-packages/replication/src/client.ts @@ -78,7 +78,7 @@ export interface LogicalReplicationClientOptions { export type LogicalReplicationClientEvents = { leaderElection: [boolean]; error: [Error]; - data: [{ lsn: string; log: PgoutputMessage }]; + data: [{ lsn: string; log: PgoutputMessage; parseDuration: bigint }]; start: []; acknowledge: [{ lsn: string }]; heartbeat: [{ lsn: string; timestamp: number; shouldRespond: boolean }]; @@ -351,8 +351,10 @@ export class LogicalReplicationClient { if (buffer[0] === 0x77) { // XLogData try { + const start = process.hrtime.bigint(); const log = parser.parse(buffer.subarray(25)); - this.events.emit("data", { lsn, log }); + const duration = process.hrtime.bigint() - start; + this.events.emit("data", { lsn, log, parseDuration: duration }); await this.#acknowledge(lsn); } catch (err) { this.logger.error("Failed to parse XLogData", { error: err }); From 3c50bfeb6e62183be9ab0d750a8f311741bb553e Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 12 May 2025 13:58:33 +0100 Subject: [PATCH 28/33] Improvements to the runs replication service, now ready for testing --- apps/webapp/app/env.server.ts | 1 + .../admin.api.v1.runs-replication.start.ts | 10 +- .../runsReplicationInstance.server.ts | 1 + .../services/runsReplicationService.server.ts | 227 +++++++++--------- 4 files changed, 118 insertions(+), 121 deletions(-) diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index 1000e6d798..3baf691f84 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -765,6 +765,7 @@ const EnvironmentSchema = z.object({ RUN_REPLICATION_LEADER_LOCK_TIMEOUT_MS: z.coerce.number().int().default(30_000), RUN_REPLICATION_LEADER_LOCK_EXTEND_INTERVAL_MS: z.coerce.number().int().default(10_000), RUN_REPLICATION_ACK_INTERVAL_SECONDS: z.coerce.number().int().default(10), + RUN_REPLICATION_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), }); export type Environment = z.infer; diff --git a/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts b/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts index c7af85e208..182b264000 100644 --- a/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts +++ b/apps/webapp/app/routes/admin.api.v1.runs-replication.start.ts @@ -2,11 +2,6 @@ import { ActionFunctionArgs, json } from "@remix-run/server-runtime"; import { prisma } from "~/db.server"; import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; import { runsReplicationInstance } from "~/services/runsReplicationInstance.server"; -import { z } from "zod"; - -const schema = z.object({ - insertStrategy: z.enum(["streaming", "batching"]).optional(), -}); export async function action({ request }: ActionFunctionArgs) { // Next authenticate the request @@ -31,10 +26,7 @@ export async function action({ request }: ActionFunctionArgs) { } try { - const body = await request.json(); - const { insertStrategy } = schema.parse(body); - - await runsReplicationInstance?.start(insertStrategy); + await runsReplicationInstance?.start(); return json({ success: true, diff --git a/apps/webapp/app/services/runsReplicationInstance.server.ts b/apps/webapp/app/services/runsReplicationInstance.server.ts index 1b40f2849a..6a4c9c4ee5 100644 --- a/apps/webapp/app/services/runsReplicationInstance.server.ts +++ b/apps/webapp/app/services/runsReplicationInstance.server.ts @@ -46,6 +46,7 @@ function initializeRunsReplicationInstance() { leaderLockTimeoutMs: env.RUN_REPLICATION_LEADER_LOCK_TIMEOUT_MS, leaderLockExtendIntervalMs: env.RUN_REPLICATION_LEADER_LOCK_EXTEND_INTERVAL_MS, ackIntervalSeconds: env.RUN_REPLICATION_ACK_INTERVAL_SECONDS, + logLevel: env.RUN_REPLICATION_LOG_LEVEL, }); if (env.RUN_REPLICATION_ENABLED === "1") { diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index 287314af63..bfa2887a11 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -2,14 +2,13 @@ import type { ClickHouse, RawTaskRunPayloadV1, TaskRunV1 } from "@internal/click import { RedisOptions } from "@internal/redis"; import { LogicalReplicationClient, Transaction, type PgoutputMessage } from "@internal/replication"; import { Span, startSpan, trace, type Tracer } from "@internal/tracing"; -import { Logger } from "@trigger.dev/core/logger"; +import { Logger, LogLevel } from "@trigger.dev/core/logger"; import { tryCatch } from "@trigger.dev/core/utils"; import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; import { TaskRun } from "@trigger.dev/database"; import { nanoid } from "nanoid"; import EventEmitter from "node:events"; import pLimit from "p-limit"; -import { Counter, Gauge } from "prom-client"; export type RunsReplicationServiceOptions = { clickhouse: ClickHouse; @@ -26,6 +25,7 @@ export type RunsReplicationServiceOptions = { ackIntervalSeconds?: number; acknowledgeTimeoutMs?: number; logger?: Logger; + logLevel?: LogLevel; tracer?: Tracer; }; @@ -36,7 +36,6 @@ export type RunsReplicationServiceEvents = { }; export class RunsReplicationService { - private _lastLsn: string | null = null; private _isSubscribed = false; private _currentTransaction: | (Omit, "commitEndLsn" | "replicationLagMs"> & { @@ -48,9 +47,6 @@ export class RunsReplicationService { private _replicationClient: LogicalReplicationClient; private _concurrentFlushScheduler: ConcurrentFlushScheduler; private logger: Logger; - private _lastReplicationLagMs: number | null = null; - private _transactionCounter?: Counter; - private _lagGauge?: Gauge; private _isShuttingDown = false; private _isShutDownComplete = false; private _tracer: Tracer; @@ -58,11 +54,15 @@ export class RunsReplicationService { private _currentParseDurationMs: number | null = null; private _lastAcknowledgedAt: number | null = null; private _acknowledgeTimeoutMs: number; + private _latestCommitEndLsn: string | null = null; + private _lastAcknowledgedLsn: string | null = null; + private _acknowledgeInterval: NodeJS.Timeout | null = null; public readonly events: EventEmitter; constructor(private readonly options: RunsReplicationServiceOptions) { - this.logger = options.logger ?? new Logger("RunsReplicationService", "debug"); + this.logger = + options.logger ?? new Logger("RunsReplicationService", options.logLevel ?? "info"); this.events = new EventEmitter(); this._tracer = options.tracer ?? trace.getTracer("runs-replication-service"); @@ -79,7 +79,7 @@ export class RunsReplicationService { redisOptions: options.redisOptions, autoAcknowledge: false, publicationActions: ["insert", "update", "delete"], - logger: this.logger, + logger: new Logger("LogicalReplicationClient", options.logLevel ?? "info"), leaderLockTimeoutMs: options.leaderLockTimeoutMs ?? 30_000, leaderLockExtendIntervalMs: options.leaderLockExtendIntervalMs ?? 10_000, ackIntervalSeconds: options.ackIntervalSeconds ?? 10, @@ -90,12 +90,11 @@ export class RunsReplicationService { flushInterval: options.flushIntervalMs ?? 100, maxConcurrency: options.maxFlushConcurrency ?? 100, callback: this.#flushBatch.bind(this), + logger: new Logger("ConcurrentFlushScheduler", options.logLevel ?? "info"), }); this._replicationClient.events.on("data", async ({ lsn, log, parseDuration }) => { - this._lastLsn = lsn; - - await this.#handleData(lsn, log, parseDuration); + this.#handleData(lsn, log, parseDuration); }); this._replicationClient.events.on("heartbeat", async ({ lsn, shouldRespond }) => { @@ -103,6 +102,7 @@ export class RunsReplicationService { if (this._isShutDownComplete) return; if (shouldRespond) { + this._lastAcknowledgedLsn = lsn; await this._replicationClient.acknowledge(lsn); } }); @@ -126,14 +126,6 @@ export class RunsReplicationService { }); } - public async getTransactionCountMetric() { - return this._transactionCounter?.get(); - } - - public async getLagGaugeMetric() { - return this._lagGauge?.get(); - } - public async shutdown() { this._isShuttingDown = true; @@ -145,32 +137,46 @@ export class RunsReplicationService { this._isShutDownComplete = true; return; } + + this._concurrentFlushScheduler.shutdown(); } async start() { this.logger.info("Starting replication client", { - lastLsn: this._lastLsn, + lastLsn: this._latestCommitEndLsn, }); - await this._replicationClient.subscribe(this._lastLsn ?? undefined); + await this._replicationClient.subscribe(this._latestCommitEndLsn ?? undefined); + + this._acknowledgeInterval = setInterval(this.#acknowledgeLatestTransaction.bind(this), 1000); + this._concurrentFlushScheduler.start(); } async stop() { this.logger.info("Stopping replication client"); await this._replicationClient.stop(); + + if (this._acknowledgeInterval) { + clearInterval(this._acknowledgeInterval); + } } async teardown() { this.logger.info("Teardown replication client"); await this._replicationClient.teardown(); + + if (this._acknowledgeInterval) { + clearInterval(this._acknowledgeInterval); + } } - async #handleData(lsn: string, message: PgoutputMessage, parseDuration: bigint) { + #handleData(lsn: string, message: PgoutputMessage, parseDuration: bigint) { this.logger.debug("Handling data", { lsn, tag: message.tag, + parseDuration, }); this.events.emit("message", { lsn, message, service: this }); @@ -269,33 +275,26 @@ export class RunsReplicationService { this._currentTransaction.replicationLagMs = replicationLagMs; const transaction = this._currentTransaction as Transaction; this._currentTransaction = null; - await this.#handleTransaction(transaction); + + if (transaction.commitEndLsn) { + this._latestCommitEndLsn = transaction.commitEndLsn; + } + + this.#handleTransaction(transaction); break; } } } - async #handleTransaction(transaction: Transaction) { + #handleTransaction(transaction: Transaction) { if (this._isShutDownComplete) return; - let alreadyAcknowledged = false; - if (this._isShuttingDown) { - // We need to immediately acknowledge the transaction - // And then try and handle this transaction - if (transaction.commitEndLsn) { - this._currentSpan?.setAttribute("transaction.shutdown", true); - - await this.#maybeAcknowledge(transaction.commitEndLsn); - alreadyAcknowledged = true; - } - - await this._replicationClient.stop(); - this._isShutDownComplete = true; + this._replicationClient.stop().finally(() => { + this._isShutDownComplete = true; + }); } - this._lastReplicationLagMs = transaction.replicationLagMs; - this._currentSpan?.setAttribute("transaction.replication_lag_ms", transaction.replicationLagMs); this._currentSpan?.setAttribute("transaction.xid", transaction.xid); @@ -307,10 +306,6 @@ export class RunsReplicationService { // If there are no events, do nothing if (transaction.events.length === 0) { - if (transaction.commitEndLsn && !alreadyAcknowledged) { - await this.#maybeAcknowledge(transaction.commitEndLsn); - } - this._currentSpan?.end(); return; @@ -328,7 +323,6 @@ export class RunsReplicationService { this.logger.debug("Handling transaction", { transaction, - alreadyAcknowledged, }); const lsnToUInt64Start = process.hrtime.bigint(); @@ -341,35 +335,26 @@ export class RunsReplicationService { Number(process.hrtime.bigint() - lsnToUInt64Start) / 1_000_000 ); - this._transactionCounter?.inc(); - - this._concurrentFlushScheduler - .addToBatch( - transaction.events.map((event) => ({ - _version, - run: event.data, - event: event.tag, - })) - ) - .catch((error) => { - this.logger.error("Error adding to batch", { - error, - }); - }); - - if (!alreadyAcknowledged) { - const acknowledgeStart = process.hrtime.bigint(); - await this.#maybeAcknowledge(transaction.commitEndLsn); - this._currentSpan?.setAttribute( - "transaction.acknowledge_ms", - Number(process.hrtime.bigint() - acknowledgeStart) / 1_000_000 - ); - } + this._concurrentFlushScheduler.addToBatch( + transaction.events.map((event) => ({ + _version, + run: event.data, + event: event.tag, + })) + ); this._currentSpan?.end(); } - async #maybeAcknowledge(commitEndLsn: string) { + async #acknowledgeLatestTransaction() { + if (!this._latestCommitEndLsn) { + return; + } + + if (this._lastAcknowledgedLsn === this._latestCommitEndLsn) { + return; + } + const now = Date.now(); if (this._lastAcknowledgedAt) { @@ -381,13 +366,18 @@ export class RunsReplicationService { } this._lastAcknowledgedAt = now; + this._lastAcknowledgedLsn = this._latestCommitEndLsn; - this.logger.info("Acknowledging transaction", { - commitEndLsn, + this.logger.debug("Acknowledging transaction", { + commitEndLsn: this._latestCommitEndLsn, lastAcknowledgedAt: this._lastAcknowledgedAt, }); - await this._replicationClient.acknowledge(commitEndLsn); + await this._replicationClient.acknowledge(this._latestCommitEndLsn); + + if (this._isShutDownComplete && this._acknowledgeInterval) { + clearInterval(this._acknowledgeInterval); + } } async #flushBatch(flushId: string, batch: Array) { @@ -398,7 +388,7 @@ export class RunsReplicationService { return; } - this.logger.info("Flushing batch", { + this.logger.debug("Flushing batch", { flushId, batchSize: batch.length, }); @@ -419,7 +409,7 @@ export class RunsReplicationService { span.setAttribute("task_run_inserts", taskRunInserts.length); span.setAttribute("payload_inserts", payloadInserts.length); - this.logger.info("Flushing inserts", { + this.logger.debug("Flushing inserts", { flushId, taskRunInserts: taskRunInserts.length, payloadInserts: payloadInserts.length, @@ -430,7 +420,7 @@ export class RunsReplicationService { this.#insertPayloadInserts(payloadInserts), ]); - this.logger.info("Flushed inserts", { + this.logger.debug("Flushed inserts", { flushId, taskRunInserts: taskRunInserts.length, payloadInserts: payloadInserts.length, @@ -610,9 +600,14 @@ export class RunsReplicationService { dataType, }; - const parsedData = await parsePacket(packet); + const [parseError, parsedData] = await tryCatch(parsePacket(packet)); + + if (parseError) { + this.logger.error("Error parsing packet", { + error: parseError, + packet, + }); - if (!parsedData) { return { data: undefined }; } @@ -626,93 +621,99 @@ export type ConcurrentFlushSchedulerConfig = { maxConcurrency?: number; callback: (flushId: string, batch: T[]) => Promise; tracer?: Tracer; + logger?: Logger; }; export class ConcurrentFlushScheduler { private currentBatch: T[]; // Adjust the type according to your data structure private readonly BATCH_SIZE: number; - private readonly FLUSH_INTERVAL: number; + private readonly flushInterval: number; private readonly MAX_CONCURRENCY: number; private readonly concurrencyLimiter: ReturnType; private flushTimer: NodeJS.Timeout | null; private failedBatchCount; private logger: Logger; private _tracer: Tracer; + private _isShutDown = false; constructor(private readonly config: ConcurrentFlushSchedulerConfig) { - this.logger = new Logger("ConcurrentFlushScheduler", "info"); + this.logger = config.logger ?? new Logger("ConcurrentFlushScheduler", "info"); this._tracer = config.tracer ?? trace.getTracer("concurrent-flush-scheduler"); this.currentBatch = []; this.BATCH_SIZE = config.batchSize; - this.FLUSH_INTERVAL = config.flushInterval; + this.flushInterval = config.flushInterval; this.MAX_CONCURRENCY = config.maxConcurrency || 1; this.concurrencyLimiter = pLimit(this.MAX_CONCURRENCY); this.flushTimer = null; this.failedBatchCount = 0; + } - this.logger.info("Initializing ConcurrentFlushScheduler", { + addToBatch(items: T[]): void { + this.currentBatch = this.currentBatch.concat(items); + this.#flushNextBatchIfNeeded(); + } + + start(): void { + this.logger.info("Starting ConcurrentFlushScheduler", { batchSize: this.BATCH_SIZE, - flushInterval: this.FLUSH_INTERVAL, + flushInterval: this.flushInterval, maxConcurrency: this.MAX_CONCURRENCY, }); - this.startFlushTimer(); + this.#startFlushTimer(); } - /** - * - * If you want to fire and forget, don't await this method. - */ - async addToBatch(items: T[]): Promise { - this.currentBatch = this.currentBatch.concat(items); + shutdown(): void { + this.logger.info("Shutting down ConcurrentFlushScheduler"); - this.logger.debug("Adding items to batch", { - currentBatchSize: this.currentBatch.length, - itemsAdded: items.length, - }); + this._isShutDown = true; + + this.#clearTimer(); + this.#flushNextBatchIfNeeded(); + } - if (this.currentBatch.length >= this.BATCH_SIZE) { + #flushNextBatchIfNeeded(): void { + if (this.currentBatch.length >= this.BATCH_SIZE || this._isShutDown) { this.logger.debug("Batch size threshold reached, initiating flush", { batchSize: this.BATCH_SIZE, currentSize: this.currentBatch.length, + isShutDown: this._isShutDown, + }); + + this.#flushNextBatch().catch((error) => { + this.logger.error("Error flushing next batch", { + error, + }); }); - await this.flushNextBatch(); - this.resetFlushTimer(); } } - private startFlushTimer(): void { - this.flushTimer = setInterval(() => this.checkAndFlush(), this.FLUSH_INTERVAL); - this.logger.debug("Started flush timer", { interval: this.FLUSH_INTERVAL }); + #startFlushTimer(): void { + this.flushTimer = setInterval(() => this.#checkAndFlush().catch(() => {}), this.flushInterval); + this.logger.debug("Started flush timer", { interval: this.flushInterval }); } - private clearTimer(): void { + #clearTimer(): void { if (this.flushTimer) { clearInterval(this.flushTimer); this.logger.debug("Flush timer cleared"); } } - private resetFlushTimer(): void { - this.clearTimer(); - this.startFlushTimer(); - this.logger.debug("Flush timer reset"); - } - - private async checkAndFlush(): Promise { + async #checkAndFlush(): Promise { if (this.currentBatch.length > 0) { this.logger.debug("Periodic flush check triggered", { currentBatchSize: this.currentBatch.length, }); - await this.flushNextBatch(); + await this.#flushNextBatch(); } } - private async flushNextBatch(): Promise { + async #flushNextBatch(): Promise { if (this.currentBatch.length === 0) return; - const batch = [...this.currentBatch]; + const batch = this.currentBatch; this.currentBatch = []; const callback = this.config.callback; @@ -734,12 +735,14 @@ export class ConcurrentFlushScheduler { const [error] = await tryCatch(promise); if (error) { - this.logger.error("Error processing batch", { + this.logger.error("Error flushing batch", { error, }); + + this.failedBatchCount++; } - this.logger.info("Batch flush complete", { + this.logger.debug("Batch flush complete", { totalBatches: 1, successfulBatches: 1, failedBatches: 0, From 14a41834d49deb51ad49a41e83c062d291165593 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 12 May 2025 14:23:58 +0100 Subject: [PATCH 29/33] Some fixes and cleanups --- .configs/prometheus.yml | 7 - .../clickhouse/src/client/client.ts | 4 + .../clickhouse/src/client/noop.ts | 4 + .../clickhouse/src/client/types.ts | 4 + internal-packages/clickhouse/src/index.ts | 15 ++ internal-packages/replication/src/index.ts | 1 - .../replication/src/stream.test.ts | 203 ------------------ internal-packages/replication/src/stream.ts | 177 --------------- internal-packages/testcontainers/src/index.ts | 10 +- 9 files changed, 35 insertions(+), 390 deletions(-) delete mode 100644 .configs/prometheus.yml delete mode 100644 internal-packages/replication/src/stream.test.ts delete mode 100644 internal-packages/replication/src/stream.ts diff --git a/.configs/prometheus.yml b/.configs/prometheus.yml deleted file mode 100644 index bfc97660cc..0000000000 --- a/.configs/prometheus.yml +++ /dev/null @@ -1,7 +0,0 @@ -global: - scrape_interval: 15s # how often to scrape targets - -scrape_configs: - - job_name: "trigger-dev" - static_configs: - - targets: ["localhost:3030"] diff --git a/internal-packages/clickhouse/src/client/client.ts b/internal-packages/clickhouse/src/client/client.ts index dc1554e52d..eab28fec16 100644 --- a/internal-packages/clickhouse/src/client/client.ts +++ b/internal-packages/clickhouse/src/client/client.ts @@ -48,6 +48,10 @@ export class ClickhouseClient implements ClickhouseReader, ClickhouseWriter { this.tracer = config.tracer ?? trace.getTracer("@internal/clickhouse"); } + public async close() { + await this.client.close(); + } + public query, TOut extends z.ZodSchema>(req: { /** * The name of the operation. diff --git a/internal-packages/clickhouse/src/client/noop.ts b/internal-packages/clickhouse/src/client/noop.ts index ca8f647b47..1c81cd3d3a 100644 --- a/internal-packages/clickhouse/src/client/noop.ts +++ b/internal-packages/clickhouse/src/client/noop.ts @@ -6,6 +6,10 @@ import { z } from "zod"; import { ClickHouseSettings, InsertResult } from "@clickhouse/client"; export class NoopClient implements ClickhouseReader, ClickhouseWriter { + public async close() { + return; + } + public query, TOut extends z.ZodSchema>(req: { query: string; params?: TIn; diff --git a/internal-packages/clickhouse/src/client/types.ts b/internal-packages/clickhouse/src/client/types.ts index ea2df95a78..dfddc9c3f1 100644 --- a/internal-packages/clickhouse/src/client/types.ts +++ b/internal-packages/clickhouse/src/client/types.ts @@ -41,6 +41,8 @@ export interface ClickhouseReader { */ settings?: ClickHouseSettings; }): ClickhouseQueryFunction, z.output>; + + close(): Promise; } export type ClickhouseInsertFunction = ( @@ -58,4 +60,6 @@ export interface ClickhouseWriter { schema: TSchema; settings?: ClickHouseSettings; }): ClickhouseInsertFunction>; + + close(): Promise; } diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts index 5c76955dfa..5d33b45410 100644 --- a/internal-packages/clickhouse/src/index.ts +++ b/internal-packages/clickhouse/src/index.ts @@ -31,6 +31,7 @@ export class ClickHouse { public readonly reader: ClickhouseReader; public readonly writer: ClickhouseWriter; private readonly logger: Logger; + private _splitClients: boolean; constructor(config: ClickHouseConfig) { this.logger = config.logger ?? new Logger("ClickHouse", "debug"); @@ -49,6 +50,8 @@ export class ClickHouse { }); this.reader = client; this.writer = client; + + this._splitClients = false; } else if (config.writerUrl && config.readerUrl) { this.reader = new ClickhouseClient({ name: config.readerName ?? "clickhouse-reader", @@ -62,9 +65,13 @@ export class ClickHouse { clickhouseSettings: config.clickhouseSettings, logger: this.logger, }); + + this._splitClients = true; } else { this.reader = new NoopClient(); this.writer = new NoopClient(); + + this._splitClients = true; } } @@ -87,6 +94,14 @@ export class ClickHouse { }); } + async close() { + if (this._splitClients) { + await Promise.all([this.reader.close(), this.writer.close()]); + } else { + await this.reader.close(); + } + } + get taskRuns() { return { insert: insertTaskRuns(this.writer), diff --git a/internal-packages/replication/src/index.ts b/internal-packages/replication/src/index.ts index d8dc8e725a..b889c12f7c 100644 --- a/internal-packages/replication/src/index.ts +++ b/internal-packages/replication/src/index.ts @@ -1,4 +1,3 @@ export * from "./client.js"; export * from "./errors.js"; -export * from "./stream.js"; export type * from "./pgoutput.js"; diff --git a/internal-packages/replication/src/stream.test.ts b/internal-packages/replication/src/stream.test.ts deleted file mode 100644 index 8eb3be57fa..0000000000 --- a/internal-packages/replication/src/stream.test.ts +++ /dev/null @@ -1,203 +0,0 @@ -import { postgresAndRedisTest } from "@internal/testcontainers"; -import { createSubscription, Transaction } from "./stream.js"; -import { setTimeout } from "timers/promises"; - -describe("LogicalReplicationStream", () => { - postgresAndRedisTest( - "should group changes by transaction and filter relevant events", - async ({ postgresContainer, prisma, redisOptions }) => { - await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); - - type TaskRunData = { - friendlyId: string; - taskIdentifier: string; - payload: string; - traceId: string; - spanId: string; - queue: string; - runtimeEnvironmentId: string; - projectId: string; - }; - - const received: Transaction[] = []; - - const subscription = createSubscription({ - name: "test_stream", - publicationName: "test_publication_stream", - slotName: "test_slot_stream", - pgConfig: { - connectionString: postgresContainer.getConnectionUri(), - }, - table: "TaskRun", - redisOptions, - filterTags: ["insert"], - abortSignal: AbortSignal.timeout(10000), - }); - - const organization = await prisma.organization.create({ - data: { - title: "test", - slug: "test", - }, - }); - - const project = await prisma.project.create({ - data: { - name: "test", - slug: "test", - organizationId: organization.id, - externalRef: "test", - }, - }); - - const runtimeEnvironment = await prisma.runtimeEnvironment.create({ - data: { - slug: "test", - type: "DEVELOPMENT", - projectId: project.id, - organizationId: organization.id, - apiKey: "test", - pkApiKey: "test", - shortcode: "test", - }, - }); - - // Insert a row into the table - new Promise(async (resolve) => { - await setTimeout(2000); - - await prisma.taskRun.create({ - data: { - friendlyId: "run_5678", - taskIdentifier: "my-task", - payload: JSON.stringify({ foo: "bar" }), - traceId: "5678", - spanId: "5678", - queue: "test", - runtimeEnvironmentId: runtimeEnvironment.id, - projectId: project.id, - }, - }); - - resolve(undefined); - }).then(() => {}); - - // Now we want to read from the stream - for await (const transaction of subscription.stream) { - received.push(transaction); - } - - console.log(received); - - expect(received.length).toBeGreaterThan(0); - const transaction = received[0]; - expect(transaction.events.length).toBeGreaterThan(0); - expect(transaction.events[0].data.friendlyId).toBe("run_5678"); - - // Clean up - await subscription.client.stop(); - } - ); - - postgresAndRedisTest( - "should respect highWaterMark and not pull more data than allowed", - async ({ postgresContainer, prisma, redisOptions }) => { - await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); - - type TaskRunData = { - friendlyId: string; - taskIdentifier: string; - payload: string; - traceId: string; - spanId: string; - queue: string; - runtimeEnvironmentId: string; - projectId: string; - }; - - const subscription = createSubscription({ - name: "test_stream", - publicationName: "test_publication_stream", - slotName: "test_slot_stream", - pgConfig: { - connectionString: postgresContainer.getConnectionUri(), - }, - table: "TaskRun", - redisOptions, - filterTags: ["insert"], - abortSignal: AbortSignal.timeout(10000), - }); - - const organization = await prisma.organization.create({ - data: { - title: "test", - slug: "test", - }, - }); - - const project = await prisma.project.create({ - data: { - name: "test", - slug: "test", - organizationId: organization.id, - externalRef: "test", - }, - }); - - const runtimeEnvironment = await prisma.runtimeEnvironment.create({ - data: { - slug: "test", - type: "DEVELOPMENT", - projectId: project.id, - organizationId: organization.id, - apiKey: "test", - pkApiKey: "test", - shortcode: "test", - }, - }); - - // Insert a row into the table - new Promise(async (resolve) => { - await setTimeout(2000); - - for (let i = 0; i < 5; i++) { - await prisma.taskRun.create({ - data: { - friendlyId: `run_${i}`, - taskIdentifier: "my-task", - payload: JSON.stringify({ foo: "bar" }), - traceId: `${i}`, - spanId: `${i}`, - queue: "test", - runtimeEnvironmentId: runtimeEnvironment.id, - projectId: project.id, - }, - }); - } - - resolve(undefined); - }).then(() => {}); - - const received: Transaction[] = []; - const iterator = subscription.stream[Symbol.asyncIterator](); - - // Pull the first item, then wait before pulling the next - const first = await iterator.next(); - received.push(first.value); - - // Wait to simulate slow consumer - await setTimeout(2000); - - // Pull the next item - const second = await iterator.next(); - received.push(second.value); - - // Optionally, check internal state or spy on client.subscribe/data to ensure only 1 item was buffered at a time - - expect(received.length).toBe(2); - - // Clean up - await subscription.client.stop(); - } - ); -}); diff --git a/internal-packages/replication/src/stream.ts b/internal-packages/replication/src/stream.ts deleted file mode 100644 index 4ec32a92ab..0000000000 --- a/internal-packages/replication/src/stream.ts +++ /dev/null @@ -1,177 +0,0 @@ -import { createAsyncIterableStreamFromAsyncIterable } from "@trigger.dev/core/v3"; -import { LogicalReplicationClient, LogicalReplicationClientOptions } from "./client.js"; -import type { MessageDelete, MessageInsert, MessageUpdate, PgoutputMessage } from "./pgoutput.js"; - -export interface LogicalReplicationStreamOptions extends LogicalReplicationClientOptions { - onError?: (err: Error) => void; - filterTags?: Array<"insert" | "update" | "delete">; - abortSignal?: AbortSignal; - highWaterMark?: number; -} - -export interface TransactionEvent { - tag: "insert" | "update" | "delete"; - data: T; - raw: MessageInsert | MessageUpdate | MessageDelete; -} - -export interface Transaction { - commitLsn: string | null; - commitEndLsn: string | null; - xid: number; - events: TransactionEvent[]; - replicationLagMs: number; -} - -export function createLogicalReplicationStream( - client: LogicalReplicationClient, - highWaterMark?: number, - signal?: AbortSignal -) { - let lastLsn: string | null = null; - let isSubscribed = false; - - const source = new ReadableStream<{ lsn: string; message: PgoutputMessage }>( - { - async start(controller) { - console.log("ReadableStream.start"); - - if (signal) { - signal.addEventListener("abort", () => { - controller.close(); - }); - } - - client.events.on("data", async ({ lsn, log }) => { - console.log("ReadableStream.data"); - lastLsn = lsn; - - if (signal?.aborted) { - return; - } - - if (isRelevantTag(log.tag)) { - controller.enqueue({ lsn, message: log }); - } - - if (typeof controller.desiredSize === "number" && controller.desiredSize <= 0) { - await client.stop(); - } - }); - }, - async cancel() { - console.log("ReadableStream.cancel"); - await client.stop(); - }, - async pull() { - if (!isSubscribed) { - isSubscribed = true; - console.log("ReadableStream.pull"); - await client.subscribe(lastLsn ?? undefined); - } - }, - }, - new CountQueuingStrategy({ highWaterMark: highWaterMark ?? 1 }) - ); - - return createAsyncIterableStreamFromAsyncIterable>(groupByTransaction(source)); -} - -export async function* groupByTransaction( - stream: ReadableStream<{ - lsn: string; - message: PgoutputMessage; - }> -) { - let currentTransaction: Omit, "commitEndLsn" | "replicationLagMs"> & { - commitEndLsn?: string | null; - replicationLagMs?: number; - } = { - commitLsn: null, - xid: 0, - events: [], - }; - for await (const { lsn, message } of stream as AsyncIterable<{ - lsn: string; - message: PgoutputMessage; - }>) { - console.log("groupByTransaction.for await"); - console.log(message); - switch (message.tag) { - case "begin": { - currentTransaction = { - commitLsn: message.commitLsn, - xid: message.xid, - events: [], - }; - break; - } - case "insert": { - currentTransaction.events.push({ - tag: message.tag, - data: message.new as T, - raw: message, - }); - break; - } - case "update": { - currentTransaction.events.push({ - tag: message.tag, - data: message.new as T, - raw: message, - }); - break; - } - case "delete": { - currentTransaction.events.push({ - tag: message.tag, - data: message.old as T, - raw: message, - }); - break; - } - case "commit": { - const replicationLagMs = Date.now() - Number(message.commitTime / 1000n); - currentTransaction.commitEndLsn = message.commitEndLsn; - currentTransaction.replicationLagMs = replicationLagMs; - yield currentTransaction as Transaction; - break; - } - } - } -} - -export function createSubscription(opts: LogicalReplicationStreamOptions) { - const client = new LogicalReplicationClient({ - name: opts.name, - publicationName: opts.publicationName, - slotName: opts.slotName, - pgConfig: opts.pgConfig, - table: opts.table, - redisOptions: opts.redisOptions, - publicationActions: opts.filterTags, - }); - - client.events.on("error", (err) => { - if (opts.onError) opts.onError(err); - }); - - client.events.on("heartbeat", async ({ lsn, shouldRespond }) => { - if (shouldRespond) { - await client.acknowledge(lsn); - } - }); - - const stream = createLogicalReplicationStream(client, opts.highWaterMark, opts.abortSignal); - - return { - stream, - client, - }; -} - -function isRelevantTag(tag: string): tag is "insert" | "update" | "delete" | "begin" | "commit" { - return ( - tag === "insert" || tag === "update" || tag === "delete" || tag === "begin" || tag === "commit" - ); -} diff --git a/internal-packages/testcontainers/src/index.ts b/internal-packages/testcontainers/src/index.ts index f136f48fc4..4816d3ba9d 100644 --- a/internal-packages/testcontainers/src/index.ts +++ b/internal-packages/testcontainers/src/index.ts @@ -188,11 +188,17 @@ const clickhouseContainer = async ( }; const clickhouseClient = async ( - { clickhouseContainer }: { clickhouseContainer: StartedClickHouseContainer }, + { clickhouseContainer, task }: { clickhouseContainer: StartedClickHouseContainer } & TaskContext, use: Use ) => { + const testName = task.name; const client = createClient({ url: clickhouseContainer.getConnectionUrl() }); - await use(client); + + try { + await use(client); + } finally { + await logCleanup("clickhouseClient", client.close(), { testName }); + } }; type ClickhouseContext = { From 024c30a748d62f0b3ef302e8e7897ebc7ef7c557 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 12 May 2025 14:34:09 +0100 Subject: [PATCH 30/33] Don't need this code anymore --- apps/webapp/app/v3/services/completeAttempt.server.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/apps/webapp/app/v3/services/completeAttempt.server.ts b/apps/webapp/app/v3/services/completeAttempt.server.ts index 0296937593..618e823630 100644 --- a/apps/webapp/app/v3/services/completeAttempt.server.ts +++ b/apps/webapp/app/v3/services/completeAttempt.server.ts @@ -312,7 +312,6 @@ export class CompleteAttemptService extends BaseService { checkpoint, forceRequeue: isOOMRetry, oomMachine, - error: sanitizedError, }); } @@ -559,7 +558,6 @@ export class CompleteAttemptService extends BaseService { checkpoint, forceRequeue = false, oomMachine, - error, }: { execution: TaskRunExecution; executionRetry: TaskRunExecutionRetry; @@ -570,7 +568,6 @@ export class CompleteAttemptService extends BaseService { forceRequeue?: boolean; /** Setting this will also alter the retry span message */ oomMachine?: MachinePresetName; - error: TaskRunError; }) { const retryAt = new Date(executionRetry.timestamp); From aaf65ccac9724e6272967f82e1946fc8efd2b710 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 12 May 2025 14:52:58 +0100 Subject: [PATCH 31/33] move transaction types into the runs replication service --- .../services/runsReplicationService.server.ts | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index bfa2887a11..7269f76749 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -1,6 +1,12 @@ import type { ClickHouse, RawTaskRunPayloadV1, TaskRunV1 } from "@internal/clickhouse"; import { RedisOptions } from "@internal/redis"; -import { LogicalReplicationClient, Transaction, type PgoutputMessage } from "@internal/replication"; +import { + LogicalReplicationClient, + type MessageDelete, + type MessageInsert, + type MessageUpdate, + type PgoutputMessage, +} from "@internal/replication"; import { Span, startSpan, trace, type Tracer } from "@internal/tracing"; import { Logger, LogLevel } from "@trigger.dev/core/logger"; import { tryCatch } from "@trigger.dev/core/utils"; @@ -10,6 +16,20 @@ import { nanoid } from "nanoid"; import EventEmitter from "node:events"; import pLimit from "p-limit"; +interface TransactionEvent { + tag: "insert" | "update" | "delete"; + data: T; + raw: MessageInsert | MessageUpdate | MessageDelete; +} + +interface Transaction { + commitLsn: string | null; + commitEndLsn: string | null; + xid: number; + events: TransactionEvent[]; + replicationLagMs: number; +} + export type RunsReplicationServiceOptions = { clickhouse: ClickHouse; pgConnectionUrl: string; From beabd266c52c88fb306e882cb9d1a115b0819a84 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 12 May 2025 15:35:16 +0100 Subject: [PATCH 32/33] only send spans where there are transaction events --- .../services/runsReplicationService.server.ts | 54 ++++++-------- .../test/runsReplicationService.test.ts | 70 +++++++++++++++++++ 2 files changed, 93 insertions(+), 31 deletions(-) diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index 7269f76749..b063862bd9 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -7,7 +7,7 @@ import { type MessageUpdate, type PgoutputMessage, } from "@internal/replication"; -import { Span, startSpan, trace, type Tracer } from "@internal/tracing"; +import { startSpan, trace, type Tracer } from "@internal/tracing"; import { Logger, LogLevel } from "@trigger.dev/core/logger"; import { tryCatch } from "@trigger.dev/core/utils"; import { parsePacket } from "@trigger.dev/core/v3/utils/ioSerialization"; @@ -23,6 +23,7 @@ interface TransactionEvent { } interface Transaction { + beginStartTimestamp: number; commitLsn: string | null; commitEndLsn: string | null; xid: number; @@ -70,7 +71,6 @@ export class RunsReplicationService { private _isShuttingDown = false; private _isShutDownComplete = false; private _tracer: Tracer; - private _currentSpan: Span | null = null; private _currentParseDurationMs: number | null = null; private _lastAcknowledgedAt: number | null = null; private _acknowledgeTimeoutMs: number; @@ -208,17 +208,12 @@ export class RunsReplicationService { } this._currentTransaction = { + beginStartTimestamp: Date.now(), commitLsn: message.commitLsn, xid: message.xid, events: [], }; - this._currentSpan = this._tracer.startSpan("handle_transaction", { - attributes: { - "transaction.xid": message.xid, - }, - }); - this._currentParseDurationMs = Number(parseDuration) / 1_000_000; break; @@ -283,11 +278,6 @@ export class RunsReplicationService { if (this._currentParseDurationMs) { this._currentParseDurationMs = this._currentParseDurationMs + Number(parseDuration) / 1_000_000; - - this._currentSpan?.setAttribute( - "transaction.parse_duration_ms", - this._currentParseDurationMs - ); } const replicationLagMs = Date.now() - Number(message.commitTime / 1000n); @@ -303,6 +293,11 @@ export class RunsReplicationService { this.#handleTransaction(transaction); break; } + default: { + this.logger.debug("Unknown message tag", { + pgMessage: message, + }); + } } } @@ -315,19 +310,8 @@ export class RunsReplicationService { }); } - this._currentSpan?.setAttribute("transaction.replication_lag_ms", transaction.replicationLagMs); - this._currentSpan?.setAttribute("transaction.xid", transaction.xid); - - if (transaction.commitEndLsn) { - this._currentSpan?.setAttribute("transaction.commit_end_lsn", transaction.commitEndLsn); - } - - this._currentSpan?.setAttribute("transaction.events", transaction.events.length); - // If there are no events, do nothing if (transaction.events.length === 0) { - this._currentSpan?.end(); - return; } @@ -336,8 +320,6 @@ export class RunsReplicationService { transaction, }); - this._currentSpan?.end(); - return; } @@ -350,10 +332,7 @@ export class RunsReplicationService { // If there are events, we need to handle them const _version = lsnToUInt64(transaction.commitEndLsn); - this._currentSpan?.setAttribute( - "transaction.lsn_to_uint64_ms", - Number(process.hrtime.bigint() - lsnToUInt64Start) / 1_000_000 - ); + const lsnToUInt64DurationMs = Number(process.hrtime.bigint() - lsnToUInt64Start) / 1_000_000; this._concurrentFlushScheduler.addToBatch( transaction.events.map((event) => ({ @@ -363,7 +342,20 @@ export class RunsReplicationService { })) ); - this._currentSpan?.end(); + const currentSpan = this._tracer.startSpan("handle_transaction", { + attributes: { + "transaction.xid": transaction.xid, + "transaction.replication_lag_ms": transaction.replicationLagMs, + "transaction.events": transaction.events.length, + "transaction.commit_end_lsn": transaction.commitEndLsn, + "transaction.parse_duration_ms": this._currentParseDurationMs ?? undefined, + "transaction.lsn_to_uint64_ms": lsnToUInt64DurationMs, + "transaction.version": _version.toString(), + }, + startTime: transaction.beginStartTimestamp, + }); + + currentSpan.end(); } async #acknowledgeLatestTransaction() { diff --git a/apps/webapp/test/runsReplicationService.test.ts b/apps/webapp/test/runsReplicationService.test.ts index 3afbf6dcb3..90802da273 100644 --- a/apps/webapp/test/runsReplicationService.test.ts +++ b/apps/webapp/test/runsReplicationService.test.ts @@ -130,6 +130,76 @@ describe("RunsReplicationService", () => { } ); + containerTest( + "should not produce any handle_transaction spans when no TaskRun events are produced", + async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { + await prisma.$executeRawUnsafe(`ALTER TABLE public."TaskRun" REPLICA IDENTITY FULL;`); + + const clickhouse = new ClickHouse({ + url: clickhouseContainer.getConnectionUrl(), + name: "runs-replication", + }); + + const { tracer, exporter } = createInMemoryTracing(); + + const runsReplicationService = new RunsReplicationService({ + clickhouse, + pgConnectionUrl: postgresContainer.getConnectionUri(), + serviceName: "runs-replication", + slotName: "task_runs_to_clickhouse_v1", + publicationName: "task_runs_to_clickhouse_v1_publication", + redisOptions, + maxFlushConcurrency: 1, + flushIntervalMs: 100, + flushBatchSize: 1, + leaderLockTimeoutMs: 5000, + leaderLockExtendIntervalMs: 1000, + ackIntervalSeconds: 5, + tracer, + }); + + await runsReplicationService.start(); + + const organization = await prisma.organization.create({ + data: { + title: "test", + slug: "test", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test", + slug: "test", + organizationId: organization.id, + externalRef: "test", + }, + }); + + await prisma.runtimeEnvironment.create({ + data: { + slug: "test", + type: "DEVELOPMENT", + projectId: project.id, + organizationId: organization.id, + apiKey: "test", + pkApiKey: "test", + shortcode: "test", + }, + }); + + await setTimeout(1000); + + const spans = exporter.getFinishedSpans(); + + const handleTransactionSpans = spans.filter((span) => span.name === "handle_transaction"); + + expect(handleTransactionSpans.length).toBe(0); + + await runsReplicationService.stop(); + } + ); + containerTest( "should replicate a new TaskRun to ClickHouse using batching insert strategy", async ({ clickhouseContainer, redisOptions, postgresContainer, prisma }) => { From 54665993e5f24732accee39cc29bf6037e232fe2 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 12 May 2025 21:31:55 +0100 Subject: [PATCH 33/33] A couple of suggested tweaks --- apps/webapp/app/services/runsReplicationService.server.ts | 8 +++++++- internal-packages/replication/src/pgoutput.ts | 1 - 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/apps/webapp/app/services/runsReplicationService.server.ts b/apps/webapp/app/services/runsReplicationService.server.ts index b063862bd9..9299e545f9 100644 --- a/apps/webapp/app/services/runsReplicationService.server.ts +++ b/apps/webapp/app/services/runsReplicationService.server.ts @@ -385,7 +385,13 @@ export class RunsReplicationService { lastAcknowledgedAt: this._lastAcknowledgedAt, }); - await this._replicationClient.acknowledge(this._latestCommitEndLsn); + const [ackError] = await tryCatch( + this._replicationClient.acknowledge(this._latestCommitEndLsn) + ); + + if (ackError) { + this.logger.error("Error acknowledging transaction", { ackError }); + } if (this._isShutDownComplete && this._acknowledgeInterval) { clearInterval(this._acknowledgeInterval); diff --git a/internal-packages/replication/src/pgoutput.ts b/internal-packages/replication/src/pgoutput.ts index 90b88dab87..0e75a697f4 100644 --- a/internal-packages/replication/src/pgoutput.ts +++ b/internal-packages/replication/src/pgoutput.ts @@ -156,7 +156,6 @@ class BinaryReader { readTime(): bigint { // (POSTGRES_EPOCH_JDATE - UNIX_EPOCH_JDATE) * USECS_PER_DAY == 946684800000000 const microsSinceUnixEpoch = this.readUint64() + 946684800000000n; - // Convert to milliseconds for JS Date compatibility return microsSinceUnixEpoch; } }