diff --git a/.changeset/cuddly-penguins-cross.md b/.changeset/cuddly-penguins-cross.md new file mode 100644 index 0000000000..c4db002564 --- /dev/null +++ b/.changeset/cuddly-penguins-cross.md @@ -0,0 +1,8 @@ +--- +"@trigger.dev/sdk": major +"trigger.dev": major +"@trigger.dev/build": major +"@trigger.dev/core": major +--- + +Release 3.0.0 diff --git a/.changeset/itchy-jars-pay.md b/.changeset/itchy-jars-pay.md new file mode 100644 index 0000000000..ed54cc8972 --- /dev/null +++ b/.changeset/itchy-jars-pay.md @@ -0,0 +1,5 @@ +--- +"@trigger.dev/build": patch +--- + +Add ffmpeg build extension diff --git a/.changeset/ninety-countries-swim.md b/.changeset/ninety-countries-swim.md new file mode 100644 index 0000000000..417a6a6e08 --- /dev/null +++ b/.changeset/ninety-countries-swim.md @@ -0,0 +1,5 @@ +--- +"trigger.dev": patch +--- + +Add --runtime option to the init CLI command diff --git a/.changeset/old-feet-brush.md b/.changeset/old-feet-brush.md new file mode 100644 index 0000000000..caaaa6a5ec --- /dev/null +++ b/.changeset/old-feet-brush.md @@ -0,0 +1,5 @@ +--- +"trigger.dev": patch +--- + +trigger.dev init now adds @trigger.dev/build to devDependencies diff --git a/.changeset/pre.json b/.changeset/pre.json index 92f3c36af5..a9c2724417 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -1,5 +1,5 @@ { - "mode": "pre", + "mode": "exit", "tag": "beta", "initialVersions": { "coordinator": "0.0.1", diff --git a/.changeset/stale-actors-camp.md b/.changeset/stale-actors-camp.md new file mode 100644 index 0000000000..ac5462046a --- /dev/null +++ b/.changeset/stale-actors-camp.md @@ -0,0 +1,5 @@ +--- +"@trigger.dev/sdk": patch +--- + +Add triggerAndWait().unwrap() to more easily get at the output or throw the subtask error diff --git a/.changeset/thick-trains-work.md b/.changeset/thick-trains-work.md new file mode 100644 index 0000000000..5d70a22780 --- /dev/null +++ b/.changeset/thick-trains-work.md @@ -0,0 +1,5 @@ +--- +"@trigger.dev/build": patch +--- + +Add aptGet build extension to easily add system packages to install diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 1a6a2352a0..535f94f9b5 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -16,9 +16,7 @@ jobs: fail-fast: false matrix: os: [buildjet-8vcpu-ubuntu-2204, windows-latest] - package-manager: ["npm", "pnpm", "yarn"] - env: - YARN_ENABLE_IMMUTABLE_INSTALLS: false + package-manager: ["npm", "pnpm"] steps: - name: ⬇️ Checkout repo uses: actions/checkout@v3 @@ -34,7 +32,6 @@ jobs: uses: buildjet/setup-node@v3 with: node-version: 20.11.1 - cache: "pnpm" - name: 📥 Download deps run: pnpm install --frozen-lockfile --filter trigger.dev... diff --git a/apps/webapp/app/components/SetupCommands.tsx b/apps/webapp/app/components/SetupCommands.tsx index 510e58d777..4071444ee3 100644 --- a/apps/webapp/app/components/SetupCommands.tsx +++ b/apps/webapp/app/components/SetupCommands.tsx @@ -132,7 +132,7 @@ export function TriggerDevStep({ extra }: { extra?: string }) { } // Trigger.dev version 3 setup commands -const v3PackageTag = "beta"; +const v3PackageTag = "latest"; function getApiUrlArg() { const appOrigin = useAppOrigin(); diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.runs.$runParam/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.runs.$runParam/route.tsx index 11524ce988..349bad884c 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.runs.$runParam/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.v3.$projectParam.runs.$runParam/route.tsx @@ -1147,7 +1147,7 @@ function ConnectedDevWarning() { Runs usually start within 1 second in{" "} . Check you're running the - CLI: npx trigger.dev@beta dev + CLI: npx trigger.dev@latest dev diff --git a/docs/cli-init-commands.mdx b/docs/cli-init-commands.mdx index f83b071390..5329404ae0 100644 --- a/docs/cli-init-commands.mdx +++ b/docs/cli-init-commands.mdx @@ -9,27 +9,32 @@ Run the command like this: ```bash npm -npx trigger.dev@beta init +npx trigger.dev@latest init ``` ```bash pnpm -pnpm dlx trigger.dev@beta init +pnpm dlx trigger.dev@latest init ``` ```bash yarn -yarn dlx trigger.dev@beta init +yarn dlx trigger.dev@latest init ``` ## Options + + By default, the init command assumes you are using TypeScript. Use this flag to initialize a + project that uses JavaScript. + + The project ref to use when initializing the project. - The version of the `@trigger.dev/sdk` package to install. Defaults to `3.0.0-beta.56`. + The version of the `@trigger.dev/sdk` package to install. Defaults to `latest`. @@ -53,7 +58,8 @@ yarn dlx trigger.dev@beta init - The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This does not affect the log level of your trigger.dev tasks. Defaults to "log". + The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This + does not affect the log level of your trigger.dev tasks. Defaults to "log". @@ -64,4 +70,4 @@ yarn dlx trigger.dev@beta init Shows the help information for the command. - \ No newline at end of file + diff --git a/docs/cli-list-profiles-commands.mdx b/docs/cli-list-profiles-commands.mdx index 416347dd75..4687811ae5 100644 --- a/docs/cli-list-profiles-commands.mdx +++ b/docs/cli-list-profiles-commands.mdx @@ -9,15 +9,15 @@ Run the command like this: ```bash npm -npx trigger.dev@beta list-profiles +npx trigger.dev@latest list-profiles ``` ```bash pnpm -pnpm dlx trigger.dev@beta list-profiles +pnpm dlx trigger.dev@latest list-profiles ``` ```bash yarn -yarn dlx trigger.dev@beta list-profiles +yarn dlx trigger.dev@latest list-profiles ``` @@ -25,16 +25,16 @@ yarn dlx trigger.dev@beta list-profiles ## Options - The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This does not affect the log level of your trigger.dev tasks. Defaults to `log`. + The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This + does not affect the log level of your trigger.dev tasks. Defaults to `log`. Opt-out of sending telemetry data. - ## Standard options Shows the help information for the command. - \ No newline at end of file + diff --git a/docs/cli-login-commands.mdx b/docs/cli-login-commands.mdx index 0850557443..d20f9338d7 100644 --- a/docs/cli-login-commands.mdx +++ b/docs/cli-login-commands.mdx @@ -9,15 +9,15 @@ Run the command like this: ```bash npm -npx trigger.dev@beta login +npx trigger.dev@latest login ``` ```bash pnpm -pnpm dlx trigger.dev@beta login +pnpm dlx trigger.dev@latest login ``` ```bash yarn -yarn dlx trigger.dev@beta login +yarn dlx trigger.dev@latest login ``` @@ -33,7 +33,8 @@ yarn dlx trigger.dev@beta login - Sets the CLI log level. Available options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This setting doesn't affect the log level of your trigger.dev tasks. The default is `log`. + Sets the CLI log level. Available options are `debug`, `info`, `log`, `warn`, `error`, and `none`. + This setting doesn't affect the log level of your trigger.dev tasks. The default is `log`. @@ -48,4 +49,4 @@ yarn dlx trigger.dev@beta login Shows the help information for the command. - \ No newline at end of file + diff --git a/docs/cli-logout-commands.mdx b/docs/cli-logout-commands.mdx index 5031ce33fc..5b28eb2607 100644 --- a/docs/cli-logout-commands.mdx +++ b/docs/cli-logout-commands.mdx @@ -9,15 +9,15 @@ Run the command like this: ```bash npm -npx trigger.dev@beta logout +npx trigger.dev@latest logout ``` ```bash pnpm -pnpm dlx trigger.dev@beta logout +pnpm dlx trigger.dev@latest logout ``` ```bash yarn -yarn dlx trigger.dev@beta logout +yarn dlx trigger.dev@latest logout ``` @@ -33,16 +33,16 @@ yarn dlx trigger.dev@beta logout - The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This does not affect the log level of your trigger.dev tasks. Defaults to `log`. + The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This + does not affect the log level of your trigger.dev tasks. Defaults to `log`. Opt-out of sending telemetry data. - ## Standard options Shows the help information for the command. - \ No newline at end of file + diff --git a/docs/cli-update-commands.mdx b/docs/cli-update-commands.mdx index e4b63145f6..7a07b9d9b2 100644 --- a/docs/cli-update-commands.mdx +++ b/docs/cli-update-commands.mdx @@ -9,15 +9,15 @@ Run the command like this: ```bash npm -npx trigger.dev@beta update +npx trigger.dev@latest update ``` ```bash pnpm -pnpm dlx trigger.dev@beta update +pnpm dlx trigger.dev@latest update ``` ```bash yarn -yarn dlx trigger.dev@beta update +yarn dlx trigger.dev@latest update ``` @@ -25,16 +25,16 @@ yarn dlx trigger.dev@beta update ## Options - The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This does not affect the log level of your trigger.dev tasks. Defaults to `log`. + The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This + does not affect the log level of your trigger.dev tasks. Defaults to `log`. Opt-out of sending telemetry data. - ## Standard options Shows the help information for the command. - \ No newline at end of file + diff --git a/docs/cli-whoami-commands.mdx b/docs/cli-whoami-commands.mdx index 284b32ecde..1e51d3f733 100644 --- a/docs/cli-whoami-commands.mdx +++ b/docs/cli-whoami-commands.mdx @@ -9,15 +9,15 @@ Run the command like this: ```bash npm -npx trigger.dev@beta whoami +npx trigger.dev@latest whoami ``` ```bash pnpm -pnpm dlx trigger.dev@beta whoami +pnpm dlx trigger.dev@latest whoami ``` ```bash yarn -yarn dlx trigger.dev@beta whoami +yarn dlx trigger.dev@latest whoami ``` @@ -33,16 +33,16 @@ yarn dlx trigger.dev@beta whoami - The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This does not affect the log level of your trigger.dev tasks. Defaults to `log`. + The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This + does not affect the log level of your trigger.dev tasks. Defaults to `log`. Opt-out of sending telemetry data. - ## Standard options Shows the help information for the command. - \ No newline at end of file + diff --git a/docs/config/config-file.mdx b/docs/config/config-file.mdx new file mode 100644 index 0000000000..7c898e494b --- /dev/null +++ b/docs/config/config-file.mdx @@ -0,0 +1,631 @@ +--- +title: "The trigger.config.ts file" +sidebarTitle: "Configuration" +description: "This file is used to configure your project and how it's built." +--- + +import BundlePackages from "/snippets/bundle-packages.mdx"; + +The `trigger.config.ts` file is used to configure your Trigger.dev project. It is a TypeScript file at the root of your project that exports a default configuration object. Here's an example: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //Your project ref (you can see it on the Project settings page in the dashboard) + project: "proj_gtcwttqhhtlasxgfuhxs", + //The paths for your trigger folders + dirs: ["./trigger"], + retries: { + //If you want to retry a task in dev mode (when using the CLI) + enabledInDev: false, + //the default retry settings. Used if you don't specify on a task. + default: { + maxAttempts: 3, + minTimeoutInMs: 1000, + maxTimeoutInMs: 10000, + factor: 2, + randomize: true, + }, + }, +}); +``` + +The config file handles a lot of things, like: + +- Specifying where your trigger tasks are located using the `dirs` option. +- Setting the default retry settings. +- Configuring OpenTelemetry instrumentations. +- Customizing the build process. +- Adding global task lifecycle functions. + + + The config file is bundled with your project, so code imported in the config file is also bundled, + which can have an effect on build times and cold start duration. One important qualification is + anything defined in the `build` config is automatically stripped out of the config file, and + imports used inside build config with be tree-shaken out. + + +## Lifecycle functions + +You can add lifecycle functions to get notified when any task starts, succeeds, or fails using `onStart`, `onSuccess` and `onFailure`: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + onSuccess: async (payload, output, { ctx }) => { + console.log("Task succeeded", ctx.task.id); + }, + onFailure: async (payload, error, { ctx }) => { + console.log("Task failed", ctx.task.id); + }, + onStart: async (payload, { ctx }) => { + console.log("Task started", ctx.task.id); + }, + init: async (payload, { ctx }) => { + console.log("I run before any task is run"); + }, +}); +``` + +Read more about task lifecycle functions in the [tasks overview](/tasks-overview). + +## Instrumentations + +We use OpenTelemetry (OTEL) for our run logs. This means you get a lot of information about your tasks with no effort. But you probably want to add more information to your logs. For example, here's all the Prisma calls automatically logged: + +![The run log](/images/auto-instrumentation.png) + +Here we add Prisma and OpenAI instrumentations to your `trigger.config.ts` file. + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { PrismaInstrumentation } from "@prisma/instrumentation"; +import { OpenAIInstrumentation } from "@traceloop/instrumentation-openai"; + +export default defineConfig({ + //..other stuff + instrumentations: [new PrismaInstrumentation(), new OpenAIInstrumentation()], +}); +``` + +There is a [huge library of instrumentations](https://opentelemetry.io/ecosystem/registry/?language=js) you can easily add to your project like this. + +Some ones we recommend: + +| Package | Description | +| --------------------------------------- | ------------------------------------------------------------------------------------------------------------------------ | +| `@opentelemetry/instrumentation-undici` | Logs all fetch calls (inc. Undici fetch) | +| `@opentelemetry/instrumentation-fs` | Logs all file system calls | +| `@opentelemetry/instrumentation-http` | Logs all HTTP calls | +| `@prisma/instrumentation` | Logs all Prisma calls, you need to [enable tracing](https://github.com/prisma/prisma/tree/main/packages/instrumentation) | +| `@traceloop/instrumentation-openai` | Logs all OpenAI calls | + +## Runtime + +We currently only officially support the `node` runtime, but you can try our experimental `bun` runtime by setting the `runtime` option in your config file: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + runtime: "bun", +}); +``` + +See our [Bun guide](/guides/bun) for more information. + +## Default machine + +You can specify the default machine for all tasks in your project: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + defaultMachine: "large-1x", +}); +``` + +See our [machines documentation](/machines) for more information. + +## Log level + +You can set the log level for your project: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + logLevel: "debug", +}); +``` + +The `logLevel` only determines which logs are sent to the Trigger.dev instance when using the `logger` API. All `console` based logs are always sent. + +## Build configuration + +You can customize the build process using the `build` option: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + build: { + // Don't bundle these packages + external: ["header-generator"], + }, +}); +``` + + + The `trigger.config.ts` file is included in the bundle, but with the `build` configuration + stripped out. These means any imports only used inside the `build` configuration are also removed + from the final bundle. + + +### External + +All code is bundled by default, but you can exclude some packages from the bundle using the `external` option: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + build: { + external: ["header-generator"], + }, +}); +``` + +When a package is excluded from the bundle, it will be added to a dynamically generated package.json file in the build directory. The version of the package will be the same as the version found in your `node_modules` directory. + +Each entry in the external should be a package name, not necessarily the import path. For example, if you want to exclude the `ai` package, but you are importing `ai/rsc`, you should just include `ai` in the `external` array: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + build: { + external: ["ai"], + }, +}); +``` + + + Any packages that install or build a native binary should be added to external, as native binaries + cannot be bundled. For example, `re2`, `sharp`, and `sqlite3` should be added to external. + + +### JSX + +You can customize the `jsx` options that are passed to `esbuild` using the `jsx` option: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + build: { + jsx: { + // Use the Fragment component instead of React.Fragment + fragment: "Fragment", + // Use the h function instead of React.createElement + factory: "h", + // Turn off automatic runtime + automatic: false, + }, + }, +}); +``` + +By default we enabled [esbuild's automatic JSX runtime](https://esbuild.github.io/content-types/#auto-import-for-jsx) which means you don't need to import `React` in your JSX files. You can disable this by setting `automatic` to `false`. + +See the [esbuild JSX documentation](https://esbuild.github.io/content-types/#jsx) for more information. + +### Conditions + +You can add custom [import conditions](https://esbuild.github.io/api/#conditions) to your build using the `conditions` option: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + build: { + conditions: ["react-server"], + }, +}); +``` + +These conditions effect how imports are resolved during the build process. For example, the `react-server` condition will resolve `ai/rsc` to the server version of the `ai/rsc` export. + +Custom conditions will also be passed to the `node` runtime when running your tasks. + +### Extensions + +Build extension allow you to hook into the build system and customize the build process or the resulting bundle and container image (in the case of deploying). You can use pre-built extensions by installing the `@trigger.dev/build` package into your `devDependencies`, or you can create your own. + +#### additionalFiles + +Import the `additionalFiles` build extension and use it in your `trigger.config.ts` file: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { additionalFiles } from "@trigger.dev/build/extensions/core"; + +export default defineConfig({ + //..other stuff + build: { + extensions: [ + additionalFiles({ files: ["wrangler/wrangler.toml", "./assets/**", "./fonts/**"] }), + ], + }, +}); +``` + +This will copy the files specified in the `files` array to the build directory. The `files` array can contain globs. The output paths will match the path of the file, relative to the root of the project. + +The root of the project is the directory that contains the trigger.config.ts file + +#### `additionalPackages` + +Import the `additionalPackages` build extension and use it in your `trigger.config.ts` file: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { additionalPackages } from "@trigger.dev/build/extensions/core"; + +export default defineConfig({ + //..other stuff + build: { + extensions: [additionalPackages({ packages: ["wrangler"] })], + }, +}); +``` + +This allows you to include additional packages in the build that are not automatically included via imports. This is useful if you want to install a package that includes a CLI tool that you want to invoke in your tasks via `exec`. We will try to automatically resolve the version of the package but you can specify the version by using the `@` symbol: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + build: { + extensions: [additionalPackages({ packages: ["wrangler@1.19.0"] })], + }, +}); +``` + +#### `emitDecoratorMetadata` + +If you need support for the `emitDecoratorMetadata` typescript compiler option, import the `emitDecoratorMetadata` build extension and use it in your `trigger.config.ts` file: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { emitDecoratorMetadata } from "@trigger.dev/build/extensions/typescript"; + +export default defineConfig({ + project: "", + build: { + extensions: [emitDecoratorMetadata()], + }, +}); +``` + +This is usually required if you are using certain ORMs, like TypeORM, that require this option to be enabled. It's not enabled by default because there is a performance cost to enabling it. + + + emitDecoratorMetadata works by hooking into the esbuild bundle process and using the TypeScript + compiler API to compile files where we detect the use of decorators. This means you must have + `emitDecoratorMetadata` enabled in your `tsconfig.json` file, as well as `typescript` installed in + your `devDependencies`. + + +#### Prisma + +If you are using Prisma, you should use the prisma build extension. + +- Automatically handles copying prisma files to the build directory. +- Generates the prisma client during the deploy process +- Optionally will migrate the database during the deploy process +- Support for TypedSQL and multiple schema files. + +You can use it for a simple Prisma setup like this: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { prismaExtension } from "@trigger.dev/build/extensions/prisma"; + +export default defineConfig({ + build: { + extensions: [ + prismaExtension({ + version: "5.19.0", // optional, we'll automatically detect the version if not provided + schema: "prisma/schema.prisma", + }), + ], + }, +}); +``` + + + This does not have any effect when running the `dev` command, only when running the `deploy` + command. + + +If you want to also run migrations during the build process, you can pass in the `migrate` option: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { prismaExtension } from "@trigger.dev/build/extensions/prisma"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + prismaExtension({ + schema: "prisma/schema.prisma", + migrate: true, + directUrlEnvVarName: "DATABASE_URL_UNPOOLED", // optional - the name of the environment variable that contains the direct database URL if you are using a direct database URL + }), + ], + }, +}); +``` + +If you have multiple `generator` statements defined in your schema file, you can pass in the `clientGenerator` option to specify the `prisma-client-js` generator, which will prevent other generators from being generated: + + + +```prisma schema.prisma +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + directUrl = env("DATABASE_URL_UNPOOLED") +} + +// We only want to generate the prisma-client-js generator +generator client { + provider = "prisma-client-js" +} + +generator kysely { + provider = "prisma-kysely" + output = "../../src/kysely" + enumFileName = "enums.ts" + fileName = "types.ts" +} +``` + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { prismaExtension } from "@trigger.dev/build/extensions/prisma"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + prismaExtension({ + schema: "prisma/schema.prisma", + clientGenerator: "client", + }), + ], + }, +}); +``` + + + +If you are using [TypedSQL](https://www.prisma.io/typedsql), you'll need to enable it via the `typedSql` option: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + prismaExtension({ + schema: "prisma/schema.prisma", + typedSql: true, + }), + ], + }, +}); +``` + + + The `prismaExtension` will inject the `DATABASE_URL` environment variable into the build process + when running the `deploy` command. This means the CLI needs to have `process.env.DATABASE_URL` set + at the time of calling the `deploy` command. You can do this via a `.env` file and passing the + `--env-file .env` option to the deploy command or via shell environment variables. This goes for direct database URLs as well. + +These environment variables are only used during the build process and are not embedded in the final image. + + + +#### syncEnvVars + +The `syncEnvVars` build extension replaces the deprecated `resolveEnvVars` export. Check out our [syncEnvVars documentation](/deploy-environment-variables#sync-env-vars-from-another-service) for more information. + +#### audioWaveform + +Previously, we installed [Audio Waveform](https://github.com/bbc/audiowaveform) in the build image. That's been moved to a build extension: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { audioWaveform } from "@trigger.dev/build/extensions/audioWaveform"; + +export default defineConfig({ + //..other stuff + build: { + extensions: [audioWaveform()], // uses verson 1.1.0 of audiowaveform by default + }, +}); +``` + +#### ffmpeg + +You can add the `ffmpeg` build extension to your build process: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { ffmpeg } from "@trigger.dev/build/extensions/core"; + +export default defineConfig({ + //..other stuff + build: { + extensions: [ffmpeg()], + }, +}); +``` + +By default, this will install the version of `ffmpeg` that is available in the Debian package manager. If you need a specific version, you can pass in the version as an argument: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { ffmpeg } from "@trigger.dev/build/extensions/core"; + +export default defineConfig({ + //..other stuff + build: { + extensions: [ffmpeg({ version: "6.0-4" })], + }, +}); +``` + +This extension will also add the `FFMPEG_PATH` and `FFPROBE_PATH` to your environment variables, making it easy to use popular ffmpeg libraries like `fluent-ffmpeg`. + +#### esbuild plugins + +You can easily add existing or custom esbuild plugins to your build process using the `esbuildPlugin` extension: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { esbuildPlugin } from "@trigger.dev/build/extensions"; +import { sentryEsbuildPlugin } from "@sentry/esbuild-plugin"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + esbuildPlugin( + sentryEsbuildPlugin({ + org: process.env.SENTRY_ORG, + project: process.env.SENTRY_PROJECT, + authToken: process.env.SENTRY_AUTH_TOKEN, + }), + // optional - only runs during the deploy command, and adds the plugin to the end of the list of plugins + { placement: "last", target: "deploy" } + ), + ], + }, +}); +``` + +#### aptGet + +You can install system packages into the deployed image using using the `aptGet` extension: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { aptGet } from "@trigger.dev/build/extensions/core"; + +export default defineConfig({ + //..other stuff + build: { + extensions: [aptGet({ packages: ["ffmpeg"] })], + }, +}); +``` + +If you want to install a specific version of a package, you can specify the version like this: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + build: { + extensions: [aptGet({ packages: ["ffmpeg=6.0-4"] })], + }, +}); +``` + +#### Custom extensions + +You can create your own extensions to further customize the build process. Extensions are an object with a `name` and zero or more lifecycle hooks (`onBuildStart` and `onBuildComplete`) that allow you to modify the `BuildContext` object that is passed to the build process through adding layers. For example, this is how the `aptGet` extension is implemented: + +```ts +import { BuildExtension } from "@trigger.dev/core/v3/build"; + +export type AptGetOptions = { + packages: string[]; +}; + +export function aptGet(options: AptGetOptions): BuildExtension { + return { + name: "aptGet", + onBuildComplete(context) { + if (context.target === "dev") { + return; + } + + context.logger.debug("Adding apt-get layer", { + pkgs: options.packages, + }); + + context.addLayer({ + id: "apt-get", + image: { + pkgs: options.packages, + }, + }); + }, + }; +} +``` + +Instead of creating this function and worrying about types, you can define an extension inline in your `trigger.config.ts` file: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + //..other stuff + build: { + extensions: [ + { + name: "aptGet", + onBuildComplete(context) { + if (context.target === "dev") { + return; + } + + context.logger.debug("Adding apt-get layer", { + pkgs: ["ffmpeg"], + }); + + context.addLayer({ + id: "apt-get", + image: { + pkgs: ["ffmpeg"], + }, + }); + }, + }, + ], + }, +}); +``` + +We'll be expanding the documentation on how to create custom extensions in the future, but for now you are encouraged to look at the existing extensions in the `@trigger.dev/build` package for inspiration, which you can see in our repo [here](https://github.com/triggerdotdev/trigger.dev/tree/main/packages/build/src/extensions) diff --git a/docs/config/extensions/custom.mdx b/docs/config/extensions/custom.mdx new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/config/extensions/esbuild-plugins.mdx b/docs/config/extensions/esbuild-plugins.mdx new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/config/extensions/overview.mdx b/docs/config/extensions/overview.mdx new file mode 100644 index 0000000000..4d5e406a5f --- /dev/null +++ b/docs/config/extensions/overview.mdx @@ -0,0 +1,5 @@ +--- +title: "Overview" +sidebarTitle: "Overview" +description: "This file is used to configure your project and how it's bundled." +--- diff --git a/docs/config/extensions/prisma.mdx b/docs/config/extensions/prisma.mdx new file mode 100644 index 0000000000..79f7cf9522 --- /dev/null +++ b/docs/config/extensions/prisma.mdx @@ -0,0 +1,5 @@ +--- +title: "Prisma" +sidebarTitle: "Prisma" +description: "This file is used to configure your project and how it's bundled." +--- diff --git a/docs/deploy-environment-variables.mdx b/docs/deploy-environment-variables.mdx index 9b18d5058b..d9d510bd8a 100644 --- a/docs/deploy-environment-variables.mdx +++ b/docs/deploy-environment-variables.mdx @@ -74,8 +74,8 @@ You can use our SDK to get and manipulate environment variables. You can also ea We have a complete set of SDK functions (and REST API) you can use to directly manipulate environment variables. -| Function | Description | -| ----------------------------------------------------- | ----------------------------------------------------------- | +| Function | Description | +| -------------------------------------------------- | ----------------------------------------------------------- | | [envvars.list()](/management/envvars/list) | List all environment variables | | [envvars.upload()](/management/envvars/import) | Upload multiple env vars. You can override existing values. | | [envvars.create()](/management/envvars/create) | Create a new environment variable | @@ -85,93 +85,91 @@ We have a complete set of SDK functions (and REST API) you can use to directly m ### Sync env vars from another service -You could use the SDK functions above but it's much easier to use our `resolveEnvVars` function in your `trigger.config` file. +You could use the SDK functions above but it's much easier to use our `syncEnvVars` build extension in your `trigger.config` file. -In this example we're using env vars from [Infisical](https://infisical.com). + + To use the `syncEnvVars` build extension, you should first install the `@trigger.dev/build` + package into your devDependencies. + -```ts /trigger.config.ts -import type { TriggerConfig, ResolveEnvironmentVariablesFunction } from "@trigger.dev/sdk/v3"; - -//This runs when you run the deploy command or the dev command -export const resolveEnvVars: ResolveEnvironmentVariablesFunction = async ({ - //the project ref (starting with "proj_") - projectRef, - //any existing env vars from a .env file or Trigger.dev - env, - //"dev", "staging", or "prod" - environment, -}) => { - //the existing environment variables from Trigger.dev (or your local .env file) - if (env.INFISICAL_CLIENT_ID === undefined || env.INFISICAL_CLIENT_SECRET === undefined) { - //returning undefined won't modify the existing env vars - return; - } - - const client = new InfisicalClient({ - clientId: env.INFISICAL_CLIENT_ID, - clientSecret: env.INFISICAL_CLIENT_SECRET, - }); - - const secrets = await client.listSecrets({ - environment, - projectId: env.INFISICAL_PROJECT_ID!, - }); - - return { - variables: secrets.map((secret) => ({ - name: secret.secretKey, - value: secret.secretValue, - })), - // this defaults to true - // override: true, - }; -}; +In this example we're using env vars from [Infisical](https://infisical.com). -//the rest of your config file -export const config: TriggerConfig = { - project: "proj_1234567890", - //etc -}; +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { syncEnvVars } from "@trigger.dev/build/extensions/core"; +import { InfisicalClient } from "@infisical/sdk"; + +export default defineConfig({ + build: { + extensions: [ + syncEnvVars(async (ctx) => { + const client = new InfisicalClient({ + clientId: process.env.INFISICAL_CLIENT_ID, + clientSecret: process.env.INFISICAL_CLIENT_SECRET, + }); + + const secrets = await client.listSecrets({ + environment: ctx.environment, + projectId: process.env.INFISICAL_PROJECT_ID!, + }); + + return secrets.map((secret) => ({ + name: secret.secretKey, + value: secret.secretValue, + })); + }), + ], + }, +}); ``` -#### Local development - -When you [develop locally](/cli-dev) `resolveEnvVars()` will inject the env vars from [Infisical](https://infisical.com) into your local `process.env`. - #### Deploy When you run the [CLI deploy command](/cli-deploy) directly or using [GitHub Actions](/github-actions) it will sync the environment variables from [Infisical](https://infisical.com) to Trigger.dev. This means they'll appear on the Environment Variables page so you can confirm that it's worked. This means that you need to redeploy your Trigger.dev tasks if you change the environment variables in [Infisical](https://infisical.com). -### The variables return type + + The `process.env.INFISICAL_CLIENT_ID`, `process.env.INFISICAL_CLIENT_SECRET` and + `process.env.INFISICAL_PROJECT_ID` will need to be supplied to the `deploy` CLI command. You can + do this via the `--env-file .env` flag or by setting them as environment variables in your + terminal. + + +#### Dev -You can return `variables` as an object with string keys and values, or an array of names + values. +`syncEnvVars` does not have any effect when running the `dev` command locally. If you want to inject environment variables from another service into your local environment you can do so via a `.env` file or just supplying them as environment variables in your terminal. Most services will have a CLI tool that allows you to run a command with environment variables set: + +```sh +infisical run -- npx trigger.dev@latest dev +``` + +Any environment variables set in the CLI command will be available to your local Trigger.dev tasks. + +### The syncEnvVars callback return type + +You can return env vars as an object with string keys and values, or an array of names + values. ```ts return { - variables: { - MY_ENV_VAR: "my value", - MY_OTHER_ENV_VAR: "my other value", - }, + MY_ENV_VAR: "my value", + MY_OTHER_ENV_VAR: "my other value", }; ``` or ```ts -return { - variables: [ - { - name: "MY_ENV_VAR", - value: "my value", - }, - { - name: "MY_OTHER_ENV_VAR", - value: "my other value", - }, - ], -}; +return [ + { + name: "MY_ENV_VAR", + value: "my value", + }, + { + name: "MY_OTHER_ENV_VAR", + value: "my other value", + }, +]; ``` This should mean that for most secret services you won't need to convert the data into a different format. @@ -184,11 +182,11 @@ Securely pass a Google credential JSON file to your Trigger.dev task using envir - In your terminal, run the following command and copy the resulting base64 string: +In your terminal, run the following command and copy the resulting base64 string: - ``` - base64 path/to/your/service-account-file.json - ``` +``` +base64 path/to/your/service-account-file.json +``` @@ -207,13 +205,15 @@ GOOGLE_CREDENTIALS_BASE64="" Add the following code to your Trigger.dev task: ```ts -import { google } from 'googleapis'; +import { google } from "googleapis"; -const credentials = JSON.parse(Buffer.from(process.env.GOOGLE_CREDENTIALS_BASE64, 'base64').toString('utf8')); +const credentials = JSON.parse( + Buffer.from(process.env.GOOGLE_CREDENTIALS_BASE64, "base64").toString("utf8") +); const auth = new google.auth.GoogleAuth({ credentials, - scopes: ['https://www.googleapis.com/auth/cloud-platform'], + scopes: ["https://www.googleapis.com/auth/cloud-platform"], }); const client = await auth.getClient(); @@ -227,4 +227,4 @@ You can now use the `client` object to make authenticated requests to Google API - \ No newline at end of file + diff --git a/docs/github-actions.mdx b/docs/github-actions.mdx index 1a9206ea39..385459d349 100644 --- a/docs/github-actions.mdx +++ b/docs/github-actions.mdx @@ -41,7 +41,7 @@ jobs: env: TRIGGER_ACCESS_TOKEN: ${{ secrets.TRIGGER_ACCESS_TOKEN }} run: | - npx trigger.dev@beta deploy + npx trigger.dev@latest deploy ``` ```yaml .github/workflows/release-trigger-staging.yml @@ -70,7 +70,7 @@ jobs: env: TRIGGER_ACCESS_TOKEN: ${{ secrets.TRIGGER_ACCESS_TOKEN }} run: | - npx trigger.dev@beta deploy --env staging + npx trigger.dev@latest deploy --env staging ``` @@ -81,29 +81,36 @@ If you already have a GitHub action file, you can just add the final step "🚀 -Go to your profile page and click on the ["Personal Access Tokens"](https://cloud.trigger.dev/account/tokens) tab. + + Go to your profile page and click on the ["Personal Access + Tokens"](https://cloud.trigger.dev/account/tokens) tab. + -Click on 'Settings' -> 'Secrets and variables' -> 'Actions' -> 'New repository secret' + + Click on 'Settings' -> 'Secrets and variables' -> 'Actions' -> 'New repository secret' + -Add the name `TRIGGER_ACCESS_TOKEN` and the value of your access token. ![Add TRIGGER_ACCESS_TOKEN in GitHub](/images/github-access-token.png) + + Add the name `TRIGGER_ACCESS_TOKEN` and the value of your access token. ![Add TRIGGER_ACCESS_TOKEN + in GitHub](/images/github-access-token.png) + - - ## Version pinning -The CLI and `@trigger.dev/*` package versions need to be in sync, otherwise there will be errors and unpredictable behavior. Hence, the `deploy` command will automatically fail during CI on any version mismatches. +The CLI and `@trigger.dev/*` package versions need to be in sync with the `trigger.dev` CLI, otherwise there will be errors and unpredictable behavior. Hence, the `deploy` command will automatically fail during CI on any version mismatches. Tip: add the deploy command to your `package.json` file to keep versions managed in the same place. For example: ```json { "scripts": { - "deploy:trigger-prod": "npx trigger.dev@3.0.0-beta.34 deploy", - "deploy:trigger": "npx trigger.dev@3.0.0-beta.34 deploy --env staging" + "deploy:trigger-prod": "npx trigger.dev@3.0.0 deploy", + "deploy:trigger": "npx trigger.dev@3.0.0 deploy --env staging" } } ``` + Your workflow file will follow the version specified in the `package.json` script, like so: ```yaml .github/workflows/release-trigger.yml diff --git a/docs/guides/bun.mdx b/docs/guides/bun.mdx new file mode 100644 index 0000000000..8efea3f4d0 --- /dev/null +++ b/docs/guides/bun.mdx @@ -0,0 +1,113 @@ +--- +title: "Bun guide" +sidebarTitle: "Bun" +description: "This guide will show you how to setup Trigger.dev with Bun" +icon: "js" +--- + +import Prerequisites from "/snippets/framework-prerequisites.mdx"; +import CliRunTestStep from "/snippets/step-run-test.mdx"; +import CliViewRunStep from "/snippets/step-view-run.mdx"; + +We now have experimental support for Bun. This guide will show you have to setup Trigger.dev in your existing Bun project, test an example task, and view the run. + + + The trigger.dev CLI does not yet support Bun. So you will need to run the CLI using Node.js. But + Bun will still be used to execute your tasks, even in the `dev` environment. + + + + +## Initial setup + + + + +The easiest way to get started is to use the CLI. It will add Trigger.dev to your existing project, create a `/trigger` folder and give you an example task. + +Run this command in the root of your project to get started: + + + +```bash npm +npx trigger.dev@latest init --runtime bun +``` + +```bash pnpm +pnpm dlx trigger.dev@latest init --runtime bun +``` + +```bash yarn +yarn dlx trigger.dev@latest init --runtime bun +``` + + + +It will do a few things: + +1. Log you into the CLI if you're not already logged in. +2. Create a `trigger.config.ts` file in the root of your project. +3. Ask where you'd like to create the `/trigger` directory. +4. Create the `/src/trigger` directory with an example task, `/src/trigger/example.[ts/js]`. + +Install the "Hello World" example task when prompted. We'll use this task to test the setup. + + + + + + Open the `/src/trigger/example.ts` file and replace the contents with the following: + + ```ts example.ts + import { Database } from "bun:sqlite"; + import { task } from "@trigger.dev/sdk/v3"; + + export const bunTask = task({ + id: "bun-task", + run: async (payload: { query: string }) => { + const db = new Database(":memory:"); + const query = db.query("select 'Hello world' as message;"); + console.log(query.get()); // => { message: "Hello world" } + + return { + message: "Query executed", + }; + }, + }); + + ``` + + + + + +The CLI `dev` command runs a server for your tasks. It watches for changes in your `/trigger` directory and communicates with the Trigger.dev platform to register your tasks, perform runs, and send data back and forth. + +It can also update your `@trigger.dev/*` packages to prevent version mismatches and failed deploys. You will always be prompted first. + + + +```bash npm +npx trigger.dev@latest dev +``` + +```bash pnpm +pnpm dlx trigger.dev@latest dev +``` + +```bash yarn +yarn dlx trigger.dev@latest dev +``` + + + + + + + + + + +## Known issues + +- Certain OpenTelemetry instrumentation will not work with Bun, because Bun does not support Node's `register` hook. This means that some libraries that rely on this hook will not work with Bun. diff --git a/docs/guides/frameworks/nextjs.mdx b/docs/guides/frameworks/nextjs.mdx index 7dea49af05..50c96fbce2 100644 --- a/docs/guides/frameworks/nextjs.mdx +++ b/docs/guides/frameworks/nextjs.mdx @@ -5,16 +5,16 @@ description: "This guide will show you how to setup Trigger.dev in your existing icon: "N" --- -import Prerequisites from '/snippets/framework-prerequisites.mdx'; -import CliInitStep from '/snippets/step-cli-init.mdx'; -import CliDevStep from '/snippets/step-cli-dev.mdx'; -import CliRunTestStep from '/snippets/step-run-test.mdx'; -import CliViewRunStep from '/snippets/step-view-run.mdx'; -import UsefulNextSteps from '/snippets/useful-next-steps.mdx'; -import TriggerTaskNextjs from '/snippets/trigger-tasks-nextjs.mdx'; -import NextjsTroubleshootingMissingApiKey from '/snippets/nextjs-missing-api-key.mdx'; -import NextjsTroubleshootingButtonSyntax from '/snippets/nextjs-button-syntax.mdx'; -import WorkerFailedToStartWhenRunningDevCommand from '/snippets/worker-failed-to-start.mdx'; +import Prerequisites from "/snippets/framework-prerequisites.mdx"; +import CliInitStep from "/snippets/step-cli-init.mdx"; +import CliDevStep from "/snippets/step-cli-dev.mdx"; +import CliRunTestStep from "/snippets/step-run-test.mdx"; +import CliViewRunStep from "/snippets/step-view-run.mdx"; +import UsefulNextSteps from "/snippets/useful-next-steps.mdx"; +import TriggerTaskNextjs from "/snippets/trigger-tasks-nextjs.mdx"; +import NextjsTroubleshootingMissingApiKey from "/snippets/nextjs-missing-api-key.mdx"; +import NextjsTroubleshootingButtonSyntax from "/snippets/nextjs-button-syntax.mdx"; +import WorkerFailedToStartWhenRunningDevCommand from "/snippets/worker-failed-to-start.mdx"; This guide can be followed for both App and Pages router as well as Server Actions. @@ -91,11 +91,11 @@ Here are the steps to trigger your task in the Next.js App and Pages router and - + Create an `actions.ts` file in the `app/api` directory and add this code which imports your `helloWorldTask()` task. Make sure to include `"use server";` at the top of the file. - + ```ts app/api/actions.ts "use server"; @@ -120,7 +120,7 @@ Here are the steps to trigger your task in the Next.js App and Pages router and ``` - + For the purposes of this guide, we'll create a button with an `onClick` event that triggers your task. We'll add this to the `page.tsx` file so we can trigger the task by clicking the button. Make sure to import your task and include `"use client";` at the top of your file. @@ -166,31 +166,31 @@ Here are the steps to trigger your task in the Next.js App and Pages router and - Open your app in a browser, making sure the port number is the same as the one you're running your Next.js app on. For example, if you're running your Next.js app on port 3000, visit: + Open your app in a browser, making sure the port number is the same as the one you're running your Next.js app on. For example, if you're running your Next.js app on port 3000, visit: ```bash http://localhost:3000 ``` - + Run the dev server from Step 2. of the [Initial Setup](/guides/frameworks/nextjs#initial-setup) section above if it's not already running: - + ```bash npm - npx trigger.dev@beta dev + npx trigger.dev@latest dev ``` ```bash pnpm - pnpm dlx trigger.dev@beta dev + pnpm dlx trigger.dev@latest dev ``` ```bash yarn - yarn dlx trigger.dev@beta dev + yarn dlx trigger.dev@latest dev ``` - - Then click the button we created in your app to trigger the task. You should see the CLI log the task run with a link to view the logs. + + Then click the button we created in your app to trigger the task. You should see the CLI log the task run with a link to view the logs. ![Trigger.dev CLI showing a successful run](/images/trigger-cli-run-success.png) @@ -225,7 +225,7 @@ Here are the steps to trigger your task in the Next.js App and Pages router and "James" ); - res.status(200).json(handle); + res.status(200).json(handle); } ``` @@ -261,15 +261,15 @@ For this guide, we'll manually deploy your task by running the [CLI deploy comma ```bash npm -npx trigger.dev@beta deploy +npx trigger.dev@latest deploy ``` ```bash pnpm -pnpm dlx trigger.dev@beta deploy +pnpm dlx trigger.dev@latest deploy ``` ```bash yarn -yarn dlx trigger.dev@beta deploy +yarn dlx trigger.dev@latest deploy ``` diff --git a/docs/guides/frameworks/nodejs.mdx b/docs/guides/frameworks/nodejs.mdx index 504c801278..0924368d06 100644 --- a/docs/guides/frameworks/nodejs.mdx +++ b/docs/guides/frameworks/nodejs.mdx @@ -2,25 +2,25 @@ title: "Node.js setup guide" sidebarTitle: "Node.js" description: "This guide will show you how to setup Trigger.dev in your existing Node.js project, test an example task, and view the run." -icon: "JS" +icon: "node-js" --- -import Prerequisites from '/snippets/framework-prerequisites.mdx'; -import CliInitStep from '/snippets/step-cli-init.mdx'; -import CliDevStep from '/snippets/step-cli-dev.mdx'; -import CliRunTestStep from '/snippets/step-run-test.mdx'; -import CliViewRunStep from '/snippets/step-view-run.mdx'; -import UsefulNextSteps from '/snippets/useful-next-steps.mdx'; +import Prerequisites from "/snippets/framework-prerequisites.mdx"; +import CliInitStep from "/snippets/step-cli-init.mdx"; +import CliDevStep from "/snippets/step-cli-dev.mdx"; +import CliRunTestStep from "/snippets/step-run-test.mdx"; +import CliViewRunStep from "/snippets/step-view-run.mdx"; +import UsefulNextSteps from "/snippets/useful-next-steps.mdx"; ## Initial setup - - - - + + + + diff --git a/docs/guides/frameworks/supabase-edge-functions-basic.mdx b/docs/guides/frameworks/supabase-edge-functions-basic.mdx index b3b264d54f..86e6489be8 100644 --- a/docs/guides/frameworks/supabase-edge-functions-basic.mdx +++ b/docs/guides/frameworks/supabase-edge-functions-basic.mdx @@ -62,8 +62,8 @@ Replace the placeholder code in your `edge-function-trigger/index.ts` file with ```ts functions/edge-function-trigger/index.ts // Setup type definitions for built-in Supabase Runtime APIs import "jsr:@supabase/functions-js/edge-runtime.d.ts"; -// Import the Trigger.dev SDK - replace "" with the version of the SDK you are using, e.g. "3.0.0-beta.55". You can find this in your package.json file. -import { tasks } from "npm:@trigger.dev/sdk@/v3"; +// Import the Trigger.dev SDK - replace "" with the version of the SDK you are using, e.g. "3.0.0". You can find this in your package.json file. +import { tasks } from "npm:@trigger.dev/sdk@3.0.0/v3"; // Import your task type from your /trigger folder import type { helloWorldTask } from "../../../src/trigger/example.ts"; // 👆 **type-only** import @@ -139,15 +139,15 @@ Next, deploy your `hello-world` task to [Trigger.dev cloud](https://cloud.trigge ```bash npm -npx trigger.dev@beta deploy +npx trigger.dev@latest deploy ``` ```bash pnpm -pnpm dlx trigger.dev@beta deploy +pnpm dlx trigger.dev@latest deploy ``` ```bash yarn -yarn dlx trigger.dev@beta deploy +yarn dlx trigger.dev@latest deploy ``` diff --git a/docs/guides/frameworks/supabase-edge-functions-database-webhooks.mdx b/docs/guides/frameworks/supabase-edge-functions-database-webhooks.mdx index 4780966c3b..9ca50a50e3 100644 --- a/docs/guides/frameworks/supabase-edge-functions-database-webhooks.mdx +++ b/docs/guides/frameworks/supabase-edge-functions-database-webhooks.mdx @@ -73,8 +73,8 @@ Replace the `database-webhook` placeholder code with the following code: ```ts functions/database-webhook/index.ts import "jsr:@supabase/functions-js/edge-runtime.d.ts"; -// Import the Trigger.dev SDK - replace "" with the version of the SDK you are using, e.g. "3.0.0-beta.55". You can find this in your package.json file. -import { tasks } from "npm:@trigger.dev/sdk@/v3"; +// Import the Trigger.dev SDK - replace "" with the version of the SDK you are using, e.g. "3.0.0". You can find this in your package.json file. +import { tasks } from "npm:@trigger.dev/sdk@3.0.0/v3"; // Import your task type from your /trigger folder import type { helloWorldTask } from "../../../src/trigger/example.ts"; // 👆 **type-only** import @@ -190,15 +190,15 @@ To do this, run the following command in the terminal: ```bash npm -npx trigger.dev@beta deploy +npx trigger.dev@latest deploy ``` ```bash pnpm -pnpm dlx trigger.dev@beta deploy +pnpm dlx trigger.dev@latest deploy ``` ```bash yarn -yarn dlx trigger.dev@beta deploy +yarn dlx trigger.dev@latest deploy ``` diff --git a/docs/guides/new-build-system-preview.mdx b/docs/guides/new-build-system-preview.mdx index e785863842..984092cd27 100644 --- a/docs/guides/new-build-system-preview.mdx +++ b/docs/guides/new-build-system-preview.mdx @@ -25,7 +25,7 @@ You will also need to update your usage of the `trigger.dev` CLI to use the prev ```sh # old way -npx trigger.dev@beta dev +npx trigger.dev@latest dev # using the preview release npx trigger.dev@0.0.0-prerelease-20240911144933 dev diff --git a/docs/guides/use-cases/upgrading-from-v2.mdx b/docs/guides/use-cases/upgrading-from-v2.mdx index e0f4fccc12..f10a44f5f5 100644 --- a/docs/guides/use-cases/upgrading-from-v2.mdx +++ b/docs/guides/use-cases/upgrading-from-v2.mdx @@ -170,7 +170,7 @@ async function yourBackendFunction() { 1. Make sure to upgrade all of your trigger.dev packages to v3 first. ```bash -npx @trigger.dev/cli@beta update --to beta +npx @trigger.dev/cli@latest update --to 3.0.0 ``` 2. Follow the [v3 quick start](/quick-start) to get started with v3. Our new CLI will take care of the rest. diff --git a/docs/how-it-works.mdx b/docs/how-it-works.mdx new file mode 100644 index 0000000000..4f93693772 --- /dev/null +++ b/docs/how-it-works.mdx @@ -0,0 +1,452 @@ +--- +title: "How it works" +sidebarTitle: "How it works" +description: "Understand how Trigger.dev works and how it can help you." +--- + +## Introduction + +Trigger.dev v3 allows you to integrate long-running async tasks into your application and run them in the background. This allows you to offload tasks that take a long time to complete, such as sending multi-day email campaigns, processing videos, or running long chains of AI tasks. + +For example, the below task processes a video with `ffmpeg` and sends the results to an s3 bucket, then updates a database with the results and sends an email to the user. + +```ts /trigger/video.ts +import { logger, task } from "@trigger.dev/sdk/v3"; +import { updateVideoUrl } from "../db.js"; +import ffmpeg from "fluent-ffmpeg"; +import { Readable } from "node:stream"; +import type { ReadableStream } from "node:stream/web"; +import * as fs from "node:fs/promises"; +import * as path from "node:path"; +import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3"; +import { sendEmail } from "../email.js"; +import { getVideo } from "../db.js"; + +// Initialize S3 client +const s3Client = new S3Client({ + region: process.env.AWS_REGION, +}); + +export const convertVideo = task({ + id: "convert-video", + retry: { + maxAttempts: 5, + minTimeoutInMs: 1000, + maxTimeoutInMs: 10000, + factor: 2, + }, + run: async ({ videoId }: { videoId: string }) => { + const { url, userId } = await getVideo(videoId); + + const outputPath = path.join("/tmp", `output_${videoId}.mp4`); + + const response = await fetch(url); + + await new Promise((resolve, reject) => { + ffmpeg(Readable.fromWeb(response.body as ReadableStream)) + .videoFilters("scale=iw/2:ih/2") + .output(outputPath) + .on("end", resolve) + .on("error", reject) + .run(); + }); + + const processedContent = await fs.readFile(outputPath); + + // Upload to S3 + const s3Key = `processed-videos/output_${videoId}.mp4`; + + const uploadParams = { + Bucket: process.env.S3_BUCKET, + Key: s3Key, + Body: processedContent, + }; + + await s3Client.send(new PutObjectCommand(uploadParams)); + const s3Url = `https://${process.env.S3_BUCKET}.s3.amazonaws.com/${s3Key}`; + + logger.info("Video converted", { videoId, s3Url }); + + // Update database + await updateVideoUrl(videoId, s3Url); + + await sendEmail( + userId, + "Video Processing Complete", + `Your video has been processed and is available at: ${s3Url}` + ); + + return { success: true, s3Url }; + }, +}); +``` + +Now in your application, you can trigger this task by calling: + +```ts +import { NextResponse } from "next/server"; +import { tasks } from "@trigger.dev/sdk/v3"; +import type { convertVideo } from "./trigger/video"; +// 👆 **type-only** import + +export async function POST(request: Request) { + const body = await request.json(); + + // Trigger the task, this will return before the task is completed + const handle = await tasks.trigger("convert-video", body); + + return NextResponse.json(handle); +} +``` + +This will schedule the task to run in the background and return a handle that you can use to check the status of the task. This allows your backend application to respond quickly to the user and offload the long-running task to Trigger.dev. + +## The CLI + +Trigger.dev comes with a CLI that allows you to initialize Trigger.dev into your project, deploy your tasks, and run your tasks locally. You can run it via `npx` like so: + +```sh +npx trigger.dev@latest login # Log in to your Trigger.dev account +npx trigger.dev@latest init # Initialize Trigger.dev in your project +npx trigger.dev@latest dev # Run your tasks locally +npx trigger.dev@latest deploy # Deploy your tasks to the Trigger.dev instance +``` + +All these commands work with the Trigger.dev cloud and/or your self-hosted instance. It supports multiple profiles so you can easily switch between different accounts or instances. + +```sh +npx trigger.dev@latest login --profile -a https://trigger.example.com # Log in to a specific profile into a self-hosted instance +npx trigger.dev@latest dev --profile # Initialize Trigger.dev in your project +npx trigger.dev@latest deploy --profile # Deploy your tasks to the Trigger.dev instance +``` + +## Trigger.dev architecture + +Trigger.dev implements a serverless architecture (without timeouts!) that allows you to run your tasks in a scalable and reliable way. When you run `npx trigger.dev@latest deploy`, we build and deploy your task code to your Trigger.dev instance. Then, when you trigger a task from your application, it's run in a secure, isolated environment with the resources you need to complete the task. A simplified diagram for a task execution looks like this: + +```mermaid +sequenceDiagram + participant App + participant Trigger.dev + participant Task Worker + + App->>Trigger.dev: Trigger task + Trigger.dev-->>App: Task handle + Trigger.dev->>Task Worker: Run task + Task Worker-->>Trigger.dev: Task completed +``` + +In reality there are many more components involved, such as the task queue, the task scheduler, and the task worker pool, logging (etc.), but this diagram gives you a high-level overview of how Trigger.dev works. + +## The Checkpoint-Resume System + +Trigger.dev implements a powerful Checkpoint-Resume System that enables efficient execution of long-running background tasks in a serverless-like environment. This system allows tasks to pause, checkpoint their state, and resume seamlessly, optimizing resource usage and enabling complex workflows. + +Here's how the Checkpoint-Resume System works: + +1. **Task Execution**: When a task is triggered, it runs in an isolated environment with all necessary resources. + +2. **Subtask Handling**: If a task needs to trigger a subtask, it can do so and wait for its completion using `triggerAndWait` + +3. **State Checkpointing**: While waiting for a subtask or during a programmed pause (e.g., `wait.for({ seconds: 30 })`), the system uses CRIU (Checkpoint/Restore In Userspace) to create a checkpoint of the task's entire state, including memory, CPU registers, and open file descriptors. + +4. **Resource Release**: After checkpointing, the parent task's resources are released, freeing up the execution environment. + +5. **Efficient Storage**: The checkpoint is efficiently compressed and stored on disk, ready to be restored when needed. + +6. **Event-Driven Resumption**: When a subtask completes or a wait period ends, Trigger.dev's event system triggers the restoration process. + +7. **State Restoration**: The checkpoint is loaded back into a new execution environment, restoring the task to its exact state before suspension. + +8. **Seamless Continuation**: The task resumes execution from where it left off, with any subtask results or updated state seamlessly integrated. + +This approach allows Trigger.dev to manage resources efficiently, handle complex task dependencies, and provide a virtually limitless execution time for your tasks, all while maintaining the simplicity and scalability of a serverless architecture. + +Example of a parent and child task using the Checkpoint-Resume System: + +```ts +import { task, wait } from "@trigger.dev/sdk/v3"; + +const parentTask = task({ + id: "parent-task", + run: async () => { + console.log("Starting parent task"); + + // This will cause the parent task to be checkpointed and suspended + const result = await childTask.triggerAndWait({ data: "some data" }); + + console.log("Child task result:", result); + + // This will also cause the task to be checkpointed and suspended + await wait.for({ seconds: 30 }); + + console.log("Resumed after 30 seconds"); + + return "Parent task completed"; + }, +}); + +const childTask = task({ + id: "child-task", + run: async (payload: { data: string }) => { + console.log("Starting child task with data:", payload.data); + + // Simulate some work + await sleep(5); + + return "Child task result"; + }, +}); +``` + +The diagram below illustrates the flow of the parent and child tasks using the Checkpoint-Resume System: + +```mermaid +sequenceDiagram + participant App + participant Trigger.dev + participant Parent Task + participant Child Task + participant CR System + participant Storage + + App->>Trigger.dev: Trigger parent task + Trigger.dev->>Parent Task: Start execution + Parent Task->>Child Task: Trigger child task + Parent Task->>CR System: Request snapshot + CR System->>Storage: Store snapshot + CR System-->>Parent Task: Confirm snapshot stored + Parent Task->>Trigger.dev: Release resources + + Child Task->>Trigger.dev: Complete execution + Trigger.dev->>CR System: Request parent task restoration + CR System->>Storage: Retrieve snapshot + CR System->>Parent Task: Restore state + Parent Task->>Trigger.dev: Resume execution + Parent Task->>Trigger.dev: Complete execution +``` + + + This is why, in the Trigger.dev Cloud, we don't charge for the time waiting for subtasks or the + time spent in a paused state. + + +## Durable execution + +Trigger.dev's Checkpoint-Resume System, combined with idempotency keys, enables durable execution of complex workflows. This approach allows for efficient retries and caching of results, ensuring that work is not unnecessarily repeated in case of failures. + +### How it works + +1. **Task breakdown**: Complex workflows are broken down into smaller, independent subtasks. +2. **Idempotency keys**: Each subtask is assigned a unique idempotency key. +3. **Result caching**: The output of each subtask is cached based on its idempotency key. +4. **Intelligent retries**: If a failure occurs, only the failed subtask and subsequent tasks are retried. + +### Example: Video processing workflow + +Let's rewrite the `convert-video` task above to be more durable: + + + +```ts /trigger/video.ts +import { idempotencyKeys, logger, task } from "@trigger.dev/sdk/v3"; +import { processVideo, sendUserEmail, uploadToS3 } from "./tasks.js"; +import { updateVideoUrl } from "../db.js"; + +export const convertVideo = task({ + id: "convert-video", + retry: { + maxAttempts: 5, + minTimeoutInMs: 1000, + maxTimeoutInMs: 10000, + factor: 2, + }, + run: async ({ videoId }: { videoId: string }) => { + // Automatically scope the idempotency key to this run, across retries + const idempotencyKey = await idempotencyKeys.create(videoId); + + // Process video + const { processedContent } = await processVideo + .triggerAndWait({ videoId }, { idempotencyKey }) + .unwrap(); // Calling unwrap will return the output of the subtask, or throw an error if the subtask failed + + // Upload to S3 + const { s3Url } = await uploadToS3 + .triggerAndWait({ processedContent, videoId }, { idempotencyKey }) + .unwrap(); + + // Update database + await updateVideoUrl(videoId, s3Url); + + // Send email, we don't need to wait for this to finish + await sendUserEmail.trigger({ videoId, s3Url }, { idempotencyKey }); + + return { success: true, s3Url }; + }, +}); +``` + +```ts /trigger/tasks.ts +import { task, logger } from "@trigger.dev/sdk/v3"; +import ffmpeg from "fluent-ffmpeg"; +import { Readable } from "node:stream"; +import type { ReadableStream } from "node:stream/web"; +import * as fs from "node:fs/promises"; +import * as path from "node:path"; +import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3"; +import { sendEmail } from "../email.js"; +import { getVideo } from "../db.js"; + +// Initialize S3 client +const s3Client = new S3Client({ + region: process.env.AWS_REGION, +}); + +export const processVideo = task({ + id: "process-video", + run: async ({ videoId }: { videoId: string }) => { + const { url } = await getVideo(videoId); + + const outputPath = path.join("/tmp", `output_${videoId}.mp4`); + const response = await fetch(url); + + await logger.trace("ffmpeg", async (span) => { + await new Promise((resolve, reject) => { + ffmpeg(Readable.fromWeb(response.body as ReadableStream)) + .videoFilters("scale=iw/2:ih/2") + .output(outputPath) + .on("end", resolve) + .on("error", reject) + .run(); + }); + }); + + const processedContent = await fs.readFile(outputPath); + + await fs.unlink(outputPath); + + return { processedContent: processedContent.toString("base64") }; + }, +}); + +export const uploadToS3 = task({ + id: "upload-to-s3", + run: async (payload: { processedContent: string; videoId: string }) => { + const { processedContent, videoId } = payload; + + const s3Key = `processed-videos/output_${videoId}.mp4`; + + const uploadParams = { + Bucket: process.env.S3_BUCKET, + Key: s3Key, + Body: Buffer.from(processedContent, "base64"), + }; + + await s3Client.send(new PutObjectCommand(uploadParams)); + const s3Url = `https://${process.env.S3_BUCKET}.s3.amazonaws.com/${s3Key}`; + + return { s3Url }; + }, +}); + +export const sendUserEmail = task({ + id: "send-user-email", + run: async ({ videoId, s3Url }: { videoId: string; s3Url: string }) => { + const { userId } = await getVideo(videoId); + + return await sendEmail( + userId, + "Video Processing Complete", + `Your video has been processed and is available at: ${s3Url}` + ); + }, +}); +``` + + + +### How retries work + +Let's say the email sending fails in our video processing workflow. Here's how the retry process works: + +1. The main task throws an error and is scheduled for retry. +2. When retried, it starts from the beginning, but leverages cached results for completed subtasks. + +Here's a sequence diagram illustrating this process: + +```mermaid +sequenceDiagram + participant Main as Main Task + participant Process as Process Video + participant Upload as Upload to S3 + participant DB as Update Database + participant Email as Send Email + + Main->>Process: triggerAndWait (1st attempt) + Process-->>Main: Return result + Main->>Upload: triggerAndWait (1st attempt) + Upload-->>Main: Return result + Main->>DB: Update + Main->>Email: triggerAndWait (1st attempt) + Email--xMain: Fail + Main-->>Main: Schedule retry + + Main->>Process: triggerAndWait (2nd attempt) + Process-->>Main: Return cached result + Main->>Upload: triggerAndWait (2nd attempt) + Upload-->>Main: Return cached result + Main->>DB: Update (idempotent) + Main->>Email: triggerAndWait (2nd attempt) + Email-->>Main: Success +``` + +## The build system + +When you run `npx trigger.dev@latest deploy` or `npx trigger.dev@latest dev`, we build your task code using our build system, which is powered by [esbuild](https://esbuild.github.io/). When deploying, the code is packaged up into a Docker image and deployed to your Trigger.dev instance. When running in dev mode, the code is built and run locally on your machine. Some features of our build system include: + +- **Bundled by default**: Code + dependencies are bundled and tree-shaked by default. +- **Build extensions**: Use and write custom build extensions to transform your code or the resulting docker image. +- **ESM ouput**: We output to ESM, which allows tree-shaking and better performance. + +You can review the build output by running deploy with the `--dry-run` flag, which will output the Containerfile and the build output. + +Learn more about working with our build system in the [configuration docs](/config/config-file). + +## Dev mode + +When you run `npx trigger.dev@latest dev`, we run your task code locally on your machine. All scheduling is still done in the Trigger.dev server instance, but the task code is run locally. This allows you to develop and test your tasks locally before deploying them to the cloud, and is especially useful for debugging and testing. + +- The same build system is used in dev mode, so you can be sure that your code will run the same locally as it does in the cloud. +- Changes are automatically detected and a new version is spun up when you save your code. +- Add debuggers and breakpoints to your code and debug it locally. +- Each task is run in a separate process, so you can run multiple tasks in parallel. +- Auto-cancels tasks when you stop the dev server. + + + Trigger.dev currently does not support "offline" dev mode, where you can run tasks without an + internet connection. [Please let us know](feedback.trigger.dev) if this is a feature you + want/need. + + +## Staging and production environments + +Trigger.dev supports deploying to multiple "deployed" environments, such as staging and production. This allows you to test your tasks in a staging environment before deploying them to production. You can deploy to a new environment by running `npx trigger.dev@latest deploy --env `, where `` is the name of the environment you want to deploy to. Each environment has its own API Key, which you can use to trigger tasks in that environment. + +## OpenTelemetry + +The Trigger.dev logging and task dashboard is powered by OpenTelemetry traces and logs, which allows you to trace your tasks and auto-instrument your code. We also auto-correlate logs from subtasks and parent tasks, making it easy view the entire trace of a task execution. A single run of the video processing task above looks like this in the dashboard: + +![OpenTelemetry trace](/images/opentelemetry-trace.png) + +Because we use standard OpenTelemetry, you can instrument your code and OpenTelemetry compatible libraries to get detailed traces and logs of your tasks. The above trace instruments both Prisma and the AWS SDK: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { PrismaInstrumentation } from "@prisma/instrumentation"; +import { AwsInstrumentation } from "@opentelemetry/instrumentation-aws-sdk"; + +export default defineConfig({ + project: "", + instrumentations: [new PrismaInstrumentation(), new AwsInstrumentation()], +}); +``` diff --git a/docs/images/opentelemetry-trace.png b/docs/images/opentelemetry-trace.png new file mode 100644 index 0000000000..7939e05dc5 Binary files /dev/null and b/docs/images/opentelemetry-trace.png differ diff --git a/docs/management/overview.mdx b/docs/management/overview.mdx index c3b5bff9ea..433463a362 100644 --- a/docs/management/overview.mdx +++ b/docs/management/overview.mdx @@ -8,20 +8,18 @@ description: Using the Trigger.dev v3 management API The management API is available through the same `@trigger.dev/sdk` package used in defining and triggering tasks. If you have already installed the package in your project, you can skip this step. -Make sure you use the `beta` tag when installing, as v3 is still in Developer Preview. - ```bash npm -npm i @trigger.dev/sdk@beta +npm i @trigger.dev/sdk@latest ``` ```bash pnpm -pnpm add @trigger.dev/sdk@beta +pnpm add @trigger.dev/sdk@latest ``` ```bash yarn -yarn add @trigger.dev/sdk@beta +yarn add @trigger.dev/sdk@latest ``` diff --git a/docs/mint.json b/docs/mint.json index 24083ce9f3..254c934c77 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -1,7 +1,10 @@ { "$schema": "https://mintlify.com/schema.json", "name": "Trigger.dev", - "openapi": ["/openapi.yml", "/v3-openapi.yaml"], + "openapi": [ + "/openapi.yml", + "/v3-openapi.yaml" + ], "api": { "playground": { "mode": "simple" @@ -71,9 +74,20 @@ { "source": "/reattempting-replaying", "destination": "/replaying" + }, + { + "source": "/tasks-overview", + "destination": "/tasks/overview" + }, + { + "source": "/tasks-scheduled", + "destination": "/tasks/scheduled" + }, + { + "source": "/trigger-folder", + "destination": "/config/config-file" } ], - "anchors": [ { "name": "Guides", @@ -89,23 +103,35 @@ "navigation": [ { "group": "Getting Started", - "pages": ["introduction", "quick-start", "limits", "changelog", "roadmap"] + "pages": [ + "introduction", + "quick-start", + "how-it-works", + "upgrading-beta", + "limits" + ] }, { "group": "Fundamentals", "pages": [ - "trigger-folder", - "tasks-overview", + { + "group": "Tasks", + "pages": [ + "tasks/overview", + "tasks/scheduled" + ] + }, "triggering", "apikeys", - "tasks-regular", - "tasks-scheduled", - "trigger-config" + "config/config-file" ] }, { "group": "Development", - "pages": ["cli-dev", "run-tests"] + "pages": [ + "cli-dev", + "run-tests" + ] }, { "group": "Deployment", @@ -115,7 +141,9 @@ "github-actions", { "group": "Deployment integrations", - "pages": ["vercel-integration"] + "pages": [ + "vercel-integration" + ] } ] }, @@ -127,7 +155,13 @@ "errors-retrying", { "group": "Wait", - "pages": ["wait", "wait-for", "wait-until", "wait-for-event", "wait-for-request"] + "pages": [ + "wait", + "wait-for", + "wait-until", + "wait-for-event", + "wait-for-request" + ] }, "queue-concurrency", "versioning", @@ -145,7 +179,10 @@ "management/overview", { "group": "Tasks API", - "pages": ["management/tasks/trigger", "management/tasks/batch-trigger"] + "pages": [ + "management/tasks/trigger", + "management/tasks/batch-trigger" + ] }, { "group": "Runs API", @@ -183,7 +220,9 @@ }, { "group": "Projects API", - "pages": ["management/projects/runs"] + "pages": [ + "management/projects/runs" + ] } ] }, @@ -208,7 +247,13 @@ }, { "group": "Open source", - "pages": ["open-source-self-hosting", "open-source-contributing", "github-repo"] + "pages": [ + "open-source-self-hosting", + "open-source-contributing", + "github-repo", + "changelog", + "roadmap" + ] }, { "group": "Troubleshooting", @@ -223,12 +268,17 @@ }, { "group": "Help", - "pages": ["community", "help-slack", "help-email"] + "pages": [ + "community", + "help-slack", + "help-email" + ] }, { "group": "Frameworks", "pages": [ "guides/frameworks/nodejs", + "guides/bun", "guides/frameworks/nextjs", "guides/frameworks/remix", { @@ -244,11 +294,15 @@ }, { "group": "Dashboard", - "pages": ["guides/dashboard/creating-a-project"] + "pages": [ + "guides/dashboard/creating-a-project" + ] }, { "group": "Migrations", - "pages": ["guides/use-cases/upgrading-from-v2"] + "pages": [ + "guides/use-cases/upgrading-from-v2" + ] }, { "group": "Examples", @@ -265,4 +319,4 @@ "github": "https://github.com/triggerdotdev", "linkedin": "https://www.linkedin.com/company/triggerdotdev" } -} +} \ No newline at end of file diff --git a/docs/open-source-self-hosting.mdx b/docs/open-source-self-hosting.mdx index 3422244789..2c005ac88a 100644 --- a/docs/open-source-self-hosting.mdx +++ b/docs/open-source-self-hosting.mdx @@ -24,11 +24,20 @@ As self-hosted deployments tend to have unique requirements and configurations, Should the burden ever get too much, we'd be happy to see you on [Trigger.dev cloud](https://trigger.dev/pricing) where we deal with these concerns for you. -- The Docker [checkpoint command](https://docs.docker.com/reference/cli/docker/checkpoint/) is an experimental feature which may not work as expected. It won't be enabled by default. Instead, the containers will stay up and their processes frozen. They won't consume CPU but they _will_ consume RAM. -- The Docker provider does not currently enforce any resource limits. This means your tasks can consume up to the total machine CPU and RAM. Having no limits may be preferable when self-hosting, but can impact the performance of other services. -- The worker components (not the tasks!) have direct access to the Docker socket. This means they can run any Docker command. To restrict access, you may want to consider using [Docker Socket Proxy](https://github.com/Tecnativa/docker-socket-proxy). -- The task containers are running with host networking. This means there is no network isolation between them and the host machine. They will be able to access any networked service on the host. -- There is currently no support for adding multiple worker machines. This would require a more elaborate provider, or possibly a switch to Docker Swarm. This is not currently planned, but you are welcome to [contribute](https://github.com/triggerdotdev/trigger.dev/blob/main/CONTRIBUTING.md). + - The Docker [checkpoint command](https://docs.docker.com/reference/cli/docker/checkpoint/) is an + experimental feature which may not work as expected. It won't be enabled by default. Instead, the + containers will stay up and their processes frozen. They won't consume CPU but they _will_ consume + RAM. - The Docker provider does not currently enforce any resource limits. This means your tasks + can consume up to the total machine CPU and RAM. Having no limits may be preferable when + self-hosting, but can impact the performance of other services. - The worker components (not the + tasks!) have direct access to the Docker socket. This means they can run any Docker command. To + restrict access, you may want to consider using [Docker Socket + Proxy](https://github.com/Tecnativa/docker-socket-proxy). - The task containers are running with + host networking. This means there is no network isolation between them and the host machine. They + will be able to access any networked service on the host. - There is currently no support for + adding multiple worker machines. This would require a more elaborate provider, or possibly a + switch to Docker Swarm. This is not currently planned, but you are welcome to + [contribute](https://github.com/triggerdotdev/trigger.dev/blob/main/CONTRIBUTING.md). ## Requirements @@ -70,6 +79,7 @@ sudo apt-get install -y \ ### Trigger.dev setup 1. Clone the [Trigger.dev docker repository](https://github.com/triggerdotdev/docker) and checkout the v3 branch + ```bash git clone https://github.com/triggerdotdev/docker cd docker @@ -77,6 +87,7 @@ git checkout v3 ``` 2. Run the start script and follow the prompts + ```bash ./start.sh # hint: you can append -d to run in detached mode ``` @@ -163,7 +174,7 @@ docker login -u 5. You can now deploy v3 projects using the CLI with these flags: ```bash -npx trigger.dev@beta deploy --self-hosted --push +npx trigger.dev@latest deploy --self-hosted --push ``` ## Part 2: Split services @@ -201,9 +212,12 @@ scp -3 root@:docker/.env root@:docker/.env ## Checkpoint support -This requires an _experimental Docker feature_. Successfully checkpointing a task today, does not mean you will be able to restore it tomorrow. Your data may be lost. You've been warned! + + This requires an _experimental Docker feature_. Successfully checkpointing a task today, does not + mean you will be able to restore it tomorrow. Your data may be lost. You've been warned! + -Checkpointing allows you to save the state of a running container to disk and restore it later. This can be useful for +Checkpointing allows you to save the state of a running container to disk and restore it later. This can be useful for long-running tasks that need to be paused and resumed without losing state. Think fan-out and fan-in, or long waits in email campaigns. The checkpoints will be pushed to the same registry as the deployed images. Please see the [Registry setup](#registry-setup) section for more information. @@ -283,5 +297,5 @@ TRIGGER_TELEMETRY_DISABLED=1 To avoid being redirected to the Cloud login page when using the CLI, you can specify the URL of your self-hosted instance with the `-a` flag. For example: ``` -npx trigger.dev@beta login -a http://example.com -``` \ No newline at end of file +npx trigger.dev@latest login -a http://example.com +``` diff --git a/docs/snippets/cli-commands-deploy.mdx b/docs/snippets/cli-commands-deploy.mdx index 241f22fb97..90c625369d 100644 --- a/docs/snippets/cli-commands-deploy.mdx +++ b/docs/snippets/cli-commands-deploy.mdx @@ -3,29 +3,30 @@ Run the command like this: ```bash npm -npx trigger.dev@beta deploy +npx trigger.dev@latest deploy ``` ```bash pnpm -pnpm dlx trigger.dev@beta deploy +pnpm dlx trigger.dev@latest deploy ``` ```bash yarn -yarn dlx trigger.dev@beta deploy +yarn dlx trigger.dev@latest deploy ``` -This will fail in CI if any version mismatches are detected. Ensure everything runs locally first using the [dev](/cli-dev) command and don't bypass the version checks! + + This will fail in CI if any version mismatches are detected. Ensure everything runs locally first + using the [dev](/cli-dev) command and don't bypass the version checks! + It performs a few steps to deploy: 1. Optionally updates packages when running locally. -2. Typechecks the code. -3. Compiles and bundles the code. -4. Checks that [environment variables](/deploy-environment-variables) are set. -5. Deploys the code to the cloud. -6. Registers the tasks as a new version in the environment (prod by default). +2. Compiles and bundles the code. +3. Deploys the code to the Trigger.dev instance. +4. Registers the tasks as a new version in the environment (prod by default). You can also setup [GitHub Actions](/github-actions) to deploy your tasks automatically. @@ -35,28 +36,33 @@ You can also setup [GitHub Actions](/github-actions) to deploy your tasks automa Defaults to `prod` but you can specify `staging`. - - Skips the pre-build typecheck step. + + The name of the config file, found where the command is run from. Defaults to `trigger.config.ts`. + + + + Load environment variables from a file. This will only hydrate the `process.env` of the CLI + process, not the tasks. + + + + Create a deployable build but don't deploy it. Prints out the build path so you can inspect it. Skip checking for `@trigger.dev` package updates. - - The platform to build the deployment image for. Defaults to `linux/amd64`. + + The project ref. Required if there is no config file. The log level to use (debug, info, log, warn, error, none). Defaults to `log`. - - The name of the config file, found where the command is run from. Defaults to `trigger.config.ts`. - - - - The project ref. Required if there is no config file. + + Turn off syncing environment variables with the Trigger.dev instance. ## Self-hosting @@ -68,17 +74,37 @@ These options are typically used when [self-hosting](/open-source-self-hosting) - Builds and loads the image using your local docker. Use the `--registry` option to specify the registry to push the image to when using `--self-hosted`, or just use `--push` to push to the default registry. + Builds and loads the image using your local docker. Use the `--registry` option to specify the + registry to push the image to when using `--self-hosted`, or just use `--push` to push to the + default registry. + + + + Loads the image into your local docker after building it. - **This option is coming soon.** The registry to push the image to when using --self-hosted. + Specify the registry to push the image to when using `--self-hosted`. - When using the --self-hosted flag, push the image to the default registry. (defaults to false when not using --registry) + When using the --self-hosted flag, push the image to the registry. - - **This option is coming soon.** Specify the tag to use when pushing the image to the registry. - \ No newline at end of file + + The namespace to use when pushing the image to the registry. For example, if pushing to Docker + Hub, the namespace is your Docker Hub username. + + +### Push to Docker Hub + +An example of deploying to Docker Hub when using a self-hosted setup: + +```bash +npx trigger.dev@latest deploy \ + --self-hosted \ + --load-image \ + --push \ + --registry docker.io \ + --namespace mydockerhubusername +``` diff --git a/docs/snippets/cli-commands-develop.mdx b/docs/snippets/cli-commands-develop.mdx index a29fddf54b..720526eff1 100644 --- a/docs/snippets/cli-commands-develop.mdx +++ b/docs/snippets/cli-commands-develop.mdx @@ -3,22 +3,22 @@ This runs a server on your machine that can execute Trigger.dev tasks: ```bash npm -npx trigger.dev@beta dev +npx trigger.dev@latest dev ``` ```bash pnpm -pnpm dlx trigger.dev@beta dev +pnpm dlx trigger.dev@latest dev ``` ```bash yarn -yarn dlx trigger.dev@beta dev +yarn dlx trigger.dev@latest dev ``` It will first perform an update check to prevent version mismatches, failed deploys, and other errors. You will always be prompted first. -You will see in the terminal that the server is running and listening for requests. When you run a task, you will see it in the terminal along with a link to view it in the dashboard. +You will see in the terminal that the server is running and listening for tasks. When you run a task, you will see it in the terminal along with a link to view it in the dashboard. It is worth noting that each task runs in a separate Node process. This means that if you have a long-running task, it will not block other tasks from running. @@ -32,28 +32,9 @@ It is worth noting that each task runs in a separate Node process. This means th The project ref. Required if there is no config file. - - You can use this flag to run the server in debug mode. This will allow you to attach a debugger to the server and debug your tasks. - - - -```bash npm -npx trigger.dev@beta dev --debugger -``` - -```bash pnpm -pnpm dlx trigger.dev@beta dev --debugger -``` - -```bash yarn -yarn dlx trigger.dev@beta dev --debugger -``` - - - - - - Enable OpenTelemetry debugging. + + Pass a custom path to an env file. We automatically detect `.env`, `.env.local`, + `.env.development`, and `.env.development.local` files. @@ -69,11 +50,8 @@ yarn dlx trigger.dev@beta dev --debugger - The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This does not affect the log level of your trigger.dev tasks. Defaults to `log`. - - - - Opt-out of sending telemetry data. + The CLI log level to use. Options are `debug`, `info`, `log`, `warn`, `error`, and `none`. This + does not affect the log level of your trigger.dev tasks. Defaults to `log`. ## Standard options @@ -95,10 +73,10 @@ Then add something like this in your package.json scripts: ```json "scripts": { "dev": "concurrently --raw --kill-others npm:dev:*", - "dev:trigger": "npx trigger.dev@beta dev", + "dev:trigger": "npx trigger.dev@latest dev", // Add your framework-specific dev script here, for example: // "dev:next": "next dev", // "dev:remix": "remix dev", //... } -``` \ No newline at end of file +``` diff --git a/docs/snippets/step-cli-dev.mdx b/docs/snippets/step-cli-dev.mdx index ec62aedc61..28305706d7 100644 --- a/docs/snippets/step-cli-dev.mdx +++ b/docs/snippets/step-cli-dev.mdx @@ -7,15 +7,15 @@ It can also update your `@trigger.dev/*` packages to prevent version mismatches ```bash npm -npx trigger.dev@beta dev +npx trigger.dev@latest dev ``` ```bash pnpm -pnpm dlx trigger.dev@beta dev +pnpm dlx trigger.dev@latest dev ``` ```bash yarn -yarn dlx trigger.dev@beta dev +yarn dlx trigger.dev@latest dev ``` diff --git a/docs/snippets/step-cli-init.mdx b/docs/snippets/step-cli-init.mdx index d75f2ade91..265f3d9c89 100644 --- a/docs/snippets/step-cli-init.mdx +++ b/docs/snippets/step-cli-init.mdx @@ -7,15 +7,15 @@ Run this command in the root of your project to get started: ```bash npm -npx trigger.dev@beta init +npx trigger.dev@latest init ``` ```bash pnpm -pnpm dlx trigger.dev@beta init +pnpm dlx trigger.dev@latest init ``` ```bash yarn -yarn dlx trigger.dev@beta init +yarn dlx trigger.dev@latest init ``` diff --git a/docs/snippets/trigger-tasks-nextjs.mdx b/docs/snippets/trigger-tasks-nextjs.mdx index 54fbdce84a..b670b94b87 100644 --- a/docs/snippets/trigger-tasks-nextjs.mdx +++ b/docs/snippets/trigger-tasks-nextjs.mdx @@ -15,25 +15,25 @@ Run your Next.js app: ``` - + Run the dev server from Step 2. of the [Initial Setup](/guides/frameworks/nextjs#initial-setup) section above if it's not already running: - + ```bash npm - npx trigger.dev@beta dev + npx trigger.dev@latest dev ``` ```bash pnpm - pnpm dlx trigger.dev@beta dev + pnpm dlx trigger.dev@latest dev ``` ```bash yarn - yarn dlx trigger.dev@beta dev + yarn dlx trigger.dev@latest dev ``` - + Now visit the URL in your browser to trigger the task. Ensure the port number is the same as the one you're running your Next.js app on. For example, if you're running your Next.js app on port 3000, visit: ```bash @@ -44,4 +44,4 @@ Run your Next.js app: ![Trigger.dev CLI showing a successful run](/images/trigger-cli-run-success.png) - Visit the [Trigger.dev dashboard](https://cloud.trigger.dev) to see your run. \ No newline at end of file + Visit the [Trigger.dev dashboard](https://cloud.trigger.dev) to see your run. diff --git a/docs/snippets/worker-failed-to-start.mdx b/docs/snippets/worker-failed-to-start.mdx deleted file mode 100644 index 04843eb508..0000000000 --- a/docs/snippets/worker-failed-to-start.mdx +++ /dev/null @@ -1,51 +0,0 @@ -### Worker failed to start when running Dev command - -An issue may occur when trying to run the development command for Trigger.dev when using certain packages like `@t3-oss/env-nextjs` or ORMs like Drizzle ORM. The error message typically indicates that there's a problem with importing ES modules in a CommonJS context. - -```bash Error message -X Error: Worker failed to start Error [ERR_REQUIRE_ESM]: require() of ES Module [...] not supported. -Instead change the require of index.js in [...] to a dynamic import() which is available in all CommonJS modules. -``` - -This issue is related to how Trigger.dev bundles code and interacts with certain ES module dependencies. - -To resolve this issue, follow these steps: - -1. In your `trigger.config.ts` file, add the problematic dependencies to the `dependenciesToBundle` array: - -```bash trigger.config.ts -export const config: TriggerConfig = { - // ... other config options - dependenciesToBundle: [ - /@t3-oss/, - "drizzle-orm", - /@neondatabase/, - // Add other problematic dependencies here - ], -}; -``` - -2. If you're using environment variables with `@t3-oss/env-nextjs`, implement a `resolveEnvVars` function in your config file: - -```bash trigger.config.ts -import { env } from "@/env"; -import type { ResolveEnvironmentVariablesFunction } from "@trigger.dev/sdk/v3"; - -export const resolveEnvVars: ResolveEnvironmentVariablesFunction = () => { - return { - variables: Object.keys(env).map((key) => ({ - name: key, - value: env[key as keyof typeof env]?.toString(), - })), - }; -}; -``` - -3. For users of packages that require WebSocket (like `@neondatabase/serverless`), you may need to set up a WebSocket polyfill if you're using Node.js versions earlier than 22. Add this to your code: - -```bash -import { neonConfig, Pool } from '@neondatabase/serverless'; -import ws from 'ws'; - -neonConfig.webSocketConstructor = ws; -``` \ No newline at end of file diff --git a/docs/tasks-overview.mdx b/docs/tasks/overview.mdx similarity index 98% rename from docs/tasks-overview.mdx rename to docs/tasks/overview.mdx index a1becca7c4..37edc23232 100644 --- a/docs/tasks-overview.mdx +++ b/docs/tasks/overview.mdx @@ -1,10 +1,10 @@ --- title: "Tasks: Overview" -sidebarTitle: "Tasks" +sidebarTitle: "Overview" description: "Tasks are functions that can run for a long time and provide strong resilience to failure." --- -There are different types of tasks including [regular tasks](/tasks-regular) and [scheduled tasks](/tasks-scheduled). +There are different types of tasks including regular tasks and [scheduled tasks](/tasks/scheduled). ## Hello world task and how to trigger it diff --git a/docs/tasks-scheduled.mdx b/docs/tasks/scheduled.mdx similarity index 100% rename from docs/tasks-scheduled.mdx rename to docs/tasks/scheduled.mdx diff --git a/docs/trigger-config.mdx b/docs/trigger-config.mdx deleted file mode 100644 index 15e8cd40a6..0000000000 --- a/docs/trigger-config.mdx +++ /dev/null @@ -1,274 +0,0 @@ ---- -title: "The trigger.config.ts file" -sidebarTitle: "trigger.config file" -description: "This file is used to configure your project and how it's bundled." ---- - -import BundlePackages from '/snippets/bundle-packages.mdx'; - -Let's take a look at a basic `trigger.config.ts` file. This is generated for you when you follow [the quick start guide](/quick-start). This file is used to configure your project and how it's bundled. - -```ts trigger.config.ts -import type { TriggerConfig } from "@trigger.dev/sdk/v3"; - -export const config: TriggerConfig = { - //Your project ref (you can see it on the Project settings page in the dashboard) - project: "proj_gtcwttqhhtlasxgfuhxs", - retries: { - //If you want to retry a task in dev mode (when using the CLI) - enabledInDev: false, - //the default retry settings. Used if you don't specify on a task. - default: { - maxAttempts: 3, - minTimeoutInMs: 1000, - maxTimeoutInMs: 10000, - factor: 2, - randomize: true, - }, - }, - //The paths for your trigger folders - triggerDirectories: ["./trigger"], -}; -``` - -Most of the time you don't need to change anything in this file, or if you do then we will tell you when you the run the CLI command. - -## Global initialization - -You can run code before any task is run by adding a `init` function to your `trigger.config.ts` file. - -```ts trigger.config.ts -import type { TriggerConfig } from "@trigger.dev/sdk/v3"; - -export const config: TriggerConfig = { - //..other stuff - init: async (payload, { ctx }) => { - console.log("I run before any task is run"); - }, -}; -``` - -You'll have access to the run payload and the context object. Currently you cannot return anything from this function. - -## Lifecycle functions - -You can add lifecycle functions to get notified when any task starts, succeeds, or fails using `onStart`, `onSuccess` and `onFailure`: - -```ts trigger.config.ts -import type { TriggerConfig } from "@trigger.dev/sdk/v3"; - -export const config: TriggerConfig = { - //..other stuff - onSuccess: async (payload, output, { ctx }) => { - console.log("Task succeeded", ctx.task.id); - }, - onFailure: async (payload, error, { ctx }) => { - console.log("Task failed", ctx.task.id); - }, - onStart: async (payload, { ctx }) => { - console.log("Task started", ctx.task.id); - }, -}; -``` - -Read more about task lifecycle functions in the [tasks overview](/tasks-overview). - -## Instrumentations - -We use OpenTelemetry (OTEL) for our run logs. This means you get a lot of information about your tasks with no effort. But you probably want to add more information to your logs. For example, here's all the Prisma calls automatically logged: - -![The run log](/images/auto-instrumentation.png) - -Here we add Prisma and OpenAI instrumentations to your `trigger.config.ts` file. - -```ts trigger.config.ts -import type { TriggerConfig } from "@trigger.dev/sdk/v3"; -import { PrismaInstrumentation } from "@prisma/instrumentation"; -import { OpenAIInstrumentation } from "@traceloop/instrumentation-openai"; - -export const config: TriggerConfig = { - //..other stuff - instrumentations: [new PrismaInstrumentation(), new OpenAIInstrumentation()], -}; -``` - -There is a [huge library of instrumentations](https://opentelemetry.io/ecosystem/registry/?language=js) you can easily add to your project like this. - -Some ones we recommend: - -| Package | Description | -| --------------------------------------- | ------------------------------------------------------------------------------------------------------------------------ | -| `@opentelemetry/instrumentation-undici` | Logs all fetch calls (inc. Undici fetch) | -| `@opentelemetry/instrumentation-fs` | Logs all file system calls | -| `@opentelemetry/instrumentation-http` | Logs all HTTP calls | -| `@prisma/instrumentation` | Logs all Prisma calls, you need to [enable tracing](https://github.com/prisma/prisma/tree/main/packages/instrumentation) | -| `@traceloop/instrumentation-openai` | Logs all OpenAI calls | - -## Syncing environment variables - -You can sync environment variables from another service using the `resolveEnvVars` function. [Read the docs](/deploy-environment-variables#sync-env-vars-from-another-service) for more information. - -## ESM-only packages - -We'll let you know when running the CLI dev command if this is a problem. Some packages are ESM-only so they don't work directly from CJS when using Node.js. In that case you need to add them to the `dependenciesToBundle` array in your `trigger.config.ts` file. - - - -## Prisma (and other generators) - - - -```bash -✘ [ERROR] Error: @prisma/client did not initialize yet. Please run "prisma generate" and try to import it again. - -In case this error is unexpected for you, please report it in -https://pris.ly/prisma-prisma-bug-report -at new PrismaClient (/app/node_modules/.prisma/client/default.js:43:11) -at Object. (/lib/prisma.ts:7:33) -at Module.\_compile (node:internal/modules/cjs/loader:1356:14) -at Object.Module.\_extensions..js (node:internal/modules/cjs/loader:1414:10) -at Module.load (node:internal/modules/cjs/loader:1197:32) -at Function.Module.\_load (node:internal/modules/cjs/loader:1013:12) -at Function.executeUserEntryPoint [as runMain] (node:internal/modules/run_main:128:12) -at node:internal/main/run_main_module:28:49 -``` - - - -Prisma works by generating a client from your `prisma.schema` file. This means you need to do a couple of things to get it to work with Trigger: - - - - - - - - ```json default path - { - "scripts": { - "postinstall": "prisma generate" - } - } - ``` - - ```json custom path - { - "scripts": { - "postinstall": "prisma generate --schema=./custom/path/to/schema.prisma" - } - } - ``` - - - - Anything you put in `postinstall` will be run as part of the install step. This is how Next.js recommends you set up Prisma anyway. - - - - - - ```ts trigger.config.ts - import type { TriggerConfig } from "@trigger.dev/sdk/v3"; - - export const config: TriggerConfig = { - //..other stuff - - // using the default path - additionalFiles: ["./prisma/schema.prisma"], - // or a custom path, for example in a monorepo - additionalFiles: ["../../custom/path/to/schema.prisma"], - - additionalPackages: ["prisma@5.11.0"], - }; - ``` - - This tells Trigger to bundle the Prisma client and the schema file. - - - - - -## TypeORM support - -We support using TypeORM with Trigger. You can use decorators in your entities and then use them in your tasks. Here's an example: - -```ts orm/index.ts -import "reflect-metadata"; -import { DataSource } from "typeorm"; -import { Entity, Column, PrimaryColumn } from "typeorm"; - -@Entity() -export class Photo { - @PrimaryColumn() - id!: number; - - @Column() - name!: string; - - @Column() - description!: string; - - @Column() - filename!: string; - - @Column() - views!: number; - - @Column() - isPublished!: boolean; -} - -export const AppDataSource = new DataSource({ - type: "postgres", - host: "localhost", - port: 5432, - username: "postgres", - password: "postgres", - database: "my-database", - entities: [Photo], - synchronize: true, - logging: false, -}); -``` - -And then in your trigger.config.ts file you can initialize the datasource using the `onStart` lifecycle function option: - -```ts trigger.config.ts -import type { TriggerConfig } from "@trigger.dev/sdk/v3"; -import { AppDataSource } from "@/trigger/orm"; - -export const config: TriggerConfig = { - // ... other options here - onStart: async (payload, { ctx }) => { - await AppDataSource.initialize(); - }, -}; -``` - -Now you are ready to use this in your tasks: - -```ts -import { task } from "@trigger.dev/sdk/v3"; -import { AppDataSource, Photo } from "./orm"; - -export const taskThatUsesDecorators = task({ - id: "task-that-uses-decorators", - run: async (payload: { message: string }) => { - console.log("Creating a photo..."); - - const photo = new Photo(); - photo.id = 2; - photo.name = "Me and Bears"; - photo.description = "I am near polar bears"; - photo.filename = "photo-with-bears.jpg"; - photo.views = 1; - photo.isPublished = true; - - await AppDataSource.manager.save(photo); - }, -}); -``` - -## Troubleshooting - -If you have an issue with bundling checkout our [troubleshooting guide](/troubleshooting). diff --git a/docs/trigger-folder.mdx b/docs/trigger-folder.mdx deleted file mode 100644 index 1f6fd4bd62..0000000000 --- a/docs/trigger-folder.mdx +++ /dev/null @@ -1,25 +0,0 @@ ---- -title: "/trigger folders" -description: "Your tasks live inside /trigger folders. Code in these is bundled and deployed together." ---- - -## What gets bundled? - -We automatically bundle everything for your tasks. This includes: - -- Your tasks (they can be in any file inside a /trigger folder, they just need to be exported with a name). -- Imported npm packages. -- Other imports from your code. - -Mostly this means you shouldn't need to think about what gets bundled. Just write your tasks and we'll take care of the rest. If you need to alter the bundling you use the [trigger.config file](/trigger-config). - -## Multiple `/trigger` folders - -You can have multiple `/trigger` folders in your repository. - -- Each `/trigger` folder can have many tasks exported from it. -- Each file inside a `/trigger` folder can export many tasks. - -### (Optional) configuration - -It is possible to manually set one or more folders as `/trigger` folders in your [trigger.config file](/trigger-config). diff --git a/docs/triggering.mdx b/docs/triggering.mdx index 0765a51072..d69bfd5df5 100644 --- a/docs/triggering.mdx +++ b/docs/triggering.mdx @@ -5,20 +5,20 @@ description: "Tasks need to be triggered in order to run." Trigger tasks **from your backend**: -| Function | This works | What it does | -| -------------------------------- | --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `tasks.trigger()` | Anywhere | Triggers a task and gets a handle you can use to fetch and manage the run. [Read more](#tasks-trigger) | -| `tasks.batchTrigger()` | Anywhere | Triggers a task multiple times and gets a handle you can use to fetch and manage the runs. [Read more](#tasks-batchtrigger) | -| `tasks.triggerAndPoll()` | Anywhere | Triggers a task and then polls the run until it’s complete. [Read more](#tasks-triggerandpoll) | +| Function | This works | What it does | +| ------------------------ | ---------- | --------------------------------------------------------------------------------------------------------------------------- | +| `tasks.trigger()` | Anywhere | Triggers a task and gets a handle you can use to fetch and manage the run. [Read more](#tasks-trigger) | +| `tasks.batchTrigger()` | Anywhere | Triggers a task multiple times and gets a handle you can use to fetch and manage the runs. [Read more](#tasks-batchtrigger) | +| `tasks.triggerAndPoll()` | Anywhere | Triggers a task and then polls the run until it’s complete. [Read more](#tasks-triggerandpoll) | Trigger tasks **from inside a run**: -| Function | This works | What it does | -| -------------------------------- | --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `yourTask.trigger()` | Anywhere | Triggers a task and gets a handle you can use to monitor and manage the run. It does not wait for the result. [Read more](#task-trigger) | -| `yourTask.batchTrigger()` | Anywhere | Triggers a task multiple times and gets a handle you can use to monitor and manage the runs. It does not wait for the results. [Read more](#task-batchtrigger) | -| `yourTask.triggerAndWait()` | Inside task | Triggers a task and then waits until it's complete. You get the result data to continue with. [Read more](#task-triggerandwait) | -| `yourTask.batchTriggerAndWait()` | Inside task | Triggers a task multiple times in parallel and then waits until they're all complete. You get the resulting data to continue with. [Read more](#task-batchtriggerandwait) | +| Function | This works | What it does | +| -------------------------------- | ----------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `yourTask.trigger()` | Anywhere | Triggers a task and gets a handle you can use to monitor and manage the run. It does not wait for the result. [Read more](#task-trigger) | +| `yourTask.batchTrigger()` | Anywhere | Triggers a task multiple times and gets a handle you can use to monitor and manage the runs. It does not wait for the results. [Read more](#task-batchtrigger) | +| `yourTask.triggerAndWait()` | Inside task | Triggers a task and then waits until it's complete. You get the result data to continue with. [Read more](#task-triggerandwait) | +| `yourTask.batchTriggerAndWait()` | Inside task | Triggers a task multiple times in parallel and then waits until they're all complete. You get the resulting data to continue with. [Read more](#task-batchtriggerandwait) | Additionally, [scheduled tasks](/tasks-scheduled) get **automatically** triggered on their schedule and webhooks when receiving a webhook. @@ -46,7 +46,9 @@ You can use Next.js Server Actions but [you need to be careful with bundling](/g Triggers a single run of a task with the payload you pass in, and any options you specify, without needing to import the task. - By using `tasks.trigger()`, you can pass in the task type as a generic argument, giving you full type checking. Make sure you use a `type` import so that your task code is not imported into your application. + By using `tasks.trigger()`, you can pass in the task type as a generic argument, giving you full + type checking. Make sure you use a `type` import so that your task code is not imported into your + application. @@ -103,7 +105,9 @@ export async function action({ request, params }: ActionFunctionArgs) { Triggers multiples runs of a task with the payloads you pass in, and any options you specify, without needing to import the task. - By using `tasks.batchTrigger()`, you can pass in the task type as a generic argument, giving you full type checking. Make sure you use a `type` import so that your task code is not imported into your application. + By using `tasks.batchTrigger()`, you can pass in the task type as a generic argument, giving you + full type checking. Make sure you use a `type` import so that your task code is not imported into + your application. @@ -159,7 +163,9 @@ export async function action({ request, params }: ActionFunctionArgs) { Triggers a single run of a task with the payload you pass in, and any options you specify, and then polls the run until it's complete. - By using `tasks.triggerAndPoll()`, you can pass in the task type as a generic argument, giving you full type checking. Make sure you use a `type` import so that your task code is not imported into your application. + By using `tasks.triggerAndPoll()`, you can pass in the task type as a generic argument, giving you + full type checking. Make sure you use a `type` import so that your task code is not imported into + your application. @@ -313,7 +319,7 @@ export const loopTask = task({ export const parentTask = task({ id: "parent-task", run: async (payload: string) => { - const result = await batchChildTask.triggerAndWait("some-data"); + const result = await childTask.triggerAndWait("some-data"); console.log("Result", result); //...do stuff with the result @@ -321,6 +327,58 @@ export const parentTask = task({ }); ``` +The `result` object is a "Result" type that needs to be checked to see if the child task run was successful: + +```ts /trigger/parent.ts +export const parentTask = task({ + id: "parent-task", + run: async (payload: string) => { + const result = await childTask.triggerAndWait("some-data"); + + if (result.ok) { + console.log("Result", result.output); // result.output is the typed return value of the child task + } else { + console.error("Error", result.error); // result.error is the error that caused the run to fail + } + }, +}); +``` + +If instead you just want to get the output of the child task, and throw an error if the child task failed, you can use the `unwrap` method: + +```ts /trigger/parent.ts +export const parentTask = task({ + id: "parent-task", + run: async (payload: string) => { + const output = await childTask.triggerAndWait("some-data").unwrap(); + console.log("Output", output); + }, +}); +``` + +You can also catch the error if the child task fails and get more information about the error: + +```ts /trigger/parent.ts +import { task, SubtaskUnwrapError } from "@trigger.dev/sdk/v3"; +export const parentTask = task({ + id: "parent-task", + run: async (payload: string) => { + try { + const output = await childTask.triggerAndWait("some-data").unwrap(); + console.log("Output", output); + } catch (error) { + if (error instanceof SubtaskUnwrapError) { + console.error("Error in fetch-post-task", { + runId: error.runId, + taskId: error.taskId, + cause: error.cause, + }); + } + } + }, +}); +``` + This method should only be used inside a task. If you use it outside a task, it will throw an error. @@ -379,45 +437,45 @@ export const loopTask = task({ When using `batchTriggerAndWait`, you have full control over how to handle failures within the batch. The method returns an array of run results, allowing you to inspect each run's outcome individually and implement custom error handling. - Here's how you can manage run failures: +Here's how you can manage run failures: + +1. **Inspect individual run results**: Each run in the returned array has an `ok` property indicating success or failure. - 1. **Inspect individual run results**: Each run in the returned array has an `ok` property indicating success or failure. +2. **Access error information**: For failed runs, you can examine the `error` property to get details about the failure. - 2. **Access error information**: For failed runs, you can examine the `error` property to get details about the failure. +3. **Choose your failure strategy**: You have two main options: - 3. **Choose your failure strategy**: You have two main options: - - **Fail the entire batch**: Throw an error if any run fails, causing the parent task to reattempt. - - **Continue despite failures**: Process the results without throwing an error, allowing the parent task to continue. + - **Fail the entire batch**: Throw an error if any run fails, causing the parent task to reattempt. + - **Continue despite failures**: Process the results without throwing an error, allowing the parent task to continue. - 4. **Implement custom logic**: You can create sophisticated handling based on the number of failures, types of errors, or other criteria. +4. **Implement custom logic**: You can create sophisticated handling based on the number of failures, types of errors, or other criteria. - Here's an example of how you might handle run failures: +Here's an example of how you might handle run failures: ```ts /trigger/batchTriggerAndWait.ts - const result = await batchChildTask.batchTriggerAndWait([ - { payload: "item1" }, - { payload: "item2" }, - { payload: "item3" }, - ]); - - // Result will contain the finished runs. - // They're only finished if they have succeeded or failed. - // "Failed" means all attempts failed - - for (const run of result.runs) { - - // Check if the run succeeded - if (run.ok) { - logger.info("Batch task run succeeded", { output: run.output }); - } else { - logger.error("Batch task run error", { error: run.error }); +const result = await batchChildTask.batchTriggerAndWait([ + { payload: "item1" }, + { payload: "item2" }, + { payload: "item3" }, +]); + +// Result will contain the finished runs. +// They're only finished if they have succeeded or failed. +// "Failed" means all attempts failed + +for (const run of result.runs) { + // Check if the run succeeded + if (run.ok) { + logger.info("Batch task run succeeded", { output: run.output }); + } else { + logger.error("Batch task run error", { error: run.error }); - //You can choose if you want to throw an error and fail the entire run - throw new Error(`Fail the entire run because ${run.id} failed`); - } + //You can choose if you want to throw an error and fail the entire run + throw new Error(`Fail the entire run because ${run.id} failed`); } +} ``` @@ -472,7 +530,7 @@ await myTask.trigger({ some: "data" }, { delay: "2024-12-01T00:00:00" }); // Delay using a Date object await myTask.trigger({ some: "data" }, { delay: new Date(Date.now() + 1000 * 60 * 60) }); // Delay using a timezone -await myTask.trigger({ some: "data" }, { delay: new Date('2024-07-23T11:50:00+02:00') }); +await myTask.trigger({ some: "data" }, { delay: new Date("2024-07-23T11:50:00+02:00") }); ``` Runs that are delayed and have not been enqueued yet will display in the dashboard with a "Delayed" status: diff --git a/docs/troubleshooting.mdx b/docs/troubleshooting.mdx index bceaa8953d..4802a68b43 100644 --- a/docs/troubleshooting.mdx +++ b/docs/troubleshooting.mdx @@ -3,10 +3,9 @@ title: "Common problems" description: "Some common problems you might experience and their solutions" --- -import NextjsTroubleshootingMissingApiKey from '/snippets/nextjs-missing-api-key.mdx'; -import NextjsTroubleshootingButtonSyntax from '/snippets/nextjs-button-syntax.mdx'; -import RateLimitHitUseBatchTrigger from '/snippets/rate-limit-hit-use-batchtrigger.mdx'; -import WorkerFailedToStartWhenRunningDevCommand from '/snippets/worker-failed-to-start.mdx'; +import NextjsTroubleshootingMissingApiKey from "/snippets/nextjs-missing-api-key.mdx"; +import NextjsTroubleshootingButtonSyntax from "/snippets/nextjs-button-syntax.mdx"; +import RateLimitHitUseBatchTrigger from "/snippets/rate-limit-hit-use-batchtrigger.mdx"; ## Development @@ -30,37 +29,20 @@ Then change the permissions of the npm folder (if 1 doesn't work): sudo chown -R $(whoami) ~/.npm ``` - ## Deployment Running the [trigger.dev deploy] command builds and deploys your code. Sometimes there can be issues building your code. You can run the deploy command with `--log-level debug` at the end. This will spit out a lot of information about the deploy. If you can't figure out the problem from the information below please join [our Discord](https://trigger.dev/discord) and create a help forum post. Do NOT share the extended debug logs publicly as they might reveal private information about your project. -Here are some common problems and their solutions: - -### `Typecheck failed, aborting deployment` - -We typecheck your code before deploying. If the typecheck fails, the deployment is aborted. You should see logs with details about the typecheck failure. - -You can skip typechecking, by adding the `--skip-typecheck` flag when calling deploy. - -### `Error: Cannot find module 'X'` - -This errors occurs if we can't figure out how to automatically import some code. You can fix this by adding it to the `dependenciesToBundle` array in the [trigger.config file](/trigger-config). +You can also review the build by supplying the `--dry-run` flag. This will build your project but not deploy it. You can then inspect the build output on your machine. - +Here are some common problems and their solutions: ### `Failed to build project image: Error building image` There should be a link below the error message to the full build logs on your machine. Take a look at these to see what went wrong. Join [our Discord](https://trigger.dev/discord) and you share it privately with us if you can't figure out what's going wrong. Do NOT share these publicly as the verbose logs might reveal private information about your project. -### `Deployment timed out` - -The last stage of deployment is to run it on our servers – we register the new versions of your tasks with the dashboard during this step. We allow 3 mins for this to succeed or fail. If it fails then you'll see this error. - -The first thing to do is to try again. If that fails then join [our Discord](https://trigger.dev/discord) and create a Help forum post with a link to your deployment. - ### `Deployment encountered an error` Usually there will be some useful guidance below this message. If you can't figure out what's going wrong then join [our Discord](https://trigger.dev/discord) and create a Help forum post with a link to your deployment. @@ -68,6 +50,7 @@ Usually there will be some useful guidance below this message. If you can't figu ## Project setup issues ### `The requested module 'node:events' does not provide an export named 'addAbortListener'` + If you see this error it means you're not a supported version of Node: ``` @@ -81,10 +64,11 @@ Node.js v19.9.0 You need to be on at least these minor versions: | Version | Minimum | -| ----- | ------- | -| 18 | 18.16+ | -| 20 | 20.11+ | -| 21 | 21.0+ | +| ------- | ------- | +| 18 | 18.20+ | +| 20 | 20.5+ | +| 21 | 21.0+ | +| 22 | 22.0+ | ## Runtime issues @@ -94,7 +78,7 @@ Your code is deployed separately from the rest of your app(s) so you need to mak ### `Error: @prisma/client did not initialize yet.` -Prisma uses code generation to create the client from your schema file. This means you need to add a bit of config so we can generate this file before your tasks run: [read the guide](/trigger-config#prisma-and-other-generators). +Prisma uses code generation to create the client from your schema file. This means you need to add a bit of config so we can generate this file before your tasks run: [read the guide](/config/config-file#prisma). ### When triggering subtasks the parent task finishes too soon @@ -102,7 +86,7 @@ Make sure that you always use `await` when you call `trigger`, `triggerAndWait`, ### Rate limit exceeded - + View the [rate limits](/limits) page for more information. @@ -147,11 +131,10 @@ Or change the tsconfig jsx setting: { "compilerOptions": { //... - "jsx": "react-jsx" - }, + "jsx": "react-jsx" + } } ``` - - - + + diff --git a/docs/upgrading-beta.mdx b/docs/upgrading-beta.mdx new file mode 100644 index 0000000000..65af28bd0f --- /dev/null +++ b/docs/upgrading-beta.mdx @@ -0,0 +1,431 @@ +--- +title: "Upgrade to new build system" +sidebarTitle: "Beta upgrade" +description: "How to update to 3.0.0 from the beta" +--- + +The Trigger.dev packages are now at version `3.0.x` in the `latest` tag. This is our first official release of v3 under the latest tag, and we recommend anyone still using packages in the `beta` tag to upgrade to the latest version. This guide will help you upgrade your project to the latest version of Trigger.dev. + +The major changes in this release are a new build system, which is more flexible and powerful than the previous build system. We've also made some changes to the `trigger.dev` CLI to improve the developer experience. + +The main features of the new build sytem are: + +- **Bundling by default**: All dependencies are bundled by default, so you no longer need to specify which dependencies to bundle. This solves a whole bunch of issues related to monorepos. +- **Build extensions**: A new way to extend the build process with custom logic. This is a more flexible and powerful way to extend the build process compared to the old system. (including custom esbuild plugin support) +- **Improved configuration**: We've migrated to using [c12](https://github.com/unjs/c12) to power our configuration system. +- **Improved error handling**: We now do a much better job of reporting of any errors that happen during the indexing process by loading your trigger task files dynamically. +- **Improved cold start times**: Previously, we would load all your trigger task files at once, which could lead to long cold start times. Now we load your trigger task files dynamically, which should improve cold start times. + +## Update packages + +To use the new build system, you have to update to use our latest packages. Update the `@trigger.dev/sdk` package in your package.json: + +```json +"@trigger.dev/sdk": "^3.0.0", +``` + +You will also need to update your usage of the `trigger.dev` CLI to use the latest release. If you run the CLI via `npx` you can update to the latest release like so: + +```sh +# old way +npx trigger.dev@3.0.0-beta.56 dev + +# using the latest release +npx trigger.dev@latest dev +``` + +If you've added the `trigger.dev` CLI to your `devDependencies`, then you should update the version to point to the latest release: + +```json +"trigger.dev": "^3.0.0", +``` + +Once you do that make sure you re-install your dependencies using `npm i` or the equivalent with your preferred package manager. + +If you deploy using GitHub actions, make sure you update the version there too. + +## Update your `trigger.config.ts` + +The new build system does not effect your trigger task files at all, so those can remain unchanged. However, you may need to make changes to your `trigger.config.ts` file. + +### `defineConfig` + +You should now import the `defineConfig` function from `@trigger.dev/sdk/v3` and export the config as the default export: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + project: "", +}); +``` + +### Deprecated: `dependenciesToBundle` + +The new build system will bundle all dependencies by default, so `dependenciesToBundle` no longer makes any sense and can be removed. + +#### Externals + +Now that all dependencies are bundled, there are some situations where bundling a dependency doesn't work, and needs to be made external (e.g. when a dependency includes a native module). You can now specify these dependencies as build externals in the `defineConfig` function: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + project: "", + build: { + external: ["native-module"], + }, +}); +``` + +`external` is an array of strings, where each string is the name of a dependency that should be made external. Glob expressions are also supported and use the [minimatch](https://github.com/isaacs/minimatch) matcher. + +### additionalFiles + +The `additionalFiles` option has been moved to our new build extension system. + +To use build extensions, you'll need to add the `@trigger.dev/build` package to your `devDependencies`: + +```sh +npm add @trigger.dev/build@latest -D +``` + +Now you can import the `additionalFiles` build extension and use it in your `trigger.config.ts` file: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { additionalFiles } from "@trigger.dev/build/extensions/core"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + additionalFiles({ files: ["wrangler/wrangler.toml", "./assets/**", "./fonts/**"] }), + ], + }, +}); +``` + +### additionalPackages + +The `additionalPackages` option has been moved to our new build extension system. + +To use build extensions, you'll need to add the `@trigger.dev/build` package to your `devDependencies`: + +```sh +npm add @trigger.dev/build@latest -D +``` + +Now you can import the `additionalPackages` build extension and use it in your `trigger.config.ts` file: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { additionalPackages } from "@trigger.dev/build/extensions/core"; + +export default defineConfig({ + project: "", + build: { + extensions: [additionalPackages({ packages: ["wrangler"] })], + }, +}); +``` + +### resolveEnvVars + +The `resolveEnvVars` export has been moved to our new build extension system. + +To use build extensions, you'll need to add the `@trigger.dev/build` package to your `devDependencies`: + +```sh +npm add @trigger.dev/build@latest -D +``` + +Now you can import the `syncEnvVars` build extension and use it in your `trigger.config.ts` file: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { syncEnvVars } from "@trigger.dev/build/extensions/core"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + syncEnvVars(async (params) => { + return { + MY_ENV_VAR: "my-value", + }; + }), + ], + }, +}); +``` + +The `syncEnvVars` callback function works very similarly to the deprecated `resolveEnvVars` handler, but now instead of returning an object with a `variables` key that contains the environment variables, you return an object with the environment variables directly (see the example above). + +One other difference is now `params.env` only contains the environment variables that are set in the Trigger.dev environment variables, and not the environment variables from the process. If you want to access the environment variables from the process, you can use `process.env`. + +See the [syncEnvVars](/deploy-environment-variables#sync-env-vars-from-another-service) documentation for more information. + +### emitDecoratorMetadata + +If you make use of decorators in your code, and have enabled the `emitDecoratorMetadata` tsconfig compiler option, you'll need to enable this in the new build sytem using the `emitDecoratorMetadata` build extension: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { emitDecoratorMetadata } from "@trigger.dev/build/extensions/typescript"; + +export default defineConfig({ + project: "", + build: { + extensions: [emitDecoratorMetadata()], + }, +}); +``` + +### Prisma + +We've created a build extension to support using Prisma in your Trigger.dev tasks. To use this extension, you'll need to add the `@trigger.dev/build` package to your `devDependencies`: + +```sh +npm add @trigger.dev/build@latest -D +``` + +Then you can import the `prismaExtension` build extension and use it in your `trigger.config.ts` file, passing in the path to your Prisma schema file: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { prismaExtension } from "@trigger.dev/build/extensions/prisma"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + prismaExtension({ + schema: "prisma/schema.prisma", + }), + ], + }, +}); +``` + +This will make sure that your prisma client is generated during the build process when deploying to Trigger.dev. + + + This does not have any effect when running the `dev` command, so you'll need to make sure you + generate your client locally first. + + +If you want to also run migrations during the build process, you can pass in the `migrate` option: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { prismaExtension } from "@trigger.dev/build/extensions/prisma"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + prismaExtension({ + schema: "prisma/schema.prisma", + migrate: true, + directUrlEnvVarName: "DATABASE_URL_UNPOOLED", // optional - the name of the environment variable that contains the direct database URL if you are using a direct database URL + }), + ], + }, +}); +``` + +If you have multiple `generator` statements defined in your schema file, you can pass in the `clientGenerator` option to specify the `prisma-client-js` generator, which will prevent other generators from being generated: + + + +```prisma schema.prisma +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + directUrl = env("DATABASE_URL_UNPOOLED") +} + +// We only want to generate the prisma-client-js generator +generator client { + provider = "prisma-client-js" +} + +generator kysely { + provider = "prisma-kysely" + output = "../../src/kysely" + enumFileName = "enums.ts" + fileName = "types.ts" +} +``` + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { prismaExtension } from "@trigger.dev/build/extensions/prisma"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + prismaExtension({ + schema: "prisma/schema.prisma", + clientGenerator: "client", + }), + ], + }, +}); +``` + + + +### audioWaveform + +Previously, we installed [Audio Waveform](https://github.com/bbc/audiowaveform) in the build image. That's been moved to a build extension: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { audioWaveform } from "@trigger.dev/build/extensions/audioWaveform"; + +export default defineConfig({ + project: "", + build: { + extensions: [audioWaveform()], // uses verson 1.1.0 of audiowaveform by default + }, +}); +``` + +### esbuild plugins + +You can now add esbuild plugins to customize the build process using the `esbuildPlugin` build extension. The example below shows how to automatically upload sourcemaps to Sentry using their esbuild plugin: + +```ts +import { defineConfig } from "@trigger.dev/sdk/v3"; +import { esbuildPlugin } from "@trigger.dev/build/extensions"; +import { sentryEsbuildPlugin } from "@sentry/esbuild-plugin"; + +export default defineConfig({ + project: "", + build: { + extensions: [ + esbuildPlugin( + sentryEsbuildPlugin({ + org: process.env.SENTRY_ORG, + project: process.env.SENTRY_PROJECT, + authToken: process.env.SENTRY_AUTH_TOKEN, + }), + // optional - only runs during the deploy command, and adds the plugin to the end of the list of plugins + { placement: "last", target: "deploy" } + ), + ], + }, +}); +``` + +## Changes to the `trigger.dev` CLI + +### No more typechecking during deploy + +We no longer run typechecking during the deploy command. This was causing issues with some projects, and we found that it wasn't necessary to run typechecking during the deploy command. If you want to run typechecking before deploying to Trigger.dev, you can run the `tsc` command before running the `deploy` command. + +```sh +tsc && npx trigger.dev@latest deploy +``` + +Or if you are using GitHub actions, you can add an additional step to run the `tsc` command before deploying to Trigger.dev. + +```yaml +- name: Install dependencies + run: npm install + +- name: Typecheck + run: npx tsc + +- name: 🚀 Deploy Trigger.dev + env: + TRIGGER_ACCESS_TOKEN: ${{ secrets.TRIGGER_ACCESS_TOKEN }} + run: | + npx trigger.dev@latest deploy +``` + +### deploy `--dry-run` + +You can now inspect the build output of your project without actually deploying it to Trigger.dev by using the `--dry-run` flag: + +```sh +npx trigger.dev@latest deploy --dry-run +``` + +This will save the build output and print the path to the build output directory. If you face any issues with deploying, please include the build output in your issue report. + +### `--env-file` + +You can now pass the path to your local `.env` file using the `--env-file` flag during `dev` and `deploy` commands: + +```sh +npx trigger.dev@latest dev --env-file ../../.env +npx trigger.dev@latest deploy --env-file ../../.env +``` + +The `.env` file works slightly differently in `dev` vs `deploy`: + +- In `dev`, the `.env` file is loaded into the CLI's `process.env` and also into the environment variables of the Trigger.dev environment. +- In `deploy`, the `.env` file is loaded into the CLI's `process.env` but not into the environment variables of the Trigger.dev environment. If you want to sync the environment variables from the `.env` file to the Trigger.dev environment variables, you can use the `syncEnvVars` build extension. + +### dev debugging in VS Code + +Debugging your tasks code in `dev` is now supported via VS Code, without having to pass in any additional flags. Create a launch configuration in `.vscode/launch.json`: + +```json launch.json +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Trigger.dev: Dev", + "type": "node", + "request": "launch", + "cwd": "${workspaceFolder}", + "runtimeExecutable": "npx", + "runtimeArgs": ["trigger.dev@latest", "dev"], + "skipFiles": ["/**"], + "sourceMaps": true + } + ] +} +``` + +Then you can start debugging your tasks code by selecting the `Trigger.dev: Dev` configuration in the debug panel, and set breakpoints in your tasks code. + +### TRIGGER_ACCESS_TOKEN in dev + +You can now authenticate the `dev` command using the `TRIGGER_ACCESS_TOKEN` environment variable. Previously this was only supported in the `deploy` command. + +```sh +TRIGGER_ACCESS_TOKEN= npx trigger.dev@latest dev +``` + +### Better deploy support for self-hosters + +You can now specify a custom registry and namespace when deploying via a self-hosted instance of Trigger.dev: + +```sh +npx trigger.dev@latest deploy \ + --self-hosted \ + --load-image \ + --push \ + --registry docker.io \ + --namespace mydockerhubusername +``` + +All you have to do is create a repository in dockerhub that matches the project ref of your Trigger.dev project (e.g. `proj_rrkpdguyagvsoktglnod`) + + + Docker Hub will automatically create a repository the first time you push, which is public by + default. If you want to keep these images private, make sure you create the repository before you + first run the `deploy` command + + +## Known issues + +- Path aliases are not yet support in your `trigger.config.ts` file. To workaround this issue you'll need to rewrite path aliases to their relative paths. (See [this](https://github.com/unjs/jiti/issues/166) and [this](https://knip.dev/reference/known-issues#path-aliases-in-config-files)) for more info. +- `*.test.ts` and `.spec.ts` files inside the trigger dirs will be bundled and could cause issues. You'll need to move these files outside of the trigger dirs to avoid this issue. diff --git a/docs/upgrading-packages.mdx b/docs/upgrading-packages.mdx index 91e70f65e9..197c19f222 100644 --- a/docs/upgrading-packages.mdx +++ b/docs/upgrading-packages.mdx @@ -9,7 +9,7 @@ description: "When we release fixes and new features we recommend you upgrade yo Run this command in your project: ```sh -npx trigger.dev@beta update +npx trigger.dev@latest update ``` This will update all of the Trigger.dev packages in your project to the latest version. @@ -19,11 +19,11 @@ This will update all of the Trigger.dev packages in your project to the latest v When you run the CLI locally use the latest version for the `dev` and `deploy` commands: ```sh -npx trigger.dev@beta dev +npx trigger.dev@latest dev ``` ```sh -npx trigger.dev@beta deploy +npx trigger.dev@latest deploy ``` These commands will also give you the option to upgrade if you are behind on versions. @@ -44,7 +44,7 @@ You can deploy using [GitHub Actions](/github-actions). We recommend that you lo In the steps you'll see a `run` command. It will run the trigger.dev deploy CLI command. Make - sure to update this version to the latest version (e.g. `npx trigger.dev@3.0.0-beta.48 deploy`). + sure to update this version to the latest version (e.g. `npx trigger.dev@3.0.0 deploy`). @@ -57,7 +57,7 @@ For example: ```json { "devDependencies": { - "trigger.dev": "3.0.0-beta.48" + "trigger.dev": "3.0.0" } } ``` diff --git a/packages/build/src/extensions/core.ts b/packages/build/src/extensions/core.ts index 0abd6c10fb..2adddbaa06 100644 --- a/packages/build/src/extensions/core.ts +++ b/packages/build/src/extensions/core.ts @@ -1,3 +1,5 @@ export * from "./core/additionalFiles.js"; export * from "./core/additionalPackages.js"; export * from "./core/syncEnvVars.js"; +export * from "./core/aptGet.js"; +export * from "./core/ffmpeg.js"; diff --git a/packages/build/src/extensions/core/aptGet.ts b/packages/build/src/extensions/core/aptGet.ts new file mode 100644 index 0000000000..c6d0b51e65 --- /dev/null +++ b/packages/build/src/extensions/core/aptGet.ts @@ -0,0 +1,27 @@ +import { BuildExtension } from "@trigger.dev/core/v3/build"; + +export type AptGetOptions = { + packages: string[]; +}; + +export function aptGet(options: AptGetOptions): BuildExtension { + return { + name: "aptGet", + onBuildComplete(context) { + if (context.target === "dev") { + return; + } + + context.logger.debug("Adding apt-get layer", { + pkgs: options.packages, + }); + + context.addLayer({ + id: "apt-get", + image: { + pkgs: options.packages, + }, + }); + }, + }; +} diff --git a/packages/build/src/extensions/core/ffmpeg.ts b/packages/build/src/extensions/core/ffmpeg.ts new file mode 100644 index 0000000000..84a91d7061 --- /dev/null +++ b/packages/build/src/extensions/core/ffmpeg.ts @@ -0,0 +1,40 @@ +import { BuildExtension } from "@trigger.dev/core/v3/build"; + +export type FfmpegOptions = { + version?: string; +}; + +/** + * Add ffmpeg to the build, and automatically set the FFMPEG_PATH and FFPROBE_PATH environment variables. + * @param options.version The version of ffmpeg to install. If not provided, the latest version will be installed. + * + * @returns The build extension. + */ +export function ffmpeg(options: FfmpegOptions = {}): BuildExtension { + return { + name: "ffmpeg", + onBuildComplete(context) { + if (context.target === "dev") { + return; + } + + context.logger.debug("Adding ffmpeg", { + options, + }); + + context.addLayer({ + id: "ffmpeg", + image: { + pkgs: options.version ? [`ffmpeg=${options.version}`] : ["ffmpeg"], + }, + deploy: { + env: { + FFMPEG_PATH: "/usr/bin/ffmpeg", + FFPROBE_PATH: "/usr/bin/ffprobe", + }, + override: true, + }, + }); + }, + }; +} diff --git a/packages/cli-v3/package.json b/packages/cli-v3/package.json index 4d648b28cd..91ca0cedaf 100644 --- a/packages/cli-v3/package.json +++ b/packages/cli-v3/package.json @@ -29,7 +29,8 @@ "dist" ], "bin": { - "triggerdev": "./dist/esm/index.js" + "triggerdev": "./dist/esm/index.js", + "trigger": "./dist/esm/index.js" }, "tshy": { "selfLink": false, diff --git a/packages/cli-v3/src/commands/init.ts b/packages/cli-v3/src/commands/init.ts index 370e8ae581..df60445cf1 100644 --- a/packages/cli-v3/src/commands/init.ts +++ b/packages/cli-v3/src/commands/init.ts @@ -7,7 +7,7 @@ import { Command, Option as CommandOption } from "commander"; import { applyEdits, findNodeAtLocation, getNodeValue, modify, parseTree } from "jsonc-parser"; import { writeFile } from "node:fs/promises"; import { join, relative, resolve } from "node:path"; -import { addDependency, detectPackageManager } from "nypm"; +import { addDependency, addDevDependency, detectPackageManager } from "nypm"; import { resolveTSConfig } from "pkg-types"; import { z } from "zod"; import { CliApiClient } from "../apiClient.js"; @@ -37,8 +37,9 @@ import { login } from "./login.js"; const InitCommandOptions = CommonCommandOptions.extend({ projectRef: z.string().optional(), overrideConfig: z.boolean().default(false), - tag: z.string().default("beta"), + tag: z.string().default("latest"), skipPackageInstall: z.boolean().default(false), + runtime: z.string().default("node"), pkgArgs: z.string().optional(), gitRef: z.string().default("main"), javascript: z.boolean().default(false), @@ -60,7 +61,12 @@ export function configureInitCommand(program: Command) { .option( "-t, --tag ", "The version of the @trigger.dev/sdk package to install", - "beta" + "latest" + ) + .option( + "-r, --runtime ", + "Which runtime to use for the project. Currently only supports node and bun", + "node" ) .option("--skip-package-install", "Skip installing the @trigger.dev/sdk package") .option("--override-config", "Override the existing config file if it exists") @@ -434,6 +440,15 @@ async function installPackages(dir: string, options: InitCommandOptions) { installSpinner.stop(`@trigger.dev/sdk@${options.tag} installed`); + installSpinner.start(`Adding @trigger.dev/build@${options.tag} to devDependencies`); + + await addDevDependency(`@trigger.dev/build@${options.tag}`, { + cwd: projectDir, + silent: true, + }); + + installSpinner.stop(`@trigger.dev/build@${options.tag} installed`); + span.end(); } catch (e) { if (options.logLevel === "debug") { @@ -481,15 +496,17 @@ async function writeConfigFile( "cli.projectDir": projectDir, "cli.templatePath": templateUrl, "cli.outputPath": outputPath, + "cli.runtime": options.runtime, }); const result = await createFileFromTemplate({ templateUrl, replacements: { projectRef: project.externalRef, + runtime: options.runtime, triggerDirectoriesOption: triggerDir.isCustomValue ? `\n dirs: ["${triggerDir.location}"],` - : `\n dirs: ["/src/trigger"],`, + : `\n dirs: ["./src/trigger"],`, }, outputPath, override: options.overrideConfig, diff --git a/packages/cli-v3/src/commands/update.ts b/packages/cli-v3/src/commands/update.ts index aa608c4ba3..f22fe9fd3e 100644 --- a/packages/cli-v3/src/commands/update.ts +++ b/packages/cli-v3/src/commands/update.ts @@ -77,7 +77,7 @@ export async function updateTriggerPackages( prettyWarning( "You're not running the latest CLI version, please consider updating ASAP", `Current: ${cliVersion}\nLatest: ${newCliVersion}`, - "Run latest: npx trigger.dev@beta" + "Run latest: npx trigger.dev@latest" ); hasOutput = true; @@ -108,25 +108,9 @@ export async function updateTriggerPackages( mismatches.push(dep); } - const extractRelease = (version: string) => { - const release = Number(version.split("3.0.0-beta.")[1]); - return release || undefined; - }; - - let isDowngrade = false; - const targetRelease = extractRelease(targetVersion); - - if (targetRelease) { - isDowngrade = mismatches.some((dep) => { - const depRelease = extractRelease(dep.version); - - if (!depRelease) { - return false; - } - - return depRelease > targetRelease; - }); - } + const isDowngrade = mismatches.some((dep) => { + return dep.version > targetVersion; + }); return { mismatches, diff --git a/packages/cli-v3/src/utilities/initialBanner.ts b/packages/cli-v3/src/utilities/initialBanner.ts index 45b4ef39b2..034f483291 100644 --- a/packages/cli-v3/src/utilities/initialBanner.ts +++ b/packages/cli-v3/src/utilities/initialBanner.ts @@ -72,10 +72,7 @@ export function printDevBanner(printTopBorder = true) { async function doUpdateCheck(): Promise { try { // default cache for update check is 1 day - const meta = await getLatestVersion( - `trigger.dev@${VERSION.startsWith("3.0.0-beta") ? "beta" : "latest"}`, - { force: true } - ); + const meta = await getLatestVersion("trigger.dev@latest", { force: true }); if (!meta.version) { return; diff --git a/packages/cli-v3/templates/trigger.config.mjs.template b/packages/cli-v3/templates/trigger.config.mjs.template index 6a440ed3ca..7b5c122ddd 100644 --- a/packages/cli-v3/templates/trigger.config.mjs.template +++ b/packages/cli-v3/templates/trigger.config.mjs.template @@ -2,6 +2,7 @@ import { defineConfig } from "@trigger.dev/sdk/v3"; export default defineConfig({ project: "${projectRef}", + runtime: "${runtime}", logLevel: "log", retries: { enabledInDev: true, diff --git a/packages/cli-v3/templates/trigger.config.ts.template b/packages/cli-v3/templates/trigger.config.ts.template index 6a440ed3ca..7b5c122ddd 100644 --- a/packages/cli-v3/templates/trigger.config.ts.template +++ b/packages/cli-v3/templates/trigger.config.ts.template @@ -2,6 +2,7 @@ import { defineConfig } from "@trigger.dev/sdk/v3"; export default defineConfig({ project: "${projectRef}", + runtime: "${runtime}", logLevel: "log", retries: { enabledInDev: true, diff --git a/packages/trigger-sdk/src/v3/shared.ts b/packages/trigger-sdk/src/v3/shared.ts index 62ca7e1285..0294366488 100644 --- a/packages/trigger-sdk/src/v3/shared.ts +++ b/packages/trigger-sdk/src/v3/shared.ts @@ -263,6 +263,47 @@ export type TaskRunResult = error: unknown; }; +export class SubtaskUnwrapError extends Error { + public readonly taskId: string; + public readonly runId: string; + public readonly cause?: unknown; + + constructor(taskId: string, runId: string, subtaskError: unknown) { + if (subtaskError instanceof Error) { + super(`Error in ${taskId}: ${subtaskError.message}`, { cause: subtaskError }); + this.name = "SubtaskUnwrapError"; + } else { + super(`Error in ${taskId}`, { cause: subtaskError }); + this.name = "SubtaskUnwrapError"; + } + + this.taskId = taskId; + this.runId = runId; + } +} + +export class TaskRunPromise extends Promise> { + constructor( + executor: ( + resolve: (value: TaskRunResult | PromiseLike>) => void, + reject: (reason?: any) => void + ) => void, + private readonly taskId: string + ) { + super(executor); + } + + unwrap(): Promise { + return this.then((result) => { + if (result.ok) { + return result.output; + } else { + throw new SubtaskUnwrapError(this.taskId, result.id, result.error); + } + }); + } +} + export type BatchResult = { id: string; runs: TaskRunResult[]; @@ -311,7 +352,7 @@ export interface Task * } * ``` */ - triggerAndWait: (payload: TInput, options?: TaskRunOptions) => Promise>; + triggerAndWait: (payload: TInput, options?: TaskRunOptions) => TaskRunPromise; /** * Batch trigger multiple task runs with the given payloads, and wait for the results. Returns the results of the task runs. @@ -512,20 +553,28 @@ export function createTask< customQueue ); }, - triggerAndWait: async (payload, options) => { + triggerAndWait: (payload, options) => { const taskMetadata = taskCatalog.getTaskManifest(params.id); - return await triggerAndWait_internal( - taskMetadata && taskMetadata.exportName - ? `${taskMetadata.exportName}.triggerAndWait()` - : `triggerAndWait()`, - params.id, - payload, - { - queue: customQueue, - ...options, - } - ); + return new TaskRunPromise((resolve, reject) => { + triggerAndWait_internal( + taskMetadata && taskMetadata.exportName + ? `${taskMetadata.exportName}.triggerAndWait()` + : `triggerAndWait()`, + params.id, + payload, + { + queue: customQueue, + ...options, + } + ) + .then((result) => { + resolve(result); + }) + .catch((error) => { + reject(error); + }); + }, params.id); }, batchTriggerAndWait: async (items) => { const taskMetadata = taskCatalog.getTaskManifest(params.id); @@ -614,19 +663,27 @@ export async function trigger( * } * ``` */ -export async function triggerAndWait( +export function triggerAndWait( id: TaskIdentifier, payload: TaskPayload, options?: TaskRunOptions, requestOptions?: ApiRequestOptions -): Promise>> { - return await triggerAndWait_internal, TaskOutput>( - "tasks.triggerAndWait()", - id, - payload, - options, - requestOptions - ); +): TaskRunPromise> { + return new TaskRunPromise>((resolve, reject) => { + triggerAndWait_internal, TaskOutput>( + "tasks.triggerAndWait()", + id, + payload, + options, + requestOptions + ) + .then((result) => { + resolve(result); + }) + .catch((error) => { + reject(error); + }); + }, id); } /** diff --git a/packages/trigger-sdk/src/v3/tasks.ts b/packages/trigger-sdk/src/v3/tasks.ts index 0e84d586c5..53cddf8d94 100644 --- a/packages/trigger-sdk/src/v3/tasks.ts +++ b/packages/trigger-sdk/src/v3/tasks.ts @@ -6,8 +6,11 @@ import { trigger, triggerAndPoll, triggerAndWait, + SubtaskUnwrapError, } from "./shared.js"; +export { SubtaskUnwrapError }; + import type { TaskOptions, Task, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 54305948e0..950ed17cfe 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1362,12 +1362,6 @@ importers: references/v3-catalog: dependencies: - '@ffmpeg-installer/ffmpeg': - specifier: ^1.1.0 - version: 1.1.0 - '@ffprobe-installer/ffprobe': - specifier: ^2.1.2 - version: 2.1.2 '@infisical/sdk': specifier: ^2.1.9 version: 2.3.5 @@ -1413,6 +1407,9 @@ importers: execa: specifier: ^8.0.1 version: 8.0.1 + fluent-ffmpeg: + specifier: ^2.1.3 + version: 2.1.3 header-generator: specifier: ^2.1.55 version: 2.1.55 @@ -1504,6 +1501,9 @@ importers: '@types/email-reply-parser': specifier: ^1.4.2 version: 1.4.2 + '@types/fluent-ffmpeg': + specifier: ^2.1.26 + version: 2.1.26 '@types/node': specifier: 20.4.2 version: 20.4.2 @@ -5993,161 +5993,6 @@ packages: resolution: {integrity: sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==} engines: {node: '>=14'} - /@ffmpeg-installer/darwin-arm64@4.1.5: - resolution: {integrity: sha512-hYqTiP63mXz7wSQfuqfFwfLOfwwFChUedeCVKkBtl/cliaTM7/ePI9bVzfZ2c+dWu3TqCwLDRWNSJ5pqZl8otA==} - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: false - optional: true - - /@ffmpeg-installer/darwin-x64@4.1.0: - resolution: {integrity: sha512-Z4EyG3cIFjdhlY8wI9aLUXuH8nVt7E9SlMVZtWvSPnm2sm37/yC2CwjUzyCQbJbySnef1tQwGG2Sx+uWhd9IAw==} - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: false - optional: true - - /@ffmpeg-installer/ffmpeg@1.1.0: - resolution: {integrity: sha512-Uq4rmwkdGxIa9A6Bd/VqqYbT7zqh1GrT5/rFwCwKM70b42W5gIjWeVETq6SdcL0zXqDtY081Ws/iJWhr1+xvQg==} - optionalDependencies: - '@ffmpeg-installer/darwin-arm64': 4.1.5 - '@ffmpeg-installer/darwin-x64': 4.1.0 - '@ffmpeg-installer/linux-arm': 4.1.3 - '@ffmpeg-installer/linux-arm64': 4.1.4 - '@ffmpeg-installer/linux-ia32': 4.1.0 - '@ffmpeg-installer/linux-x64': 4.1.0 - '@ffmpeg-installer/win32-ia32': 4.1.0 - '@ffmpeg-installer/win32-x64': 4.1.0 - dev: false - - /@ffmpeg-installer/linux-arm64@4.1.4: - resolution: {integrity: sha512-dljEqAOD0oIM6O6DxBW9US/FkvqvQwgJ2lGHOwHDDwu/pX8+V0YsDL1xqHbj1DMX/+nP9rxw7G7gcUvGspSoKg==} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@ffmpeg-installer/linux-arm@4.1.3: - resolution: {integrity: sha512-NDf5V6l8AfzZ8WzUGZ5mV8O/xMzRag2ETR6+TlGIsMHp81agx51cqpPItXPib/nAZYmo55Bl2L6/WOMI3A5YRg==} - cpu: [arm] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@ffmpeg-installer/linux-ia32@4.1.0: - resolution: {integrity: sha512-0LWyFQnPf+Ij9GQGD034hS6A90URNu9HCtQ5cTqo5MxOEc7Rd8gLXrJvn++UmxhU0J5RyRE9KRYstdCVUjkNOQ==} - cpu: [ia32] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@ffmpeg-installer/linux-x64@4.1.0: - resolution: {integrity: sha512-Y5BWhGLU/WpQjOArNIgXD3z5mxxdV8c41C+U15nsE5yF8tVcdCGet5zPs5Zy3Ta6bU7haGpIzryutqCGQA/W8A==} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@ffmpeg-installer/win32-ia32@4.1.0: - resolution: {integrity: sha512-FV2D7RlaZv/lrtdhaQ4oETwoFUsUjlUiasiZLDxhEUPdNDWcH1OU9K1xTvqz+OXLdsmYelUDuBS/zkMOTtlUAw==} - cpu: [ia32] - os: [win32] - requiresBuild: true - dev: false - optional: true - - /@ffmpeg-installer/win32-x64@4.1.0: - resolution: {integrity: sha512-Drt5u2vzDnIONf4ZEkKtFlbvwj6rI3kxw1Ck9fpudmtgaZIHD4ucsWB2lCZBXRxJgXR+2IMSti+4rtM4C4rXgg==} - cpu: [x64] - os: [win32] - requiresBuild: true - dev: false - optional: true - - /@ffprobe-installer/darwin-arm64@5.0.1: - resolution: {integrity: sha512-vwNCNjokH8hfkbl6m95zICHwkSzhEvDC3GVBcUp5HX8+4wsX10SP3B+bGur7XUzTIZ4cQpgJmEIAx6TUwRepMg==} - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: false - optional: true - - /@ffprobe-installer/darwin-x64@5.1.0: - resolution: {integrity: sha512-J+YGscZMpQclFg31O4cfVRGmDpkVsQ2fZujoUdMAAYcP0NtqpC49Hs3SWJpBdsGB4VeqOt5TTm1vSZQzs1NkhA==} - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: false - optional: true - - /@ffprobe-installer/ffprobe@2.1.2: - resolution: {integrity: sha512-ZNvwk4f2magF42Zji2Ese16SMj9BS7Fui4kRjg6gTYTxY3gWZNpg85n4MIfQyI9nimHg4x/gT6FVkp/bBDuBwg==} - engines: {node: '>=14.21.2'} - optionalDependencies: - '@ffprobe-installer/darwin-arm64': 5.0.1 - '@ffprobe-installer/darwin-x64': 5.1.0 - '@ffprobe-installer/linux-arm': 5.2.0 - '@ffprobe-installer/linux-arm64': 5.2.0 - '@ffprobe-installer/linux-ia32': 5.2.0 - '@ffprobe-installer/linux-x64': 5.2.0 - '@ffprobe-installer/win32-ia32': 5.1.0 - '@ffprobe-installer/win32-x64': 5.1.0 - dev: false - - /@ffprobe-installer/linux-arm64@5.2.0: - resolution: {integrity: sha512-X1VvWtlLs6ScP73biVLuHD5ohKJKsMTa0vafCESOen4mOoNeLAYbxOVxDWAdFz9cpZgRiloFj5QD6nDj8E28yQ==} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@ffprobe-installer/linux-arm@5.2.0: - resolution: {integrity: sha512-PF5HqEhCY7WTWHtLDYbA/+rLS+rhslWvyBlAG1Fk8VzVlnRdl93o6hy7DE2kJgxWQbFaR3ZktPQGEzfkrmQHvQ==} - cpu: [arm] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@ffprobe-installer/linux-ia32@5.2.0: - resolution: {integrity: sha512-TFVK5sasXyXhbIG7LtPRDmtkrkOsInwKcL43iEvEw+D9vCS2rc//mn9/0Q+BR0UoJEiMK4+ApYr/3LLVUBPOCQ==} - cpu: [ia32] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@ffprobe-installer/linux-x64@5.2.0: - resolution: {integrity: sha512-D3UeqTLYPNs7pBWPLUYGehPdRVqU8eACox4OZy3pZUZatxye2YKlvBwEfaLdL1v2Z4FOAlLUhms0kY8m8kqSRA==} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@ffprobe-installer/win32-ia32@5.1.0: - resolution: {integrity: sha512-5O3vOoNRxmut0/Nu9vSazTdSHasrr+zPT2B3Hm7kjmO3QVFcIfVImS6ReQnZeSy8JPJOqXts5kX5x/3KOX54XQ==} - cpu: [ia32] - os: [win32] - requiresBuild: true - dev: false - optional: true - - /@ffprobe-installer/win32-x64@5.1.0: - resolution: {integrity: sha512-jMGYeAgkrdn4e2vvYt/qakgHRE3CPju4bn5TmdPfoAm1BlX1mY9cyMd8gf5vSzI8gH8Zq5WQAyAkmekX/8TSTg==} - cpu: [x64] - os: [win32] - requiresBuild: true - dev: false - optional: true - /@floating-ui/core@0.7.3: resolution: {integrity: sha512-buc8BXHmG9l82+OQXOFU3Kr2XQx9ys01U/Q9HMIrZ300iLc8HLMgh7dcCqgYzAzf4BkoQvDcXf5Y+CuEZ5JBYg==} dev: false @@ -14550,6 +14395,12 @@ packages: '@types/serve-static': 1.15.0 dev: true + /@types/fluent-ffmpeg@2.1.26: + resolution: {integrity: sha512-0JVF3wdQG+pN0ImwWD0bNgJiKF2OHg/7CDBHw5UIbRTvlnkgGHK6V5doE54ltvhud4o31/dEiHm23CAlxFiUQg==} + dependencies: + '@types/node': 18.19.20 + dev: true + /@types/gradient-string@1.1.2: resolution: {integrity: sha512-zIet2KvHr2dkOCPI5ggQQ+WJVyfBSFaqK9sNelhgDjlE2K3Fu2muuPJwu5aKM3xoWuc3WXudVEMUwI1QWhykEQ==} dependencies: @@ -16129,6 +15980,10 @@ packages: hasBin: true dev: true + /async@0.2.10: + resolution: {integrity: sha512-eAkdoKxU6/LkKDBzLpT+t6Ff5EtfSF4wx1WfJiPEEV7WNLnDaRXk0oVysiEPm262roaachGexwUv94WhSgN5TQ==} + dev: false + /async@3.2.6: resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} dev: true @@ -19707,6 +19562,14 @@ packages: /flatted@3.2.7: resolution: {integrity: sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==} + /fluent-ffmpeg@2.1.3: + resolution: {integrity: sha512-Be3narBNt2s6bsaqP6Jzq91heDgOEaDCJAXcE3qcma/EJBSy5FB4cvO31XBInuAuKBx8Kptf8dkhjK0IOru39Q==} + engines: {node: '>=18'} + dependencies: + async: 0.2.10 + which: 1.3.1 + dev: false + /follow-redirects@1.15.2: resolution: {integrity: sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==} engines: {node: '>=4.0'} diff --git a/references/v3-catalog/package.json b/references/v3-catalog/package.json index 1525750df1..26f9872829 100644 --- a/references/v3-catalog/package.json +++ b/references/v3-catalog/package.json @@ -16,8 +16,7 @@ "generate:prisma": "prisma generate --sql" }, "dependencies": { - "@ffmpeg-installer/ffmpeg": "^1.1.0", - "@ffprobe-installer/ffprobe": "^2.1.2", + "fluent-ffmpeg": "^2.1.3", "@infisical/sdk": "^2.1.9", "@opentelemetry/api": "1.4.1", "@prisma/client": "5.19.0", @@ -68,6 +67,7 @@ "@types/email-reply-parser": "^1.4.2", "@types/node": "20.4.2", "@types/react": "^18.3.1", + "@types/fluent-ffmpeg": "^2.1.26", "esbuild": "^0.19.11", "prisma": "5.19.0", "prisma-kysely": "^1.8.0", diff --git a/references/v3-catalog/src/trigger/binaries.ts b/references/v3-catalog/src/trigger/binaries.ts index 2a5796cdd8..d7a6da8b51 100644 --- a/references/v3-catalog/src/trigger/binaries.ts +++ b/references/v3-catalog/src/trigger/binaries.ts @@ -1,159 +1,33 @@ -import { logger, task } from "@trigger.dev/sdk/v3"; -import { chmod, writeFile } from "node:fs/promises"; +import { task } from "@trigger.dev/sdk/v3"; +import ffmpeg from "fluent-ffmpeg"; +import * as path from "node:path"; import { Readable } from "node:stream"; -import { ReadableStream } from "stream/web"; -import { basename } from "node:path"; -import YTDlpWrap from "yt-dlp-wrap"; -import ffmpeg from "@ffmpeg-installer/ffmpeg"; - -export const ytDlp = task({ - id: "yt-dlp", - run: async () => { - const releaseArtifact = "yt-dlp_linux"; - const filePath = `./${releaseArtifact}`; - const fileURL = `https://github.com/yt-dlp/yt-dlp/releases/latest/download/${releaseArtifact}`; - - await YTDlpWrap.downloadFile(fileURL, filePath); - await chmod(filePath, "777"); - - logger.log("downloaded", { filePath, fileURL }); - - const ytDlpWrap = new YTDlpWrap(filePath); - const version = await ytDlpWrap.getVersion(); - - logger.log("version", { version }); - }, -}); - -async function getFfprobe() { - const ffprobe = await import("@ffprobe-installer/ffprobe"); - - logger.log("ffprobeInstaller", ffprobe); - - return ffprobe; -} - -async function ffprobeVersion() { - const ffprobe = await getFfprobe(); - const childProcess = await execute(ffprobe.path, ["-version"]); - - logger.log("ffprobe -version", { - output: childProcess.stdout.split("\n")[0], - }); -} - -async function ffmpegVersion() { - logger.log("ffmpegInstaller", ffmpeg); - - const childProcess = await execute(ffmpeg.path, ["-version"]); - - logger.log("ffmpeg -version", { - output: childProcess.stdout.split("\n")[0], - }); -} - -export const ffprobeInstaller = task({ - id: "ffprobe-installer", - run: async () => { - await ffprobeVersion(); - }, -}); - -export const ffmpegInstaller = task({ - id: "ffmpeg-installer", - run: async () => { - await ffmpegVersion(); +import type { ReadableStream } from "node:stream/web"; + +export const convertVideo = task({ + id: "convert-video", + retry: { + maxAttempts: 5, + minTimeoutInMs: 1000, + maxTimeoutInMs: 10000, + factor: 2, }, -}); - -const videoUrl = - "https://upload.wikimedia.org/wikipedia/commons/0/07/Fractal-zoom-1-03-Mandelbrot_Buzzsaw.ogv"; -const videoPath = "./video.ogv"; - -async function downloadVideo() { - logger.log("downloading video", { url: videoUrl }); - - const response = await fetch(videoUrl); - - if (!response.body) { - throw new Error("No readable stream"); - } - - const readStream = Readable.fromWeb(response.body as ReadableStream); - await writeFile(videoPath, readStream); - - logger.log("finished downloading", { outputPath: videoPath }); -} - -async function execute(file: string, args?: readonly string[]) { - const { execa } = await import("execa"); - - logger.log(`execute: ${basename(file)}`, { args }); - const childProcess = await execa(file, args); - - if (childProcess.exitCode !== 0) { - logger.error("Non-zero exit code", { - stderr: childProcess.stderr, - stdout: childProcess.stdout, + run: async ({ url }: { url: string }) => { + const outputPath = path.join("/tmp", `output_${Date.now()}.mp4`); + + const response = await fetch(url); + + await new Promise((resolve, reject) => { + ffmpeg(Readable.fromWeb(response.body as ReadableStream)) + .videoFilters("scale=iw/2:ih/2") + .output(outputPath) + .on("end", resolve) + .on("error", reject) + .run(); }); - throw new Error("Non-zero exit code"); - } - - return childProcess; -} - -async function probeVideo() { - const ffprobe = await getFfprobe(); - const args = ["-hide_banner", "-print_format", "json", "-show_format", videoPath]; - - logger.log("probing video", { videoPath }); - const childProcess = await execute(ffprobe.path, args); - - logger.log("video info", { - output: JSON.parse(childProcess.stdout), - }); -} - -export const ffprobeInfo = task({ - id: "ffprobe-info", - run: async () => { - await ffprobeVersion(); - await downloadVideo(); - await probeVideo(); - }, -}); - -async function convertVideo() { - const outputPath = "./video.webm"; - logger.log("converting video", { input: videoPath, output: outputPath }); - - const childProcess = await execute(ffmpeg.path, [ - "-hide_banner", - "-y", // overwrite output, don't prompt - "-i", - videoPath, - // seek to 25s - "-ss", - "25", - // stop after 5s - "-t", - "5", - outputPath, - ]); - logger.log("video converted", { - input: videoPath, - output: outputPath, - stderr: childProcess.stderr, - stdout: childProcess.stdout, - }); -} + console.log(`Video converted to ${outputPath}`); -export const ffmpegConvert = task({ - id: "ffmpeg-convert", - run: async () => { - await ffmpegVersion(); - await downloadVideo(); - await convertVideo(); + return { success: true, outputPath }; }, }); diff --git a/references/v3-catalog/src/trigger/simple.ts b/references/v3-catalog/src/trigger/simple.ts index 1d6aa20711..4db91ac0f4 100644 --- a/references/v3-catalog/src/trigger/simple.ts +++ b/references/v3-catalog/src/trigger/simple.ts @@ -1,5 +1,5 @@ import "server-only"; -import { logger, task, tasks, wait } from "@trigger.dev/sdk/v3"; +import { logger, SubtaskUnwrapError, task, tasks, wait } from "@trigger.dev/sdk/v3"; import { traceAsync } from "@/telemetry.js"; import { HeaderGenerator } from "header-generator"; @@ -31,14 +31,22 @@ export const fetchPostTask = task({ export const anyPayloadTask = task({ id: "any-payload-task", run: async (payload: any) => { - const result = await tasks.triggerAndWait("fetch-post-task", { - url: "https://jsonplaceholder.typicode.com/posts/1", - }); - - if (result.ok) { - logger.info("Result from fetch-post-task 211111sss", { output: result.output }); - } else { - logger.error("Error from fetch-post-task", { error: result.error }); + try { + const { url, method } = await tasks + .triggerAndWait("fetch-post-task", { + url: "https://jsonplaceholder.typicode.comasdqdasd/posts/1", + }) + .unwrap(); + + console.log("Result from fetch-post-task 211111sss", { output: { url, method } }); + } catch (error) { + if (error instanceof SubtaskUnwrapError) { + console.error("Error in fetch-post-task", { + runId: error.runId, + taskId: error.taskId, + cause: error.cause, + }); + } } return { @@ -126,10 +134,12 @@ export const parentTask = task({ await wait.for({ seconds: 5 }); - const childTaskResponse = await childTask.triggerAndWait({ - message: payload.message, - forceError: false, - }); + const childTaskResponse = await childTask + .triggerAndWait({ + message: payload.message, + forceError: false, + }) + .unwrap(); logger.info("Child task response", { childTaskResponse }); diff --git a/references/v3-catalog/trigger.config.ts b/references/v3-catalog/trigger.config.ts index 2f2c66d92d..3560609182 100644 --- a/references/v3-catalog/trigger.config.ts +++ b/references/v3-catalog/trigger.config.ts @@ -1,41 +1,15 @@ import { InfisicalClient } from "@infisical/sdk"; +import { sentryEsbuildPlugin } from "@sentry/esbuild-plugin"; import { OpenAIInstrumentation } from "@traceloop/instrumentation-openai"; import { esbuildPlugin } from "@trigger.dev/build"; import { audioWaveform } from "@trigger.dev/build/extensions/audioWaveform"; +import { ffmpeg, syncEnvVars } from "@trigger.dev/build/extensions/core"; import { prismaExtension } from "@trigger.dev/build/extensions/prisma"; import { emitDecoratorMetadata } from "@trigger.dev/build/extensions/typescript"; -import { defineConfig, ResolveEnvironmentVariablesFunction } from "@trigger.dev/sdk/v3"; -import { sentryEsbuildPlugin } from "@sentry/esbuild-plugin"; +import { defineConfig } from "@trigger.dev/sdk/v3"; export { handleError } from "./src/handleError.js"; -export const resolveEnvVars: ResolveEnvironmentVariablesFunction = async (ctx) => { - if ( - process.env.INFISICAL_CLIENT_ID === undefined || - process.env.INFISICAL_CLIENT_SECRET === undefined || - process.env.INFISICAL_PROJECT_ID === undefined - ) { - return; - } - - const client = new InfisicalClient({ - clientId: process.env.INFISICAL_CLIENT_ID, - clientSecret: process.env.INFISICAL_CLIENT_SECRET, - }); - - const secrets = await client.listSecrets({ - environment: ctx.environment, - projectId: process.env.INFISICAL_PROJECT_ID, - }); - - return { - variables: secrets.map((secret) => ({ - name: secret.secretKey, - value: secret.secretValue, - })), - }; -}; - export default defineConfig({ runtime: "node", project: "yubjwjsfkxnylobaqvqz", @@ -43,7 +17,7 @@ export default defineConfig({ instrumentations: [new OpenAIInstrumentation()], additionalFiles: ["wrangler/wrangler.toml"], retries: { - enabledInDev: true, + enabledInDev: false, default: { maxAttempts: 10, minTimeoutInMs: 5_000, @@ -63,6 +37,7 @@ export default defineConfig({ build: { conditions: ["react-server"], extensions: [ + ffmpeg(), emitDecoratorMetadata(), audioWaveform(), prismaExtension({ @@ -80,7 +55,31 @@ export default defineConfig({ }), { placement: "last", target: "deploy" } ), + syncEnvVars(async (ctx) => { + if ( + !process.env.INFISICAL_CLIENT_ID || + !process.env.INFISICAL_CLIENT_SECRET || + !process.env.INFISICAL_PROJECT_ID + ) { + return; + } + + const client = new InfisicalClient({ + clientId: process.env.INFISICAL_CLIENT_ID, + clientSecret: process.env.INFISICAL_CLIENT_SECRET, + }); + + const secrets = await client.listSecrets({ + environment: ctx.environment, + projectId: process.env.INFISICAL_PROJECT_ID, + }); + + return secrets.map((secret) => ({ + name: secret.secretKey, + value: secret.secretValue, + })); + }), ], - external: ["@ffmpeg-installer/ffmpeg", "re2"], + external: ["re2"], }, });