diff --git a/.changeset/sharp-dolls-burn.md b/.changeset/sharp-dolls-burn.md
new file mode 100644
index 0000000000..6d51f36ecb
--- /dev/null
+++ b/.changeset/sharp-dolls-burn.md
@@ -0,0 +1,5 @@
+---
+"trigger.dev": patch
+---
+
+feat: Add official MCP server, install MCP and rules CLI commands and wizards
diff --git a/.cursor/mcp.json b/.cursor/mcp.json
index 9b3221784d..da39e4ffaf 100644
--- a/.cursor/mcp.json
+++ b/.cursor/mcp.json
@@ -1,7 +1,3 @@
{
- "mcpServers": {
- "trigger.dev": {
- "url": "http://localhost:3333/sse"
- }
- }
-}
\ No newline at end of file
+ "mcpServers": {}
+}
diff --git a/.gitignore b/.gitignore
index 9bee46fc27..6f435d0400 100644
--- a/.gitignore
+++ b/.gitignore
@@ -63,4 +63,5 @@ apps/**/public/build
/packages/core/src/package.json
/packages/trigger-sdk/src/package.json
/packages/python/src/package.json
-.claude
\ No newline at end of file
+.claude
+.mcp.log
\ No newline at end of file
diff --git a/apps/webapp/app/routes/account.authorization-code.$authorizationCode/route.tsx b/apps/webapp/app/routes/account.authorization-code.$authorizationCode/route.tsx
index f2c9479361..df75c25ff7 100644
--- a/apps/webapp/app/routes/account.authorization-code.$authorizationCode/route.tsx
+++ b/apps/webapp/app/routes/account.authorization-code.$authorizationCode/route.tsx
@@ -1,11 +1,8 @@
import { CheckCircleIcon } from "@heroicons/react/24/solid";
import { LoaderFunctionArgs } from "@remix-run/server-runtime";
-import { title } from "process";
import { typedjson, useTypedLoaderData } from "remix-typedjson";
import { z } from "zod";
-import { ErrorIcon } from "~/assets/icons/ErrorIcon";
import { AppContainer, MainCenteredContainer } from "~/components/layout/AppLayout";
-import { LinkButton } from "~/components/primitives/Buttons";
import { Callout } from "~/components/primitives/Callout";
import { Header1 } from "~/components/primitives/Headers";
import { Icon } from "~/components/primitives/Icon";
@@ -13,12 +10,16 @@ import { Paragraph } from "~/components/primitives/Paragraph";
import { logger } from "~/services/logger.server";
import { createPersonalAccessTokenFromAuthorizationCode } from "~/services/personalAccessToken.server";
import { requireUserId } from "~/services/session.server";
-import { rootPath } from "~/utils/pathBuilder";
const ParamsSchema = z.object({
authorizationCode: z.string(),
});
+const SearchParamsSchema = z.object({
+ source: z.string().optional(),
+ clientName: z.string().optional(),
+});
+
export const loader = async ({ request, params }: LoaderFunctionArgs) => {
const userId = await requireUserId(request);
@@ -32,6 +33,14 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
});
}
+ const url = new URL(request.url);
+ const searchObject = Object.fromEntries(url.searchParams.entries());
+
+ const searchParams = SearchParamsSchema.safeParse(searchObject);
+
+ const source = (searchParams.success ? searchParams.data.source : undefined) ?? "cli";
+ const clientName = (searchParams.success ? searchParams.data.clientName : undefined) ?? "unknown";
+
try {
const personalAccessToken = await createPersonalAccessTokenFromAuthorizationCode(
parsedParams.data.authorizationCode,
@@ -39,6 +48,8 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
);
return typedjson({
success: true as const,
+ source,
+ clientName,
});
} catch (error) {
if (error instanceof Response) {
@@ -49,6 +60,8 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
return typedjson({
success: false as const,
error: error.message,
+ source,
+ clientName,
});
}
@@ -73,7 +86,7 @@ export default function Page() {
Successfully
authenticated
- Return to your terminal to continue.
+ {getInstructionsForSource(result.source, result.clientName)}
) : (
@@ -91,3 +104,21 @@ export default function Page() {
);
}
+
+const prettyClientNames: Record
= {
+ "claude-code": "Claude Code",
+ "cursor-vscode": "Cursor",
+ "Visual Studio Code": "VSCode",
+ "windsurf-client": "Windsurf",
+ "claude-ai": "Claude Desktop",
+};
+
+function getInstructionsForSource(source: string, clientName: string) {
+ if (source === "mcp") {
+ if (clientName) {
+ return `Return to your ${prettyClientNames[clientName] ?? clientName} to continue.`;
+ }
+ }
+
+ return `Return to your terminal to continue.`;
+}
diff --git a/apps/webapp/app/routes/api.v1.deployments.ts b/apps/webapp/app/routes/api.v1.deployments.ts
index 65410761b9..c80e180d89 100644
--- a/apps/webapp/app/routes/api.v1.deployments.ts
+++ b/apps/webapp/app/routes/api.v1.deployments.ts
@@ -1,10 +1,13 @@
import { ActionFunctionArgs, json } from "@remix-run/server-runtime";
import {
+ ApiDeploymentListSearchParams,
InitializeDeploymentRequestBody,
InitializeDeploymentResponseBody,
} from "@trigger.dev/core/v3";
+import { $replica } from "~/db.server";
import { authenticateApiRequest } from "~/services/apiAuth.server";
import { logger } from "~/services/logger.server";
+import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server";
import { ServiceValidationError } from "~/v3/services/baseService.server";
import { InitializeDeploymentService } from "~/v3/services/initializeDeployment.server";
@@ -60,3 +63,119 @@ export async function action({ request, params }: ActionFunctionArgs) {
}
}
}
+
+export const loader = createLoaderApiRoute(
+ {
+ searchParams: ApiDeploymentListSearchParams,
+ allowJWT: true,
+ corsStrategy: "none",
+ authorization: {
+ action: "read",
+ resource: () => ({ deployments: "list" }),
+ superScopes: ["read:deployments", "read:all", "admin"],
+ },
+ findResource: async () => 1, // This is a dummy function, we don't need to find a resource
+ },
+ async ({ searchParams, authentication }) => {
+ const limit = Math.max(Math.min(searchParams["page[size]"] ?? 20, 100), 5);
+
+ const afterDeployment = searchParams["page[after]"]
+ ? await $replica.workerDeployment.findFirst({
+ where: {
+ friendlyId: searchParams["page[after]"],
+ environmentId: authentication.environment.id,
+ },
+ })
+ : undefined;
+
+ const deployments = await $replica.workerDeployment.findMany({
+ where: {
+ environmentId: authentication.environment.id,
+ ...(afterDeployment ? { id: { lt: afterDeployment.id } } : {}),
+ ...getCreatedAtFilter(searchParams),
+ ...(searchParams.status ? { status: searchParams.status } : {}),
+ },
+ orderBy: {
+ id: "desc",
+ },
+ take: limit + 1,
+ });
+
+ const hasMore = deployments.length > limit;
+ const nextCursor = hasMore ? deployments[limit - 1].friendlyId : undefined;
+ const data = hasMore ? deployments.slice(0, limit) : deployments;
+
+ return json({
+ data: data.map((deployment) => ({
+ id: deployment.friendlyId,
+ createdAt: deployment.createdAt,
+ shortCode: deployment.shortCode,
+ version: deployment.version.toString(),
+ runtime: deployment.runtime,
+ runtimeVersion: deployment.runtimeVersion,
+ status: deployment.status,
+ deployedAt: deployment.deployedAt,
+ git: deployment.git,
+ error: deployment.errorData ?? undefined,
+ })),
+ pagination: {
+ next: nextCursor,
+ },
+ });
+ }
+);
+
+import parseDuration from "parse-duration";
+import { parseDate } from "@trigger.dev/core/v3/isomorphic";
+
+function getCreatedAtFilter(searchParams: ApiDeploymentListSearchParams) {
+ if (searchParams.period) {
+ const duration = parseDuration(searchParams.period, "ms");
+
+ if (!duration) {
+ throw new ServiceValidationError(
+ `Invalid search query parameter: period=${searchParams.period}`,
+ 400
+ );
+ }
+
+ return {
+ createdAt: {
+ gte: new Date(Date.now() - duration),
+ lte: new Date(),
+ },
+ };
+ }
+
+ if (searchParams.from && searchParams.to) {
+ const fromDate = safeDateFromString(searchParams.from, "from");
+ const toDate = safeDateFromString(searchParams.to, "to");
+
+ return {
+ createdAt: {
+ gte: fromDate,
+ lte: toDate,
+ },
+ };
+ }
+
+ if (searchParams.from) {
+ const fromDate = safeDateFromString(searchParams.from, "from");
+ return {
+ createdAt: {
+ gte: fromDate,
+ },
+ };
+ }
+
+ return {};
+}
+
+function safeDateFromString(value: string, paramName: string) {
+ const date = parseDate(value);
+
+ if (!date) {
+ throw new ServiceValidationError(`Invalid search query parameter: ${paramName}=${value}`, 400);
+ }
+ return date;
+}
diff --git a/apps/webapp/app/routes/api.v1.orgs.$orgParam.projects.ts b/apps/webapp/app/routes/api.v1.orgs.$orgParam.projects.ts
new file mode 100644
index 0000000000..9a23d12909
--- /dev/null
+++ b/apps/webapp/app/routes/api.v1.orgs.$orgParam.projects.ts
@@ -0,0 +1,138 @@
+import type { ActionFunctionArgs, LoaderFunctionArgs } from "@remix-run/server-runtime";
+import { json } from "@remix-run/server-runtime";
+import {
+ CreateProjectRequestBody,
+ GetProjectResponseBody,
+ GetProjectsResponseBody,
+} from "@trigger.dev/core/v3";
+import { z } from "zod";
+import { prisma } from "~/db.server";
+import { createProject } from "~/models/project.server";
+import { logger } from "~/services/logger.server";
+import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server";
+import { isCuid } from "cuid";
+
+const ParamsSchema = z.object({
+ orgParam: z.string(),
+});
+
+export async function loader({ request, params }: LoaderFunctionArgs) {
+ logger.info("get projects", { url: request.url });
+
+ const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request);
+
+ if (!authenticationResult) {
+ return json({ error: "Invalid or Missing Access Token" }, { status: 401 });
+ }
+
+ const { orgParam } = ParamsSchema.parse(params);
+
+ const projects = await prisma.project.findMany({
+ where: {
+ organization: {
+ ...orgParamWhereClause(orgParam),
+ deletedAt: null,
+ members: {
+ some: {
+ userId: authenticationResult.userId,
+ },
+ },
+ },
+ version: "V3",
+ deletedAt: null,
+ },
+ include: {
+ organization: true,
+ },
+ });
+
+ if (!projects) {
+ return json({ error: "Projects not found" }, { status: 404 });
+ }
+
+ const result: GetProjectsResponseBody = projects.map((project) => ({
+ id: project.id,
+ externalRef: project.externalRef,
+ name: project.name,
+ slug: project.slug,
+ createdAt: project.createdAt,
+ organization: {
+ id: project.organization.id,
+ title: project.organization.title,
+ slug: project.organization.slug,
+ createdAt: project.organization.createdAt,
+ },
+ }));
+
+ return json(result);
+}
+
+export async function action({ request, params }: ActionFunctionArgs) {
+ const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request);
+
+ if (!authenticationResult) {
+ return json({ error: "Invalid or Missing Access Token" }, { status: 401 });
+ }
+
+ const { orgParam } = ParamsSchema.parse(params);
+
+ const organization = await prisma.organization.findFirst({
+ where: {
+ ...orgParamWhereClause(orgParam),
+ deletedAt: null,
+ members: {
+ some: {
+ userId: authenticationResult.userId,
+ },
+ },
+ },
+ });
+
+ if (!organization) {
+ return json({ error: "Organization not found" }, { status: 404 });
+ }
+
+ const body = await request.json();
+ const parsedBody = CreateProjectRequestBody.safeParse(body);
+
+ if (!parsedBody.success) {
+ return json({ error: "Invalid request body" }, { status: 400 });
+ }
+
+ const project = await createProject({
+ organizationSlug: organization.slug,
+ name: parsedBody.data.name,
+ userId: authenticationResult.userId,
+ version: "v3",
+ });
+
+ const result: GetProjectResponseBody = {
+ id: project.id,
+ externalRef: project.externalRef,
+ name: project.name,
+ slug: project.slug,
+ createdAt: project.createdAt,
+ organization: {
+ id: project.organization.id,
+ title: project.organization.title,
+ slug: project.organization.slug,
+ createdAt: project.organization.createdAt,
+ },
+ };
+
+ return json(result);
+}
+
+function orgParamWhereClause(orgParam: string) {
+ // If the orgParam is an ID, or if it's a slug
+ // IDs are cuid
+ if (isCuid(orgParam)) {
+ return {
+ id: orgParam,
+ };
+ }
+
+ return {
+ slug: orgParam,
+ };
+}
diff --git a/apps/webapp/app/routes/api.v1.orgs.ts b/apps/webapp/app/routes/api.v1.orgs.ts
new file mode 100644
index 0000000000..626162f234
--- /dev/null
+++ b/apps/webapp/app/routes/api.v1.orgs.ts
@@ -0,0 +1,37 @@
+import type { LoaderFunctionArgs } from "@remix-run/server-runtime";
+import { json } from "@remix-run/server-runtime";
+import { GetOrgsResponseBody } from "@trigger.dev/core/v3";
+import { prisma } from "~/db.server";
+import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server";
+
+export async function loader({ request }: LoaderFunctionArgs) {
+ const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request);
+
+ if (!authenticationResult) {
+ return json({ error: "Invalid or Missing Access Token" }, { status: 401 });
+ }
+
+ const orgs = await prisma.organization.findMany({
+ where: {
+ deletedAt: null,
+ members: {
+ some: {
+ userId: authenticationResult.userId,
+ },
+ },
+ },
+ });
+
+ if (!orgs) {
+ return json({ error: "Orgs not found" }, { status: 404 });
+ }
+
+ const result: GetOrgsResponseBody = orgs.map((org) => ({
+ id: org.id,
+ title: org.title,
+ slug: org.slug,
+ createdAt: org.createdAt,
+ }));
+
+ return json(result);
+}
diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.jwt.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.jwt.ts
new file mode 100644
index 0000000000..2db054d4d4
--- /dev/null
+++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.jwt.ts
@@ -0,0 +1,109 @@
+import { ActionFunctionArgs, json } from "@remix-run/node";
+import { generateJWT as internal_generateJWT } from "@trigger.dev/core/v3";
+import { z } from "zod";
+import { prisma } from "~/db.server";
+import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server";
+import { getEnvironmentFromEnv } from "./api.v1.projects.$projectRef.$env";
+
+const ParamsSchema = z.object({
+ projectRef: z.string(),
+ env: z.enum(["dev", "staging", "prod", "preview"]),
+});
+
+const RequestBodySchema = z.object({
+ claims: z
+ .object({
+ scopes: z.array(z.string()).default([]),
+ })
+ .optional(),
+ expirationTime: z.union([z.number(), z.string()]).optional(),
+});
+
+export async function action({ request, params }: ActionFunctionArgs) {
+ const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request);
+
+ if (!authenticationResult) {
+ return json({ error: "Invalid or Missing Access Token" }, { status: 401 });
+ }
+
+ const parsedParams = ParamsSchema.safeParse(params);
+
+ if (!parsedParams.success) {
+ return json({ error: "Invalid Params" }, { status: 400 });
+ }
+
+ const { projectRef, env } = parsedParams.data;
+
+ const project = await prisma.project.findFirst({
+ where: {
+ externalRef: projectRef,
+ organization: {
+ members: {
+ some: {
+ userId: authenticationResult.userId,
+ },
+ },
+ },
+ },
+ });
+
+ if (!project) {
+ return json({ error: "Project not found" }, { status: 404 });
+ }
+
+ const envResult = await getEnvironmentFromEnv({
+ projectId: project.id,
+ userId: authenticationResult.userId,
+ env,
+ });
+
+ if (!envResult.success) {
+ return json({ error: envResult.error }, { status: 404 });
+ }
+
+ const runtimeEnv = envResult.environment;
+
+ const parsedBody = RequestBodySchema.safeParse(await request.json());
+
+ if (!parsedBody.success) {
+ return json(
+ { error: "Invalid request body", issues: parsedBody.error.issues },
+ { status: 400 }
+ );
+ }
+
+ const triggerBranch = request.headers.get("x-trigger-branch") ?? undefined;
+
+ let previewBranchEnvironmentId: string | undefined;
+
+ if (triggerBranch) {
+ const previewBranch = await prisma.runtimeEnvironment.findFirst({
+ where: {
+ projectId: project.id,
+ branchName: triggerBranch,
+ parentEnvironmentId: runtimeEnv.id,
+ archivedAt: null,
+ },
+ });
+
+ if (previewBranch) {
+ previewBranchEnvironmentId = previewBranch.id;
+ } else {
+ return json({ error: `Preview branch ${triggerBranch} not found` }, { status: 404 });
+ }
+ }
+
+ const claims = {
+ sub: previewBranchEnvironmentId ?? runtimeEnv.id,
+ pub: true,
+ ...parsedBody.data.claims,
+ };
+
+ const jwt = await internal_generateJWT({
+ secretKey: runtimeEnv.apiKey,
+ payload: claims,
+ expirationTime: parsedBody.data.expirationTime ?? "1h",
+ });
+
+ return json({ token: jwt });
+}
diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.ts
index cb5adeaf0d..c45a3c55ed 100644
--- a/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.ts
+++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.ts
@@ -70,14 +70,16 @@ export async function loader({ request, params }: LoaderFunctionArgs) {
return json(result);
}
-async function getEnvironmentFromEnv({
+export async function getEnvironmentFromEnv({
projectId,
userId,
env,
+ branch,
}: {
projectId: string;
userId: string;
env: ParamsSchema["env"];
+ branch?: string;
}): Promise<
| {
success: true;
@@ -126,6 +128,49 @@ async function getEnvironmentFromEnv({
break;
}
+ if (slug === "preview") {
+ const previewEnvironment = await prisma.runtimeEnvironment.findFirst({
+ where: {
+ projectId,
+ slug: "preview",
+ },
+ });
+
+ if (!previewEnvironment) {
+ return {
+ success: false,
+ error: "Preview environment not found",
+ };
+ }
+
+ // If no branch is provided, just return the parent preview environment
+ if (!branch) {
+ return {
+ success: true,
+ environment: previewEnvironment,
+ };
+ }
+
+ const branchEnvironment = await prisma.runtimeEnvironment.findFirst({
+ where: {
+ parentEnvironmentId: previewEnvironment.id,
+ branchName: branch,
+ },
+ });
+
+ if (!branchEnvironment) {
+ return {
+ success: false,
+ error: `Preview branch ${branch} not found`,
+ };
+ }
+
+ return {
+ success: true,
+ environment: branchEnvironment,
+ };
+ }
+
const environment = await prisma.runtimeEnvironment.findFirst({
where: {
projectId,
diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.workers.$tagName.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.workers.$tagName.ts
new file mode 100644
index 0000000000..b26923716d
--- /dev/null
+++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.workers.$tagName.ts
@@ -0,0 +1,141 @@
+import { json, type LoaderFunctionArgs } from "@remix-run/server-runtime";
+import { z } from "zod";
+import { $replica, prisma } from "~/db.server";
+import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server";
+import { findCurrentWorkerFromEnvironment } from "~/v3/models/workerDeployment.server";
+import { getEnvironmentFromEnv } from "./api.v1.projects.$projectRef.$env";
+import { GetWorkerByTagResponse } from "@trigger.dev/core/v3/schemas";
+import { env as $env } from "~/env.server";
+import { v3RunsPath } from "~/utils/pathBuilder";
+
+const ParamsSchema = z.object({
+ projectRef: z.string(),
+ tagName: z.string(),
+ env: z.enum(["dev", "staging", "prod", "preview"]),
+});
+
+const HeadersSchema = z.object({
+ "x-trigger-branch": z.string().optional(),
+});
+
+type ParamsSchema = z.infer;
+
+export async function loader({ request, params }: LoaderFunctionArgs) {
+ const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request);
+
+ if (!authenticationResult) {
+ return json({ error: "Invalid or Missing Access Token" }, { status: 401 });
+ }
+
+ const parsedParams = ParamsSchema.safeParse(params);
+
+ if (!parsedParams.success) {
+ return json({ error: "Invalid Params" }, { status: 400 });
+ }
+
+ const parsedHeaders = HeadersSchema.safeParse(Object.fromEntries(request.headers));
+
+ const branch = parsedHeaders.success ? parsedHeaders.data["x-trigger-branch"] : undefined;
+
+ const { projectRef, env } = parsedParams.data;
+
+ const project = await prisma.project.findFirst({
+ where: {
+ externalRef: projectRef,
+ organization: {
+ members: {
+ some: {
+ userId: authenticationResult.userId,
+ },
+ },
+ },
+ },
+ select: {
+ id: true,
+ slug: true,
+ organization: {
+ select: {
+ slug: true,
+ },
+ },
+ },
+ });
+
+ if (!project) {
+ return json({ error: "Project not found" }, { status: 404 });
+ }
+
+ const envResult = await getEnvironmentFromEnv({
+ projectId: project.id,
+ userId: authenticationResult.userId,
+ env,
+ branch,
+ });
+
+ if (!envResult.success) {
+ return json({ error: envResult.error }, { status: 404 });
+ }
+
+ const runtimeEnv = envResult.environment;
+
+ const currentWorker = await findCurrentWorkerFromEnvironment(
+ {
+ id: runtimeEnv.id,
+ type: runtimeEnv.type,
+ },
+ $replica,
+ params.tagName
+ );
+
+ if (!currentWorker) {
+ return json({ error: "Worker not found" }, { status: 404 });
+ }
+
+ const tasks = await $replica.backgroundWorkerTask.findMany({
+ where: {
+ workerId: currentWorker.id,
+ },
+ select: {
+ friendlyId: true,
+ slug: true,
+ filePath: true,
+ triggerSource: true,
+ createdAt: true,
+ payloadSchema: true,
+ },
+ orderBy: {
+ slug: "asc",
+ },
+ });
+
+ const urls = {
+ runs: `${$env.APP_ORIGIN}${v3RunsPath(
+ { slug: project.organization.slug },
+ { slug: project.slug },
+ { slug: runtimeEnv.slug },
+ { versions: [currentWorker.version] }
+ )}`,
+ };
+
+ // Prepare the response object
+ const response: GetWorkerByTagResponse = {
+ worker: {
+ id: currentWorker.friendlyId,
+ version: currentWorker.version,
+ engine: currentWorker.engine,
+ sdkVersion: currentWorker.sdkVersion,
+ cliVersion: currentWorker.cliVersion,
+ tasks: tasks.map((task) => ({
+ id: task.friendlyId,
+ slug: task.slug,
+ filePath: task.filePath,
+ triggerSource: task.triggerSource,
+ createdAt: task.createdAt,
+ payloadSchema: task.payloadSchema,
+ })),
+ },
+ urls,
+ };
+
+ return json(response);
+}
diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.branches.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.branches.ts
index 21654580bf..6ae6a133e9 100644
--- a/apps/webapp/app/routes/api.v1.projects.$projectRef.branches.ts
+++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.branches.ts
@@ -1,4 +1,4 @@
-import { json, type ActionFunctionArgs } from "@remix-run/server-runtime";
+import { json, LoaderFunctionArgs, type ActionFunctionArgs } from "@remix-run/server-runtime";
import { tryCatch, UpsertBranchRequestBody } from "@trigger.dev/core/v3";
import { z } from "zod";
import { prisma } from "~/db.server";
@@ -93,3 +93,82 @@ export async function action({ request, params }: ActionFunctionArgs) {
return json(result.branch);
}
+
+export async function loader({ request, params }: LoaderFunctionArgs) {
+ const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request);
+ if (!authenticationResult) {
+ return json({ error: "Invalid or Missing Access Token" }, { status: 401 });
+ }
+
+ const parsedParams = ParamsSchema.safeParse(params);
+
+ if (!parsedParams.success) {
+ return json({ error: "Invalid Params" }, { status: 400 });
+ }
+
+ const { projectRef } = parsedParams.data;
+
+ const project = await prisma.project.findFirst({
+ select: {
+ id: true,
+ },
+ where: {
+ externalRef: projectRef,
+ organization: {
+ members: {
+ some: {
+ userId: authenticationResult.userId,
+ },
+ },
+ },
+ },
+ });
+
+ if (!project) {
+ return json({ error: "Project not found" }, { status: 404 });
+ }
+
+ const previewEnvironment = await prisma.runtimeEnvironment.findFirst({
+ select: {
+ id: true,
+ },
+ where: {
+ projectId: project.id,
+ slug: "preview",
+ },
+ });
+
+ if (!previewEnvironment) {
+ return json(
+ { error: "You don't have preview branches setup. Go to the dashboard to enable them." },
+ { status: 400 }
+ );
+ }
+
+ const branches = await prisma.runtimeEnvironment.findMany({
+ where: {
+ projectId: project.id,
+ parentEnvironmentId: previewEnvironment.id,
+ archivedAt: null,
+ },
+ select: {
+ id: true,
+ branchName: true,
+ createdAt: true,
+ updatedAt: true,
+ git: true,
+ paused: true,
+ },
+ });
+
+ return json({
+ branches: branches.map((branch) => ({
+ id: branch.id,
+ name: branch.branchName ?? "main",
+ createdAt: branch.createdAt,
+ updatedAt: branch.updatedAt,
+ git: branch.git ?? undefined,
+ isPaused: branch.paused,
+ })),
+ });
+}
diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.dev-status.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.dev-status.ts
new file mode 100644
index 0000000000..58171cc5bb
--- /dev/null
+++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.dev-status.ts
@@ -0,0 +1,59 @@
+import { json, type LoaderFunctionArgs } from "@remix-run/node";
+import { z } from "zod";
+import { prisma } from "~/db.server";
+import { devPresence } from "~/presenters/v3/DevPresence.server";
+import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server";
+import { getEnvironmentFromEnv } from "./api.v1.projects.$projectRef.$env";
+
+const ParamsSchema = z.object({
+ projectRef: z.string(),
+});
+
+export async function loader({ request, params }: LoaderFunctionArgs) {
+ const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request);
+
+ if (!authenticationResult) {
+ return json({ error: "Invalid or Missing Access Token" }, { status: 401 });
+ }
+
+ const parsedParams = ParamsSchema.safeParse(params);
+
+ if (!parsedParams.success) {
+ return json({ error: "Invalid Params" }, { status: 400 });
+ }
+
+ const { projectRef } = parsedParams.data;
+
+ const project = await prisma.project.findFirst({
+ where: {
+ externalRef: projectRef,
+ organization: {
+ members: {
+ some: {
+ userId: authenticationResult.userId,
+ },
+ },
+ },
+ },
+ });
+
+ if (!project) {
+ return json({ error: "Project not found" }, { status: 404 });
+ }
+
+ const envResult = await getEnvironmentFromEnv({
+ projectId: project.id,
+ userId: authenticationResult.userId,
+ env: "dev",
+ });
+
+ if (!envResult.success) {
+ return json({ error: envResult.error }, { status: 404 });
+ }
+
+ const runtimeEnv = envResult.environment;
+
+ const isConnected = await devPresence.isConnected(runtimeEnv.id);
+
+ return json({ isConnected });
+}
diff --git a/apps/webapp/app/routes/api.v1.projects.ts b/apps/webapp/app/routes/api.v1.projects.ts
index 3962560f5c..3a12417dce 100644
--- a/apps/webapp/app/routes/api.v1.projects.ts
+++ b/apps/webapp/app/routes/api.v1.projects.ts
@@ -1,4 +1,4 @@
-import type { LoaderFunctionArgs } from "@remix-run/server-runtime";
+import type { ActionFunctionArgs, LoaderFunctionArgs } from "@remix-run/server-runtime";
import { json } from "@remix-run/server-runtime";
import { GetProjectsResponseBody } from "@trigger.dev/core/v3";
import { prisma } from "~/db.server";
diff --git a/apps/webapp/app/routes/api.v1.runs.$runId.trace.ts b/apps/webapp/app/routes/api.v1.runs.$runId.trace.ts
new file mode 100644
index 0000000000..8ab42d8c3c
--- /dev/null
+++ b/apps/webapp/app/routes/api.v1.runs.$runId.trace.ts
@@ -0,0 +1,57 @@
+import { json } from "@remix-run/server-runtime";
+import { BatchId } from "@trigger.dev/core/v3/isomorphic";
+import { z } from "zod";
+import { $replica } from "~/db.server";
+import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server";
+import { eventRepository } from "~/v3/eventRepository.server";
+import { getTaskEventStoreTableForRun } from "~/v3/taskEventStore.server";
+
+const ParamsSchema = z.object({
+ runId: z.string(), // This is the run friendly ID
+});
+
+export const loader = createLoaderApiRoute(
+ {
+ params: ParamsSchema,
+ allowJWT: true,
+ corsStrategy: "all",
+ findResource: (params, auth) => {
+ return $replica.taskRun.findFirst({
+ where: {
+ friendlyId: params.runId,
+ runtimeEnvironmentId: auth.environment.id,
+ },
+ });
+ },
+ shouldRetryNotFound: true,
+ authorization: {
+ action: "read",
+ resource: (run) => ({
+ runs: run.friendlyId,
+ tags: run.runTags,
+ batch: run.batchId ? BatchId.toFriendlyId(run.batchId) : undefined,
+ tasks: run.taskIdentifier,
+ }),
+ superScopes: ["read:runs", "read:all", "admin"],
+ },
+ },
+ async ({ resource: run }) => {
+ const traceSummary = await eventRepository.getTraceDetailedSummary(
+ getTaskEventStoreTableForRun(run),
+ run.traceId,
+ run.createdAt,
+ run.completedAt ?? undefined
+ );
+
+ if (!traceSummary) {
+ return json({ error: "Trace not found" }, { status: 404 });
+ }
+
+ return json(
+ {
+ trace: traceSummary,
+ },
+ { status: 200 }
+ );
+ }
+);
diff --git a/apps/webapp/app/routes/api.v1.runs.ts b/apps/webapp/app/routes/api.v1.runs.ts
index 17a664f6ef..b5191ee259 100644
--- a/apps/webapp/app/routes/api.v1.runs.ts
+++ b/apps/webapp/app/routes/api.v1.runs.ts
@@ -3,6 +3,7 @@ import {
ApiRunListPresenter,
ApiRunListSearchParams,
} from "~/presenters/v3/ApiRunListPresenter.server";
+import { logger } from "~/services/logger.server";
import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server";
export const loader = createLoaderApiRoute(
diff --git a/apps/webapp/app/routes/api.v2.runs.$runParam.cancel.ts b/apps/webapp/app/routes/api.v2.runs.$runParam.cancel.ts
index 1a32a8ce37..a05af273d8 100644
--- a/apps/webapp/app/routes/api.v2.runs.$runParam.cancel.ts
+++ b/apps/webapp/app/routes/api.v2.runs.$runParam.cancel.ts
@@ -1,53 +1,47 @@
-import type { ActionFunctionArgs } from "@remix-run/server-runtime";
import { json } from "@remix-run/server-runtime";
import { z } from "zod";
-import { prisma } from "~/db.server";
-import { authenticateApiRequest } from "~/services/apiAuth.server";
+import { $replica } from "~/db.server";
+import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
import { CancelTaskRunService } from "~/v3/services/cancelTaskRun.server";
const ParamsSchema = z.object({
runParam: z.string(),
});
-export async function action({ request, params }: ActionFunctionArgs) {
- // Ensure this is a POST request
- if (request.method.toUpperCase() !== "POST") {
- return { status: 405, body: "Method Not Allowed" };
- }
-
- // Authenticate the request
- const authenticationResult = await authenticateApiRequest(request);
-
- if (!authenticationResult) {
- return json({ error: "Invalid or Missing API Key" }, { status: 401 });
- }
-
- const parsed = ParamsSchema.safeParse(params);
-
- if (!parsed.success) {
- return json({ error: "Invalid or Missing run id" }, { status: 400 });
- }
-
- const { runParam } = parsed.data;
-
- const taskRun = await prisma.taskRun.findUnique({
- where: {
- friendlyId: runParam,
- runtimeEnvironmentId: authenticationResult.environment.id,
+const { action } = createActionApiRoute(
+ {
+ params: ParamsSchema,
+ allowJWT: true,
+ corsStrategy: "none",
+ authorization: {
+ action: "write",
+ resource: (params) => ({ runs: params.runParam }),
+ superScopes: ["write:runs", "admin"],
},
- });
+ findResource: async (params, auth) => {
+ return $replica.taskRun.findFirst({
+ where: {
+ friendlyId: params.runParam,
+ runtimeEnvironmentId: auth.environment.id,
+ },
+ });
+ },
+ },
+ async ({ resource }) => {
+ if (!resource) {
+ return json({ error: "Run not found" }, { status: 404 });
+ }
- if (!taskRun) {
- return json({ error: "Run not found" }, { status: 404 });
- }
+ const service = new CancelTaskRunService();
- const service = new CancelTaskRunService();
+ try {
+ await service.call(resource);
+ } catch (error) {
+ return json({ error: "Internal Server Error" }, { status: 500 });
+ }
- try {
- await service.call(taskRun);
- } catch (error) {
- return json({ error: "Internal Server Error" }, { status: 500 });
+ return json({ id: resource.friendlyId }, { status: 200 });
}
+);
- return json({ id: runParam }, { status: 200 });
-}
+export { action };
diff --git a/apps/webapp/app/routes/projects.$projectRef.ts b/apps/webapp/app/routes/projects.$projectRef.ts
new file mode 100644
index 0000000000..856a93c4ac
--- /dev/null
+++ b/apps/webapp/app/routes/projects.$projectRef.ts
@@ -0,0 +1,37 @@
+import { type LoaderFunctionArgs, redirect } from "@remix-run/server-runtime";
+import { z } from "zod";
+import { prisma } from "~/db.server";
+import { requireUserId } from "~/services/session.server";
+
+const ParamsSchema = z.object({
+ projectRef: z.string(),
+});
+
+export async function loader({ params, request }: LoaderFunctionArgs) {
+ const userId = await requireUserId(request);
+
+ const validatedParams = ParamsSchema.parse(params);
+
+ const project = await prisma.project.findFirst({
+ where: {
+ externalRef: validatedParams.projectRef,
+ organization: {
+ members: {
+ some: {
+ userId,
+ },
+ },
+ },
+ },
+ include: {
+ organization: true,
+ },
+ });
+
+ if (!project) {
+ return new Response("Not found", { status: 404 });
+ }
+
+ // Redirect to the project's runs page
+ return redirect(`/orgs/${project.organization.slug}/projects/${project.slug}`);
+}
diff --git a/apps/webapp/app/routes/projects.v3.$projectRef.ts b/apps/webapp/app/routes/projects.v3.$projectRef.ts
index 856a93c4ac..48b007a627 100644
--- a/apps/webapp/app/routes/projects.v3.$projectRef.ts
+++ b/apps/webapp/app/routes/projects.v3.$projectRef.ts
@@ -1,37 +1,12 @@
import { type LoaderFunctionArgs, redirect } from "@remix-run/server-runtime";
import { z } from "zod";
-import { prisma } from "~/db.server";
-import { requireUserId } from "~/services/session.server";
const ParamsSchema = z.object({
projectRef: z.string(),
});
export async function loader({ params, request }: LoaderFunctionArgs) {
- const userId = await requireUserId(request);
-
const validatedParams = ParamsSchema.parse(params);
- const project = await prisma.project.findFirst({
- where: {
- externalRef: validatedParams.projectRef,
- organization: {
- members: {
- some: {
- userId,
- },
- },
- },
- },
- include: {
- organization: true,
- },
- });
-
- if (!project) {
- return new Response("Not found", { status: 404 });
- }
-
- // Redirect to the project's runs page
- return redirect(`/orgs/${project.organization.slug}/projects/${project.slug}`);
+ return redirect(`/projects/${validatedParams.projectRef}`);
}
diff --git a/apps/webapp/app/services/authorization.server.ts b/apps/webapp/app/services/authorization.server.ts
index a7a3101165..15f85cc327 100644
--- a/apps/webapp/app/services/authorization.server.ts
+++ b/apps/webapp/app/services/authorization.server.ts
@@ -1,6 +1,6 @@
export type AuthorizationAction = "read" | "write" | string; // Add more actions as needed
-const ResourceTypes = ["tasks", "tags", "runs", "batch", "waitpoints"] as const;
+const ResourceTypes = ["tasks", "tags", "runs", "batch", "waitpoints", "deployments"] as const;
export type AuthorizationResources = {
[key in (typeof ResourceTypes)[number]]?: string | string[];
diff --git a/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts
index 9e161abe07..9d06d3345c 100644
--- a/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts
+++ b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts
@@ -430,7 +430,8 @@ type ApiKeyActionRouteBuilderOptions<
TParamsSchema extends AnyZodSchema | undefined = undefined,
TSearchParamsSchema extends AnyZodSchema | undefined = undefined,
THeadersSchema extends AnyZodSchema | undefined = undefined,
- TBodySchema extends AnyZodSchema | undefined = undefined
+ TBodySchema extends AnyZodSchema | undefined = undefined,
+ TResource = never
> = {
params?: TParamsSchema;
searchParams?: TSearchParamsSchema;
@@ -438,6 +439,17 @@ type ApiKeyActionRouteBuilderOptions<
allowJWT?: boolean;
corsStrategy?: "all" | "none";
method?: "POST" | "PUT" | "DELETE" | "PATCH";
+ findResource?: (
+ params: TParamsSchema extends z.ZodFirstPartySchemaTypes | z.ZodDiscriminatedUnion
+ ? z.infer
+ : undefined,
+ authentication: ApiAuthenticationResultSuccess,
+ searchParams: TSearchParamsSchema extends
+ | z.ZodFirstPartySchemaTypes
+ | z.ZodDiscriminatedUnion
+ ? z.infer
+ : undefined
+ ) => Promise;
authorization?: {
action: AuthorizationAction;
resource: (
@@ -466,7 +478,8 @@ type ApiKeyActionHandlerFunction<
TParamsSchema extends AnyZodSchema | undefined,
TSearchParamsSchema extends AnyZodSchema | undefined,
THeadersSchema extends AnyZodSchema | undefined = undefined,
- TBodySchema extends AnyZodSchema | undefined = undefined
+ TBodySchema extends AnyZodSchema | undefined = undefined,
+ TResource = never
> = (args: {
params: TParamsSchema extends z.ZodFirstPartySchemaTypes | z.ZodDiscriminatedUnion
? z.infer
@@ -484,25 +497,29 @@ type ApiKeyActionHandlerFunction<
: undefined;
authentication: ApiAuthenticationResultSuccess;
request: Request;
+ resource?: TResource;
}) => Promise;
export function createActionApiRoute<
TParamsSchema extends AnyZodSchema | undefined = undefined,
TSearchParamsSchema extends AnyZodSchema | undefined = undefined,
THeadersSchema extends AnyZodSchema | undefined = undefined,
- TBodySchema extends AnyZodSchema | undefined = undefined
+ TBodySchema extends AnyZodSchema | undefined = undefined,
+ TResource = never
>(
options: ApiKeyActionRouteBuilderOptions<
TParamsSchema,
TSearchParamsSchema,
THeadersSchema,
- TBodySchema
+ TBodySchema,
+ TResource
>,
handler: ApiKeyActionHandlerFunction<
TParamsSchema,
TSearchParamsSchema,
THeadersSchema,
- TBodySchema
+ TBodySchema,
+ TResource
>
) {
const {
@@ -682,6 +699,18 @@ export function createActionApiRoute<
}
}
+ const resource = options.findResource
+ ? await options.findResource(parsedParams, authenticationResult, parsedSearchParams)
+ : undefined;
+
+ if (options.findResource && !resource) {
+ return await wrapResponse(
+ request,
+ json({ error: "Resource not found" }, { status: 404 }),
+ corsStrategy !== "none"
+ );
+ }
+
const result = await handler({
params: parsedParams,
searchParams: parsedSearchParams,
@@ -689,6 +718,7 @@ export function createActionApiRoute<
body: parsedBody,
authentication: authenticationResult,
request,
+ resource,
});
return await wrapResponse(request, result, corsStrategy !== "none");
} catch (error) {
diff --git a/apps/webapp/app/v3/eventRepository.server.ts b/apps/webapp/app/v3/eventRepository.server.ts
index 687fbe9e76..547593266b 100644
--- a/apps/webapp/app/v3/eventRepository.server.ts
+++ b/apps/webapp/app/v3/eventRepository.server.ts
@@ -33,7 +33,7 @@ import { createRedisClient, RedisClient, RedisWithClusterOptions } from "~/redis
import { logger } from "~/services/logger.server";
import { singleton } from "~/utils/singleton";
import { DynamicFlushScheduler } from "./dynamicFlushScheduler.server";
-import { TaskEventStore, TaskEventStoreTable } from "./taskEventStore.server";
+import { DetailedTraceEvent, TaskEventStore, TaskEventStoreTable } from "./taskEventStore.server";
import { startActiveSpan } from "./tracer.server";
import { startSpan } from "./tracing.server";
@@ -146,6 +146,12 @@ export type PreparedEvent = Omit
style: TaskEventStyle;
};
+export type PreparedDetailedEvent = Omit & {
+ duration: number;
+ events: SpanEvents;
+ style: TaskEventStyle;
+};
+
export type RunPreparedEvent = PreparedEvent & {
taskSlug?: string;
};
@@ -186,6 +192,36 @@ export type SpanSummary = {
export type TraceSummary = { rootSpan: SpanSummary; spans: Array };
+export type SpanDetailedSummary = {
+ id: string;
+ parentId: string | undefined;
+ message: string;
+ data: {
+ runId: string;
+ taskSlug?: string;
+ taskPath?: string;
+ events: SpanEvents;
+ startTime: Date;
+ duration: number;
+ isError: boolean;
+ isPartial: boolean;
+ isCancelled: boolean;
+ level: NonNullable;
+ environmentType: CreatableEventEnvironmentType;
+ workerVersion?: string;
+ queueName?: string;
+ machinePreset?: string;
+ properties?: Attributes;
+ output?: Attributes;
+ };
+ children: Array;
+};
+
+export type TraceDetailedSummary = {
+ traceId: string;
+ rootSpan: SpanDetailedSummary;
+};
+
export type UpdateEventOptions = {
attributes: TraceAttributes;
endTime?: Date;
@@ -589,6 +625,121 @@ export class EventRepository {
});
}
+ public async getTraceDetailedSummary(
+ storeTable: TaskEventStoreTable,
+ traceId: string,
+ startCreatedAt: Date,
+ endCreatedAt?: Date,
+ options?: { includeDebugLogs?: boolean }
+ ): Promise {
+ return await startActiveSpan("getTraceDetailedSummary", async (span) => {
+ const events = await this.taskEventStore.findDetailedTraceEvents(
+ storeTable,
+ traceId,
+ startCreatedAt,
+ endCreatedAt,
+ { includeDebugLogs: options?.includeDebugLogs }
+ );
+
+ let preparedEvents: Array = [];
+ let rootSpanId: string | undefined;
+ const eventsBySpanId = new Map();
+
+ for (const event of events) {
+ preparedEvents.push(prepareDetailedEvent(event));
+
+ if (!rootSpanId && !event.parentId) {
+ rootSpanId = event.spanId;
+ }
+ }
+
+ for (const event of preparedEvents) {
+ const existingEvent = eventsBySpanId.get(event.spanId);
+
+ if (!existingEvent) {
+ eventsBySpanId.set(event.spanId, event);
+ continue;
+ }
+
+ if (event.isCancelled || !event.isPartial) {
+ eventsBySpanId.set(event.spanId, event);
+ }
+ }
+
+ preparedEvents = Array.from(eventsBySpanId.values());
+
+ if (!rootSpanId) {
+ return;
+ }
+
+ // Build hierarchical structure
+ const spanDetailedSummaryMap = new Map();
+
+ // First pass: create all span detailed summaries
+ for (const event of preparedEvents) {
+ const ancestorCancelled = isAncestorCancelled(eventsBySpanId, event.spanId);
+ const duration = calculateDurationIfAncestorIsCancelled(
+ eventsBySpanId,
+ event.spanId,
+ event.duration
+ );
+
+ const output = event.output ? (event.output as Attributes) : undefined;
+ const properties = event.properties
+ ? removePrivateProperties(event.properties as Attributes)
+ : {};
+
+ const spanDetailedSummary: SpanDetailedSummary = {
+ id: event.spanId,
+ parentId: event.parentId ?? undefined,
+ message: event.message,
+ data: {
+ runId: event.runId,
+ taskSlug: event.taskSlug ?? undefined,
+ taskPath: event.taskPath ?? undefined,
+ events: event.events?.filter((e) => !e.name.startsWith("trigger.dev")),
+ startTime: getDateFromNanoseconds(event.startTime),
+ duration: nanosecondsToMilliseconds(duration),
+ isError: event.isError,
+ isPartial: ancestorCancelled ? false : event.isPartial,
+ isCancelled: event.isCancelled === true ? true : event.isPartial && ancestorCancelled,
+ level: event.level,
+ environmentType: event.environmentType,
+ workerVersion: event.workerVersion ?? undefined,
+ queueName: event.queueName ?? undefined,
+ machinePreset: event.machinePreset ?? undefined,
+ properties,
+ output,
+ },
+ children: [],
+ };
+
+ spanDetailedSummaryMap.set(event.spanId, spanDetailedSummary);
+ }
+
+ // Second pass: build parent-child relationships
+ for (const spanSummary of spanDetailedSummaryMap.values()) {
+ if (spanSummary.parentId) {
+ const parent = spanDetailedSummaryMap.get(spanSummary.parentId);
+ if (parent) {
+ parent.children.push(spanSummary);
+ }
+ }
+ }
+
+ const rootSpan = spanDetailedSummaryMap.get(rootSpanId);
+
+ if (!rootSpan) {
+ return;
+ }
+
+ return {
+ traceId,
+ rootSpan,
+ };
+ });
+ }
+
public async getRunEvents(
storeTable: TaskEventStoreTable,
runId: string,
@@ -1517,6 +1668,15 @@ function prepareEvent(event: QueriedEvent): PreparedEvent {
};
}
+function prepareDetailedEvent(event: DetailedTraceEvent): PreparedDetailedEvent {
+ return {
+ ...event,
+ duration: Number(event.duration),
+ events: parseEventsField(event.events),
+ style: parseStyleField(event.style),
+ };
+}
+
function parseEventsField(events: Prisma.JsonValue): SpanEvents {
const unsafe = events
? (events as any[]).map((e) => ({
@@ -1548,7 +1708,10 @@ function parseStyleField(style: Prisma.JsonValue): TaskEventStyle {
return {};
}
-function isAncestorCancelled(events: Map, spanId: string) {
+function isAncestorCancelled(
+ events: Map,
+ spanId: string
+) {
const event = events.get(spanId);
if (!event) {
@@ -1567,7 +1730,16 @@ function isAncestorCancelled(events: Map, spanId: string)
}
function calculateDurationIfAncestorIsCancelled(
- events: Map,
+ events: Map<
+ string,
+ {
+ isCancelled: boolean;
+ parentId: string | null;
+ isPartial: boolean;
+ startTime: bigint;
+ events: SpanEvents;
+ }
+ >,
spanId: string,
defaultDuration: number
) {
@@ -1603,7 +1775,19 @@ function calculateDurationIfAncestorIsCancelled(
return defaultDuration;
}
-function findFirstCancelledAncestor(events: Map, spanId: string) {
+function findFirstCancelledAncestor(
+ events: Map<
+ string,
+ {
+ isCancelled: boolean;
+ parentId: string | null;
+ isPartial: boolean;
+ startTime: bigint;
+ events: SpanEvents;
+ }
+ >,
+ spanId: string
+) {
const event = events.get(spanId);
if (!event) {
@@ -1711,6 +1895,10 @@ export function getDateFromNanoseconds(nanoseconds: bigint) {
return new Date(Number(nanoseconds) / 1_000_000);
}
+function nanosecondsToMilliseconds(nanoseconds: bigint | number): number {
+ return Number(nanoseconds) / 1_000_000;
+}
+
function rehydrateJson(json: Prisma.JsonValue): any {
if (json === null) {
return undefined;
diff --git a/apps/webapp/app/v3/taskEventStore.server.ts b/apps/webapp/app/v3/taskEventStore.server.ts
index 269aab84b4..580541da9c 100644
--- a/apps/webapp/app/v3/taskEventStore.server.ts
+++ b/apps/webapp/app/v3/taskEventStore.server.ts
@@ -23,6 +23,32 @@ export type TraceEvent = Pick<
| "kind"
>;
+export type DetailedTraceEvent = Pick<
+ TaskEvent,
+ | "spanId"
+ | "parentId"
+ | "runId"
+ | "idempotencyKey"
+ | "message"
+ | "style"
+ | "startTime"
+ | "duration"
+ | "isError"
+ | "isPartial"
+ | "isCancelled"
+ | "level"
+ | "events"
+ | "environmentType"
+ | "kind"
+ | "taskSlug"
+ | "taskPath"
+ | "workerVersion"
+ | "queueName"
+ | "machinePreset"
+ | "properties"
+ | "output"
+>;
+
export type TaskEventStoreTable = "taskEvent" | "taskEventPartitioned";
export function getTaskEventStoreTableForRun(run: {
@@ -207,4 +233,95 @@ export class TaskEventStore {
`;
}
}
+
+ async findDetailedTraceEvents(
+ table: TaskEventStoreTable,
+ traceId: string,
+ startCreatedAt: Date,
+ endCreatedAt?: Date,
+ options?: { includeDebugLogs?: boolean }
+ ) {
+ const filterDebug =
+ options?.includeDebugLogs === false || options?.includeDebugLogs === undefined;
+
+ if (table === "taskEventPartitioned") {
+ const createdAtBufferInMillis = env.TASK_EVENT_PARTITIONED_WINDOW_IN_SECONDS * 1000;
+ const startCreatedAtWithBuffer = new Date(startCreatedAt.getTime() - createdAtBufferInMillis);
+ const $endCreatedAt = endCreatedAt ?? new Date();
+ const endCreatedAtWithBuffer = new Date($endCreatedAt.getTime() + createdAtBufferInMillis);
+
+ return await this.readReplica.$queryRaw`
+ SELECT
+ "spanId",
+ "parentId",
+ "runId",
+ "idempotencyKey",
+ message,
+ style,
+ "startTime",
+ duration,
+ "isError",
+ "isPartial",
+ "isCancelled",
+ level,
+ events,
+ "environmentType",
+ "kind",
+ "taskSlug",
+ "taskPath",
+ "workerVersion",
+ "queueName",
+ "machinePreset",
+ properties,
+ output
+ FROM "TaskEventPartitioned"
+ WHERE
+ "traceId" = ${traceId}
+ AND "createdAt" >= ${startCreatedAtWithBuffer.toISOString()}::timestamp
+ AND "createdAt" < ${endCreatedAtWithBuffer.toISOString()}::timestamp
+ ${
+ filterDebug
+ ? Prisma.sql`AND \"kind\" <> CAST('LOG'::text AS "public"."TaskEventKind")`
+ : Prisma.empty
+ }
+ ORDER BY "startTime" ASC
+ LIMIT ${env.MAXIMUM_TRACE_SUMMARY_VIEW_COUNT}
+ `;
+ } else {
+ return await this.readReplica.$queryRaw`
+ SELECT
+ "spanId",
+ "parentId",
+ "runId",
+ "idempotencyKey",
+ message,
+ style,
+ "startTime",
+ duration,
+ "isError",
+ "isPartial",
+ "isCancelled",
+ level,
+ events,
+ "environmentType",
+ "kind",
+ "taskSlug",
+ "taskPath",
+ "workerVersion",
+ "queueName",
+ "machinePreset",
+ properties,
+ output
+ FROM "TaskEvent"
+ WHERE "traceId" = ${traceId}
+ ${
+ filterDebug
+ ? Prisma.sql`AND \"kind\" <> CAST('LOG'::text AS "public"."TaskEventKind")`
+ : Prisma.empty
+ }
+ ORDER BY "startTime" ASC
+ LIMIT ${env.MAXIMUM_TRACE_SUMMARY_VIEW_COUNT}
+ `;
+ }
+ }
}
diff --git a/packages/cli-v3/install-mcp.sh b/packages/cli-v3/install-mcp.sh
new file mode 100755
index 0000000000..e2612a34e5
--- /dev/null
+++ b/packages/cli-v3/install-mcp.sh
@@ -0,0 +1,582 @@
+#!/bin/bash
+
+set -e # Exit on error
+
+# Default target
+TARGET="all"
+
+# Parse command line arguments
+show_help() {
+ echo "š Trigger.dev MCP Server Installer"
+ echo ""
+ echo "Usage: $0 [OPTIONS]"
+ echo ""
+ echo "Options:"
+ echo " -t, --target TARGET Install target: claude, claude-desktop, cursor, vscode, crush, windsurf, or all (default: all)"
+ echo " -h, --help Show this help message"
+ echo ""
+ echo "Targets:"
+ echo " claude Install for Claude Code (~/.claude.json)"
+ echo " claude-desktop Install for Claude Desktop (~/Library/Application Support/Claude/claude_desktop_config.json)"
+ echo " cursor Install for Cursor (~/.cursor/mcp.json)"
+ echo " vscode Install for VS Code (~/Library/Application Support/Code/User/mcp.json)"
+ echo " crush Install for Crush (~/.config/crush/crush.json)"
+ echo " windsurf Install for Windsurf (~/.codeium/windsurf/mcp_config.json)"
+ echo " all Install for all supported targets"
+ echo ""
+ echo "Examples:"
+ echo " $0 # Install for all targets"
+ echo " $0 -t claude # Install only for Claude Code"
+ echo " $0 -t claude-desktop # Install only for Claude Desktop"
+ echo " $0 -t cursor # Install only for Cursor"
+ echo " $0 -t vscode # Install only for VS Code"
+ echo " $0 -t crush # Install only for Crush"
+ echo " $0 -t windsurf # Install only for Windsurf"
+}
+
+# Parse arguments
+while [[ $# -gt 0 ]]; do
+ case $1 in
+ -t|--target)
+ TARGET="$2"
+ shift 2
+ ;;
+ -h|--help)
+ show_help
+ exit 0
+ ;;
+ *)
+ echo "ā Unknown option: $1"
+ echo "Use -h or --help for usage information"
+ exit 1
+ ;;
+ esac
+done
+
+# Validate target
+case $TARGET in
+ claude|claude-desktop|cursor|vscode|crush|windsurf|all)
+ ;;
+ *)
+ echo "ā Invalid target: $TARGET"
+ echo "Valid targets are: claude, claude-desktop, cursor, vscode, crush, windsurf, all"
+ exit 1
+ ;;
+esac
+
+echo "š Installing Trigger.dev MCP Server for target: $TARGET"
+
+# Get the absolute path to the node binary
+NODE_PATH=$(which node)
+if [ -z "$NODE_PATH" ]; then
+ echo "ā Error: Node.js not found in PATH"
+ echo "Please ensure Node.js is installed and available in your PATH"
+ exit 1
+fi
+
+# Get the directory where this script is located
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+
+# Construct the path to the CLI index.js file
+CLI_PATH="$SCRIPT_DIR/dist/esm/index.js"
+
+# Construct the path to the MCP log file
+MCP_LOG_FILE="$SCRIPT_DIR/.mcp.log"
+
+# Make sure the MCP log file exists
+touch "$MCP_LOG_FILE"
+
+# Check if the CLI file exists
+if [ ! -f "$CLI_PATH" ]; then
+ echo "ā Error: CLI file not found at $CLI_PATH"
+ echo "Make sure to build the CLI first with: pnpm run build"
+ exit 1
+fi
+
+# Ensure the CLI is executable
+chmod +x "$CLI_PATH"
+
+echo "ā
Found Node.js at: $NODE_PATH"
+echo "ā
Found CLI at: $CLI_PATH"
+
+# Function to install for Claude Code
+install_claude() {
+ echo ""
+ echo "š§ Installing for Claude Code..."
+
+ local CLAUDE_CONFIG="$HOME/.claude.json"
+ echo "š Claude configuration file: $CLAUDE_CONFIG"
+
+ # Check if Claude config exists, create if it doesn't
+ if [ ! -f "$CLAUDE_CONFIG" ]; then
+ echo "š Creating new Claude configuration file..."
+ echo '{"mcpServers": {}}' > "$CLAUDE_CONFIG"
+ fi
+
+ # Use Node.js to manipulate the JSON
+ echo "š§ Updating Claude configuration..."
+
+ node -e "
+ const fs = require('fs');
+ const path = require('path');
+
+ const configPath = '$CLAUDE_CONFIG';
+ const nodePath = '$NODE_PATH';
+ const cliPath = '$CLI_PATH';
+ const logFile = '$MCP_LOG_FILE';
+
+ try {
+ // Read existing config
+ let config;
+ try {
+ const configContent = fs.readFileSync(configPath, 'utf8');
+ config = JSON.parse(configContent);
+ } catch (error) {
+ console.log('š Creating new configuration structure...');
+ config = {};
+ }
+
+ // Ensure mcpServers object exists
+ if (!config.mcpServers) {
+ config.mcpServers = {};
+ }
+
+ // Add/update trigger.dev entry
+ config.mcpServers['trigger'] = {
+ command: nodePath,
+ args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030']
+ };
+
+ // Write back to file with proper formatting
+ fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
+
+ console.log('ā
Successfully installed Trigger.dev MCP server to Claude Code');
+ console.log('');
+ console.log('š Claude Code Configuration:');
+ console.log(' ⢠Config file:', configPath);
+ console.log(' ⢠Node.js path:', nodePath);
+ console.log(' ⢠CLI path:', cliPath);
+ console.log('');
+ console.log('š” Try typing @ in Claude Code and select \"triggerdev\" to get started.');
+
+ } catch (error) {
+ console.error('ā Error updating Claude configuration:', error.message);
+ process.exit(1);
+ }
+ "
+}
+
+# Function to install for Claude Desktop
+install_claude_desktop() {
+ echo ""
+ echo "š§ Installing for Claude Desktop..."
+
+ local CLAUDE_DESKTOP_DIR="$HOME/Library/Application Support/Claude"
+ local CLAUDE_DESKTOP_CONFIG="$CLAUDE_DESKTOP_DIR/claude_desktop_config.json"
+
+ echo "š Claude Desktop configuration file: $CLAUDE_DESKTOP_CONFIG"
+
+ # Create Claude Desktop directory if it doesn't exist
+ if [ ! -d "$CLAUDE_DESKTOP_DIR" ]; then
+ echo "š Creating Claude Desktop configuration directory..."
+ mkdir -p "$CLAUDE_DESKTOP_DIR"
+ fi
+
+ # Check if Claude Desktop config exists, create if it doesn't
+ if [ ! -f "$CLAUDE_DESKTOP_CONFIG" ]; then
+ echo "š Creating new Claude Desktop configuration file..."
+ echo '{"mcpServers": {}}' > "$CLAUDE_DESKTOP_CONFIG"
+ fi
+
+ # Use Node.js to manipulate the JSON
+ echo "š§ Updating Claude Desktop configuration..."
+
+ node -e "
+ const fs = require('fs');
+ const path = require('path');
+
+ const configPath = '$CLAUDE_DESKTOP_CONFIG';
+ const nodePath = '$NODE_PATH';
+ const cliPath = '$CLI_PATH';
+ const logFile = '$MCP_LOG_FILE';
+
+ try {
+ // Read existing config
+ let config;
+ try {
+ const configContent = fs.readFileSync(configPath, 'utf8');
+ config = JSON.parse(configContent);
+ } catch (error) {
+ console.log('š Creating new configuration structure...');
+ config = {};
+ }
+
+ // Ensure mcpServers object exists
+ if (!config.mcpServers) {
+ config.mcpServers = {};
+ }
+
+ // Add/update trigger.dev entry
+ config.mcpServers['trigger'] = {
+ command: nodePath,
+ args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030']
+ };
+
+ // Write back to file with proper formatting
+ fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
+
+ console.log('ā
Successfully installed Trigger.dev MCP server to Claude Desktop');
+ console.log('');
+ console.log('š Claude Desktop Configuration:');
+ console.log(' ⢠Config file:', configPath);
+ console.log(' ⢠Node.js path:', nodePath);
+ console.log(' ⢠CLI path:', cliPath);
+ console.log('');
+ console.log('š” You can now use Trigger.dev MCP commands in Claude Desktop.');
+
+ } catch (error) {
+ console.error('ā Error updating Claude Desktop configuration:', error.message);
+ process.exit(1);
+ }
+ "
+}
+
+# Function to install for Cursor
+install_cursor() {
+ echo ""
+ echo "š§ Installing for Cursor..."
+
+ local CURSOR_DIR="$HOME/.cursor"
+ local CURSOR_CONFIG="$CURSOR_DIR/mcp.json"
+
+ echo "š Cursor configuration file: $CURSOR_CONFIG"
+
+ # Create Cursor directory if it doesn't exist
+ if [ ! -d "$CURSOR_DIR" ]; then
+ echo "š Creating Cursor configuration directory..."
+ mkdir -p "$CURSOR_DIR"
+ fi
+
+ # Check if Cursor config exists, create if it doesn't
+ if [ ! -f "$CURSOR_CONFIG" ]; then
+ echo "š Creating new Cursor configuration file..."
+ echo '{"mcpServers": {}}' > "$CURSOR_CONFIG"
+ fi
+
+ # Use Node.js to manipulate the JSON
+ echo "š§ Updating Cursor configuration..."
+
+ node -e "
+ const fs = require('fs');
+ const path = require('path');
+
+ const configPath = '$CURSOR_CONFIG';
+ const nodePath = '$NODE_PATH';
+ const cliPath = '$CLI_PATH';
+ const logFile = '$MCP_LOG_FILE';
+
+ try {
+ // Read existing config
+ let config;
+ try {
+ const configContent = fs.readFileSync(configPath, 'utf8');
+ config = JSON.parse(configContent);
+ } catch (error) {
+ console.log('š Creating new configuration structure...');
+ config = {};
+ }
+
+ // Ensure mcpServers object exists
+ if (!config.mcpServers) {
+ config.mcpServers = {};
+ }
+
+ // Add/update trigger.dev entry
+ config.mcpServers['trigger'] = {
+ command: nodePath,
+ args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030']
+ };
+
+ // Write back to file with proper formatting
+ fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
+
+ console.log('ā
Successfully installed Trigger.dev MCP server to Cursor');
+ console.log('');
+ console.log('š Cursor Configuration:');
+ console.log(' ⢠Config file:', configPath);
+ console.log(' ⢠Node.js path:', nodePath);
+ console.log(' ⢠CLI path:', cliPath);
+ console.log('');
+ console.log('š” You can now use Trigger.dev MCP commands in Cursor.');
+
+ } catch (error) {
+ console.error('ā Error updating Cursor configuration:', error.message);
+ process.exit(1);
+ }
+ "
+}
+
+# Function to install for VS Code
+install_vscode() {
+ echo ""
+ echo "š§ Installing for VS Code..."
+
+ local VSCODE_DIR="$HOME/Library/Application Support/Code/User"
+ local VSCODE_CONFIG="$VSCODE_DIR/mcp.json"
+
+ echo "š VS Code configuration file: $VSCODE_CONFIG"
+
+ # Create VS Code User directory if it doesn't exist
+ if [ ! -d "$VSCODE_DIR" ]; then
+ echo "š Creating VS Code User configuration directory..."
+ mkdir -p "$VSCODE_DIR"
+ fi
+
+ # Check if VS Code config exists, create if it doesn't
+ if [ ! -f "$VSCODE_CONFIG" ]; then
+ echo "š Creating new VS Code configuration file..."
+ echo '{"servers": {}}' > "$VSCODE_CONFIG"
+ fi
+
+ # Use Node.js to manipulate the JSON
+ echo "š§ Updating VS Code configuration..."
+
+ node -e "
+ const fs = require('fs');
+ const path = require('path');
+
+ const configPath = '$VSCODE_CONFIG';
+ const nodePath = '$NODE_PATH';
+ const cliPath = '$CLI_PATH';
+ const logFile = '$MCP_LOG_FILE';
+
+ try {
+ // Read existing config
+ let config;
+ try {
+ const configContent = fs.readFileSync(configPath, 'utf8');
+ config = JSON.parse(configContent);
+ } catch (error) {
+ console.log('š Creating new configuration structure...');
+ config = {};
+ }
+
+ // Ensure servers object exists
+ if (!config.servers) {
+ config.servers = {};
+ }
+
+ // Add/update trigger.dev entry
+ config.servers['trigger'] = {
+ command: nodePath,
+ args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030']
+ };
+
+ // Write back to file with proper formatting
+ fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
+
+ console.log('ā
Successfully installed Trigger.dev MCP server to VS Code');
+ console.log('');
+ console.log('š VS Code Configuration:');
+ console.log(' ⢠Config file:', configPath);
+ console.log(' ⢠Node.js path:', nodePath);
+ console.log(' ⢠CLI path:', cliPath);
+ console.log('');
+ console.log('š” You can now use Trigger.dev MCP commands in VS Code.');
+
+ } catch (error) {
+ console.error('ā Error updating VS Code configuration:', error.message);
+ process.exit(1);
+ }
+ "
+}
+
+# Function to install for Crush
+install_crush() {
+ echo ""
+ echo "š§ Installing for Crush..."
+
+ local CRUSH_DIR="$HOME/.config/crush"
+ local CRUSH_CONFIG="$CRUSH_DIR/crush.json"
+
+ echo "š Crush configuration file: $CRUSH_CONFIG"
+
+ # Create Crush config directory if it doesn't exist
+ if [ ! -d "$CRUSH_DIR" ]; then
+ echo "š Creating Crush configuration directory..."
+ mkdir -p "$CRUSH_DIR"
+ fi
+
+ # Check if Crush config exists, create if it doesn't
+ if [ ! -f "$CRUSH_CONFIG" ]; then
+ echo "š Creating new Crush configuration file..."
+ echo '{"$schema": "https://charm.land/crush.json", "mcp": {}}' > "$CRUSH_CONFIG"
+ fi
+
+ # Use Node.js to manipulate the JSON
+ echo "š§ Updating Crush configuration..."
+
+ node -e "
+ const fs = require('fs');
+ const path = require('path');
+
+ const configPath = '$CRUSH_CONFIG';
+ const nodePath = '$NODE_PATH';
+ const cliPath = '$CLI_PATH';
+ const logFile = '$MCP_LOG_FILE';
+
+ try {
+ // Read existing config
+ let config;
+ try {
+ const configContent = fs.readFileSync(configPath, 'utf8');
+ config = JSON.parse(configContent);
+ } catch (error) {
+ console.log('š Creating new configuration structure...');
+ config = {};
+ }
+
+ // Ensure schema and mcp object exists
+ if (!config['\$schema']) {
+ config['\$schema'] = 'https://charm.land/crush.json';
+ }
+ if (!config.mcp) {
+ config.mcp = {};
+ }
+
+ // Add/update trigger.dev entry
+ config.mcp['trigger'] = {
+ type: 'stdio',
+ command: nodePath,
+ args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030']
+ };
+
+ // Write back to file with proper formatting
+ fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
+
+ console.log('ā
Successfully installed Trigger.dev MCP server to Crush');
+ console.log('');
+ console.log('š Crush Configuration:');
+ console.log(' ⢠Config file:', configPath);
+ console.log(' ⢠Node.js path:', nodePath);
+ console.log(' ⢠CLI path:', cliPath);
+ console.log('');
+ console.log('š” You can now use Trigger.dev MCP commands in Crush.');
+
+ } catch (error) {
+ console.error('ā Error updating Crush configuration:', error.message);
+ process.exit(1);
+ }
+ "
+}
+
+# Function to install for Windsurf
+install_windsurf() {
+ echo ""
+ echo "š§ Installing for Windsurf..."
+
+ local WINDSURF_DIR="$HOME/.codeium/windsurf"
+ local WINDSURF_CONFIG="$WINDSURF_DIR/mcp_config.json"
+
+ echo "š Windsurf configuration file: $WINDSURF_CONFIG"
+
+ # Create Windsurf config directory if it doesn't exist
+ if [ ! -d "$WINDSURF_DIR" ]; then
+ echo "š Creating Windsurf configuration directory..."
+ mkdir -p "$WINDSURF_DIR"
+ fi
+
+ # Check if Windsurf config exists, create if it doesn't
+ if [ ! -f "$WINDSURF_CONFIG" ]; then
+ echo "š Creating new Windsurf configuration file..."
+ echo '{"mcpServers": {}}' > "$WINDSURF_CONFIG"
+ fi
+
+ # Use Node.js to manipulate the JSON
+ echo "š§ Updating Windsurf configuration..."
+
+ node -e "
+ const fs = require('fs');
+ const path = require('path');
+
+ const configPath = '$WINDSURF_CONFIG';
+ const nodePath = '$NODE_PATH';
+ const cliPath = '$CLI_PATH';
+ const logFile = '$MCP_LOG_FILE';
+
+ try {
+ // Read existing config
+ let config;
+ try {
+ const configContent = fs.readFileSync(configPath, 'utf8');
+ config = JSON.parse(configContent);
+ } catch (error) {
+ console.log('š Creating new configuration structure...');
+ config = {};
+ }
+
+ // Ensure mcpServers object exists
+ if (!config.mcpServers) {
+ config.mcpServers = {};
+ }
+
+ // Add/update trigger.dev entry
+ config.mcpServers['trigger'] = {
+ command: nodePath,
+ args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030']
+ };
+
+ // Write back to file with proper formatting
+ fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
+
+ console.log('ā
Successfully installed Trigger.dev MCP server to Windsurf');
+ console.log('');
+ console.log('š Windsurf Configuration:');
+ console.log(' ⢠Config file:', configPath);
+ console.log(' ⢠Node.js path:', nodePath);
+ console.log(' ⢠CLI path:', cliPath);
+ console.log('');
+ console.log('š” You can now use Trigger.dev MCP commands in Windsurf.');
+
+ } catch (error) {
+ console.error('ā Error updating Windsurf configuration:', error.message);
+ process.exit(1);
+ }
+ "
+}
+
+# Install based on target
+case $TARGET in
+ claude)
+ install_claude
+ ;;
+ claude-desktop)
+ install_claude_desktop
+ ;;
+ cursor)
+ install_cursor
+ ;;
+ vscode)
+ install_vscode
+ ;;
+ crush)
+ install_crush
+ ;;
+ windsurf)
+ install_windsurf
+ ;;
+ all)
+ install_claude
+ install_claude_desktop
+ install_cursor
+ install_vscode
+ install_crush
+ install_windsurf
+ ;;
+esac
+
+echo ""
+echo "š Installation complete!"
+echo ""
+echo "š You can test the MCP server with:"
+echo " pnpm run inspector"
diff --git a/packages/cli-v3/package.json b/packages/cli-v3/package.json
index 184e36ef07..b50504aae3 100644
--- a/packages/cli-v3/package.json
+++ b/packages/cli-v3/package.json
@@ -75,12 +75,14 @@
"dev": "tshy --watch",
"test": "vitest",
"test:e2e": "vitest --run -c ./e2e/vitest.config.ts",
- "update-version": "tsx ../../scripts/updateVersion.ts"
+ "update-version": "tsx ../../scripts/updateVersion.ts",
+ "install-mcp": "./install-mcp.sh",
+ "inspector": "npx @modelcontextprotocol/inspector dist/esm/index.js mcp --log-file .mcp.log --api-url http://localhost:3030"
},
"dependencies": {
- "@clack/prompts": "^0.10.0",
+ "@clack/prompts": "0.11.0",
"@depot/cli": "0.0.1-cli.2.80.0",
- "@modelcontextprotocol/sdk": "^1.6.1",
+ "@modelcontextprotocol/sdk": "^1.17.0",
"@opentelemetry/api": "1.9.0",
"@opentelemetry/api-logs": "0.203.0",
"@opentelemetry/exporter-trace-otlp-http": "0.203.0",
@@ -99,6 +101,7 @@
"chokidar": "^3.6.0",
"cli-table3": "^0.6.3",
"commander": "^9.4.1",
+ "confbox": "^0.2.2",
"defu": "^6.1.4",
"dotenv": "^16.4.5",
"esbuild": "^0.23.0",
@@ -129,6 +132,7 @@
"socket.io-client": "4.7.5",
"source-map-support": "0.5.21",
"std-env": "^3.7.0",
+ "strip-ansi": "^7.1.0",
"supports-color": "^10.0.0",
"tiny-invariant": "^1.2.0",
"tinyexec": "^0.3.1",
diff --git a/packages/cli-v3/src/apiClient.ts b/packages/cli-v3/src/apiClient.ts
index 5056c1c47a..b5a9ed6a43 100644
--- a/packages/cli-v3/src/apiClient.ts
+++ b/packages/cli-v3/src/apiClient.ts
@@ -31,6 +31,12 @@ import {
WorkersCreateRequestBody,
WorkersCreateResponseBody,
WorkersListResponseBody,
+ CreateProjectRequestBody,
+ GetOrgsResponseBody,
+ GetWorkerByTagResponse,
+ GetJWTRequestBody,
+ GetJWTResponse,
+ ApiBranchListResponseBody,
} from "@trigger.dev/core/v3";
import {
WorkloadDebugLogRequestBody,
@@ -136,6 +142,75 @@ export class CliApiClient {
});
}
+ async getOrgs() {
+ if (!this.accessToken) {
+ throw new Error("getOrgs: No access token");
+ }
+
+ return wrapZodFetch(GetOrgsResponseBody, `${this.apiURL}/api/v1/orgs`, {
+ headers: {
+ Authorization: `Bearer ${this.accessToken}`,
+ "Content-Type": "application/json",
+ },
+ });
+ }
+
+ async createProject(orgParam: string, body: CreateProjectRequestBody) {
+ if (!this.accessToken) {
+ throw new Error("createProject: No access token");
+ }
+
+ return wrapZodFetch(GetProjectResponseBody, `${this.apiURL}/api/v1/orgs/${orgParam}/projects`, {
+ method: "POST",
+ headers: this.getHeaders(),
+ body: JSON.stringify(body),
+ });
+ }
+
+ async getWorkerByTag(projectRef: string, envName: string, tagName: string = "current") {
+ if (!this.accessToken) {
+ throw new Error("getWorkerByTag: No access token");
+ }
+
+ return wrapZodFetch(
+ GetWorkerByTagResponse,
+ `${this.apiURL}/api/v1/projects/${projectRef}/${envName}/workers/${tagName}`,
+ {
+ headers: this.getHeaders(),
+ }
+ );
+ }
+
+ async getJWT(projectRef: string, envName: string, body: GetJWTRequestBody) {
+ if (!this.accessToken) {
+ throw new Error("getJWT: No access token");
+ }
+
+ return wrapZodFetch(
+ GetJWTResponse,
+ `${this.apiURL}/api/v1/projects/${projectRef}/${envName}/jwt`,
+ {
+ method: "POST",
+ headers: this.getHeaders(),
+ body: JSON.stringify(body),
+ }
+ );
+ }
+
+ async getDevStatus(projectRef: string) {
+ if (!this.accessToken) {
+ throw new Error("getDevStatus: No access token");
+ }
+
+ return wrapZodFetch(
+ z.object({ isConnected: z.boolean() }),
+ `${this.apiURL}/api/v1/projects/${projectRef}/dev-status`,
+ {
+ headers: this.getHeaders(),
+ }
+ );
+ }
+
async createBackgroundWorker(projectRef: string, body: CreateBackgroundWorkerRequestBody) {
if (!this.accessToken) {
throw new Error("createBackgroundWorker: No access token");
@@ -204,6 +279,20 @@ export class CliApiClient {
);
}
+ async listBranches(projectRef: string) {
+ if (!this.accessToken) {
+ throw new Error("listBranches: No access token");
+ }
+
+ return wrapZodFetch(
+ ApiBranchListResponseBody,
+ `${this.apiURL}/api/v1/projects/${projectRef}/branches`,
+ {
+ headers: this.getHeaders(),
+ }
+ );
+ }
+
async getEnvironmentVariables(projectRef: string) {
if (!this.accessToken) {
throw new Error("getEnvironmentVariables: No access token");
diff --git a/packages/cli-v3/src/cli/common.ts b/packages/cli-v3/src/cli/common.ts
index 3cf9f2aba1..f1508c47b9 100644
--- a/packages/cli-v3/src/cli/common.ts
+++ b/packages/cli-v3/src/cli/common.ts
@@ -68,7 +68,7 @@ export async function wrapCommandAction(
if (e instanceof SkipLoggingError) {
// do nothing
} else if (e instanceof OutroCommandError) {
- outro("Operation cancelled");
+ outro(e.message ?? "Operation cancelled");
} else if (e instanceof SkipCommandError) {
// do nothing
} else if (e instanceof BundleError) {
diff --git a/packages/cli-v3/src/cli/index.ts b/packages/cli-v3/src/cli/index.ts
index 4a575831a5..bea6eacd04 100644
--- a/packages/cli-v3/src/cli/index.ts
+++ b/packages/cli-v3/src/cli/index.ts
@@ -1,21 +1,22 @@
import { Command } from "commander";
+import { configureAnalyzeCommand } from "../commands/analyze.js";
+import { configureDeployCommand } from "../commands/deploy.js";
import { configureDevCommand } from "../commands/dev.js";
import { configureInitCommand } from "../commands/init.js";
+import { configureListProfilesCommand } from "../commands/list-profiles.js";
import { configureLoginCommand } from "../commands/login.js";
import { configureLogoutCommand } from "../commands/logout.js";
+import { configurePreviewCommand } from "../commands/preview.js";
+import { configurePromoteCommand } from "../commands/promote.js";
+import { configureSwitchProfilesCommand } from "../commands/switch.js";
+import { configureUpdateCommand } from "../commands/update.js";
import { configureWhoamiCommand } from "../commands/whoami.js";
+import { configureMcpCommand } from "../commands/mcp.js";
import { COMMAND_NAME } from "../consts.js";
-import { configureListProfilesCommand } from "../commands/list-profiles.js";
-import { configureAnalyzeCommand } from "../commands/analyze.js";
-import { configureUpdateCommand } from "../commands/update.js";
import { VERSION } from "../version.js";
-import { configureDeployCommand } from "../commands/deploy.js";
import { installExitHandler } from "./common.js";
-import { configureWorkersCommand } from "../commands/workers/index.js";
-import { configureSwitchProfilesCommand } from "../commands/switch.js";
-import { configureTriggerTaskCommand } from "../commands/trigger.js";
-import { configurePromoteCommand } from "../commands/promote.js";
-import { configurePreviewCommand } from "../commands/preview.js";
+import { configureInstallMcpCommand } from "../commands/install-mcp.js";
+import { configureInstallRulesCommand } from "../commands/install-rules.js";
export const program = new Command();
@@ -36,7 +37,8 @@ configureSwitchProfilesCommand(program);
configureUpdateCommand(program);
configurePreviewCommand(program);
configureAnalyzeCommand(program);
-// configureWorkersCommand(program);
-// configureTriggerTaskCommand(program);
+configureMcpCommand(program);
+configureInstallMcpCommand(program);
+configureInstallRulesCommand(program);
installExitHandler();
diff --git a/packages/cli-v3/src/commands/deploy.ts b/packages/cli-v3/src/commands/deploy.ts
index b64e600014..87dbbc9787 100644
--- a/packages/cli-v3/src/commands/deploy.ts
+++ b/packages/cli-v3/src/commands/deploy.ts
@@ -68,7 +68,7 @@ export function configureDeployCommand(program: Command) {
commonOptions(
program
.command("deploy")
- .description("Deploy your Trigger.dev v3 project to the cloud.")
+ .description("Deploy your Trigger.dev project to the cloud.")
.argument("[path]", "The path to the project", ".")
.option(
"-e, --env ",
diff --git a/packages/cli-v3/src/commands/dev.ts b/packages/cli-v3/src/commands/dev.ts
index d3041ba0f8..7c82b46978 100644
--- a/packages/cli-v3/src/commands/dev.ts
+++ b/packages/cli-v3/src/commands/dev.ts
@@ -13,6 +13,15 @@ import { runtimeChecks } from "../utilities/runtimeCheck.js";
import { getProjectClient, LoginResultOk } from "../utilities/session.js";
import { login } from "./login.js";
import { updateTriggerPackages } from "./update.js";
+import {
+ readConfigHasSeenMCPInstallPrompt,
+ writeConfigHasSeenMCPInstallPrompt,
+} from "../utilities/configFiles.js";
+import { confirm, isCancel, log } from "@clack/prompts";
+import { installMcpServer } from "./install-mcp.js";
+import { tryCatch } from "@trigger.dev/core/utils";
+import { VERSION } from "@trigger.dev/core";
+import { initiateRulesInstallWizard } from "./install-rules.js";
const DevCommandOptions = CommonCommandOptions.extend({
debugOtel: z.boolean().default(false),
@@ -26,6 +35,10 @@ const DevCommandOptions = CommonCommandOptions.extend({
mcpPort: z.coerce.number().optional().default(3333),
analyze: z.boolean().default(false),
disableWarnings: z.boolean().default(false),
+ skipMCPInstall: z.boolean().default(false),
+ skipRulesInstall: z.boolean().default(false),
+ rulesInstallManifestPath: z.string().optional(),
+ rulesInstallBranch: z.string().optional(),
});
export type DevCommandOptions = z.infer;
@@ -59,6 +72,30 @@ export function configureDevCommand(program: Command) {
.addOption(
new CommandOption("--analyze", "Analyze the build output and import timings").hideHelp()
)
+ .addOption(
+ new CommandOption(
+ "--skip-mcp-install",
+ "Skip the Trigger.dev MCP server install wizard"
+ ).hideHelp()
+ )
+ .addOption(
+ new CommandOption(
+ "--skip-rules-install",
+ "Skip the Trigger.dev Agent rules install wizard"
+ ).hideHelp()
+ )
+ .addOption(
+ new CommandOption(
+ "--rules-install-manifest-path ",
+ "The path to the rules install manifest"
+ ).hideHelp()
+ )
+ .addOption(
+ new CommandOption(
+ "--rules-install-branch ",
+ "The branch to install the rules from"
+ ).hideHelp()
+ )
.addOption(new CommandOption("--disable-warnings", "Suppress warnings output").hideHelp())
).action(async (options) => {
wrapCommandAction("dev", DevCommandOptions, options, async (opts) => {
@@ -70,6 +107,49 @@ export function configureDevCommand(program: Command) {
export async function devCommand(options: DevCommandOptions) {
runtimeChecks();
+ const skipMCPInstall = typeof options.skipMCPInstall === "boolean" && options.skipMCPInstall;
+
+ if (!skipMCPInstall) {
+ const hasSeenMCPInstallPrompt = readConfigHasSeenMCPInstallPrompt();
+
+ if (!hasSeenMCPInstallPrompt) {
+ const installChoice = await confirm({
+ message: "Would you like to install the Trigger.dev MCP server?",
+ initialValue: true,
+ });
+
+ writeConfigHasSeenMCPInstallPrompt(true);
+
+ const skipInstall = isCancel(installChoice) || !installChoice;
+
+ if (!skipInstall) {
+ log.step("Welcome to the Trigger.dev MCP server install wizard š§");
+
+ const [installError] = await tryCatch(
+ installMcpServer({
+ yolo: false,
+ tag: VERSION as string,
+ logLevel: options.logLevel,
+ })
+ );
+
+ if (installError) {
+ log.error(`Failed to install MCP server: ${installError.message}`);
+ }
+ }
+ }
+ }
+
+ const skipRulesInstall =
+ typeof options.skipRulesInstall === "boolean" && options.skipRulesInstall;
+
+ if (!skipRulesInstall) {
+ await initiateRulesInstallWizard({
+ manifestPath: options.rulesInstallManifestPath,
+ branch: options.rulesInstallBranch,
+ });
+ }
+
const authorization = await login({
embedded: true,
silent: true,
diff --git a/packages/cli-v3/src/commands/init.ts b/packages/cli-v3/src/commands/init.ts
index fcae774961..9e7ca46697 100644
--- a/packages/cli-v3/src/commands/init.ts
+++ b/packages/cli-v3/src/commands/init.ts
@@ -1,13 +1,18 @@
import { intro, isCancel, log, outro, select, text } from "@clack/prompts";
import { context, trace } from "@opentelemetry/api";
-import { GetProjectResponseBody, flattenAttributes } from "@trigger.dev/core/v3";
+import {
+ GetProjectResponseBody,
+ LogLevel,
+ flattenAttributes,
+ tryCatch,
+} from "@trigger.dev/core/v3";
import { recordSpanException } from "@trigger.dev/core/v3/workers";
import chalk from "chalk";
import { Command, Option as CommandOption } from "commander";
import { applyEdits, findNodeAtLocation, getNodeValue, modify, parseTree } from "jsonc-parser";
import { writeFile } from "node:fs/promises";
import { join, relative, resolve } from "node:path";
-import { addDependency, addDevDependency, detectPackageManager } from "nypm";
+import { addDependency, addDevDependency } from "nypm";
import { resolveTSConfig } from "pkg-types";
import { z } from "zod";
import { CliApiClient } from "../apiClient.js";
@@ -31,8 +36,13 @@ import { createFile, pathExists, readFile } from "../utilities/fileSystem.js";
import { printStandloneInitialBanner } from "../utilities/initialBanner.js";
import { logger } from "../utilities/logger.js";
import { spinner } from "../utilities/windows.js";
-import { login } from "./login.js";
import { VERSION } from "../version.js";
+import { login } from "./login.js";
+import {
+ readConfigHasSeenMCPInstallPrompt,
+ writeConfigHasSeenMCPInstallPrompt,
+} from "../utilities/configFiles.js";
+import { installMcpServer } from "./install-mcp.js";
const cliVersion = VERSION as string;
const cliTag = cliVersion.includes("v4-beta") ? "v4-beta" : "latest";
@@ -46,6 +56,7 @@ const InitCommandOptions = CommonCommandOptions.extend({
pkgArgs: z.string().optional(),
gitRef: z.string().default("main"),
javascript: z.boolean().default(false),
+ yes: z.boolean().default(false),
});
type InitCommandOptions = z.infer;
@@ -77,6 +88,7 @@ export function configureInitCommand(program: Command) {
"--pkg-args ",
"Additional arguments to pass to the package manager, accepts CSV for multiple args"
)
+ .option("-y, --yes", "Skip all prompts and use defaults (requires --project-ref)")
)
.addOption(
new CommandOption(
@@ -101,6 +113,50 @@ export async function initCommand(dir: string, options: unknown) {
async function _initCommand(dir: string, options: InitCommandOptions) {
const span = trace.getSpan(context.active());
+ // Validate --yes flag requirements
+ if (options.yes && !options.projectRef) {
+ throw new Error("--project-ref is required when using --yes flag");
+ }
+
+ const hasSeenMCPInstallPrompt = readConfigHasSeenMCPInstallPrompt();
+
+ if (!hasSeenMCPInstallPrompt) {
+ const installChoice = await select({
+ message: "Choose how you want to initialize your project:",
+ options: [
+ {
+ value: "mcp",
+ label: "Trigger.dev MCP",
+ hint: "Automatically install the Trigger.dev MCP server and then vibe your way to a new project.",
+ },
+ { value: "cli", label: "CLI", hint: "Continue with the CLI" },
+ ],
+ });
+
+ writeConfigHasSeenMCPInstallPrompt(true);
+
+ const continueWithCLI = isCancel(installChoice) || installChoice === "cli";
+
+ if (!continueWithCLI) {
+ log.step("Welcome to the Trigger.dev MCP server install wizard š§");
+
+ const [installError] = await tryCatch(
+ installMcpServer({
+ yolo: false,
+ tag: options.tag,
+ logLevel: options.logLevel,
+ })
+ );
+
+ if (installError) {
+ outro(`Failed to install MCP server: ${installError.message}`);
+ return;
+ }
+
+ return;
+ }
+ }
+
intro("Initializing project");
const cwd = resolve(process.cwd(), dir);
@@ -167,7 +223,11 @@ async function _initCommand(dir: string, options: InitCommandOptions) {
// Install @trigger.dev/sdk package
if (!options.skipPackageInstall) {
- await installPackages(dir, options);
+ await installPackages(
+ cwd,
+ options.tag,
+ new CLIInstallPackagesOutputter(options.logLevel, options.tag)
+ );
} else {
log.info("Skipping package installation");
}
@@ -193,7 +253,7 @@ async function _initCommand(dir: string, options: InitCommandOptions) {
`${authorization.dashboardUrl}/projects/v3/${selectedProject.externalRef}`
);
- log.success("Successfully initialized project for Trigger.dev v3 š«”");
+ log.success("Successfully initialized your Trigger.dev project š«”");
log.info("Next steps:");
log.info(
` 1. To start developing, run ${chalk.green(
@@ -223,14 +283,44 @@ async function createTriggerDir(
try {
const defaultValue = join(dir, "src", "trigger");
- const location = await text({
- message: "Where would you like to create the Trigger.dev directory?",
- defaultValue: defaultValue,
- placeholder: defaultValue,
- });
+ let location: string;
+ let example: string;
- if (isCancel(location)) {
- throw new OutroCommandError();
+ if (options.yes) {
+ // Use defaults when --yes flag is set
+ location = defaultValue;
+ example = "simple";
+ } else {
+ const locationPrompt = await text({
+ message: "Where would you like to create the Trigger.dev directory?",
+ defaultValue: defaultValue,
+ placeholder: defaultValue,
+ });
+
+ if (isCancel(locationPrompt)) {
+ throw new OutroCommandError();
+ }
+
+ location = locationPrompt;
+
+ const exampleSelection = await select({
+ message: `Choose an example to create in the ${location} directory`,
+ options: [
+ { value: "simple", label: "Simple (Hello World)" },
+ { value: "schedule", label: "Scheduled Task" },
+ {
+ value: "none",
+ label: "None",
+ hint: "skip creating an example",
+ },
+ ],
+ });
+
+ if (isCancel(exampleSelection)) {
+ throw new OutroCommandError();
+ }
+
+ example = exampleSelection as string;
}
// Ensure that the path is always relative by stripping leading '/' if present
@@ -248,25 +338,6 @@ async function createTriggerDir(
throw new Error(`Directory already exists at ${triggerDir}`);
}
- const exampleSelection = await select({
- message: `Choose an example to create in the ${location} directory`,
- options: [
- { value: "simple", label: "Simple (Hello World)" },
- { value: "schedule", label: "Scheduled Task" },
- {
- value: "none",
- label: "None",
- hint: "skip creating an example",
- },
- ],
- });
-
- if (isCancel(exampleSelection)) {
- throw new OutroCommandError();
- }
-
- const example = exampleSelection as string;
-
span.setAttributes({
"cli.example": example,
});
@@ -424,54 +495,84 @@ async function addConfigFileToTsConfig(tsconfigPath: string, options: InitComman
});
}
-async function installPackages(dir: string, options: InitCommandOptions) {
- return await tracer.startActiveSpan("installPackages", async (span) => {
- const projectDir = resolve(process.cwd(), dir);
+export interface InstallPackagesOutputter {
+ startSDK: () => void;
+ installedSDK: () => void;
+ startBuild: () => void;
+ installedBuild: () => void;
+ stoppedWithError: () => void;
+}
- const installSpinner = spinner();
- const packageManager = await detectPackageManager(projectDir);
+class CLIInstallPackagesOutputter implements InstallPackagesOutputter {
+ private installSpinner: ReturnType;
- try {
- span.setAttributes({
- "cli.projectDir": projectDir,
- "cli.packageManager": packageManager?.name,
- "cli.tag": options.tag,
- });
+ constructor(
+ private readonly logLevel: LogLevel,
+ private readonly tag: string
+ ) {
+ this.installSpinner = spinner();
+ }
- installSpinner.start(`Adding @trigger.dev/sdk@${options.tag}`);
+ startSDK() {
+ this.installSpinner.start(`Adding @trigger.dev/sdk@${this.tag}`);
+ }
- await addDependency(`@trigger.dev/sdk@${options.tag}`, { cwd: projectDir, silent: true });
+ installedSDK() {
+ this.installSpinner.stop(`@trigger.dev/sdk@${this.tag} installed`);
+ }
- installSpinner.stop(`@trigger.dev/sdk@${options.tag} installed`);
+ startBuild() {
+ this.installSpinner.start(`Adding @trigger.dev/build@${this.tag} to devDependencies`);
+ }
- installSpinner.start(`Adding @trigger.dev/build@${options.tag} to devDependencies`);
+ installedBuild() {
+ this.installSpinner.stop(`@trigger.dev/build@${this.tag} installed`);
+ }
- await addDevDependency(`@trigger.dev/build@${options.tag}`, {
- cwd: projectDir,
- silent: true,
- });
+ stoppedWithError() {
+ if (this.logLevel === "debug") {
+ this.installSpinner.stop(`Failed to install @trigger.dev/sdk@${this.tag}.`);
+ } else {
+ this.installSpinner.stop(
+ `Failed to install @trigger.dev/sdk@${this.tag}. Rerun command with --log-level debug for more details.`
+ );
+ }
+ }
+}
- installSpinner.stop(`@trigger.dev/build@${options.tag} installed`);
+class SilentInstallPackagesOutputter implements InstallPackagesOutputter {
+ startSDK() {}
+ installedSDK() {}
+ startBuild() {}
+ installedBuild() {}
+ stoppedWithError() {}
+}
- span.end();
- } catch (e) {
- if (options.logLevel === "debug") {
- installSpinner.stop(`Failed to install @trigger.dev/sdk@${options.tag}.`);
- } else {
- installSpinner.stop(
- `Failed to install @trigger.dev/sdk@${options.tag}. Rerun command with --log-level debug for more details.`
- );
- }
+export async function installPackages(
+ projectDir: string,
+ tag: string,
+ outputter: InstallPackagesOutputter = new SilentInstallPackagesOutputter()
+) {
+ try {
+ outputter.startSDK();
- if (!(e instanceof SkipCommandError)) {
- recordSpanException(span, e);
- }
+ await addDependency(`@trigger.dev/sdk@${tag}`, { cwd: projectDir, silent: true });
- span.end();
+ outputter.installedSDK();
- throw e;
- }
- });
+ outputter.startBuild();
+
+ await addDevDependency(`@trigger.dev/build@${tag}`, {
+ cwd: projectDir,
+ silent: true,
+ });
+
+ outputter.installedBuild();
+ } catch (e) {
+ outputter.stoppedWithError();
+
+ throw e;
+ }
}
async function writeConfigFile(
diff --git a/packages/cli-v3/src/commands/install-mcp.ts b/packages/cli-v3/src/commands/install-mcp.ts
new file mode 100644
index 0000000000..07219e07dd
--- /dev/null
+++ b/packages/cli-v3/src/commands/install-mcp.ts
@@ -0,0 +1,702 @@
+import { confirm, intro, isCancel, log, multiselect, select } from "@clack/prompts";
+import chalk from "chalk";
+import { Command } from "commander";
+import { extname } from "node:path";
+import { z } from "zod";
+import { OutroCommandError, wrapCommandAction } from "../cli/common.js";
+import { cliLink } from "../utilities/cliOutput.js";
+import { writeConfigHasSeenMCPInstallPrompt } from "../utilities/configFiles.js";
+import {
+ expandTilde,
+ safeReadJSONCFile,
+ safeReadTomlFile,
+ writeJSONFile,
+ writeTomlFile,
+} from "../utilities/fileSystem.js";
+import { printStandloneInitialBanner } from "../utilities/initialBanner.js";
+import { VERSION } from "../version.js";
+import { spinner } from "../utilities/windows.js";
+
+const cliVersion = VERSION as string;
+const cliTag = cliVersion.includes("v4-beta") ? "v4-beta" : "latest";
+
+const clients = [
+ "claude-code",
+ "cursor",
+ "vscode",
+ "zed",
+ "windsurf",
+ "gemini-cli",
+ "crush",
+ "cline",
+ "openai-codex",
+ "opencode",
+ "amp",
+ "ruler",
+] as const;
+const scopes = ["user", "project", "local"] as const;
+
+type ClientScopes = {
+ [key in (typeof clients)[number]]: {
+ [key in (typeof scopes)[number]]?: string;
+ };
+};
+
+type ClientLabels = {
+ [key in (typeof clients)[number]]: string;
+};
+
+const clientScopes: ClientScopes = {
+ "claude-code": {
+ user: "~/.claude.json",
+ project: "./.mcp.json",
+ local: "~/.claude.json",
+ },
+ cursor: {
+ user: "~/.cursor/mcp.json",
+ project: "./.cursor/mcp.json",
+ },
+ vscode: {
+ user: "~/Library/Application Support/Code/User/mcp.json",
+ project: "./.vscode/mcp.json",
+ },
+ zed: {
+ user: "~/.config/zed/settings.json",
+ },
+ windsurf: {
+ user: "~/.codeium/windsurf/mcp_config.json",
+ },
+ "gemini-cli": {
+ user: "~/.gemini/settings.json",
+ project: "./.gemini/settings.json",
+ },
+ crush: {
+ user: "~/.config/crush/crush.json",
+ project: "./crush.json",
+ local: "./.crush.json",
+ },
+ cline: {
+ user: "~/Library/Application Support/Code/User/globalStorage/saoudrizwan.claude-dev/settings/cline_mcp_settings.json",
+ },
+ amp: {
+ user: "~/.config/amp/settings.json",
+ },
+ "openai-codex": {
+ user: "~/.codex/config.toml",
+ },
+ opencode: {
+ user: "~/.config/opencode/opencode.json",
+ project: "./opencode.json",
+ },
+ ruler: {
+ project: "./.ruler/mcp.json",
+ },
+};
+
+const clientLabels: ClientLabels = {
+ "claude-code": "Claude Code",
+ cursor: "Cursor",
+ vscode: "VSCode",
+ zed: "Zed",
+ windsurf: "Windsurf",
+ "gemini-cli": "Gemini CLI",
+ crush: "Charm Crush",
+ cline: "Cline",
+ "openai-codex": "OpenAI Codex CLI",
+ amp: "Sourcegraph AMP",
+ opencode: "opencode",
+ ruler: "Ruler",
+};
+
+type SupportedClients = (typeof clients)[number];
+type ResolvedClients = SupportedClients | "unsupported";
+
+const InstallMcpCommandOptions = z.object({
+ projectRef: z.string().optional(),
+ tag: z.string().default(cliVersion),
+ devOnly: z.boolean().optional(),
+ yolo: z.boolean().default(false),
+ scope: z.enum(scopes).optional(),
+ client: z.enum(clients).array().optional(),
+ logFile: z.string().optional(),
+ apiUrl: z.string().optional(),
+ logLevel: z.enum(["debug", "info", "log", "warn", "error", "none"]).default("log"),
+});
+
+type InstallMcpCommandOptions = z.infer;
+
+export function configureInstallMcpCommand(program: Command) {
+ return program
+ .command("install-mcp")
+ .description("Install the Trigger.dev MCP server")
+ .option(
+ "-p, --project-ref ",
+ "Scope the mcp server to a specific Trigger.dev project by providing its project ref"
+ )
+ .option(
+ "-t, --tag ",
+ "The version of the trigger.dev CLI package to use for the MCP server",
+ cliTag
+ )
+ .option("--dev-only", "Restrict the MCP server to the dev environment only")
+ .option("--yolo", "Install the MCP server into all supported clients")
+ .option("--scope ", "Choose the scope of the MCP server, either user or project")
+ .option(
+ "--client ",
+ "Choose the client (or clients) to install the MCP server into. We currently support: " +
+ clients.join(", ")
+ )
+ .option("--log-file ", "Configure the MCP server to write logs to a file")
+ .option(
+ "-a, --api-url ",
+ "Configure the MCP server to specify a custom Trigger.dev API URL"
+ )
+ .option(
+ "-l, --log-level ",
+ "The CLI log level to use (debug, info, log, warn, error, none). This does not effect the log level of your trigger.dev tasks.",
+ "log"
+ )
+ .action(async (options) => {
+ await printStandloneInitialBanner(true);
+ await installMcpCommand(options);
+ });
+}
+
+export async function installMcpCommand(options: unknown) {
+ return await wrapCommandAction(
+ "installMcpCommand",
+ InstallMcpCommandOptions,
+ options,
+ async (opts) => {
+ return await _installMcpCommand(opts);
+ }
+ );
+}
+
+async function _installMcpCommand(options: InstallMcpCommandOptions) {
+ intro("Welcome to the Trigger.dev MCP server install wizard š§");
+
+ await installMcpServer(options);
+}
+
+type InstallMcpServerResults = Array;
+
+type InstallMcpServerResult = {
+ configPath: string;
+ clientName: (typeof clients)[number];
+ scope: McpServerScope;
+};
+
+export async function installMcpServer(
+ options: InstallMcpCommandOptions
+): Promise {
+ const opts = InstallMcpCommandOptions.parse(options);
+
+ writeConfigHasSeenMCPInstallPrompt(true);
+
+ const devOnly = await resolveDevOnly(opts);
+
+ opts.devOnly = devOnly;
+
+ const clientNames = await resolveClients(opts);
+
+ if (clientNames.length === 1 && clientNames.includes("unsupported")) {
+ return handleUnsupportedClientOnly(opts);
+ }
+
+ const results = [];
+
+ for (const clientName of clientNames) {
+ const result = await installMcpServerForClient(clientName, opts);
+
+ if (result) {
+ results.push(result);
+ }
+ }
+
+ if (results.length > 0) {
+ log.step("Installed to:");
+ for (const r of results) {
+ const scopeLabel = `${r.scope.scope}`;
+ log.message(` ⢠${r.clientName} (${scopeLabel}) ā ${chalk.gray(r.configPath)}`);
+ }
+ }
+
+ log.info("Next steps:");
+ log.message(" 1. Restart your MCP client(s) to load the new configuration.");
+ log.message(
+ ' 2. In your client, look for a server named "trigger". It should connect automatically.'
+ );
+ log.message(" 3. Get started with Trigger.dev");
+ log.message(
+ ` Try asking your vibe-coding friend to ${chalk.green("Add trigger.dev to my project")}`
+ );
+
+ log.info("More examples:");
+ log.message(` ⢠${chalk.green('"List my Trigger.dev projects"')}`);
+ log.message(` ⢠${chalk.green('"Create a new Trigger.dev project called MyApp"')}`);
+ log.message(` ⢠${chalk.green('"Show me all tasks in my project"')}`);
+ log.message(` ⢠${chalk.green('"Trigger the email-notification task"')}`);
+ log.message(` ⢠${chalk.green('"How do I create a scheduled task in Trigger.dev?"')}`);
+ log.message(` ⢠${chalk.green('"Search Trigger.dev docs for webhook examples"')}`);
+
+ log.info("Helpful links:");
+ log.message(` ⢠${cliLink("Trigger.dev docs", "https://trigger.dev/docs")}`);
+ log.message(` ⢠${cliLink("MCP docs", "https://trigger.dev/docs/mcp")}`);
+ log.message(
+ ` ⢠Need help? ${cliLink(
+ "Join our Discord",
+ "https://trigger.dev/discord"
+ )} or email help@trigger.dev`
+ );
+
+ return results;
+}
+
+function handleUnsupportedClientOnly(options: InstallMcpCommandOptions): InstallMcpServerResults {
+ log.info("Manual MCP server configuration");
+
+ const args = [`trigger.dev@${options.tag}`, "mcp"];
+
+ if (options.logFile) {
+ args.push("--log-file", options.logFile);
+ }
+
+ if (options.apiUrl) {
+ args.push("--api-url", options.apiUrl);
+ }
+
+ if (options.devOnly) {
+ args.push("--dev-only");
+ }
+
+ if (options.projectRef) {
+ args.push("--project-ref", options.projectRef);
+ }
+
+ if (options.logLevel && options.logLevel !== "log") {
+ args.push("--log-level", options.logLevel);
+ }
+
+ log.message(
+ "Since your client isn't directly supported yet, you'll need to configure it manually:"
+ );
+ log.message("");
+ log.message(`${chalk.yellow("Command:")} ${chalk.green("npx")}`);
+ log.message(`${chalk.yellow("Arguments:")} ${chalk.green(args.join(" "))}`);
+ log.message("");
+ log.message("Add this MCP server configuration to your client's settings:");
+ log.message(` ⢠${chalk.cyan("Server name:")} trigger`);
+ log.message(` ⢠${chalk.cyan("Command:")} npx`);
+ log.message(` ⢠${chalk.cyan("Args:")} ${args.map((arg) => `"${arg}"`).join(", ")}`);
+ log.message("");
+ log.message("Most MCP clients use a JSON configuration format like:");
+ log.message(
+ chalk.dim(`{
+ "mcpServers": {
+ "trigger": {
+ "command": "npx",
+ "args": [${args.map((arg) => `"${arg}"`).join(", ")}]
+ }
+ }
+}`)
+ );
+
+ return [];
+}
+
+async function installMcpServerForClient(
+ clientName: ResolvedClients,
+ options: InstallMcpCommandOptions
+) {
+ if (clientName === "unsupported") {
+ // This should not happen as unsupported clients are handled separately
+ // but if it does, provide helpful output
+ log.message(
+ `${chalk.yellow("ā ")} Skipping unsupported client - see manual configuration above`
+ );
+ return;
+ }
+
+ const clientSpinner = spinner();
+
+ clientSpinner.start(`Installing in ${clientName}`);
+
+ const scope = await resolveScopeForClient(clientName, options);
+
+ clientSpinner.message(`Installing in ${scope.scope} scope at ${scope.location}`);
+
+ const configPath = await performInstallForClient(clientName, scope, options);
+
+ clientSpinner.stop(`Successfully installed in ${clientName} (${configPath})`);
+
+ return { configPath, clientName, scope };
+}
+
+type McpServerConfig = Record | boolean | undefined>;
+type McpServerScope = {
+ scope: (typeof scopes)[number];
+ location: string;
+};
+
+async function performInstallForClient(
+ clientName: (typeof clients)[number],
+ scope: McpServerScope,
+ options: InstallMcpCommandOptions
+) {
+ const config = resolveMcpServerConfig(clientName, options);
+ const pathComponents = resolveMcpServerConfigJsonPath(clientName, scope);
+
+ return await writeMcpServerConfig(scope.location, pathComponents, config);
+}
+
+async function writeMcpServerConfig(
+ location: string,
+ pathComponents: string[],
+ config: McpServerConfig
+) {
+ const fullPath = expandTilde(location);
+
+ const extension = extname(fullPath);
+
+ switch (extension) {
+ case ".json": {
+ let existingConfig = await safeReadJSONCFile(fullPath);
+
+ if (!existingConfig) {
+ existingConfig = {};
+ }
+
+ const newConfig = applyConfigToExistingConfig(existingConfig, pathComponents, config);
+
+ await writeJSONFile(fullPath, newConfig, true);
+ break;
+ }
+ case ".toml": {
+ let existingConfig = await safeReadTomlFile(fullPath);
+
+ if (!existingConfig) {
+ existingConfig = {};
+ }
+
+ const newConfig = applyConfigToExistingConfig(existingConfig, pathComponents, config);
+
+ await writeTomlFile(fullPath, newConfig);
+ break;
+ }
+ }
+
+ return fullPath;
+}
+
+function applyConfigToExistingConfig(
+ existingConfig: any,
+ pathComponents: string[],
+ config: McpServerConfig
+) {
+ const clonedConfig = structuredClone(existingConfig);
+
+ let currentValueAtPath = clonedConfig;
+
+ for (let i = 0; i < pathComponents.length; i++) {
+ const currentPathSegment = pathComponents[i];
+
+ if (!currentPathSegment) {
+ break;
+ }
+
+ if (i === pathComponents.length - 1) {
+ currentValueAtPath[currentPathSegment] = config;
+ break;
+ } else {
+ currentValueAtPath[currentPathSegment] = currentValueAtPath[currentPathSegment] || {};
+ currentValueAtPath = currentValueAtPath[currentPathSegment];
+ }
+ }
+
+ return clonedConfig;
+}
+
+function resolveMcpServerConfigJsonPath(
+ clientName: (typeof clients)[number],
+ scope: McpServerScope
+) {
+ switch (clientName) {
+ case "cursor": {
+ return ["mcpServers", "trigger"];
+ }
+ case "vscode": {
+ return ["servers", "trigger"];
+ }
+ case "crush": {
+ return ["mcp", "trigger"];
+ }
+ case "windsurf": {
+ return ["mcpServers", "trigger"];
+ }
+ case "gemini-cli": {
+ return ["mcpServers", "trigger"];
+ }
+ case "cline": {
+ return ["mcpServers", "trigger"];
+ }
+ case "amp": {
+ return ["amp.mcpServers", "trigger"];
+ }
+ case "zed": {
+ return ["context_servers", "trigger"];
+ }
+ case "claude-code": {
+ if (scope.scope === "local") {
+ const projectPath = process.cwd();
+
+ return ["projects", projectPath, "mcpServers", "trigger"];
+ } else {
+ return ["mcpServers", "trigger"];
+ }
+ }
+ case "openai-codex": {
+ return ["mcp_servers", "trigger"];
+ }
+ case "opencode": {
+ return ["mcp", "trigger"];
+ }
+ case "ruler": {
+ return ["mcpServers", "trigger"];
+ }
+ }
+}
+
+function resolveMcpServerConfig(
+ clientName: (typeof clients)[number],
+ options: InstallMcpCommandOptions
+): McpServerConfig {
+ const args = [`trigger.dev@${options.tag}`, "mcp"];
+
+ if (options.logFile) {
+ args.push("--log-file", options.logFile);
+ }
+
+ if (options.apiUrl) {
+ args.push("--api-url", options.apiUrl);
+ }
+
+ if (options.devOnly) {
+ args.push("--dev-only");
+ }
+
+ if (options.projectRef) {
+ args.push("--project-ref", options.projectRef);
+ }
+
+ switch (clientName) {
+ case "claude-code": {
+ return {
+ command: "npx",
+ args,
+ };
+ }
+ case "cursor": {
+ return {
+ command: "npx",
+ args,
+ };
+ }
+ case "vscode": {
+ return {
+ command: "npx",
+ args,
+ };
+ }
+ case "crush": {
+ return {
+ type: "stdio",
+ command: "npx",
+ args,
+ };
+ }
+ case "windsurf": {
+ return {
+ command: "npx",
+ args,
+ };
+ }
+ case "gemini-cli": {
+ return {
+ command: "npx",
+ args,
+ };
+ }
+ case "cline": {
+ return {
+ command: "npx",
+ args,
+ };
+ }
+ case "amp": {
+ return {
+ command: "npx",
+ args,
+ };
+ }
+ case "openai-codex": {
+ return {
+ command: "npx",
+ args,
+ };
+ }
+ case "zed": {
+ return {
+ source: "custom",
+ command: "npx",
+ args,
+ };
+ }
+ case "opencode": {
+ return {
+ type: "local",
+ command: ["npx", ...args],
+ enabled: true,
+ };
+ }
+ case "ruler": {
+ return {
+ type: "stdio",
+ command: "npx",
+ args,
+ };
+ }
+ }
+}
+
+async function resolveScopeForClient(
+ clientName: (typeof clients)[number],
+ options: InstallMcpCommandOptions
+) {
+ if (options.scope) {
+ const location = clientScopes[clientName][options.scope];
+
+ if (!location) {
+ throw new OutroCommandError(
+ `The ${clientName} client does not support the ${
+ options.scope
+ } scope, it only supports ${Object.keys(clientScopes[clientName]).join(", ")} scopes`
+ );
+ }
+
+ return {
+ scope: options.scope,
+ location,
+ };
+ }
+
+ const scopeOptions = resolveScopeOptionsForClient(clientName);
+
+ if (scopeOptions.length === 1) {
+ return {
+ scope: scopeOptions[0]!.value.scope,
+ location: scopeOptions[0]!.value.location,
+ };
+ }
+
+ const selectedScope = await select({
+ message: `Where should the MCP server for ${clientName} be installed?`,
+ options: scopeOptions,
+ });
+
+ if (isCancel(selectedScope)) {
+ throw new OutroCommandError("No scope selected");
+ }
+
+ return selectedScope;
+}
+
+function resolveScopeOptionsForClient(clientName: (typeof clients)[number]): Array<{
+ value: { location: string; scope: (typeof scopes)[number] };
+ label: string;
+ hint: string;
+}> {
+ const $clientScopes = clientScopes[clientName];
+
+ const options = Object.entries($clientScopes).map(([scope, location]) => ({
+ value: { location, scope: scope as (typeof scopes)[number] },
+ label: scope,
+ hint: scopeHint(scope as (typeof scopes)[number], location),
+ }));
+
+ return options;
+}
+
+function scopeHint(scope: (typeof scopes)[number], location: string) {
+ switch (scope) {
+ case "user": {
+ return `Install for your user account on your machine (${location})`;
+ }
+ case "project": {
+ return `Install in the current project shared with your team (${location})`;
+ }
+ case "local": {
+ return `Install in the current project, local to you only (${location})`;
+ }
+ }
+}
+
+async function resolveClients(options: InstallMcpCommandOptions): Promise {
+ if (options.client) {
+ return options.client;
+ }
+
+ if (options.yolo) {
+ return [...clients];
+ }
+
+ const selectOptions: Array<{
+ value: string;
+ label: string;
+ hint?: string;
+ }> = clients.map((client) => ({
+ value: client,
+ label: clientLabels[client],
+ }));
+
+ selectOptions.push({
+ value: "unsupported",
+ label: "Unsupported client",
+ hint: "We don't support this client yet, but you can still install the MCP server manually.",
+ });
+
+ const $selectOptions = selectOptions as Array<{
+ value: ResolvedClients;
+ label: string;
+ hint?: string;
+ }>;
+
+ const selectedClients = await multiselect({
+ message: "Select one or more clients to install the MCP server into",
+ options: $selectOptions,
+ required: true,
+ });
+
+ if (isCancel(selectedClients)) {
+ throw new OutroCommandError("No clients selected");
+ }
+
+ return selectedClients;
+}
+
+async function resolveDevOnly(options: InstallMcpCommandOptions) {
+ if (typeof options.devOnly === "boolean") {
+ return options.devOnly;
+ }
+
+ const devOnly = await confirm({
+ message: "Restrict the MCP server to the dev environment only?",
+ initialValue: false,
+ });
+
+ if (isCancel(devOnly)) {
+ return false;
+ }
+
+ return devOnly;
+}
diff --git a/packages/cli-v3/src/commands/install-rules.ts b/packages/cli-v3/src/commands/install-rules.ts
new file mode 100644
index 0000000000..284f2ad73a
--- /dev/null
+++ b/packages/cli-v3/src/commands/install-rules.ts
@@ -0,0 +1,604 @@
+import { confirm, intro, isCancel, log, multiselect, outro } from "@clack/prompts";
+import { ResolvedConfig } from "@trigger.dev/core/v3/build";
+import chalk from "chalk";
+import { Command, Option as CommandOption } from "commander";
+import { join } from "node:path";
+import * as semver from "semver";
+import { z } from "zod";
+import { OutroCommandError, wrapCommandAction } from "../cli/common.js";
+import { loadConfig } from "../config.js";
+import {
+ GithubRulesManifestLoader,
+ loadRulesManifest,
+ LocalRulesManifestLoader,
+ ManifestVersion,
+ RulesManifest,
+ RulesManifestVersionOption,
+} from "../rules/manifest.js";
+import { cliLink } from "../utilities/cliOutput.js";
+import {
+ readConfigHasSeenRulesInstallPrompt,
+ readConfigLastRulesInstallPromptVersion,
+ writeConfigHasSeenRulesInstallPrompt,
+ writeConfigLastRulesInstallPromptVersion,
+} from "../utilities/configFiles.js";
+import { pathExists, readFile, safeWriteFile } from "../utilities/fileSystem.js";
+import { printStandloneInitialBanner } from "../utilities/initialBanner.js";
+import { logger } from "../utilities/logger.js";
+
+const targets = [
+ "claude-code",
+ "cursor",
+ "vscode",
+ "windsurf",
+ "gemini-cli",
+ "cline",
+ "agents.md",
+ "amp",
+ "kilo",
+ "ruler",
+] as const;
+
+type TargetLabels = {
+ [key in (typeof targets)[number]]: string;
+};
+
+const targetLabels: TargetLabels = {
+ "claude-code": "Claude Code",
+ cursor: "Cursor",
+ vscode: "VSCode",
+ windsurf: "Windsurf",
+ "gemini-cli": "Gemini CLI",
+ cline: "Cline",
+ "agents.md": "AGENTS.md (OpenAI Codex CLI, Jules, OpenCode)",
+ amp: "Sourcegraph AMP",
+ kilo: "Kilo Code",
+ ruler: "Ruler",
+};
+
+type SupportedTargets = (typeof targets)[number];
+type ResolvedTargets = SupportedTargets | "unsupported";
+
+const InstallRulesCommandOptions = z.object({
+ target: z.enum(targets).array().optional(),
+ manifestPath: z.string().optional(),
+ branch: z.string().optional(),
+ logLevel: z.enum(["debug", "info", "log", "warn", "error", "none"]).optional(),
+ forceWizard: z.boolean().optional(),
+});
+
+type InstallRulesCommandOptions = z.infer;
+
+export function configureInstallRulesCommand(program: Command) {
+ return program
+ .command("install-rules")
+ .description("Install the Trigger.dev Agent rules files")
+ .option(
+ "--target ",
+ "Choose the target (or targets) to install the Trigger.dev rules into. We currently support: " +
+ targets.join(", ")
+ )
+ .option(
+ "-l, --log-level ",
+ "The CLI log level to use (debug, info, log, warn, error, none). This does not effect the log level of your trigger.dev tasks.",
+ "log"
+ )
+ .addOption(
+ new CommandOption(
+ "--manifest-path ",
+ "The path to the rules manifest file. This is useful if you want to install the rules from a local file."
+ ).hideHelp()
+ )
+ .addOption(
+ new CommandOption(
+ "--branch ",
+ "The branch to install the rules from, the default is main"
+ ).hideHelp()
+ )
+ .addOption(
+ new CommandOption(
+ "--force-wizard",
+ "Force the rules install wizard to run even if the rules have already been installed."
+ ).hideHelp()
+ )
+ .action(async (options) => {
+ await printStandloneInitialBanner(true);
+ await installRulesCommand(options);
+ });
+}
+
+export async function installRulesCommand(options: unknown) {
+ return await wrapCommandAction(
+ "installRulesCommand",
+ InstallRulesCommandOptions,
+ options,
+ async (opts) => {
+ if (opts.logLevel) {
+ logger.loggerLevel = opts.logLevel;
+ }
+
+ return await _installRulesCommand(opts);
+ }
+ );
+}
+
+async function _installRulesCommand(options: InstallRulesCommandOptions) {
+ if (options.forceWizard) {
+ await initiateRulesInstallWizard(options);
+ return;
+ }
+
+ intro("Welcome to the Trigger.dev Agent rules install wizard ");
+
+ const manifestLoader = options.manifestPath
+ ? new LocalRulesManifestLoader(options.manifestPath)
+ : new GithubRulesManifestLoader(options.branch ?? "main");
+
+ const manifest = await loadRulesManifest(manifestLoader);
+
+ writeConfigLastRulesInstallPromptVersion(manifest.currentVersion);
+ writeConfigHasSeenRulesInstallPrompt(true);
+
+ await installRules(manifest, options);
+
+ outro("You're all set! ");
+}
+
+type InstallRulesResults = Array;
+
+type InstallRulesResult = {
+ configPath: string;
+ targetName: (typeof targets)[number];
+};
+
+export type InstallRulesWizardOptions = {
+ target?: Array<(typeof targets)[number]>;
+ manifestPath?: string;
+ branch?: string;
+};
+
+export async function initiateRulesInstallWizard(options: InstallRulesWizardOptions) {
+ const manifestLoader = options.manifestPath
+ ? new LocalRulesManifestLoader(options.manifestPath)
+ : new GithubRulesManifestLoader(options.branch ?? "main");
+
+ const manifest = await loadRulesManifest(manifestLoader);
+
+ const hasSeenRulesInstallPrompt = readConfigHasSeenRulesInstallPrompt();
+
+ if (!hasSeenRulesInstallPrompt) {
+ writeConfigHasSeenRulesInstallPrompt(true);
+ writeConfigLastRulesInstallPromptVersion(manifest.currentVersion);
+
+ const installChoice = await confirm({
+ message: "Would you like to install the Trigger.dev code agent rules?",
+ initialValue: true,
+ });
+
+ const skipInstall = isCancel(installChoice) || !installChoice;
+
+ if (skipInstall) {
+ return;
+ }
+
+ await installRules(manifest, options);
+ return;
+ }
+
+ const lastRulesInstallPromptVersion = readConfigLastRulesInstallPromptVersion();
+
+ if (!lastRulesInstallPromptVersion) {
+ writeConfigHasSeenRulesInstallPrompt(true);
+ writeConfigLastRulesInstallPromptVersion(manifest.currentVersion);
+
+ const installChoice = await confirm({
+ message: `A new version of the trigger.dev agent rules is available (${manifest.currentVersion}). Do you want to install it?`,
+ initialValue: true,
+ });
+
+ const skipInstall = isCancel(installChoice) || !installChoice;
+
+ if (skipInstall) {
+ return;
+ }
+
+ await installRules(manifest, options);
+ return;
+ }
+
+ if (semver.gt(manifest.currentVersion, lastRulesInstallPromptVersion)) {
+ writeConfigHasSeenRulesInstallPrompt(true);
+ writeConfigLastRulesInstallPromptVersion(manifest.currentVersion);
+
+ const confirmed = await confirm({
+ message: `A new version of the trigger.dev agent rules is available (${lastRulesInstallPromptVersion} ā ${chalk.greenBright(
+ manifest.currentVersion
+ )}). Do you want to install it?`,
+ initialValue: true,
+ });
+
+ if (isCancel(confirmed) || !confirmed) {
+ return;
+ }
+
+ await installRules(manifest, options);
+ }
+
+ return;
+}
+
+async function installRules(manifest: RulesManifest, opts: InstallRulesWizardOptions) {
+ const config = await loadConfig({
+ cwd: process.cwd(),
+ });
+
+ const currentVersion = await manifest.getCurrentVersion();
+
+ const targetNames = await resolveTargets(opts);
+
+ if (targetNames.length === 1 && targetNames.includes("unsupported")) {
+ handleUnsupportedTargetOnly(opts);
+ return;
+ }
+
+ const results = [];
+
+ for (const targetName of targetNames) {
+ const result = await installRulesForTarget(targetName, currentVersion, config, opts);
+
+ if (result) {
+ results.push(result);
+ }
+ }
+
+ if (results.length > 0) {
+ log.step("Installed the following rules files:");
+
+ for (const r of results) {
+ const installationsByLocation = r.installations.reduce(
+ (acc, i) => {
+ if (!acc[i.location]) {
+ acc[i.location] = [];
+ }
+
+ acc[i.location]!.push(i.option);
+
+ return acc;
+ },
+ {} as Record
+ );
+
+ const locationOutput = Object.entries(installationsByLocation).map(
+ ([location]) => `${chalk.greenBright(location)}`
+ );
+
+ for (const message of locationOutput) {
+ log.info(message);
+ }
+ }
+
+ log.info(
+ `${cliLink("Learn how to use our rules", "https://trigger.dev/docs/agents/rules/overview")}`
+ );
+ }
+}
+
+function handleUnsupportedTargetOnly(options: InstallRulesCommandOptions): InstallRulesResults {
+ log.info(
+ `${cliLink("Install the rules manually", "https://trigger.dev/docs/agents/rules/overview")}`
+ );
+
+ return [];
+}
+
+async function installRulesForTarget(
+ targetName: ResolvedTargets,
+ currentVersion: ManifestVersion,
+ config: ResolvedConfig,
+ options: InstallRulesCommandOptions
+) {
+ if (targetName === "unsupported") {
+ // This should not happen as unsupported targets are handled separately
+ // but if it does, provide helpful output
+ log.message(
+ `${chalk.yellow("ā ")} Skipping unsupported target - see manual configuration above`
+ );
+ return;
+ }
+
+ const result = await performInstallForTarget(targetName, currentVersion, config, options);
+
+ return result;
+}
+
+async function performInstallForTarget(
+ targetName: (typeof targets)[number],
+ currentVersion: ManifestVersion,
+ config: ResolvedConfig,
+ cmdOptions: InstallRulesCommandOptions
+) {
+ const options = await resolveOptionsForTarget(targetName, currentVersion, cmdOptions);
+
+ const installations = await performInstallOptionsForTarget(targetName, options, config);
+
+ return {
+ targetName,
+ installations,
+ };
+}
+
+async function performInstallOptionsForTarget(
+ targetName: (typeof targets)[number],
+ options: Array,
+ config: ResolvedConfig
+) {
+ const results = [];
+
+ for (const option of options) {
+ const result = await performInstallOptionForTarget(targetName, option, config);
+ results.push(result);
+ }
+
+ return results;
+}
+
+async function performInstallOptionForTarget(
+ targetName: (typeof targets)[number],
+ option: RulesManifestVersionOption,
+ config: ResolvedConfig
+) {
+ switch (option.installStrategy) {
+ case "default": {
+ return performInstallDefaultOptionForTarget(targetName, option, config);
+ }
+ case "claude-code-subagent": {
+ return performInstallClaudeCodeSubagentOptionForTarget(option);
+ }
+ default: {
+ throw new Error(`Unknown install strategy: ${option.installStrategy}`);
+ }
+ }
+}
+
+async function performInstallDefaultOptionForTarget(
+ targetName: (typeof targets)[number],
+ option: RulesManifestVersionOption,
+ config: ResolvedConfig
+) {
+ // Get the path to the rules file
+ const rulesFilePath = resolveRulesFilePathForTargetOption(targetName, option);
+ const rulesFileContents = await resolveRulesFileContentsForTarget(targetName, option, config);
+ const mergeStrategy = await resolveRulesFileMergeStrategyForTarget(targetName);
+
+ // Try and read the existing rules file
+ const rulesFileAbsolutePath = join(process.cwd(), rulesFilePath);
+ await writeToFile(rulesFileAbsolutePath, rulesFileContents, mergeStrategy, option.name);
+
+ return { option, location: rulesFilePath };
+}
+
+async function writeToFile(
+ path: string,
+ contents: string,
+ mergeStrategy: "overwrite" | "replace" = "overwrite",
+ sectionName: string
+) {
+ const exists = await pathExists(path);
+
+ if (exists) {
+ switch (mergeStrategy) {
+ case "overwrite": {
+ await safeWriteFile(path, contents);
+ break;
+ }
+ case "replace": {
+ const existingContents = await readFile(path);
+
+ const pattern = new RegExp(
+ `.*?`,
+ "gs"
+ );
+
+ // If the section name is not found, just append the new content
+ if (!pattern.test(existingContents)) {
+ await safeWriteFile(path, existingContents + "\n\n" + contents);
+ break;
+ }
+
+ const updatedContent = existingContents.replace(pattern, contents);
+
+ await safeWriteFile(path, updatedContent);
+ break;
+ }
+ default: {
+ throw new Error(`Unknown merge strategy: ${mergeStrategy}`);
+ }
+ }
+ } else {
+ await safeWriteFile(path, contents);
+ }
+}
+
+async function performInstallClaudeCodeSubagentOptionForTarget(option: RulesManifestVersionOption) {
+ const rulesFilePath = ".claude/agents/trigger-dev-task-writer.md";
+ const rulesFileContents = option.contents;
+
+ await writeToFile(rulesFilePath, rulesFileContents, "overwrite", option.name);
+
+ return { option, location: rulesFilePath };
+}
+
+function resolveRulesFilePathForTargetOption(
+ targetName: (typeof targets)[number],
+ option: RulesManifestVersionOption
+): string {
+ if (option.installStrategy === "claude-code-subagent") {
+ return ".claude/agents/trigger-dev-task-writer.md";
+ }
+
+ switch (targetName) {
+ case "claude-code": {
+ return "CLAUDE.md";
+ }
+ case "cursor": {
+ return `.cursor/rules/trigger.${option.name}.mdc`;
+ }
+ case "vscode": {
+ return `.github/instructions/trigger-${option.name}.instructions.md`;
+ }
+ case "windsurf": {
+ return `.windsurf/rules/trigger-${option.name}.md`;
+ }
+ case "gemini-cli": {
+ return `GEMINI.md`;
+ }
+ case "cline": {
+ return `.clinerules/trigger-${option.name}.md`;
+ }
+ case "agents.md": {
+ return "AGENTS.md";
+ }
+ case "amp": {
+ return "AGENT.md";
+ }
+ case "kilo": {
+ return `.kilocode/rules/trigger-${option.name}.md`;
+ }
+ case "ruler": {
+ return `.ruler/trigger-${option.name}.md`;
+ }
+ default: {
+ throw new Error(`Unknown target: ${targetName}`);
+ }
+ }
+}
+
+async function resolveRulesFileMergeStrategyForTarget(targetName: (typeof targets)[number]) {
+ switch (targetName) {
+ case "amp":
+ case "agents.md":
+ case "gemini-cli":
+ case "claude-code": {
+ return "replace";
+ }
+ default: {
+ return "overwrite";
+ }
+ }
+}
+
+async function resolveRulesFileContentsForTarget(
+ targetName: (typeof targets)[number],
+ option: RulesManifestVersionOption,
+ config: ResolvedConfig
+) {
+ switch (targetName) {
+ case "cursor": {
+ return $output(
+ frontmatter({
+ description: option.label,
+ globs: option.applyTo ?? "**/trigger/**/*.ts",
+ alwaysApply: false,
+ }),
+ option.contents
+ );
+ }
+ case "vscode": {
+ return $output(
+ frontmatter({
+ applyTo: option.applyTo ?? "**/trigger/**/*.ts",
+ }),
+ option.contents
+ );
+ }
+ case "windsurf": {
+ return $output(
+ frontmatter({
+ trigger: "glob",
+ globs: option.applyTo ?? "**/trigger/**/*.ts",
+ }),
+ option.contents
+ );
+ }
+ default: {
+ return $output(
+ ``,
+ option.contents,
+ ``
+ );
+ }
+ }
+}
+
+function frontmatter(data: Record) {
+ return $output("---", ...Object.entries(data).map(([key, value]) => `${key}: ${value}`), "---");
+}
+
+function $output(...strings: string[]) {
+ return strings.map((s) => s).join("\n");
+}
+
+async function resolveOptionsForTarget(
+ targetName: (typeof targets)[number],
+ currentVersion: ManifestVersion,
+ cmdOptions: InstallRulesCommandOptions
+) {
+ const possibleOptions = currentVersion.options.filter(
+ (option) => !option.client || option.client === targetName
+ );
+
+ const selectedOptions = await multiselect({
+ message: `Choose the rules you want to install for ${targetLabels[targetName]}`,
+ options: possibleOptions.map((option) => ({
+ value: option,
+ label: option.title,
+ hint: `${option.label} [~${option.tokens} tokens]`,
+ })),
+ required: true,
+ });
+
+ if (isCancel(selectedOptions)) {
+ throw new OutroCommandError("No options selected");
+ }
+
+ return selectedOptions;
+}
+
+async function resolveTargets(options: InstallRulesCommandOptions): Promise {
+ if (options.target) {
+ return options.target;
+ }
+
+ const selectOptions: Array<{
+ value: string;
+ label: string;
+ hint?: string;
+ }> = targets.map((target) => ({
+ value: target,
+ label: targetLabels[target],
+ }));
+
+ selectOptions.push({
+ value: "unsupported",
+ label: "Unsupported target",
+ hint: "We don't support this target yet, but you can still install the rules manually.",
+ });
+
+ const $selectOptions = selectOptions as Array<{
+ value: ResolvedTargets;
+ label: string;
+ hint?: string;
+ }>;
+
+ const selectedTargets = await multiselect({
+ message: "Select one or more targets to install the rules into",
+ options: $selectOptions,
+ required: true,
+ });
+
+ if (isCancel(selectedTargets)) {
+ throw new OutroCommandError("No targets selected");
+ }
+
+ return selectedTargets;
+}
diff --git a/packages/cli-v3/src/commands/login.ts b/packages/cli-v3/src/commands/login.ts
index 953a0c796f..da8b080580 100644
--- a/packages/cli-v3/src/commands/login.ts
+++ b/packages/cli-v3/src/commands/login.ts
@@ -346,7 +346,7 @@ export async function login(options?: LoginOptions): Promise {
});
}
-async function getPersonalAccessToken(apiClient: CliApiClient, authorizationCode: string) {
+export async function getPersonalAccessToken(apiClient: CliApiClient, authorizationCode: string) {
return await tracer.startActiveSpan("getPersonalAccessToken", async (span) => {
try {
const token = await apiClient.getPersonalAccessToken(authorizationCode);
diff --git a/packages/cli-v3/src/commands/mcp.ts b/packages/cli-v3/src/commands/mcp.ts
new file mode 100644
index 0000000000..8604a455da
--- /dev/null
+++ b/packages/cli-v3/src/commands/mcp.ts
@@ -0,0 +1,119 @@
+import { intro, outro } from "@clack/prompts";
+import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
+import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
+import { VERSION } from "@trigger.dev/core";
+import { tryCatch } from "@trigger.dev/core/utils";
+import { Command, Option as CommandOption } from "commander";
+import { z } from "zod";
+import { CommonCommandOptions, commonOptions, wrapCommandAction } from "../cli/common.js";
+import { CLOUD_API_URL } from "../consts.js";
+import { McpContext } from "../mcp/context.js";
+import { FileLogger } from "../mcp/logger.js";
+import { registerTools } from "../mcp/tools.js";
+import { printStandloneInitialBanner } from "../utilities/initialBanner.js";
+import { logger } from "../utilities/logger.js";
+import { installMcpServer } from "./install-mcp.js";
+import { serverMetadata } from "../mcp/config.js";
+import { initiateRulesInstallWizard } from "./install-rules.js";
+
+const McpCommandOptions = CommonCommandOptions.extend({
+ projectRef: z.string().optional(),
+ logFile: z.string().optional(),
+ devOnly: z.boolean().default(false),
+ rulesInstallManifestPath: z.string().optional(),
+ rulesInstallBranch: z.string().optional(),
+});
+
+export type McpCommandOptions = z.infer;
+
+export function configureMcpCommand(program: Command) {
+ return commonOptions(
+ program
+ .command("mcp")
+ .description("Run the MCP server")
+ .option("-p, --project-ref ", "The project ref to use")
+ .option(
+ "--dev-only",
+ "Only run the MCP server for the dev environment. Attempts to access other environments will fail."
+ )
+ .option("--log-file ", "The file to log to")
+ .addOption(
+ new CommandOption(
+ "--rules-install-manifest-path ",
+ "The path to the rules install manifest"
+ ).hideHelp()
+ )
+ .addOption(
+ new CommandOption(
+ "--rules-install-branch ",
+ "The branch to install the rules from"
+ ).hideHelp()
+ )
+ ).action(async (options) => {
+ wrapCommandAction("mcp", McpCommandOptions, options, async (opts) => {
+ await mcpCommand(opts);
+ });
+ });
+}
+
+export async function mcpCommand(options: McpCommandOptions) {
+ if (process.stdout.isTTY) {
+ await printStandloneInitialBanner(true);
+
+ intro("Welcome to the Trigger.dev MCP server install wizard š§");
+
+ const [installError] = await tryCatch(
+ installMcpServer({
+ yolo: false,
+ tag: VERSION as string,
+ logLevel: "log",
+ })
+ );
+
+ if (installError) {
+ outro(`Failed to install MCP server: ${installError.message}`);
+ return;
+ }
+
+ await initiateRulesInstallWizard({
+ manifestPath: options.rulesInstallManifestPath,
+ branch: options.rulesInstallBranch,
+ });
+
+ return;
+ }
+
+ logger.loggerLevel = "none";
+
+ const server = new McpServer(
+ {
+ name: serverMetadata.name,
+ version: serverMetadata.version,
+ },
+ {
+ instructions: serverMetadata.instructions,
+ }
+ );
+
+ server.server.oninitialized = async () => {
+ fileLogger?.log("initialized mcp command", { options, argv: process.argv });
+ };
+
+ // Start receiving messages on stdin and sending messages on stdout
+ const transport = new StdioServerTransport();
+
+ const fileLogger: FileLogger | undefined = options.logFile
+ ? new FileLogger(options.logFile, server)
+ : undefined;
+
+ const context = new McpContext(server, {
+ projectRef: options.projectRef,
+ fileLogger,
+ apiUrl: options.apiUrl ?? CLOUD_API_URL,
+ profile: options.profile,
+ });
+
+ registerTools(context);
+
+ await server.connect(transport);
+}
diff --git a/packages/cli-v3/src/commands/update.ts b/packages/cli-v3/src/commands/update.ts
index 92fd2ec8f8..f67e9bf7db 100644
--- a/packages/cli-v3/src/commands/update.ts
+++ b/packages/cli-v3/src/commands/update.ts
@@ -1,7 +1,7 @@
import { confirm, intro, isCancel, log, outro } from "@clack/prompts";
import { Command } from "commander";
import { detectPackageManager, installDependencies } from "nypm";
-import { basename, dirname, resolve } from "path";
+import { basename, dirname, join, resolve } from "path";
import { PackageJson, readPackageJSON, type ResolveOptions, resolvePackageJSON } from "pkg-types";
import { z } from "zod";
import { CommonCommandOptions, OutroCommandError, wrapCommandAction } from "../cli/common.js";
@@ -319,7 +319,7 @@ async function getTriggerDependencies(
continue;
}
- const $version = await tryResolveTriggerPackageVersion(name, packageJsonPath);
+ const $version = await tryResolveTriggerPackageVersion(name, dirname(packageJsonPath));
deps.push({ type, name, version: $version ?? version });
}
@@ -328,13 +328,13 @@ async function getTriggerDependencies(
return deps;
}
-async function tryResolveTriggerPackageVersion(
+export async function tryResolveTriggerPackageVersion(
name: string,
- packageJsonPath: string
+ basedir?: string
): Promise {
try {
const resolvedPath = nodeResolve.sync(name, {
- basedir: dirname(packageJsonPath),
+ basedir,
});
logger.debug(`Resolved ${name} package version path`, { name, resolvedPath });
@@ -342,11 +342,11 @@ async function tryResolveTriggerPackageVersion(
const { packageJson } = await getPackageJson(dirname(resolvedPath), {
test: (filePath) => {
// We need to skip any type-marker files
- if (filePath.includes("dist/commonjs")) {
+ if (filePath.includes(join("dist", "commonjs"))) {
return false;
}
- if (filePath.includes("dist/esm")) {
+ if (filePath.includes(join("dist", "esm"))) {
return false;
}
diff --git a/packages/cli-v3/src/dev/devOutput.ts b/packages/cli-v3/src/dev/devOutput.ts
index f53b6f0e2e..6365eee2ed 100644
--- a/packages/cli-v3/src/dev/devOutput.ts
+++ b/packages/cli-v3/src/dev/devOutput.ts
@@ -41,11 +41,11 @@ export function startDevOutput(options: DevOutputOptions) {
const baseUrl = `${dashboardUrl}/projects/v3/${config.project}`;
const rebuildStarted = (...[target]: EventBusEventArgs<"rebuildStarted">) => {
- logger.log(chalkGrey("ā Rebuilding background workerā¦"));
+ logger.log(chalkGrey("ā Rebuilding local workerā¦"));
};
const buildStarted = (...[target]: EventBusEventArgs<"buildStarted">) => {
- logger.log(chalkGrey("ā Building background workerā¦"));
+ logger.log(chalkGrey("ā Building local workerā¦"));
};
const buildFailed = (...[target, error]: EventBusEventArgs<"buildFailed">) => {
@@ -89,7 +89,7 @@ export function startDevOutput(options: DevOutputOptions) {
const runsLink = chalkLink(cliLink("View runs", runsUrl));
const runtime = chalkGrey(`[${worker.build.runtime}]`);
- const workerStarted = chalkGrey("Background worker ready");
+ const workerStarted = chalkGrey("Local worker ready");
const workerVersion = chalkWorker(worker.serverWorker!.version);
logParts.push(workerStarted, runtime, arrow, workerVersion);
diff --git a/packages/cli-v3/src/mcp/auth.ts b/packages/cli-v3/src/mcp/auth.ts
new file mode 100644
index 0000000000..5079fc8b66
--- /dev/null
+++ b/packages/cli-v3/src/mcp/auth.ts
@@ -0,0 +1,211 @@
+import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
+import { env } from "std-env";
+import { CliApiClient } from "../apiClient.js";
+import { CLOUD_API_URL } from "../consts.js";
+import { readAuthConfigProfile, writeAuthConfigProfile } from "../utilities/configFiles.js";
+import {
+ isPersonalAccessToken,
+ NotPersonalAccessTokenError,
+} from "../utilities/isPersonalAccessToken.js";
+import { LoginResult, LoginResultOk } from "../utilities/session.js";
+import { getPersonalAccessToken } from "../commands/login.js";
+import open from "open";
+import pRetry from "p-retry";
+import { McpContext } from "./context.js";
+import { ApiClient } from "@trigger.dev/core/v3";
+
+export type McpAuthOptions = {
+ server: McpServer;
+ context: McpContext;
+ defaultApiUrl?: string;
+ profile?: string;
+};
+
+export async function mcpAuth(options: McpAuthOptions): Promise {
+ const opts = {
+ defaultApiUrl: CLOUD_API_URL,
+ ...options,
+ };
+
+ const accessTokenFromEnv = env.TRIGGER_ACCESS_TOKEN;
+
+ if (accessTokenFromEnv) {
+ if (!isPersonalAccessToken(accessTokenFromEnv)) {
+ throw new NotPersonalAccessTokenError(
+ "Your TRIGGER_ACCESS_TOKEN is not a Personal Access Token, they start with 'tr_pat_'. You can generate one here: https://cloud.trigger.dev/account/tokens"
+ );
+ }
+
+ const auth = {
+ accessToken: accessTokenFromEnv,
+ apiUrl: env.TRIGGER_API_URL ?? opts.defaultApiUrl ?? CLOUD_API_URL,
+ };
+
+ const apiClient = new CliApiClient(auth.apiUrl, auth.accessToken);
+ const userData = await apiClient.whoAmI();
+
+ if (!userData.success) {
+ throw new Error(userData.error);
+ }
+
+ return {
+ ok: true as const,
+ profile: options?.profile ?? "default",
+ userId: userData.data.userId,
+ email: userData.data.email,
+ dashboardUrl: userData.data.dashboardUrl,
+ auth: {
+ accessToken: auth.accessToken,
+ apiUrl: auth.apiUrl,
+ },
+ };
+ }
+
+ const authConfig = readAuthConfigProfile(options?.profile);
+
+ if (authConfig && authConfig.accessToken) {
+ const apiClient = new CliApiClient(
+ authConfig.apiUrl ?? opts.defaultApiUrl,
+ authConfig.accessToken
+ );
+ const userData = await apiClient.whoAmI();
+
+ if (!userData.success) {
+ throw new Error(userData.error);
+ }
+
+ return {
+ ok: true as const,
+ profile: options?.profile ?? "default",
+ userId: userData.data.userId,
+ email: userData.data.email,
+ dashboardUrl: userData.data.dashboardUrl,
+ auth: {
+ accessToken: authConfig.accessToken,
+ apiUrl: authConfig.apiUrl ?? opts.defaultApiUrl,
+ },
+ };
+ }
+
+ const apiClient = new CliApiClient(authConfig?.apiUrl ?? opts.defaultApiUrl);
+
+ //generate authorization code
+ const authorizationCodeResult = await createAuthorizationCode(apiClient);
+
+ const url = new URL(authorizationCodeResult.url);
+
+ url.searchParams.set("source", "mcp");
+
+ const clientName = options.server.server.getClientVersion()?.name;
+
+ if (clientName) {
+ url.searchParams.set("clientName", clientName);
+ }
+ // Only elicitInput if the client has the elicitation capability
+
+ // Elicit the user to visit the authorization code URL
+ const allowLogin = await askForLoginPermission(opts.server, url.toString());
+
+ if (!allowLogin) {
+ return {
+ ok: false as const,
+ error: "User did not allow login",
+ };
+ }
+
+ // Open the authorization code URL in the browser
+ await open(url.toString());
+
+ // Poll for the personal access token
+ const indexResult = await pRetry(
+ () => getPersonalAccessToken(apiClient, authorizationCodeResult.authorizationCode),
+ {
+ //this means we're polling, same distance between each attempt
+ factor: 1,
+ retries: 60,
+ minTimeout: 1000,
+ }
+ );
+
+ writeAuthConfigProfile(
+ { accessToken: indexResult.token, apiUrl: opts.defaultApiUrl },
+ options?.profile
+ );
+
+ const client = new CliApiClient(opts.defaultApiUrl, indexResult.token);
+ const userData = await client.whoAmI();
+
+ if (!userData.success) {
+ throw new Error(userData.error);
+ }
+
+ return {
+ ok: true as const,
+ profile: options?.profile ?? "default",
+ userId: userData.data.userId,
+ email: userData.data.email,
+ dashboardUrl: userData.data.dashboardUrl,
+ auth: {
+ accessToken: indexResult.token,
+ apiUrl: opts.defaultApiUrl,
+ },
+ };
+}
+
+async function createAuthorizationCode(apiClient: CliApiClient) {
+ const authorizationCodeResult = await apiClient.createAuthorizationCode();
+
+ if (!authorizationCodeResult.success) {
+ throw new Error(`Failed to create authorization code\n${authorizationCodeResult.error}`);
+ }
+
+ return authorizationCodeResult.data;
+}
+
+async function askForLoginPermission(server: McpServer, authorizationCodeUrl: string) {
+ const capabilities = server.server.getClientCapabilities();
+
+ if (typeof capabilities?.elicitation !== "object") {
+ return true;
+ }
+
+ const result = await server.server.elicitInput({
+ message: `You are not currently logged in. Would you like to login now? We'll automatically open the authorization code URL (${authorizationCodeUrl}) in your browser.`,
+ requestedSchema: {
+ type: "object",
+ properties: {
+ allowLogin: {
+ type: "boolean",
+ default: false,
+ title: "Allow Login",
+ description: "Whether to allow the user to login",
+ },
+ },
+ required: ["allowLogin"],
+ },
+ });
+
+ return result.action === "accept" && result.content?.allowLogin;
+}
+
+export async function createApiClientWithPublicJWT(
+ auth: LoginResultOk,
+ projectRef: string,
+ envName: string,
+ scopes: string[],
+ previewBranch?: string
+) {
+ const cliApiClient = new CliApiClient(auth.auth.apiUrl, auth.auth.accessToken, previewBranch);
+
+ const jwt = await cliApiClient.getJWT(projectRef, envName, {
+ claims: {
+ scopes,
+ },
+ });
+
+ if (!jwt.success) {
+ return;
+ }
+
+ return new ApiClient(auth.auth.apiUrl, jwt.data.token);
+}
diff --git a/packages/cli-v3/src/mcp/capabilities.ts b/packages/cli-v3/src/mcp/capabilities.ts
new file mode 100644
index 0000000000..8c4e42581c
--- /dev/null
+++ b/packages/cli-v3/src/mcp/capabilities.ts
@@ -0,0 +1,31 @@
+import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
+
+export function hasRootsCapability(server: McpServer) {
+ const capabilities = server.server.getClientCapabilities();
+
+ if (!capabilities) {
+ return false;
+ }
+
+ return "roots" in capabilities && typeof capabilities.roots === "object";
+}
+
+export function hasSamplingCapability(server: McpServer) {
+ const capabilities = server.server.getClientCapabilities();
+
+ if (!capabilities) {
+ return false;
+ }
+
+ return "sampling" in capabilities && typeof capabilities.sampling === "object";
+}
+
+export function hasElicitationCapability(server: McpServer) {
+ const capabilities = server.server.getClientCapabilities();
+
+ if (!capabilities) {
+ return false;
+ }
+
+ return "elicitation" in capabilities && typeof capabilities.elicitation === "object";
+}
diff --git a/packages/cli-v3/src/mcp/config.ts b/packages/cli-v3/src/mcp/config.ts
new file mode 100644
index 0000000000..bfa16437ec
--- /dev/null
+++ b/packages/cli-v3/src/mcp/config.ts
@@ -0,0 +1,97 @@
+import { VERSION } from "../version.js";
+
+export const serverMetadata = {
+ name: "trigger",
+ version: VERSION,
+ instructions: `Trigger.dev MCP server to automate your Trigger.dev projects and answer questions about Trigger.dev by searching the docs.
+If you need help setting up Trigger.dev in your project please refer to https://trigger.dev/docs/manual-setup.
+If the user asks for help with adding Trigger.dev to their project, please refer to https://trigger.dev/docs/manual-setup.
+ `,
+};
+
+export const toolsMetadata = {
+ search_docs: {
+ name: "search_docs",
+ title: "Search Docs",
+ description:
+ "Search across the Trigger.dev documentation to find relevant information, code examples, API references, and guides. Use this tool when you need to answer questions about Trigger.dev, find specific documentation, understand how features work, or locate implementation details. The search returns contextual content with titles and direct links to the documentation pages",
+ },
+ list_projects: {
+ name: "list_projects",
+ title: "List Projects",
+ description:
+ "List all projects for the current user, useful for when searching for a project and for looking up a projectRef",
+ },
+ list_orgs: {
+ name: "list_orgs",
+ title: "List Organizations",
+ description:
+ "List all organizations for the current user. Useful when looking up an org slug or ID.",
+ },
+ create_project_in_org: {
+ name: "create_project_in_org",
+ title: "Create Project in Organization",
+ description:
+ "Create a new project in an organization. Only do this if the user wants to add Trigger.dev to an existing project. If there is already a trigger.config.ts file present, then you should not create a new project.",
+ },
+ initialize_project: {
+ name: "initialize_project",
+ title: "Initialize Project",
+ description:
+ "Initialize Trigger.dev in your project. This will create a new project in the organization you select and add Trigger.dev to your project.",
+ },
+ get_tasks: {
+ name: "get_tasks",
+ title: "Get Tasks",
+ description:
+ "Get all tasks in the project. Useful when searching for a task and for looking up a task identifier/slug",
+ },
+ get_current_worker: {
+ name: "get_current_worker",
+ title: "Get Current Worker",
+ description:
+ "Get the current worker for the project. Useful when searching for a task and for looking up a task identifier/slug and payload schema, or looking for the latest version in a specific environment.",
+ },
+ trigger_task: {
+ name: "trigger_task",
+ title: "Trigger Task",
+ description:
+ "Trigger a task in the project. Use the get_tasks tool to get a list of tasks and ask the user to select one if it's not clear which one to use.",
+ },
+ get_run_details: {
+ name: "get_run_details",
+ title: "Get Run Details",
+ description:
+ "Get the details of a run. The run ID is the ID of the run that was triggered. It starts with run_",
+ },
+ cancel_run: {
+ name: "cancel_run",
+ title: "Cancel Run",
+ description:
+ "Cancel a run. The run ID is the ID of the run that was triggered. It starts with run_",
+ },
+ list_runs: {
+ name: "list_runs",
+ title: "List Runs",
+ description:
+ "List all runs for a project. Use this tool when you need to search for a run or list all runs for a project.",
+ },
+ deploy: {
+ name: "deploy",
+ title: "Deploy",
+ description:
+ "Deploy a project. Use this tool when you need to deploy a project. This will trigger a deployment for the project. This is a long running operation and including a progress token will allow you to display the progress to the user.",
+ },
+ list_deploys: {
+ name: "list_deploys",
+ title: "List Deploys",
+ description:
+ "List all deploys for a project. Use this tool when you need to search for a deploy or list all deploys for a project.",
+ },
+ list_preview_branches: {
+ name: "list_preview_branches",
+ title: "List Preview Branches",
+ description:
+ "List all preview branches for a project. Use this tool when you need to search for a preview branch or list all preview branches for a project.",
+ },
+};
diff --git a/packages/cli-v3/src/mcp/context.ts b/packages/cli-v3/src/mcp/context.ts
new file mode 100644
index 0000000000..75f6abd2a3
--- /dev/null
+++ b/packages/cli-v3/src/mcp/context.ts
@@ -0,0 +1,187 @@
+import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
+import { tryCatch } from "@trigger.dev/core/utils";
+import { ApiClient } from "@trigger.dev/core/v3";
+import path from "node:path";
+import { CliApiClient } from "../apiClient.js";
+import { loadConfig } from "../config.js";
+import { mcpAuth } from "./auth.js";
+import {
+ hasElicitationCapability,
+ hasRootsCapability,
+ hasSamplingCapability,
+} from "./capabilities.js";
+import { FileLogger } from "./logger.js";
+import { fileURLToPath } from "node:url";
+
+export type McpContextOptions = {
+ projectRef?: string;
+ fileLogger?: FileLogger;
+ apiUrl?: string;
+ profile?: string;
+ devOnly?: boolean;
+};
+
+export class McpContext {
+ public readonly server: McpServer;
+ public readonly options: McpContextOptions;
+
+ constructor(server: McpServer, options: McpContextOptions) {
+ this.server = server;
+ this.options = options;
+ }
+
+ get logger() {
+ return this.options.fileLogger;
+ }
+
+ public async getAuth() {
+ const auth = await mcpAuth({
+ server: this.server,
+ defaultApiUrl: this.options.apiUrl,
+ profile: this.options.profile,
+ context: this,
+ });
+
+ if (!auth.ok) {
+ throw new Error(auth.error);
+ }
+
+ return auth;
+ }
+
+ public async getCliApiClient(branch?: string) {
+ const auth = await this.getAuth();
+
+ return new CliApiClient(auth.auth.apiUrl, auth.auth.accessToken, branch);
+ }
+
+ public async getApiClient(options: {
+ projectRef: string;
+ environment: string;
+ scopes: string[];
+ branch?: string;
+ }) {
+ const cliApiClient = await this.getCliApiClient(options.branch);
+
+ const jwt = await cliApiClient.getJWT(options.projectRef, options.environment, {
+ claims: {
+ scopes: options.scopes,
+ },
+ });
+
+ if (!jwt.success) {
+ throw new Error(
+ `Could not get the authentication token for the project ${options.projectRef} in the ${options.environment} environment. Please try again.`
+ );
+ }
+
+ return new ApiClient(cliApiClient.apiURL, jwt.data.token);
+ }
+
+ public async getCwd() {
+ if (!this.hasRootsCapability) {
+ return undefined;
+ }
+
+ const response = await this.server.server.listRoots();
+
+ if (response.roots.length >= 1) {
+ return response.roots[0]?.uri ? fileURLToPath(response.roots[0].uri) : undefined;
+ }
+
+ return undefined;
+ }
+
+ public async getProjectRef(options: { projectRef?: string; cwd?: string }) {
+ if (options.projectRef) {
+ return options.projectRef;
+ }
+
+ const projectDir = await this.getProjectDir({ cwd: options.cwd });
+
+ if (!projectDir.ok) {
+ throw new Error(projectDir.error);
+ }
+
+ const [_, config] = await tryCatch(loadConfig({ cwd: projectDir.cwd }));
+
+ if (
+ config?.configFile &&
+ typeof config.project === "string" &&
+ config.project.startsWith("proj_")
+ ) {
+ return config.project;
+ }
+
+ throw new Error("No project ref found. Please provide a projectRef.");
+ }
+
+ public async getProjectDir({ cwd }: { cwd?: string }) {
+ // If cwd is a path to the actual trigger.config.ts file, then we should set the cwd to the directory of the file
+ let $cwd = cwd ? (path.extname(cwd) !== "" ? path.dirname(cwd) : cwd) : undefined;
+
+ function isRelativePath(filePath: string) {
+ return !path.isAbsolute(filePath);
+ }
+
+ if (!cwd) {
+ if (!this.hasRootsCapability) {
+ return {
+ ok: false,
+ error:
+ "The current MCP server does not support the roots capability, so please call the tool again with a projectRef or an absolute path as cwd parameter",
+ };
+ }
+
+ $cwd = await this.getCwd();
+ } else if (isRelativePath(cwd)) {
+ if (!this.hasRootsCapability) {
+ return {
+ ok: false,
+ error:
+ "The current MCP server does not support the roots capability, so please call the tool again with a projectRef or an absolute path as cwd parameter",
+ };
+ }
+
+ const resolvedCwd = await this.getCwd();
+
+ if (!resolvedCwd) {
+ return {
+ ok: false,
+ error: "No current working directory found. Please provide a projectRef or a cwd.",
+ };
+ }
+
+ $cwd = path.resolve(resolvedCwd, cwd);
+ }
+
+ if (!$cwd) {
+ return {
+ ok: false,
+ error: "No current working directory found. Please provide a projectRef or a cwd.",
+ };
+ }
+
+ return {
+ ok: true,
+ cwd: $cwd,
+ };
+ }
+
+ public async getDashboardUrl(path: string) {
+ const auth = await this.getAuth();
+ return `${auth.dashboardUrl}${path}`;
+ }
+
+ public get hasRootsCapability() {
+ return hasRootsCapability(this.server);
+ }
+
+ public get hasSamplingCapability() {
+ return hasSamplingCapability(this.server);
+ }
+
+ public get hasElicitationCapability() {
+ return hasElicitationCapability(this.server);
+ }
+}
diff --git a/packages/cli-v3/src/mcp/formatters.ts b/packages/cli-v3/src/mcp/formatters.ts
new file mode 100644
index 0000000000..8f693c5a89
--- /dev/null
+++ b/packages/cli-v3/src/mcp/formatters.ts
@@ -0,0 +1,380 @@
+import {
+ ListRunResponseItem,
+ RetrieveRunResponse,
+ RetrieveRunTraceResponseBody,
+} from "@trigger.dev/core/v3/schemas";
+import type { CursorPageResponse } from "@trigger.dev/core/v3/zodfetch";
+
+export function formatRun(run: RetrieveRunResponse): string {
+ const lines: string[] = [];
+
+ // Header with basic info
+ lines.push(`Run ${run.id}`);
+ lines.push(`Task: ${run.taskIdentifier}`);
+ lines.push(`Status: ${formatStatus(run.status)}`);
+
+ // Timing information
+ const timing = formatTiming(run);
+ if (timing) {
+ lines.push(`Timing: ${timing}`);
+ }
+
+ // Duration and cost
+ if (run.durationMs > 0) {
+ lines.push(`Duration: ${formatDuration(run.durationMs)}`);
+ }
+
+ if (run.costInCents > 0) {
+ lines.push(`Cost: $${(run.costInCents / 100).toFixed(4)}`);
+ }
+
+ // Attempt count
+ if (run.attemptCount > 1) {
+ lines.push(`Attempts: ${run.attemptCount}`);
+ }
+
+ // Version and trigger info
+ if (run.version) {
+ lines.push(`Version: ${run.version}`);
+ }
+
+ // Tags
+ if (run.tags && run.tags.length > 0) {
+ lines.push(`Tags: ${run.tags.join(", ")}`);
+ }
+
+ // Error information
+ if (run.error) {
+ lines.push(`Error: ${run.error.name || "Error"}: ${run.error.message}`);
+ if (run.error.stackTrace) {
+ lines.push(`Stack: ${run.error.stackTrace.split("\n")[0]}`); // First line only
+ }
+ }
+
+ // Related runs
+ const relatedInfo = formatRelatedRuns(run.relatedRuns);
+ if (relatedInfo) {
+ lines.push(relatedInfo);
+ }
+
+ // Schedule info
+ if (run.schedule) {
+ lines.push(`Schedule: ${run.schedule.generator.expression} (${run.schedule.id})`);
+ }
+
+ // Batch info
+ if (run.batchId) {
+ lines.push(`Batch: ${run.batchId}`);
+ }
+
+ // Test flag
+ if (run.isTest) {
+ lines.push(`Test run`);
+ }
+
+ // TTL info
+ if (run.ttl) {
+ lines.push(`TTL: ${run.ttl}`);
+ }
+
+ // Payload and Output data
+ if (run.payload) {
+ lines.push(`Payload: ${JSON.stringify(run.payload, null, 2)}`);
+ } else if (run.payloadPresignedUrl) {
+ lines.push(`Payload: (large payload available via presigned URL: ${run.payloadPresignedUrl})`);
+ }
+
+ if (run.output) {
+ lines.push(`Output: ${JSON.stringify(run.output, null, 2)}`);
+ } else if (run.outputPresignedUrl) {
+ lines.push(`Output: (large output available via presigned URL: ${run.outputPresignedUrl})`);
+ }
+
+ // Metadata
+ if (run.metadata && Object.keys(run.metadata).length > 0) {
+ lines.push(`Metadata: ${Object.keys(run.metadata).length} fields`);
+ }
+
+ return lines.join("\n");
+}
+
+function formatStatus(status: string): string {
+ return status.toLowerCase().replace(/_/g, " ");
+}
+
+function formatTiming(run: RetrieveRunResponse): string | null {
+ const parts: string[] = [];
+
+ parts.push(`created ${formatDateTime(run.createdAt)}`);
+
+ if (run.startedAt) {
+ parts.push(`started ${formatDateTime(run.startedAt)}`);
+ }
+
+ if (run.finishedAt) {
+ parts.push(`finished ${formatDateTime(run.finishedAt)}`);
+ } else if (run.delayedUntil) {
+ parts.push(`delayed until ${formatDateTime(run.delayedUntil)}`);
+ }
+
+ return parts.length > 0 ? parts.join(", ") : null;
+}
+
+function formatDateTime(date: Date | undefined): string {
+ if (!date) return "unknown";
+
+ try {
+ return date
+ .toISOString()
+ .replace("T", " ")
+ .replace(/\.\d{3}Z$/, " UTC");
+ } catch {
+ return "unknown";
+ }
+}
+
+function formatDuration(durationMs: number): string {
+ if (durationMs < 1000) return `${durationMs}ms`;
+ if (durationMs < 60000) return `${(durationMs / 1000).toFixed(1)}s`;
+ if (durationMs < 3600000) return `${(durationMs / 60000).toFixed(1)}m`;
+ return `${(durationMs / 3600000).toFixed(1)}h`;
+}
+
+function formatRelatedRuns(relatedRuns: RetrieveRunResponse["relatedRuns"]): string | null {
+ const parts: string[] = [];
+
+ if (relatedRuns.parent) {
+ parts.push(`parent: ${relatedRuns.parent.id} (${relatedRuns.parent.status.toLowerCase()})`);
+ }
+
+ if (relatedRuns.root && relatedRuns.root.id !== relatedRuns.parent?.id) {
+ parts.push(`root: ${relatedRuns.root.id} (${relatedRuns.root.status.toLowerCase()})`);
+ }
+
+ if (relatedRuns.children && relatedRuns.children.length > 0) {
+ const childStatuses = relatedRuns.children.reduce(
+ (acc, child) => {
+ acc[child.status.toLowerCase()] = (acc[child.status.toLowerCase()] || 0) + 1;
+ return acc;
+ },
+ {} as Record
+ );
+
+ const statusSummary = Object.entries(childStatuses)
+ .map(([status, count]) => `${count} ${status}`)
+ .join(", ");
+
+ parts.push(`children: ${relatedRuns.children.length} runs (${statusSummary})`);
+ }
+
+ return parts.length > 0 ? `Related: ${parts.join("; ")}` : null;
+}
+
+export function formatRunTrace(trace: RetrieveRunTraceResponseBody["trace"]): string {
+ const lines: string[] = [];
+
+ lines.push(`Trace ID: ${trace.traceId}`);
+ lines.push("");
+
+ // Format the root span and its children recursively
+ formatSpan(trace.rootSpan, lines, 0);
+
+ return lines.join("\n");
+}
+
+function formatSpan(
+ span: RetrieveRunTraceResponseBody["trace"]["rootSpan"],
+ lines: string[],
+ depth: number
+): void {
+ const indent = " ".repeat(depth);
+ const prefix = depth === 0 ? "āā" : "āā";
+
+ // Format span header
+ const statusIndicator = getStatusIndicator(span.data);
+ const duration = formatDuration(span.data.duration);
+ const startTime = formatDateTime(span.data.startTime);
+
+ lines.push(`${indent}${prefix} ${span.message} ${statusIndicator}`);
+ lines.push(`${indent} Duration: ${duration}`);
+ lines.push(`${indent} Started: ${startTime}`);
+
+ if (span.data.taskSlug) {
+ lines.push(`${indent} Task: ${span.data.taskSlug}`);
+ }
+
+ if (span.data.taskPath) {
+ lines.push(`${indent} Path: ${span.data.taskPath}`);
+ }
+
+ if (span.data.queueName) {
+ lines.push(`${indent} Queue: ${span.data.queueName}`);
+ }
+
+ if (span.data.machinePreset) {
+ lines.push(`${indent} Machine: ${span.data.machinePreset}`);
+ }
+
+ if (span.data.workerVersion) {
+ lines.push(`${indent} Worker: ${span.data.workerVersion}`);
+ }
+
+ // Show properties if they exist
+ if (span.data.properties && Object.keys(span.data.properties).length > 0) {
+ lines.push(
+ `${indent} Properties: ${JSON.stringify(span.data.properties, null, 2).replace(
+ /\n/g,
+ "\n" + indent + " "
+ )}`
+ );
+ }
+
+ // Show output if it exists
+ if (span.data.output && Object.keys(span.data.output).length > 0) {
+ lines.push(
+ `${indent} Output: ${JSON.stringify(span.data.output, null, 2).replace(
+ /\n/g,
+ "\n" + indent + " "
+ )}`
+ );
+ }
+
+ // Show events if they exist and are meaningful
+ if (span.data.events && span.data.events.length > 0) {
+ lines.push(`${indent} Events: ${span.data.events.length} events`);
+ // Optionally show first few events for context
+ const maxEvents = 3;
+ for (let i = 0; i < Math.min(span.data.events.length, maxEvents); i++) {
+ const event = span.data.events[i];
+ if (typeof event === "object" && event !== null) {
+ const eventStr = JSON.stringify(event, null, 2).replace(/\n/g, "\n" + indent + " ");
+ lines.push(`${indent} [${i + 1}] ${eventStr}`);
+ }
+ }
+ if (span.data.events.length > maxEvents) {
+ lines.push(`${indent} ... and ${span.data.events.length - maxEvents} more events`);
+ }
+ }
+
+ // Add spacing between spans
+ if (span.children && span.children.length > 0) {
+ lines.push("");
+ }
+
+ // Recursively format children
+ if (span.children) {
+ span.children.forEach((child, index) => {
+ formatSpan(child, lines, depth + 1);
+ // Add spacing between sibling spans (except for the last one)
+ if (index < span.children.length - 1) {
+ lines.push("");
+ }
+ });
+ }
+}
+
+function getStatusIndicator(
+ spanData: RetrieveRunTraceResponseBody["trace"]["rootSpan"]["data"]
+): string {
+ if (spanData.isCancelled) return "[CANCELLED]";
+ if (spanData.isError) return "[ERROR]";
+ if (spanData.isPartial) return "[PARTIAL]";
+ return "[COMPLETED]";
+}
+
+export function formatRunList(runsPage: CursorPageResponse): string {
+ const lines: string[] = [];
+
+ // Header with count info
+ const totalRuns = runsPage.data.length;
+ lines.push(`Found ${totalRuns} run${totalRuns === 1 ? "" : "s"}`);
+ lines.push("");
+
+ if (totalRuns === 0) {
+ lines.push("No runs found.");
+ return lines.join("\n");
+ }
+
+ // Format each run in a compact table-like format
+ runsPage.data.forEach((run, index) => {
+ lines.push(`${index + 1}. ${formatRunSummary(run)}`);
+ });
+
+ // Pagination info
+ lines.push("");
+ const paginationInfo = [];
+ if (runsPage.pagination.previous) {
+ paginationInfo.push("ā Previous page available");
+ }
+ if (runsPage.pagination.next) {
+ paginationInfo.push("Next page available ā");
+ }
+
+ if (paginationInfo.length > 0) {
+ lines.push(`Pagination: ${paginationInfo.join(" | ")}`);
+ if (runsPage.pagination.next) {
+ lines.push(`Next cursor: ${runsPage.pagination.next}`);
+ }
+ if (runsPage.pagination.previous) {
+ lines.push(`Previous cursor: ${runsPage.pagination.previous}`);
+ }
+ }
+
+ return lines.join("\n");
+}
+
+function formatRunSummary(run: ListRunResponseItem): string {
+ const parts: string[] = [];
+
+ // Basic info: ID, task, status
+ parts.push(`${run.id}`);
+ parts.push(`${run.taskIdentifier}`);
+ parts.push(`${formatStatus(run.status)}`);
+
+ // Environment
+ parts.push(`env:${run.env.name}`);
+
+ // Timing - show the most relevant time
+ let timeInfo = "";
+ if (run.finishedAt) {
+ timeInfo = `finished ${formatDateTime(run.finishedAt)}`;
+ } else if (run.startedAt) {
+ timeInfo = `started ${formatDateTime(run.startedAt)}`;
+ } else if (run.delayedUntil) {
+ timeInfo = `delayed until ${formatDateTime(run.delayedUntil)}`;
+ } else {
+ timeInfo = `created ${formatDateTime(run.createdAt)}`;
+ }
+ parts.push(timeInfo);
+
+ // Duration if available
+ if (run.durationMs > 0) {
+ parts.push(`took ${formatDuration(run.durationMs)}`);
+ }
+
+ // Cost if significant
+ if (run.costInCents > 0) {
+ parts.push(`$${(run.costInCents / 100).toFixed(4)}`);
+ }
+
+ // Tags if present
+ if (run.tags && run.tags.length > 0) {
+ const tagStr =
+ run.tags.length > 2
+ ? `${run.tags.slice(0, 2).join(", ")}+${run.tags.length - 2}`
+ : run.tags.join(", ");
+ parts.push(`tags:[${tagStr}]`);
+ }
+
+ // Test flag
+ if (run.isTest) {
+ parts.push("[TEST]");
+ }
+
+ // Version if available
+ if (run.version) {
+ parts.push(`v${run.version}`);
+ }
+
+ return parts.join(" | ");
+}
diff --git a/packages/cli-v3/src/mcp/logger.ts b/packages/cli-v3/src/mcp/logger.ts
new file mode 100644
index 0000000000..b30576a331
--- /dev/null
+++ b/packages/cli-v3/src/mcp/logger.ts
@@ -0,0 +1,47 @@
+import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
+import { appendFileSync } from "node:fs";
+import util from "node:util";
+
+export class FileLogger {
+ private filePath: string;
+ private server: McpServer;
+
+ constructor(filePath: string, server: McpServer) {
+ this.filePath = filePath;
+ this.server = server;
+ }
+
+ log(message: string, ...args: unknown[]) {
+ const logMessage = `[${new Date().toISOString()}][${this.formatServerInfo()}] ${message} - ${util.inspect(
+ args,
+ {
+ depth: null,
+ colors: false,
+ }
+ )}\n`;
+ appendFileSync(this.filePath, logMessage);
+ }
+
+ private formatServerInfo() {
+ return `${this.formatClientName()} ${this.formatClientVersion()} ${this.formatClientCapabilities()}`;
+ }
+
+ private formatClientName() {
+ const clientName = this.server.server.getClientVersion()?.name;
+ return `client=${clientName ?? "unknown"}`;
+ }
+
+ private formatClientVersion() {
+ const clientVersion = this.server.server.getClientVersion();
+
+ return `version=${clientVersion?.version ?? "unknown"}`;
+ }
+
+ private formatClientCapabilities() {
+ const clientCapabilities = this.server.server.getClientCapabilities();
+
+ const keys = Object.keys(clientCapabilities ?? {});
+
+ return `capabilities=${keys.join(",")}`;
+ }
+}
diff --git a/packages/cli-v3/src/mcp/mintlifyClient.ts b/packages/cli-v3/src/mcp/mintlifyClient.ts
new file mode 100644
index 0000000000..16fe41b411
--- /dev/null
+++ b/packages/cli-v3/src/mcp/mintlifyClient.ts
@@ -0,0 +1,74 @@
+export async function performSearch(query: string, signal: AbortSignal) {
+ const body = callToolBody("Search", { query });
+
+ const response = await fetch("https://trigger.dev/docs/mcp", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Accept: "application/json, text/event-stream",
+ "MCP-Protocol-Version": "2025-06-18",
+ },
+ signal,
+ body: JSON.stringify(body),
+ });
+
+ const data = await parseResponse(response);
+ return data;
+}
+
+async function parseResponse(response: Response) {
+ if (response.headers.get("content-type")?.includes("text/event-stream")) {
+ return parseSSEResponse(response);
+ } else {
+ return parseJSONResponse(response);
+ }
+}
+
+async function parseJSONResponse(response: Response) {
+ const data = await response.json();
+ return data;
+}
+
+// Get the first data: event and return the parsed JSON of the event
+async function parseSSEResponse(response: Response) {
+ const reader = response.body?.getReader();
+ const decoder = new TextDecoder();
+
+ if (!reader) {
+ throw new Error("No reader found");
+ }
+
+ let buffer = "";
+
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done) throw new Error("SSE stream closed before data arrived");
+
+ buffer += decoder.decode(value, { stream: true });
+ const events = buffer.split("\n\n"); // SSE delimiter
+ buffer = events.pop()!; // keep incomplete
+
+ for (const evt of events) {
+ for (const line of evt.split("\n")) {
+ if (line.startsWith("data:")) {
+ const json = line.slice(5).trim();
+ return JSON.parse(json); // ā
got it
+ }
+ }
+ }
+ }
+
+ throw new Error("No data: event found");
+}
+
+function callToolBody(tool: string, args: Record) {
+ return {
+ jsonrpc: "2.0",
+ id: 1,
+ method: "tools/call",
+ params: {
+ name: tool,
+ arguments: args,
+ },
+ };
+}
diff --git a/packages/cli-v3/src/mcp/schemas.ts b/packages/cli-v3/src/mcp/schemas.ts
new file mode 100644
index 0000000000..104951647f
--- /dev/null
+++ b/packages/cli-v3/src/mcp/schemas.ts
@@ -0,0 +1,197 @@
+import {
+ ApiDeploymentListParams,
+ MachinePresetName,
+ RunStatus,
+} from "@trigger.dev/core/v3/schemas";
+import { z } from "zod";
+
+export const ProjectRefSchema = z
+ .string()
+ .describe(
+ "The trigger.dev project ref, starts with proj_. We will attempt to automatically detect the project ref if running inside a directory that includes a trigger.config.ts file, or if you pass the --project-ref option to the MCP server."
+ )
+ .optional();
+
+export const CreateProjectInOrgInput = z.object({
+ orgParam: z
+ .string()
+ .describe(
+ "The organization to create the project in, can either be the organization slug or the ID. Use the list_orgs tool to get a list of organizations and ask the user to select one."
+ ),
+ name: z.string().describe("The name of the project to create."),
+});
+
+export type CreateProjectInOrgInput = z.output;
+
+export const InitializeProjectInput = z.object({
+ orgParam: z
+ .string()
+ .describe(
+ "The organization to create the project in, can either be the organization slug or the ID. Use the list_orgs tool to get a list of organizations and ask the user to select one."
+ ),
+ projectRef: ProjectRefSchema,
+ projectName: z
+ .string()
+ .describe(
+ "The name of the project to create. If projectRef is not provided, we will use this name to create a new project in the organization you select."
+ ),
+ cwd: z.string().describe("The current working directory of the project").optional(),
+});
+
+export type InitializeProjectInput = z.output;
+
+export const CommonProjectsInput = z.object({
+ projectRef: ProjectRefSchema,
+ configPath: z
+ .string()
+ .describe(
+ "The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the root dir (like in a monorepo setup). If not provided, we will try to find the config file in the current working directory"
+ )
+ .optional(),
+ environment: z
+ .enum(["dev", "staging", "prod", "preview"])
+ .describe("The environment to get tasks for")
+ .default("dev"),
+ branch: z
+ .string()
+ .describe("The branch to get tasks for, only used for preview environments")
+ .optional(),
+});
+
+export type CommonProjectsInput = z.output;
+
+export const TriggerTaskInput = CommonProjectsInput.extend({
+ taskId: z
+ .string()
+ .describe(
+ "The ID/slug of the task to trigger. Use the get_tasks tool to get a list of tasks and ask the user to select one if it's not clear which one to use."
+ ),
+ payload: z
+ .any()
+ .describe(
+ "The payload to trigger the task with. Should match the task's payload schema. Not a JSON string, but the actual payload object"
+ ),
+ options: z
+ .object({
+ queue: z
+ .object({
+ name: z
+ .string()
+ .describe(
+ "The name of the queue to trigger the task in, by default will use the queue configured in the task"
+ ),
+ })
+ .optional(),
+ delay: z
+ .string()
+ .or(z.coerce.date())
+ .describe("The delay before the task run is executed")
+ .optional(),
+ idempotencyKey: z.string().describe("The idempotency key to use for the task run").optional(),
+ machine: MachinePresetName.describe("The machine preset to use for the task run").optional(),
+ maxAttempts: z
+ .number()
+ .int()
+ .describe("The maximum number of attempts to retry the task run")
+ .optional(),
+ maxDuration: z
+ .number()
+ .describe("The maximum duration in seconds of the task run")
+ .optional(),
+ tags: z
+ .array(z.string())
+ .describe(
+ "Tags to add to the task run. Must be less than 128 characters and cannot have more than 5"
+ )
+ .optional(),
+ ttl: z
+ .string()
+ .or(z.number().nonnegative().int())
+ .describe(
+ "The time to live of the task run. If the run doesn't start executing within this time, it will be automatically cancelled."
+ )
+ .default("10m"),
+ })
+ .optional(),
+});
+
+export type TriggerTaskInput = z.output;
+
+export const CommonRunsInput = CommonProjectsInput.extend({
+ runId: z.string().describe("The ID of the run to get the details of, starts with run_"),
+});
+
+export type CommonRunsInput = z.output;
+
+export const GetRunDetailsInput = CommonRunsInput.extend({});
+
+export type GetRunDetailsInput = z.output;
+
+export const ListRunsInput = CommonProjectsInput.extend({
+ cursor: z.string().describe("The cursor to use for pagination, starts with run_").optional(),
+ limit: z
+ .number()
+ .int()
+ .describe("The number of runs to list in a single page. Up to 100")
+ .optional(),
+ status: RunStatus.describe("Filter for runs with this run status").optional(),
+ taskIdentifier: z.string().describe("Filter for runs that match this task identifier").optional(),
+ version: z
+ .string()
+ .describe("Filter for runs that match this version, e.g. 20250808.3")
+ .optional(),
+ tag: z.string().describe("Filter for runs that include this tag").optional(),
+ from: z.string().describe("Filter for runs created after this ISO 8601 timestamp").optional(),
+ to: z.string().describe("Filter for runs created before this ISO 8601 timestamp").optional(),
+ period: z
+ .string()
+ .describe("Filter for runs created in the last N time period. e.g. 7d, 30d, 365d")
+ .optional(),
+ machine: MachinePresetName.describe("Filter for runs that match this machine preset").optional(),
+});
+
+export type ListRunsInput = z.output;
+
+export const CommonDeployInput = CommonProjectsInput.omit({
+ environment: true,
+}).extend({
+ environment: z
+ .enum(["staging", "prod", "preview"])
+ .describe("The environment to trigger the task in")
+ .default("prod"),
+});
+
+export type CommonDeployInput = z.output;
+
+export const DeployInput = CommonDeployInput.extend({
+ skipPromotion: z
+ .boolean()
+ .describe("Skip promoting the deployment to the current deployment for the environment")
+ .optional(),
+ skipSyncEnvVars: z
+ .boolean()
+ .describe("Skip syncing environment variables when using the syncEnvVars extension")
+ .optional(),
+ skipUpdateCheck: z
+ .boolean()
+ .describe("Skip checking for @trigger.dev package updates")
+ .optional(),
+});
+
+export type DeployInput = z.output;
+
+export const ListDeploysInput = CommonDeployInput.extend(ApiDeploymentListParams);
+
+export type ListDeploysInput = z.output;
+
+export const ListPreviewBranchesInput = z.object({
+ projectRef: ProjectRefSchema,
+ configPath: z
+ .string()
+ .describe(
+ "The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the root dir (like in a monorepo setup). If not provided, we will try to find the config file in the current working directory"
+ )
+ .optional(),
+});
+
+export type ListPreviewBranchesInput = z.output;
diff --git a/packages/cli-v3/src/mcp/tools.ts b/packages/cli-v3/src/mcp/tools.ts
new file mode 100644
index 0000000000..8bcb8280e0
--- /dev/null
+++ b/packages/cli-v3/src/mcp/tools.ts
@@ -0,0 +1,49 @@
+import { McpContext } from "./context.js";
+import { deployTool, listDeploysTool } from "./tools/deploys.js";
+import { searchDocsTool } from "./tools/docs.js";
+import {
+ createProjectInOrgTool,
+ initializeProjectTool,
+ listOrgsTool,
+ listProjectsTool,
+} from "./tools/orgs.js";
+import { listPreviewBranchesTool } from "./tools/previewBranches.js";
+import { cancelRunTool, getRunDetailsTool, listRunsTool } from "./tools/runs.js";
+import { getCurrentWorker, triggerTaskTool } from "./tools/tasks.js";
+import { respondWithError } from "./utils.js";
+
+export function registerTools(context: McpContext) {
+ const tools = [
+ searchDocsTool,
+ listOrgsTool,
+ listProjectsTool,
+ createProjectInOrgTool,
+ initializeProjectTool,
+ getCurrentWorker,
+ triggerTaskTool,
+ listRunsTool,
+ getRunDetailsTool,
+ cancelRunTool,
+ deployTool,
+ listDeploysTool,
+ listPreviewBranchesTool,
+ ];
+
+ for (const tool of tools) {
+ context.server.registerTool(
+ tool.name,
+ {
+ annotations: { title: tool.title },
+ description: tool.description,
+ inputSchema: tool.inputSchema,
+ },
+ async (input, extra) => {
+ try {
+ return tool.handler(input, { ...extra, ctx: context });
+ } catch (error) {
+ return respondWithError(error);
+ }
+ }
+ );
+ }
+}
diff --git a/packages/cli-v3/src/mcp/tools/deploys.ts b/packages/cli-v3/src/mcp/tools/deploys.ts
new file mode 100644
index 0000000000..1c90150840
--- /dev/null
+++ b/packages/cli-v3/src/mcp/tools/deploys.ts
@@ -0,0 +1,226 @@
+import { DeployInput, ListDeploysInput } from "../schemas.js";
+import { toolsMetadata } from "../config.js";
+import { ToolMeta } from "../types.js";
+import { respondWithError, toolHandler } from "../utils.js";
+import { McpContext } from "../context.js";
+import { x } from "tinyexec";
+import { getPackageJson, tryResolveTriggerPackageVersion } from "../../commands/update.js";
+import { VERSION } from "../../version.js";
+import { resolveSync as esmResolve } from "mlly";
+import { fileURLToPath } from "node:url";
+import stripAnsi from "strip-ansi";
+
+export const deployTool = {
+ name: toolsMetadata.deploy.name,
+ title: toolsMetadata.deploy.title,
+ description: toolsMetadata.deploy.description,
+ inputSchema: DeployInput.shape,
+ handler: toolHandler(DeployInput.shape, async (input, { ctx, createProgressTracker, _meta }) => {
+ ctx.logger?.log("calling deploy", { input });
+
+ if (ctx.options.devOnly) {
+ return respondWithError(
+ `This MCP server is only available for the dev environment. The deploy command is not allowed with the --dev-only flag.`
+ );
+ }
+
+ const cwd = await ctx.getProjectDir({ cwd: input.configPath });
+
+ if (!cwd.ok) {
+ return respondWithError(cwd.error);
+ }
+
+ const auth = await ctx.getAuth();
+
+ const args = ["deploy", "--env", input.environment, "--api-url", auth.auth.apiUrl];
+
+ if (input.environment === "preview" && input.branch) {
+ args.push("--branch", input.branch);
+ }
+
+ if (ctx.options.profile) {
+ args.push("--profile", ctx.options.profile);
+ }
+
+ if (input.skipPromotion) {
+ args.push("--skip-promotion");
+ }
+
+ if (input.skipSyncEnvVars) {
+ args.push("--skip-sync-env-vars");
+ }
+
+ if (input.skipUpdateCheck) {
+ args.push("--skip-update-check");
+ }
+
+ const [nodePath, cliPath] = await resolveCLIExec(ctx, cwd.cwd);
+
+ ctx.logger?.log("deploy process args", {
+ nodePath,
+ cliPath,
+ args,
+ meta: _meta,
+ });
+
+ const progressTracker = createProgressTracker(100);
+ await progressTracker.updateProgress(
+ 5,
+ `Starting deploy to ${input.environment}${input.branch ? ` on branch ${input.branch}` : ""}`
+ );
+
+ const deployProcess = x(nodePath, [cliPath, ...args], {
+ nodeOptions: {
+ cwd: cwd.cwd,
+ env: {
+ TRIGGER_MCP_SERVER: "1",
+ CI: "true",
+ },
+ },
+ });
+
+ const logs = [];
+
+ for await (const line of deployProcess) {
+ const lineWithoutAnsi = stripAnsi(line);
+
+ const buildingVersion = lineWithoutAnsi.match(/Building version (\d+\.\d+)/);
+
+ if (buildingVersion) {
+ await progressTracker.incrementProgress(1, `Building version ${buildingVersion[1]}`);
+ } else {
+ await progressTracker.incrementProgress(1);
+ }
+
+ logs.push(stripAnsi(line));
+ }
+
+ await progressTracker.complete("Deploy complete");
+
+ ctx.logger?.log("deploy deployProcess", {
+ logs,
+ });
+
+ if (deployProcess.exitCode !== 0) {
+ return respondWithError(logs.join("\n"));
+ }
+
+ return {
+ content: [{ type: "text", text: logs.join("\n") }],
+ };
+ }),
+};
+
+export const listDeploysTool = {
+ name: toolsMetadata.list_deploys.name,
+ title: toolsMetadata.list_deploys.title,
+ description: toolsMetadata.list_deploys.description,
+ inputSchema: ListDeploysInput.shape,
+ handler: toolHandler(ListDeploysInput.shape, async (input, { ctx }) => {
+ ctx.logger?.log("calling list_deploys", { input });
+
+ if (ctx.options.devOnly) {
+ return respondWithError(
+ `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.`
+ );
+ }
+
+ const projectRef = await ctx.getProjectRef({
+ projectRef: input.projectRef,
+ cwd: input.configPath,
+ });
+
+ const apiClient = await ctx.getApiClient({
+ projectRef,
+ environment: input.environment,
+ scopes: ["read:deployments"],
+ branch: input.branch,
+ });
+
+ const result = await apiClient.listDeployments(input);
+
+ return {
+ content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
+ };
+ }),
+};
+
+async function resolveCLIExec(context: McpContext, cwd?: string): Promise<[string, string]> {
+ // Lets first try to get the version of the CLI package
+ const installedCLI = await tryResolveTriggerCLIPath(context, cwd);
+
+ if (installedCLI) {
+ context.logger?.log("resolve_cli_exec installedCLI", { installedCLI });
+
+ return [process.argv[0] ?? "node", installedCLI.path];
+ }
+
+ const sdkVersion = await tryResolveTriggerPackageVersion("@trigger.dev/sdk", cwd);
+
+ if (!sdkVersion) {
+ context.logger?.log("resolve_cli_exec no sdk version found", { cwd });
+
+ return [process.argv[0] ?? "npx", process.argv[1] ?? "trigger.dev@latest"];
+ }
+
+ if (sdkVersion === VERSION) {
+ context.logger?.log("resolve_cli_exec sdk version is the same as the current version", {
+ sdkVersion,
+ });
+
+ if (typeof process.argv[0] === "string" && typeof process.argv[1] === "string") {
+ return [process.argv[0], process.argv[1]];
+ }
+
+ return ["npx", "trigger.dev@latest"];
+ }
+
+ return ["npx", `trigger.dev@${sdkVersion}`];
+}
+
+async function tryResolveTriggerCLIPath(
+ context: McpContext,
+ basedir?: string
+): Promise<
+ | {
+ path: string;
+ version: string;
+ }
+ | undefined
+> {
+ try {
+ const resolvedPathFileURI = esmResolve("trigger.dev", {
+ url: basedir,
+ });
+
+ const resolvedPath = fileURLToPath(resolvedPathFileURI);
+
+ context.logger?.log("resolve_cli_exec resolvedPathFileURI", { resolvedPathFileURI });
+
+ const { packageJson } = await getPackageJson(resolvedPath, {
+ test: (filePath) => {
+ // We need to skip any type-marker files
+ if (filePath.includes("dist/commonjs")) {
+ return false;
+ }
+
+ if (filePath.includes("dist/esm")) {
+ return false;
+ }
+
+ return true;
+ },
+ });
+
+ if (packageJson.version) {
+ context.logger?.log("resolve_cli_exec packageJson", { packageJson });
+
+ return { path: resolvedPath, version: packageJson.version };
+ }
+
+ return;
+ } catch (error) {
+ context.logger?.log("resolve_cli_exec error", { error });
+ return undefined;
+ }
+}
diff --git a/packages/cli-v3/src/mcp/tools/docs.ts b/packages/cli-v3/src/mcp/tools/docs.ts
new file mode 100644
index 0000000000..7b5a9d27e8
--- /dev/null
+++ b/packages/cli-v3/src/mcp/tools/docs.ts
@@ -0,0 +1,20 @@
+import { z } from "zod";
+import { toolsMetadata } from "../config.js";
+import { toolHandler } from "../utils.js";
+import { performSearch } from "../mintlifyClient.js";
+
+export const searchDocsTool = {
+ name: toolsMetadata.search_docs.name,
+ title: toolsMetadata.search_docs.title,
+ description: toolsMetadata.search_docs.description,
+ inputSchema: {
+ query: z.string(),
+ },
+ handler: toolHandler({ query: z.string() }, async (input, { ctx, signal }) => {
+ ctx.logger?.log("calling search_docs", { input });
+
+ const results = await performSearch(input.query, signal);
+
+ return results.result;
+ }),
+};
diff --git a/packages/cli-v3/src/mcp/tools/orgs.ts b/packages/cli-v3/src/mcp/tools/orgs.ts
new file mode 100644
index 0000000000..9f8244b586
--- /dev/null
+++ b/packages/cli-v3/src/mcp/tools/orgs.ts
@@ -0,0 +1,243 @@
+import { CallToolResult } from "@modelcontextprotocol/sdk/types.js";
+import { GetProjectsResponseBody } from "@trigger.dev/core/v3";
+import { toolsMetadata } from "../config.js";
+import { CreateProjectInOrgInput, InitializeProjectInput } from "../schemas.js";
+import { ToolMeta } from "../types.js";
+import { respondWithError, toolHandler } from "../utils.js";
+import { loadConfig } from "../../config.js";
+import { tryCatch } from "@trigger.dev/core/utils";
+
+export const listOrgsTool = {
+ name: toolsMetadata.list_orgs.name,
+ title: toolsMetadata.list_orgs.title,
+ description: toolsMetadata.list_orgs.description,
+ inputSchema: {},
+ handler: async (input: unknown, { ctx }: ToolMeta): Promise => {
+ ctx.logger?.log("calling list_orgs", { input });
+
+ const cliApiClient = await ctx.getCliApiClient();
+
+ const orgs = await cliApiClient.getOrgs();
+
+ if (!orgs.success) {
+ return respondWithError(orgs.error);
+ }
+
+ ctx.logger?.log("list_orgs", { orgs: orgs.data });
+
+ const contents = orgs.data.map((org) => {
+ return `- ${org.title} (id=${org.id}) (slug=${org.slug}) (createdAt=${org.createdAt})`;
+ });
+
+ return {
+ content: [{ type: "text", text: contents.join("\n") }],
+ };
+ },
+};
+
+export const listProjectsTool = {
+ name: toolsMetadata.list_projects.name,
+ title: toolsMetadata.list_projects.title,
+ description: toolsMetadata.list_projects.description,
+ inputSchema: {},
+ handler: async (input: unknown, { ctx }: ToolMeta): Promise => {
+ ctx.logger?.log("calling list_projects", { input });
+
+ const cliApiClient = await ctx.getCliApiClient();
+
+ const projects = await cliApiClient.getProjects();
+
+ if (!projects.success) {
+ return respondWithError(projects.error);
+ }
+
+ ctx.logger?.log("list_projects", { projects: projects.data });
+
+ const groupedByOrg = projects.data.reduce(
+ (acc, project) => {
+ if (!project.organization) {
+ return acc;
+ }
+
+ acc[project.organization.id] = acc[project.organization.id] || {
+ organization: project.organization,
+ projects: [],
+ };
+ acc[project.organization.id]!.projects.push(project);
+
+ return acc;
+ },
+ {} as Record<
+ string,
+ {
+ organization: GetProjectsResponseBody[number]["organization"];
+ projects: GetProjectsResponseBody[number][];
+ }
+ >
+ );
+
+ const contents = Object.values(groupedByOrg)
+ .map((org) => {
+ const parts = [
+ `## Organization ${org.organization.title} (id=${org.organization.id}) (slug=${org.organization.slug}) projects:`,
+ ];
+
+ for (const project of org.projects) {
+ parts.push(
+ `- ${project.name} (projectRef=${project.externalRef}) (slug=${project.slug}) (createdAt=${project.createdAt})`
+ );
+ }
+
+ return parts.join("\n");
+ })
+ .join("\n");
+
+ return {
+ content: [
+ {
+ type: "text",
+ text: contents,
+ },
+ ],
+ };
+ },
+};
+
+export const createProjectInOrgTool = {
+ name: toolsMetadata.create_project_in_org.name,
+ title: toolsMetadata.create_project_in_org.title,
+ description: toolsMetadata.create_project_in_org.description,
+ inputSchema: CreateProjectInOrgInput.shape,
+ handler: toolHandler(CreateProjectInOrgInput.shape, async (input, { ctx }) => {
+ ctx.logger?.log("calling create_project_in_org", { input });
+
+ const cliApiClient = await ctx.getCliApiClient();
+
+ const project = await cliApiClient.createProject(input.orgParam, {
+ name: input.name,
+ });
+
+ if (!project.success) {
+ return respondWithError(project.error);
+ }
+
+ ctx.logger?.log("create_project_in_org", { project: project.data });
+
+ const contents = [
+ `Project created successfully: ${project.data.name} (projectRef=${project.data.externalRef}) (slug=${project.data.slug}) (createdAt=${project.data.createdAt})`,
+ ];
+
+ return {
+ content: [{ type: "text", text: contents.join("\n") }],
+ };
+ }),
+};
+
+export const initializeProjectTool = {
+ name: toolsMetadata.initialize_project.name,
+ title: toolsMetadata.initialize_project.title,
+ description: toolsMetadata.initialize_project.description,
+ inputSchema: InitializeProjectInput.shape,
+ handler: toolHandler(InitializeProjectInput.shape, async (input, { ctx }) => {
+ ctx.logger?.log("calling initialize_project", { input });
+
+ let projectRef: string | undefined = input.projectRef;
+
+ if (!projectRef) {
+ const cwd = input.cwd ?? (await ctx.getCwd());
+
+ if (!cwd) {
+ return respondWithError(
+ "No current working directory found. Please provide a projectRef or a cwd."
+ );
+ }
+
+ // Try to load the config file
+ const [_, config] = await tryCatch(loadConfig({ cwd }));
+
+ if (config?.configFile) {
+ if (typeof config.project === "string" && config.project.startsWith("proj_")) {
+ ctx.logger?.log("initialize_project existing project", {
+ config,
+ projectRef: config.project,
+ });
+
+ return {
+ content: [
+ {
+ type: "text",
+ text: `We found an existing trigger.config.ts file in the current working directory. Skipping initialization.`,
+ },
+ ],
+ };
+ } else {
+ return respondWithError(
+ "Could not find the project ref in the config file. Please provide a projectRef."
+ );
+ }
+ }
+
+ const cliApiClient = await ctx.getCliApiClient();
+
+ const project = await cliApiClient.createProject(input.orgParam, {
+ name: input.projectName,
+ });
+
+ if (!project.success) {
+ return respondWithError(
+ `Failed to create project ${input.projectName} in organization ${input.orgParam}: ${project.error}`
+ );
+ }
+
+ ctx.logger?.log("initialize_project new project", {
+ project: project.data,
+ });
+
+ projectRef = project.data.externalRef;
+ }
+
+ const cliApiClient = await ctx.getCliApiClient();
+
+ const projectEnv = await cliApiClient.getProjectEnv({
+ projectRef: projectRef,
+ env: "dev",
+ });
+
+ const manualSetupGuide = await getManualSetupGuide(
+ projectRef,
+ projectEnv.success ? projectEnv.data.apiKey : undefined,
+ projectEnv.success ? projectEnv.data.apiUrl : undefined
+ );
+
+ return {
+ content: [
+ {
+ type: "text",
+ text: manualSetupGuide,
+ },
+ ],
+ };
+ }),
+};
+
+async function getManualSetupGuide(projectRef: string, apiKey?: string, apiUrl?: string) {
+ const response = await fetch("https://trigger.dev/docs/manual-setup.md");
+ let text = await response.text();
+
+ text = text.replace("", projectRef);
+
+ text = text.replace("tr_dev_xxxxxxxxxx", apiKey ?? "tr_dev_xxxxxxxxxx");
+ text = text.replace(
+ "https://your-trigger-instance.com",
+ apiUrl ?? "https://your-trigger-instance.com"
+ );
+
+ return `
+Use the following manual setup guide to initialize Trigger.dev in your project. Make sure to use the correct project ref: ${projectRef}, and the following environment variables:
+
+TRIGGER_PROJECT_REF=${projectRef}
+TRIGGER_SECRET_KEY=${apiKey ?? "tr_dev_xxxxxxxxxx"}
+${apiUrl ? `TRIGGER_API_URL=${apiUrl}` : ""}
+
+${text}`;
+}
diff --git a/packages/cli-v3/src/mcp/tools/previewBranches.ts b/packages/cli-v3/src/mcp/tools/previewBranches.ts
new file mode 100644
index 0000000000..abadda29ab
--- /dev/null
+++ b/packages/cli-v3/src/mcp/tools/previewBranches.ts
@@ -0,0 +1,35 @@
+import { ListPreviewBranchesInput } from "../schemas.js";
+import { toolsMetadata } from "../config.js";
+import { ToolMeta } from "../types.js";
+import { respondWithError, toolHandler } from "../utils.js";
+
+export const listPreviewBranchesTool = {
+ name: toolsMetadata.list_preview_branches.name,
+ title: toolsMetadata.list_preview_branches.title,
+ description: toolsMetadata.list_preview_branches.description,
+ inputSchema: ListPreviewBranchesInput.shape,
+ handler: toolHandler(ListPreviewBranchesInput.shape, async (input, { ctx }) => {
+ ctx.logger?.log("calling list_preview_branches", { input });
+
+ if (ctx.options.devOnly) {
+ return respondWithError(`This MCP server is only available for the dev environment. `);
+ }
+
+ const projectRef = await ctx.getProjectRef({
+ projectRef: input.projectRef,
+ cwd: input.configPath,
+ });
+
+ const cliApiClient = await ctx.getCliApiClient();
+
+ const branches = await cliApiClient.listBranches(projectRef);
+
+ if (!branches.success) {
+ return respondWithError(branches.error);
+ }
+
+ return {
+ content: [{ type: "text", text: JSON.stringify(branches.data, null, 2) }],
+ };
+ }),
+};
diff --git a/packages/cli-v3/src/mcp/tools/runs.ts b/packages/cli-v3/src/mcp/tools/runs.ts
new file mode 100644
index 0000000000..ebe17be904
--- /dev/null
+++ b/packages/cli-v3/src/mcp/tools/runs.ts
@@ -0,0 +1,151 @@
+import { toolsMetadata } from "../config.js";
+import { formatRun, formatRunList, formatRunTrace } from "../formatters.js";
+import { CommonRunsInput, GetRunDetailsInput, ListRunsInput } from "../schemas.js";
+import { respondWithError, toolHandler } from "../utils.js";
+
+export const getRunDetailsTool = {
+ name: toolsMetadata.get_run_details.name,
+ title: toolsMetadata.get_run_details.title,
+ description: toolsMetadata.get_run_details.description,
+ inputSchema: GetRunDetailsInput.shape,
+ handler: toolHandler(GetRunDetailsInput.shape, async (input, { ctx }) => {
+ ctx.logger?.log("calling get_run_details", { input });
+
+ if (ctx.options.devOnly && input.environment !== "dev") {
+ return respondWithError(
+ `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.`
+ );
+ }
+
+ const projectRef = await ctx.getProjectRef({
+ projectRef: input.projectRef,
+ cwd: input.configPath,
+ });
+
+ const apiClient = await ctx.getApiClient({
+ projectRef,
+ environment: input.environment,
+ scopes: [`read:runs:${input.runId}`],
+ branch: input.branch,
+ });
+
+ const [runResult, traceResult] = await Promise.all([
+ apiClient.retrieveRun(input.runId),
+ apiClient.retrieveRunTrace(input.runId),
+ ]);
+
+ const formattedRun = formatRun(runResult);
+ const formattedTrace = formatRunTrace(traceResult.trace);
+
+ const runUrl = await ctx.getDashboardUrl(`/projects/v3/${projectRef}/runs/${runResult.id}`);
+
+ const content = [
+ "## Run Details",
+ formattedRun,
+ "",
+ "## Run Trace",
+ formattedTrace,
+ "",
+ `[View in dashboard](${runUrl})`,
+ ];
+
+ return {
+ content: [
+ {
+ type: "text",
+ text: content.join("\n"),
+ },
+ ],
+ };
+ }),
+};
+
+export const cancelRunTool = {
+ name: toolsMetadata.cancel_run.name,
+ title: toolsMetadata.cancel_run.title,
+ description: toolsMetadata.cancel_run.description,
+ inputSchema: CommonRunsInput.shape,
+ handler: toolHandler(CommonRunsInput.shape, async (input, { ctx }) => {
+ ctx.logger?.log("calling cancel_run", { input });
+
+ if (ctx.options.devOnly && input.environment !== "dev") {
+ return respondWithError(
+ `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.`
+ );
+ }
+
+ const projectRef = await ctx.getProjectRef({
+ projectRef: input.projectRef,
+ cwd: input.configPath,
+ });
+
+ const apiClient = await ctx.getApiClient({
+ projectRef,
+ environment: input.environment,
+ scopes: [`write:runs:${input.runId}`, `read:runs:${input.runId}`],
+ branch: input.branch,
+ });
+
+ await apiClient.cancelRun(input.runId);
+
+ const retrieveResult = await apiClient.retrieveRun(input.runId);
+
+ const runUrl = await ctx.getDashboardUrl(
+ `/projects/v3/${projectRef}/runs/${retrieveResult.id}`
+ );
+
+ return {
+ content: [{ type: "text", text: JSON.stringify({ ...retrieveResult, runUrl }, null, 2) }],
+ };
+ }),
+};
+
+export const listRunsTool = {
+ name: toolsMetadata.list_runs.name,
+ title: toolsMetadata.list_runs.title,
+ description: toolsMetadata.list_runs.description,
+ inputSchema: ListRunsInput.shape,
+ handler: toolHandler(ListRunsInput.shape, async (input, { ctx }) => {
+ ctx.logger?.log("calling list_runs", { input });
+
+ if (ctx.options.devOnly && input.environment !== "dev") {
+ return respondWithError(
+ `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.`
+ );
+ }
+
+ const projectRef = await ctx.getProjectRef({
+ projectRef: input.projectRef,
+ cwd: input.configPath,
+ });
+
+ const apiClient = await ctx.getApiClient({
+ projectRef,
+ environment: input.environment,
+ scopes: ["read:runs"],
+ branch: input.branch,
+ });
+
+ const $from = typeof input.from === "string" ? new Date(input.from) : undefined;
+ const $to = typeof input.to === "string" ? new Date(input.to) : undefined;
+
+ const result = await apiClient.listRuns({
+ after: input.cursor,
+ limit: input.limit,
+ status: input.status,
+ taskIdentifier: input.taskIdentifier,
+ version: input.version,
+ tag: input.tag,
+ from: $from,
+ to: $to,
+ period: input.period,
+ machine: input.machine,
+ });
+
+ const formattedRuns = formatRunList(result);
+
+ return {
+ content: [{ type: "text", text: formattedRuns }],
+ };
+ }),
+};
diff --git a/packages/cli-v3/src/mcp/tools/tasks.ts b/packages/cli-v3/src/mcp/tools/tasks.ts
new file mode 100644
index 0000000000..15e8d40295
--- /dev/null
+++ b/packages/cli-v3/src/mcp/tools/tasks.ts
@@ -0,0 +1,158 @@
+import { toolsMetadata } from "../config.js";
+import { CommonProjectsInput, TriggerTaskInput } from "../schemas.js";
+import { ToolMeta } from "../types.js";
+import { respondWithError, toolHandler } from "../utils.js";
+
+export const getCurrentWorker = {
+ name: toolsMetadata.get_current_worker.name,
+ title: toolsMetadata.get_current_worker.title,
+ description: toolsMetadata.get_current_worker.description,
+ inputSchema: CommonProjectsInput.shape,
+ handler: toolHandler(CommonProjectsInput.shape, async (input, { ctx }) => {
+ ctx.logger?.log("calling get_current_worker", { input });
+
+ if (ctx.options.devOnly && input.environment !== "dev") {
+ return respondWithError(
+ `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.`
+ );
+ }
+
+ const projectRef = await ctx.getProjectRef({
+ projectRef: input.projectRef,
+ cwd: input.configPath,
+ });
+
+ const cliApiClient = await ctx.getCliApiClient(input.branch);
+
+ const workerResult = await cliApiClient.getWorkerByTag(
+ projectRef,
+ input.environment,
+ "current"
+ );
+
+ if (!workerResult.success) {
+ return respondWithError(workerResult.error);
+ }
+
+ const { worker, urls } = workerResult.data;
+
+ const contents = [
+ `Current worker for ${input.environment} is ${worker.version} using ${worker.sdkVersion} of the SDK.`,
+ ];
+
+ if (worker.tasks.length > 0) {
+ contents.push(`The worker has ${worker.tasks.length} tasks registered:`);
+
+ for (const task of worker.tasks) {
+ if (task.payloadSchema) {
+ contents.push(
+ `- ${task.slug} in ${task.filePath} (payload schema: ${JSON.stringify(
+ task.payloadSchema
+ )})`
+ );
+ } else {
+ contents.push(`- ${task.slug} in ${task.filePath}`);
+ }
+ }
+ } else {
+ contents.push(`The worker has no tasks registered.`);
+ }
+
+ contents.push(`\n`);
+ contents.push(`URLs:`);
+ contents.push(`- Runs: ${urls.runs}`);
+ contents.push(`\n`);
+ contents.push(
+ `You can use the list_runs tool with the version ${worker.version} to get the list of runs for this worker.`
+ );
+
+ if (
+ typeof worker.sdkVersion === "string" &&
+ typeof worker.cliVersion === "string" &&
+ worker.sdkVersion !== worker.cliVersion
+ ) {
+ contents.push(
+ `WARNING: The SDK version (${worker.sdkVersion}) is different from the CLI version (${worker.cliVersion}). This might cause issues with the task execution. Make sure to pin the CLI and the SDK versions to ${worker.sdkVersion}.`
+ );
+ }
+
+ return {
+ content: [{ type: "text", text: contents.join("\n") }],
+ };
+ }),
+};
+
+export const triggerTaskTool = {
+ name: toolsMetadata.trigger_task.name,
+ title: toolsMetadata.trigger_task.title,
+ description: toolsMetadata.trigger_task.description,
+ inputSchema: TriggerTaskInput.shape,
+ handler: toolHandler(TriggerTaskInput.shape, async (input, { ctx }) => {
+ ctx.logger?.log("calling trigger_task", { input });
+
+ if (ctx.options.devOnly && input.environment !== "dev") {
+ return respondWithError(
+ `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.`
+ );
+ }
+
+ const projectRef = await ctx.getProjectRef({
+ projectRef: input.projectRef,
+ cwd: input.configPath,
+ });
+
+ const apiClient = await ctx.getApiClient({
+ projectRef,
+ environment: input.environment,
+ scopes: ["write:tasks"],
+ branch: input.branch,
+ });
+
+ ctx.logger?.log("triggering task", { input });
+
+ let payload = input.payload;
+
+ if (typeof payload === "string") {
+ try {
+ payload = JSON.parse(payload);
+ } catch {
+ ctx.logger?.log("payload is not a valid JSON string, using as is", { payload });
+ }
+ }
+
+ const result = await apiClient.triggerTask(input.taskId, {
+ payload,
+ options: input.options,
+ });
+
+ const taskRunUrl = await ctx.getDashboardUrl(`/projects/v3/${projectRef}/runs/${result.id}`);
+
+ const contents = [
+ `Task ${input.taskId} triggered and run with ID created: ${result.id}.`,
+ `View the run in the dashboard: ${taskRunUrl}`,
+ `You can also use the get_run_details tool to get the details of the run.`,
+ ];
+
+ if (input.environment === "dev") {
+ const cliApiClient = await ctx.getCliApiClient(input.branch);
+ const devStatus = await cliApiClient.getDevStatus(projectRef);
+ const isConnected = devStatus.success ? devStatus.data.isConnected : false;
+ const connectionMessage = isConnected
+ ? undefined
+ : "The dev CLI is not connected to this project, because it is not currently running. Make sure to run the dev command to execute triggered tasks.";
+
+ if (connectionMessage) {
+ contents.push(connectionMessage);
+ }
+ }
+
+ return {
+ content: [
+ {
+ type: "text",
+ text: contents.join("\n"),
+ },
+ ],
+ };
+ }),
+};
diff --git a/packages/cli-v3/src/mcp/types.ts b/packages/cli-v3/src/mcp/types.ts
new file mode 100644
index 0000000000..697a2d0dfc
--- /dev/null
+++ b/packages/cli-v3/src/mcp/types.ts
@@ -0,0 +1,7 @@
+import { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js";
+import { ServerNotification, ServerRequest } from "@modelcontextprotocol/sdk/types.js";
+import { McpContext } from "./context.js";
+
+export type ToolMeta = RequestHandlerExtra & {
+ ctx: McpContext;
+};
diff --git a/packages/cli-v3/src/mcp/utils.ts b/packages/cli-v3/src/mcp/utils.ts
new file mode 100644
index 0000000000..b783365e67
--- /dev/null
+++ b/packages/cli-v3/src/mcp/utils.ts
@@ -0,0 +1,134 @@
+import type { CallToolResult, ServerNotification } from "@modelcontextprotocol/sdk/types.js";
+import { z } from "zod";
+import { ToolMeta } from "./types.js";
+
+export function respondWithError(error: unknown): CallToolResult {
+ return {
+ isError: true,
+ content: [
+ {
+ type: "text",
+ text: JSON.stringify({ error: enumerateError(error) }),
+ },
+ ],
+ };
+}
+
+function enumerateError(error: unknown) {
+ if (!error) {
+ return error;
+ }
+
+ if (typeof error !== "object") {
+ return error;
+ }
+
+ const newError: Record = {};
+
+ const errorProps = ["name", "message"] as const;
+
+ for (const prop of errorProps) {
+ if (prop in error) {
+ newError[prop] = (error as Record)[prop];
+ }
+ }
+
+ return newError;
+}
+
+export type ToolHandlerMeta = ToolMeta & {
+ createProgressTracker: (total: number) => ProgressTracker;
+};
+
+export function toolHandler(
+ shape: TInputShape,
+ handler: (
+ input: z.output>,
+ meta: ToolHandlerMeta
+ ) => Promise
+) {
+ return async (input: unknown, extra: ToolMeta) => {
+ const parsedInput = z.object(shape).safeParse(input);
+
+ if (!parsedInput.success) {
+ return respondWithError(parsedInput.error);
+ }
+
+ function createProgressTracker(total: number) {
+ return new ProgressTracker(total, extra.sendNotification, extra._meta?.progressToken);
+ }
+
+ return handler(parsedInput.data, { ...extra, createProgressTracker });
+ };
+}
+
+class ProgressTracker {
+ private progress: number = 0;
+ private progressToken: string | number | undefined;
+ private total: number;
+ private message: string;
+ private sendNotification: (notification: ServerNotification) => Promise;
+
+ constructor(
+ total: number,
+ sendNotification: (notification: ServerNotification) => Promise,
+ progressToken?: string | number
+ ) {
+ this.message = "";
+ this.progressToken = progressToken;
+ this.progress = 0;
+ this.total = total;
+ this.sendNotification = sendNotification;
+ }
+
+ async updateProgress(progress: number, message?: string) {
+ this.progress = progress;
+
+ if (message) {
+ this.message = message;
+ }
+
+ await this.#sendNotification(progress, this.message);
+ }
+
+ async incrementProgress(increment: number, message?: string) {
+ this.progress += increment;
+
+ // make sure the progress is never greater than the total
+ this.progress = Math.min(this.progress, this.total);
+
+ if (message) {
+ this.message = message;
+ }
+
+ await this.#sendNotification(this.progress, this.message);
+ }
+
+ async complete(message?: string) {
+ this.progress = this.total;
+ if (message) {
+ this.message = message;
+ }
+ await this.#sendNotification(this.progress, this.message);
+ }
+
+ getProgress() {
+ return this.progress;
+ }
+
+ async #sendNotification(progress: number, message: string) {
+ if (!this.progressToken) {
+ return;
+ }
+
+ await this.sendNotification({
+ method: "notifications/progress",
+ params: {
+ progress,
+ total: this.total,
+ message: this.message,
+ progressToken: this.progressToken,
+ },
+ });
+ }
+}
diff --git a/packages/cli-v3/src/rules/install.ts b/packages/cli-v3/src/rules/install.ts
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/packages/cli-v3/src/rules/install.ts
@@ -0,0 +1 @@
+
diff --git a/packages/cli-v3/src/rules/manifest.ts b/packages/cli-v3/src/rules/manifest.ts
new file mode 100644
index 0000000000..96b08d372d
--- /dev/null
+++ b/packages/cli-v3/src/rules/manifest.ts
@@ -0,0 +1,162 @@
+import { readFile } from "fs/promises";
+import { dirname, join } from "path";
+import { z } from "zod";
+import { RulesFileInstallStrategy } from "./types.js";
+
+const RulesManifestDataSchema = z.object({
+ name: z.string(),
+ description: z.string(),
+ currentVersion: z.string(),
+ versions: z.record(
+ z.string(),
+ z.object({
+ options: z.array(
+ z.object({
+ name: z.string(),
+ title: z.string(),
+ label: z.string(),
+ path: z.string(),
+ tokens: z.number(),
+ client: z.string().optional(),
+ installStrategy: z.string().optional(),
+ applyTo: z.string().optional(),
+ })
+ ),
+ })
+ ),
+});
+
+type RulesManifestData = z.infer;
+
+export type RulesManifestVersionOption = {
+ name: string;
+ title: string;
+ label: string;
+ contents: string;
+ tokens: number;
+ client: string | undefined;
+ installStrategy: RulesFileInstallStrategy;
+ applyTo: string | undefined;
+};
+
+export type ManifestVersion = {
+ version: string;
+ options: Array;
+};
+
+export class RulesManifest {
+ constructor(
+ private readonly manifest: RulesManifestData,
+ private readonly loader: RulesManifestLoader
+ ) {}
+
+ get name() {
+ return this.manifest.name;
+ }
+
+ get description() {
+ return this.manifest.description;
+ }
+
+ get currentVersion() {
+ return this.manifest.currentVersion;
+ }
+
+ async getCurrentVersion(): Promise {
+ const version = this.versions[this.manifest.currentVersion];
+
+ if (!version) {
+ throw new Error(`Version ${this.manifest.currentVersion} not found in manifest`);
+ }
+
+ const options = await Promise.all(
+ version.options.map(async (option) => {
+ const contents = await this.loader.loadRulesFile(option.path);
+
+ // Omit path
+ const { path, installStrategy, ...rest } = option;
+
+ const $installStrategy = RulesFileInstallStrategy.safeParse(installStrategy ?? "default");
+
+ // Skip variants with invalid install strategies
+ if (!$installStrategy.success) {
+ return;
+ }
+
+ return { ...rest, contents, installStrategy: $installStrategy.data };
+ })
+ );
+
+ return {
+ version: this.manifest.currentVersion,
+ options: options.filter(Boolean) as Array,
+ };
+ }
+
+ get versions() {
+ return this.manifest.versions;
+ }
+}
+
+export async function loadRulesManifest(loader: RulesManifestLoader): Promise {
+ const content = await loader.loadManifestContent();
+
+ return new RulesManifest(RulesManifestDataSchema.parse(JSON.parse(content)), loader);
+}
+
+export interface RulesManifestLoader {
+ loadManifestContent(): Promise;
+ loadRulesFile(relativePath: string): Promise;
+}
+
+export class GithubRulesManifestLoader implements RulesManifestLoader {
+ constructor(private readonly branch: string = "main") {}
+
+ async loadManifestContent(): Promise {
+ const response = await fetch(
+ `https://raw.githubusercontent.com/triggerdotdev/trigger.dev/refs/heads/${this.branch}/rules/manifest.json`
+ );
+
+ if (!response.ok) {
+ throw new Error(`Failed to load rules manifest: ${response.status} ${response.statusText}`);
+ }
+
+ return response.text();
+ }
+
+ async loadRulesFile(relativePath: string): Promise {
+ const response = await fetch(
+ `https://raw.githubusercontent.com/triggerdotdev/trigger.dev/refs/heads/${this.branch}/${relativePath}`
+ );
+
+ if (!response.ok) {
+ throw new Error(
+ `Failed to load rules file: ${relativePath} - ${response.status} ${response.statusText}`
+ );
+ }
+
+ return response.text();
+ }
+}
+
+export class LocalRulesManifestLoader implements RulesManifestLoader {
+ constructor(private readonly path: string) {}
+
+ async loadManifestContent(): Promise {
+ try {
+ return await readFile(this.path, "utf8");
+ } catch (error) {
+ throw new Error(`Failed to load rules manifest: ${this.path} - ${error}`);
+ }
+ }
+
+ async loadRulesFile(relativePath: string): Promise {
+ const path = join(dirname(this.path), relativePath);
+
+ try {
+ return await readFile(path, "utf8");
+ } catch (error) {
+ throw new Error(`Failed to load rules file: ${relativePath} - ${error}`);
+ }
+ }
+}
diff --git a/packages/cli-v3/src/rules/types.ts b/packages/cli-v3/src/rules/types.ts
new file mode 100644
index 0000000000..70682c251a
--- /dev/null
+++ b/packages/cli-v3/src/rules/types.ts
@@ -0,0 +1,4 @@
+import { z } from "zod";
+
+export const RulesFileInstallStrategy = z.enum(["default", "claude-code-subagent"]);
+export type RulesFileInstallStrategy = z.infer;
diff --git a/packages/cli-v3/src/utilities/configFiles.ts b/packages/cli-v3/src/utilities/configFiles.ts
index 6e1c9052e7..dfbdbf5df6 100644
--- a/packages/cli-v3/src/utilities/configFiles.ts
+++ b/packages/cli-v3/src/utilities/configFiles.ts
@@ -29,6 +29,13 @@ const CliConfigFile = z.object({
version: z.literal(2),
currentProfile: z.string().default(DEFFAULT_PROFILE),
profiles: z.record(CliConfigProfileSettings),
+ settings: z
+ .object({
+ hasSeenMCPInstallPrompt: z.boolean().optional(),
+ hasSeenRulesInstallPrompt: z.boolean().optional(),
+ lastRulesInstallPromptVersion: z.string().optional(),
+ })
+ .optional(),
});
type CliConfigFile = z.infer;
@@ -50,6 +57,10 @@ function getBlankConfig(): CliConfigFile {
version: 2,
currentProfile: DEFFAULT_PROFILE,
profiles: {},
+ settings: {
+ hasSeenMCPInstallPrompt: false,
+ hasSeenRulesInstallPrompt: false,
+ },
};
}
@@ -93,6 +104,52 @@ export function readAuthConfigProfile(
}
}
+export function readConfigHasSeenMCPInstallPrompt(): boolean {
+ const config = getConfig();
+ return typeof config.settings?.hasSeenMCPInstallPrompt === "boolean"
+ ? config.settings.hasSeenMCPInstallPrompt
+ : false;
+}
+
+export function writeConfigHasSeenMCPInstallPrompt(hasSeenMCPInstallPrompt: boolean) {
+ const config = getConfig();
+ config.settings = {
+ ...config.settings,
+ hasSeenMCPInstallPrompt,
+ };
+ writeAuthConfigFile(config);
+}
+
+export function readConfigHasSeenRulesInstallPrompt(): boolean {
+ const config = getConfig();
+ return typeof config.settings?.hasSeenRulesInstallPrompt === "boolean"
+ ? config.settings.hasSeenRulesInstallPrompt
+ : false;
+}
+
+export function writeConfigHasSeenRulesInstallPrompt(hasSeenRulesInstallPrompt: boolean) {
+ const config = getConfig();
+ config.settings = {
+ ...config.settings,
+ hasSeenRulesInstallPrompt,
+ };
+ writeAuthConfigFile(config);
+}
+
+export function readConfigLastRulesInstallPromptVersion(): string | undefined {
+ const config = getConfig();
+ return config.settings?.lastRulesInstallPromptVersion;
+}
+
+export function writeConfigLastRulesInstallPromptVersion(version: string) {
+ const config = getConfig();
+ config.settings = {
+ ...config.settings,
+ lastRulesInstallPromptVersion: version,
+ };
+ writeAuthConfigFile(config);
+}
+
export function deleteAuthConfigProfile(profile: string = DEFFAULT_PROFILE) {
const config = getConfig();
diff --git a/packages/cli-v3/src/utilities/fileSystem.ts b/packages/cli-v3/src/utilities/fileSystem.ts
index b3957122fb..2de037582c 100644
--- a/packages/cli-v3/src/utilities/fileSystem.ts
+++ b/packages/cli-v3/src/utilities/fileSystem.ts
@@ -1,8 +1,9 @@
import fsSync from "fs";
import fsModule, { writeFile } from "fs/promises";
import fs from "node:fs";
-import { tmpdir } from "node:os";
+import { homedir, tmpdir } from "node:os";
import pathModule from "node:path";
+import { parseJSONC, stringifyJSONC, parseTOML, stringifyTOML } from "confbox";
// Creates a file at the given path, if the directory doesn't exist it will be created
export async function createFile(
@@ -50,6 +51,22 @@ export async function readFile(path: string) {
return await fsModule.readFile(path, "utf8");
}
+export function expandTilde(filePath: string) {
+ if (typeof filePath !== "string") {
+ throw new TypeError("Path must be a string");
+ }
+
+ if (filePath === "~") {
+ return homedir();
+ }
+
+ if (filePath.startsWith("~/")) {
+ return pathModule.resolve(homedir(), filePath.slice(2));
+ }
+
+ return pathModule.resolve(filePath);
+}
+
export async function readJSONFile(path: string) {
const fileContents = await fsModule.readFile(path, "utf8");
@@ -71,7 +88,13 @@ export async function safeReadJSONFile(path: string) {
}
export async function writeJSONFile(path: string, json: any, pretty = false) {
- await writeFile(path, JSON.stringify(json, undefined, pretty ? 2 : undefined), "utf8");
+ await safeWriteFile(path, JSON.stringify(json, undefined, pretty ? 2 : undefined));
+}
+
+// Will create the directory if it doesn't exist
+export async function safeWriteFile(path: string, contents: string) {
+ await fsModule.mkdir(pathModule.dirname(path), { recursive: true });
+ await fsModule.writeFile(path, contents);
}
export function readJSONFileSync(path: string) {
@@ -98,3 +121,31 @@ export async function createTempDir(): Promise {
return directory;
}
+
+export async function safeReadTomlFile(path: string) {
+ const fileExists = await pathExists(path);
+
+ if (!fileExists) return;
+
+ const fileContents = await readFile(path);
+
+ return parseTOML(fileContents.replace(/\r\n/g, "\n"));
+}
+
+export async function writeTomlFile(path: string, toml: any) {
+ await safeWriteFile(path, stringifyTOML(toml));
+}
+
+export async function safeReadJSONCFile(path: string) {
+ const fileExists = await pathExists(path);
+
+ if (!fileExists) return;
+
+ const fileContents = await readFile(path);
+
+ return parseJSONC(fileContents.replace(/\r\n/g, "\n"));
+}
+
+export async function writeJSONCFile(path: string, json: any) {
+ await safeWriteFile(path, stringifyJSONC(json));
+}
diff --git a/packages/core/src/v3/apiClient/index.ts b/packages/core/src/v3/apiClient/index.ts
index 4eab7d0089..fdd4bfc5e5 100644
--- a/packages/core/src/v3/apiClient/index.ts
+++ b/packages/core/src/v3/apiClient/index.ts
@@ -3,6 +3,9 @@ import { VERSION } from "../../version.js";
import { generateJWT } from "../jwt.js";
import {
AddTagsRequestBody,
+ ApiDeploymentListOptions,
+ ApiDeploymentListResponseItem,
+ ApiDeploymentListSearchParams,
BatchTaskRunExecutionResult,
BatchTriggerTaskV3RequestBody,
BatchTriggerTaskV3Response,
@@ -27,6 +30,7 @@ import {
RetrieveBatchV2Response,
RetrieveQueueParam,
RetrieveRunResponse,
+ RetrieveRunTraceResponseBody,
ScheduleObject,
TaskRunExecutionResult,
TriggerTaskRequestBody,
@@ -339,6 +343,18 @@ export class ApiClient {
);
}
+ retrieveRunTrace(runId: string, requestOptions?: ZodFetchOptions) {
+ return zodfetch(
+ RetrieveRunTraceResponseBody,
+ `${this.baseUrl}/api/v1/runs/${runId}/trace`,
+ {
+ method: "GET",
+ headers: this.#getHeaders(false),
+ },
+ mergeRequestOptions(this.defaultRequestOptions, requestOptions)
+ );
+ }
+
listRuns(
query?: ListRunsQueryParams,
requestOptions?: ZodFetchOptions
@@ -960,6 +976,41 @@ export class ApiClient {
);
}
+ listDeployments(options?: ApiDeploymentListOptions, requestOptions?: ZodFetchOptions) {
+ const searchParams = new URLSearchParams();
+
+ if (options?.status) {
+ searchParams.append("status", options.status);
+ }
+
+ if (options?.period) {
+ searchParams.append("period", options.period);
+ }
+
+ if (options?.from) {
+ searchParams.append("from", options.from);
+ }
+
+ if (options?.to) {
+ searchParams.append("to", options.to);
+ }
+
+ return zodfetchCursorPage(
+ ApiDeploymentListResponseItem,
+ `${this.baseUrl}/api/v1/deployments`,
+ {
+ query: searchParams,
+ after: options?.cursor,
+ limit: options?.limit,
+ },
+ {
+ method: "GET",
+ headers: this.#getHeaders(false),
+ },
+ mergeRequestOptions(this.defaultRequestOptions, requestOptions)
+ );
+ }
+
async fetchStream(
runId: string,
streamKey: string,
diff --git a/packages/core/src/v3/isomorphic/dates.ts b/packages/core/src/v3/isomorphic/dates.ts
new file mode 100644
index 0000000000..53a0542d7e
--- /dev/null
+++ b/packages/core/src/v3/isomorphic/dates.ts
@@ -0,0 +1,35 @@
+/**
+ * Attempts to parse a string into a valid Date.
+ *
+ * Supported formats:
+ * - ISO and RFC date strings (e.g. "2025-08-18", "2025-08-18T12:34:56Z")
+ * - Natural language dates supported by JS Date (e.g. "August 18, 2025")
+ * - Epoch seconds (10-digit numeric string, e.g. "1629302400")
+ * - Epoch milliseconds (13-digit numeric string, e.g. "1629302400000")
+ *
+ * @param input The string to parse.
+ * @returns A valid Date object, or undefined if parsing fails.
+ */
+export function parseDate(input: string): Date | undefined {
+ if (typeof input !== "string") return undefined;
+
+ // Handle pure numeric strings as epoch values
+ if (/^\d+$/.test(input)) {
+ const num = Number(input);
+
+ if (input.length === 10) {
+ // Epoch seconds
+ return new Date(num * 1000);
+ } else if (input.length === 13) {
+ // Epoch milliseconds
+ return new Date(num);
+ } else {
+ // Unsupported numeric length
+ return undefined;
+ }
+ }
+
+ // Handle general date strings
+ const date = new Date(input);
+ return isNaN(date.getTime()) ? undefined : date;
+}
diff --git a/packages/core/src/v3/isomorphic/index.ts b/packages/core/src/v3/isomorphic/index.ts
index 8e15c36d2a..d220acd515 100644
--- a/packages/core/src/v3/isomorphic/index.ts
+++ b/packages/core/src/v3/isomorphic/index.ts
@@ -4,3 +4,4 @@ export * from "./maxDuration.js";
export * from "./queueName.js";
export * from "./consts.js";
export * from "./traceContext.js";
+export * from "./dates.js";
diff --git a/packages/core/src/v3/schemas/api.ts b/packages/core/src/v3/schemas/api.ts
index 7fde77c41c..fff43ef7a1 100644
--- a/packages/core/src/v3/schemas/api.ts
+++ b/packages/core/src/v3/schemas/api.ts
@@ -29,7 +29,11 @@ export type WhoAmIResponse = z.infer;
export const GetProjectResponseBody = z.object({
id: z.string(),
- externalRef: z.string(),
+ externalRef: z
+ .string()
+ .describe(
+ "The external reference for the project, also known as the project ref, a unique identifier starting with proj_"
+ ),
name: z.string(),
slug: z.string(),
createdAt: z.coerce.date(),
@@ -47,6 +51,27 @@ export const GetProjectsResponseBody = z.array(GetProjectResponseBody);
export type GetProjectsResponseBody = z.infer;
+export const GetOrgsResponseBody = z.array(
+ z.object({
+ id: z.string(),
+ title: z.string(),
+ slug: z.string(),
+ createdAt: z.coerce.date(),
+ })
+);
+
+export type GetOrgsResponseBody = z.infer;
+
+export const CreateProjectRequestBody = z.object({
+ name: z
+ .string()
+ .trim()
+ .min(1, "Name is required")
+ .max(255, "Name must be less than 255 characters"),
+});
+
+export type CreateProjectRequestBody = z.infer;
+
export const GetProjectEnvResponse = z.object({
apiKey: z.string(),
name: z.string(),
@@ -56,6 +81,49 @@ export const GetProjectEnvResponse = z.object({
export type GetProjectEnvResponse = z.infer;
+// Zod schema for the response body type
+export const GetWorkerTaskResponse = z.object({
+ id: z.string(),
+ slug: z.string(),
+ filePath: z.string(),
+ triggerSource: z.string(),
+ createdAt: z.coerce.date(),
+ payloadSchema: z.any().nullish(),
+});
+
+export const GetWorkerByTagResponse = z.object({
+ worker: z.object({
+ id: z.string(),
+ version: z.string(),
+ engine: z.string().nullish(),
+ sdkVersion: z.string().nullish(),
+ cliVersion: z.string().nullish(),
+ tasks: z.array(GetWorkerTaskResponse),
+ }),
+ urls: z.object({
+ runs: z.string(),
+ }),
+});
+
+export type GetWorkerByTagResponse = z.infer;
+
+export const GetJWTRequestBody = z.object({
+ claims: z
+ .object({
+ scopes: z.array(z.string()).default([]),
+ })
+ .optional(),
+ expirationTime: z.union([z.number(), z.string()]).optional(),
+});
+
+export type GetJWTRequestBody = z.infer;
+
+export const GetJWTResponse = z.object({
+ token: z.string(),
+});
+
+export type GetJWTResponse = z.infer;
+
export const CreateBackgroundWorkerRequestBody = z.object({
localOnly: z.boolean(),
metadata: BackgroundWorkerMetadata,
@@ -1078,3 +1146,123 @@ export function timeoutError(timeout: Date) {
message: `Waitpoint timed out at ${timeout.toISOString()}`,
};
}
+
+const ApiDeploymentCommonShape = {
+ from: z.string().describe("The date to start the search from, in ISO 8601 format").optional(),
+ to: z.string().describe("The date to end the search, in ISO 8601 format").optional(),
+ period: z.string().describe("The period to search within (e.g. 1d, 7d, 3h, etc.)").optional(),
+ status: z
+ .enum(["PENDING", "BUILDING", "DEPLOYING", "DEPLOYED", "FAILED", "CANCELED", "TIMED_OUT"])
+ .describe("Filter deployments that are in this status")
+ .optional(),
+};
+
+const ApiDeploymentListPaginationCursor = z
+ .string()
+ .describe("The deployment ID to start the search from, to get the next page")
+ .optional();
+
+const ApiDeploymentListPaginationLimit = z.coerce
+ .number()
+ .describe("The number of deployments to return, defaults to 20 (max 100)")
+ .min(1, "Limit must be at least 1")
+ .max(100, "Limit must be less than 100")
+ .optional();
+
+export const ApiDeploymentListParams = {
+ ...ApiDeploymentCommonShape,
+ cursor: ApiDeploymentListPaginationCursor,
+ limit: ApiDeploymentListPaginationLimit,
+};
+
+export const ApiDeploymentListOptions = z.object(ApiDeploymentListParams);
+
+export type ApiDeploymentListOptions = z.infer;
+
+export const ApiDeploymentListSearchParams = z.object({
+ ...ApiDeploymentCommonShape,
+ "page[after]": ApiDeploymentListPaginationCursor,
+ "page[size]": ApiDeploymentListPaginationLimit,
+});
+
+export type ApiDeploymentListSearchParams = z.infer;
+
+export const ApiDeploymentListResponseItem = z.object({
+ id: z.string(),
+ createdAt: z.coerce.date(),
+ shortCode: z.string(),
+ version: z.string(),
+ runtime: z.string(),
+ runtimeVersion: z.string(),
+ status: z.enum([
+ "PENDING",
+ "BUILDING",
+ "DEPLOYING",
+ "DEPLOYED",
+ "FAILED",
+ "CANCELED",
+ "TIMED_OUT",
+ ]),
+ deployedAt: z.coerce.date().optional(),
+ git: z.record(z.any()).optional(),
+ error: DeploymentErrorData.optional(),
+});
+
+export type ApiDeploymentListResponseItem = z.infer;
+
+export const ApiBranchListResponseBody = z.object({
+ branches: z.array(
+ z.object({
+ id: z.string(),
+ name: z.string(),
+ createdAt: z.coerce.date(),
+ updatedAt: z.coerce.date(),
+ git: z.record(z.any()).optional(),
+ isPaused: z.boolean(),
+ })
+ ),
+});
+
+export type ApiBranchListResponseBody = z.infer;
+
+export const RetrieveRunTraceSpanSchema = z.object({
+ id: z.string(),
+ parentId: z.string().optional(),
+ message: z.string(),
+ data: z.object({
+ runId: z.string(),
+ taskSlug: z.string().optional(),
+ taskPath: z.string().optional(),
+ events: z.array(z.any()),
+ startTime: z.coerce.date(),
+ duration: z.number(),
+ isError: z.boolean(),
+ isPartial: z.boolean(),
+ isCancelled: z.boolean(),
+ level: z.string(),
+ environmentType: z.string(),
+ workerVersion: z.string().optional(),
+ queueName: z.string().optional(),
+ machinePreset: z.string().optional(),
+ properties: z.record(z.any()).optional(),
+ output: z.record(z.any()).optional(),
+ }),
+});
+
+export type RetrieveRunTraceSpan = z.infer & {
+ children: Array;
+};
+
+export const RetrieveRunTraceSpan: z.ZodType =
+ RetrieveRunTraceSpanSchema.extend({
+ children: z.lazy(() => RetrieveRunTraceSpan.array()),
+ });
+
+export const RetrieveRunTraceResponseBody = z.object({
+ trace: z.object({
+ traceId: z.string(),
+ rootSpan: RetrieveRunTraceSpan,
+ }),
+});
+
+export type RetrieveRunTraceResponseBody = z.infer;
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 6decddb309..d05d8c5a9e 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -1267,13 +1267,13 @@ importers:
packages/cli-v3:
dependencies:
'@clack/prompts':
- specifier: ^0.10.0
- version: 0.10.1
+ specifier: 0.11.0
+ version: 0.11.0
'@depot/cli':
specifier: 0.0.1-cli.2.80.0
version: 0.0.1-cli.2.80.0
'@modelcontextprotocol/sdk':
- specifier: ^1.6.1
+ specifier: ^1.17.0
version: 1.17.1(supports-color@10.0.0)
'@opentelemetry/api':
specifier: 1.9.0
@@ -1329,6 +1329,9 @@ importers:
commander:
specifier: ^9.4.1
version: 9.5.0
+ confbox:
+ specifier: ^0.2.2
+ version: 0.2.2
defu:
specifier: ^6.1.4
version: 6.1.4
@@ -1419,6 +1422,9 @@ importers:
std-env:
specifier: ^3.7.0
version: 3.7.0
+ strip-ansi:
+ specifier: ^7.1.0
+ version: 7.1.0
supports-color:
specifier: ^10.0.0
version: 10.0.0
@@ -5829,17 +5835,17 @@ packages:
resolution: {integrity: sha512-hBzuU5+JjB2cqNZyszkDHZgOSrUUT8V3dhgRl8Q9Gp6dAj/H5+KILGjbhDpc3Iy9qmqlm/akuOI2ut9VUtzJxQ==}
dev: true
- /@clack/core@0.4.2:
- resolution: {integrity: sha512-NYQfcEy8MWIxrT5Fj8nIVchfRFA26yYKJcvBS7WlUIlw2OmQOY9DhGGXMovyI5J5PpxrCPGkgUi207EBrjpBvg==}
+ /@clack/core@0.5.0:
+ resolution: {integrity: sha512-p3y0FIOwaYRUPRcMO7+dlmLh8PSRcrjuTndsiA0WAFbWES0mLZlrjVoBRZ9DzkPFJZG6KGkJmoEAY0ZcVWTkow==}
dependencies:
picocolors: 1.1.1
sisteransi: 1.0.5
dev: false
- /@clack/prompts@0.10.1:
- resolution: {integrity: sha512-Q0T02vx8ZM9XSv9/Yde0jTmmBQufZhPJfYAg2XrrrxWWaZgq1rr8nU8Hv710BQ1dhoP8rtY7YUdpGej2Qza/cw==}
+ /@clack/prompts@0.11.0:
+ resolution: {integrity: sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw==}
dependencies:
- '@clack/core': 0.4.2
+ '@clack/core': 0.5.0
picocolors: 1.1.1
sisteransi: 1.0.5
dev: false
@@ -23373,6 +23379,10 @@ packages:
/confbox@0.1.8:
resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==}
+ /confbox@0.2.2:
+ resolution: {integrity: sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==}
+ dev: false
+
/config-chain@1.1.13:
resolution: {integrity: sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==}
dependencies:
diff --git a/rules/4.0.0/advanced-tasks.md b/rules/4.0.0/advanced-tasks.md
new file mode 100644
index 0000000000..f6ecac3035
--- /dev/null
+++ b/rules/4.0.0/advanced-tasks.md
@@ -0,0 +1,451 @@
+# Trigger.dev Advanced Tasks (v4)
+
+**Advanced patterns and features for writing tasks**
+
+## Tags & Organization
+
+```ts
+import { task, tags } from "@trigger.dev/sdk";
+
+export const processUser = task({
+ id: "process-user",
+ run: async (payload: { userId: string; orgId: string }, { ctx }) => {
+ // Add tags during execution
+ await tags.add(`user_${payload.userId}`);
+ await tags.add(`org_${payload.orgId}`);
+
+ return { processed: true };
+ },
+});
+
+// Trigger with tags
+await processUser.trigger(
+ { userId: "123", orgId: "abc" },
+ { tags: ["priority", "user_123", "org_abc"] } // Max 10 tags per run
+);
+
+// Subscribe to tagged runs
+for await (const run of runs.subscribeToRunsWithTag("user_123")) {
+ console.log(`User task ${run.id}: ${run.status}`);
+}
+```
+
+**Tag Best Practices:**
+
+- Use prefixes: `user_123`, `org_abc`, `video:456`
+- Max 10 tags per run, 1-64 characters each
+- Tags don't propagate to child tasks automatically
+
+## Concurrency & Queues
+
+```ts
+import { task, queue } from "@trigger.dev/sdk";
+
+// Shared queue for related tasks
+const emailQueue = queue({
+ name: "email-processing",
+ concurrencyLimit: 5, // Max 5 emails processing simultaneously
+});
+
+// Task-level concurrency
+export const oneAtATime = task({
+ id: "sequential-task",
+ queue: { concurrencyLimit: 1 }, // Process one at a time
+ run: async (payload) => {
+ // Critical section - only one instance runs
+ },
+});
+
+// Per-user concurrency
+export const processUserData = task({
+ id: "process-user-data",
+ run: async (payload: { userId: string }) => {
+ // Override queue with user-specific concurrency
+ await childTask.trigger(payload, {
+ queue: {
+ name: `user-${payload.userId}`,
+ concurrencyLimit: 2,
+ },
+ });
+ },
+});
+
+export const emailTask = task({
+ id: "send-email",
+ queue: emailQueue, // Use shared queue
+ run: async (payload: { to: string }) => {
+ // Send email logic
+ },
+});
+```
+
+## Error Handling & Retries
+
+```ts
+import { task, retry, AbortTaskRunError } from "@trigger.dev/sdk";
+
+export const resilientTask = task({
+ id: "resilient-task",
+ retry: {
+ maxAttempts: 10,
+ factor: 1.8, // Exponential backoff multiplier
+ minTimeoutInMs: 500,
+ maxTimeoutInMs: 30_000,
+ randomize: false,
+ },
+ catchError: async ({ error, ctx }) => {
+ // Custom error handling
+ if (error.code === "FATAL_ERROR") {
+ throw new AbortTaskRunError("Cannot retry this error");
+ }
+
+ // Log error details
+ console.error(`Task ${ctx.task.id} failed:`, error);
+
+ // Allow retry by returning nothing
+ return { retryAt: new Date(Date.now() + 60000) }; // Retry in 1 minute
+ },
+ run: async (payload) => {
+ // Retry specific operations
+ const result = await retry.onThrow(
+ async () => {
+ return await unstableApiCall(payload);
+ },
+ { maxAttempts: 3 }
+ );
+
+ // Conditional HTTP retries
+ const response = await retry.fetch("https://api.example.com", {
+ retry: {
+ maxAttempts: 5,
+ condition: (response, error) => {
+ return response?.status === 429 || response?.status >= 500;
+ },
+ },
+ });
+
+ return result;
+ },
+});
+```
+
+## Machines & Performance
+
+```ts
+export const heavyTask = task({
+ id: "heavy-computation",
+ machine: { preset: "large-2x" }, // 8 vCPU, 16 GB RAM
+ maxDuration: 1800, // 30 minutes timeout
+ run: async (payload, { ctx }) => {
+ // Resource-intensive computation
+ if (ctx.machine.preset === "large-2x") {
+ // Use all available cores
+ return await parallelProcessing(payload);
+ }
+
+ return await standardProcessing(payload);
+ },
+});
+
+// Override machine when triggering
+await heavyTask.trigger(payload, {
+ machine: { preset: "medium-1x" }, // Override for this run
+});
+```
+
+**Machine Presets:**
+
+- `micro`: 0.25 vCPU, 0.25 GB RAM
+- `small-1x`: 0.5 vCPU, 0.5 GB RAM (default)
+- `small-2x`: 1 vCPU, 1 GB RAM
+- `medium-1x`: 1 vCPU, 2 GB RAM
+- `medium-2x`: 2 vCPU, 4 GB RAM
+- `large-1x`: 4 vCPU, 8 GB RAM
+- `large-2x`: 8 vCPU, 16 GB RAM
+
+## Idempotency
+
+```ts
+import { task, idempotencyKeys } from "@trigger.dev/sdk";
+
+export const paymentTask = task({
+ id: "process-payment",
+ retry: {
+ maxAttempts: 3,
+ },
+ run: async (payload: { orderId: string; amount: number }) => {
+ // Automatically scoped to this task run, so if the task is retried, the idempotency key will be the same
+ const idempotencyKey = await idempotencyKeys.create(`payment-${payload.orderId}`);
+
+ // Ensure payment is processed only once
+ await chargeCustomer.trigger(payload, {
+ idempotencyKey,
+ idempotencyKeyTTL: "24h", // Key expires in 24 hours
+ });
+ },
+});
+
+// Payload-based idempotency
+import { createHash } from "node:crypto";
+
+function createPayloadHash(payload: any): string {
+ const hash = createHash("sha256");
+ hash.update(JSON.stringify(payload));
+ return hash.digest("hex");
+}
+
+export const deduplicatedTask = task({
+ id: "deduplicated-task",
+ run: async (payload) => {
+ const payloadHash = createPayloadHash(payload);
+ const idempotencyKey = await idempotencyKeys.create(payloadHash);
+
+ await processData.trigger(payload, { idempotencyKey });
+ },
+});
+```
+
+## Metadata & Progress Tracking
+
+```ts
+import { task, metadata } from "@trigger.dev/sdk";
+
+export const batchProcessor = task({
+ id: "batch-processor",
+ run: async (payload: { items: any[] }, { ctx }) => {
+ const totalItems = payload.items.length;
+
+ // Initialize progress metadata
+ metadata
+ .set("progress", 0)
+ .set("totalItems", totalItems)
+ .set("processedItems", 0)
+ .set("status", "starting");
+
+ const results = [];
+
+ for (let i = 0; i < payload.items.length; i++) {
+ const item = payload.items[i];
+
+ // Process item
+ const result = await processItem(item);
+ results.push(result);
+
+ // Update progress
+ const progress = ((i + 1) / totalItems) * 100;
+ metadata
+ .set("progress", progress)
+ .increment("processedItems", 1)
+ .append("logs", `Processed item ${i + 1}/${totalItems}`)
+ .set("currentItem", item.id);
+ }
+
+ // Final status
+ metadata.set("status", "completed");
+
+ return { results, totalProcessed: results.length };
+ },
+});
+
+// Update parent metadata from child task
+export const childTask = task({
+ id: "child-task",
+ run: async (payload, { ctx }) => {
+ // Update parent task metadata
+ metadata.parent.set("childStatus", "processing");
+ metadata.root.increment("childrenCompleted", 1);
+
+ return { processed: true };
+ },
+});
+```
+
+## Advanced Triggering
+
+### Frontend Triggering (React)
+
+```tsx
+"use client";
+import { useTaskTrigger } from "@trigger.dev/react-hooks";
+import type { myTask } from "../trigger/tasks";
+
+function TriggerButton({ accessToken }: { accessToken: string }) {
+ const { submit, handle, isLoading } = useTaskTrigger("my-task", { accessToken });
+
+ return (
+
+ );
+}
+```
+
+### Large Payloads
+
+```ts
+// For payloads > 512KB (max 10MB)
+export const largeDataTask = task({
+ id: "large-data-task",
+ run: async (payload: { dataUrl: string }) => {
+ // Trigger.dev automatically handles large payloads
+ // For > 10MB, use external storage
+ const response = await fetch(payload.dataUrl);
+ const largeData = await response.json();
+
+ return { processed: largeData.length };
+ },
+});
+
+// Best practice: Use presigned URLs for very large files
+await largeDataTask.trigger({
+ dataUrl: "https://s3.amazonaws.com/bucket/large-file.json?presigned=true",
+});
+```
+
+### Advanced Options
+
+```ts
+await myTask.trigger(payload, {
+ delay: "2h30m", // Delay execution
+ ttl: "24h", // Expire if not started within 24 hours
+ priority: 100, // Higher priority (time offset in seconds)
+ tags: ["urgent", "user_123"],
+ metadata: { source: "api", version: "v2" },
+ queue: {
+ name: "priority-queue",
+ concurrencyLimit: 10,
+ },
+ idempotencyKey: "unique-operation-id",
+ idempotencyKeyTTL: "1h",
+ machine: { preset: "large-1x" },
+ maxAttempts: 5,
+});
+```
+
+## Hidden Tasks
+
+```ts
+// Hidden task - not exported, only used internally
+const internalProcessor = task({
+ id: "internal-processor",
+ run: async (payload: { data: string }) => {
+ return { processed: payload.data.toUpperCase() };
+ },
+});
+
+// Public task that uses hidden task
+export const publicWorkflow = task({
+ id: "public-workflow",
+ run: async (payload: { input: string }) => {
+ // Use hidden task internally
+ const result = await internalProcessor.triggerAndWait({
+ data: payload.input,
+ });
+
+ if (result.ok) {
+ return { output: result.output.processed };
+ }
+
+ throw new Error("Internal processing failed");
+ },
+});
+```
+
+## Logging & Tracing
+
+```ts
+import { task, logger } from "@trigger.dev/sdk";
+
+export const tracedTask = task({
+ id: "traced-task",
+ run: async (payload, { ctx }) => {
+ logger.info("Task started", { userId: payload.userId });
+
+ // Custom trace with attributes
+ const user = await logger.trace(
+ "fetch-user",
+ async (span) => {
+ span.setAttribute("user.id", payload.userId);
+ span.setAttribute("operation", "database-fetch");
+
+ const userData = await database.findUser(payload.userId);
+ span.setAttribute("user.found", !!userData);
+
+ return userData;
+ },
+ { userId: payload.userId }
+ );
+
+ logger.debug("User fetched", { user: user.id });
+
+ try {
+ const result = await processUser(user);
+ logger.info("Processing completed", { result });
+ return result;
+ } catch (error) {
+ logger.error("Processing failed", {
+ error: error.message,
+ userId: payload.userId,
+ });
+ throw error;
+ }
+ },
+});
+```
+
+## Usage Monitoring
+
+```ts
+import { task, usage } from "@trigger.dev/sdk";
+
+export const monitoredTask = task({
+ id: "monitored-task",
+ run: async (payload) => {
+ // Get current run cost
+ const currentUsage = await usage.getCurrent();
+ logger.info("Current cost", {
+ costInCents: currentUsage.costInCents,
+ durationMs: currentUsage.durationMs,
+ });
+
+ // Measure specific operation
+ const { result, compute } = await usage.measure(async () => {
+ return await expensiveOperation(payload);
+ });
+
+ logger.info("Operation cost", {
+ costInCents: compute.costInCents,
+ durationMs: compute.durationMs,
+ });
+
+ return result;
+ },
+});
+```
+
+## Run Management
+
+```ts
+// Cancel runs
+await runs.cancel("run_123");
+
+// Replay runs with same payload
+await runs.replay("run_123");
+
+// Retrieve run with cost details
+const run = await runs.retrieve("run_123");
+console.log(`Cost: ${run.costInCents} cents, Duration: ${run.durationMs}ms`);
+```
+
+## Best Practices
+
+- **Concurrency**: Use queues to prevent overwhelming external services
+- **Retries**: Configure exponential backoff for transient failures
+- **Idempotency**: Always use for payment/critical operations
+- **Metadata**: Track progress for long-running tasks
+- **Machines**: Match machine size to computational requirements
+- **Tags**: Use consistent naming patterns for filtering
+- **Large Payloads**: Use external storage for files > 10MB
+- **Error Handling**: Distinguish between retryable and fatal errors
+
+Design tasks to be stateless, idempotent, and resilient to failures. Use metadata for state tracking and queues for resource management.
diff --git a/rules/4.0.0/basic-tasks.md b/rules/4.0.0/basic-tasks.md
new file mode 100644
index 0000000000..6e30ff1c71
--- /dev/null
+++ b/rules/4.0.0/basic-tasks.md
@@ -0,0 +1,185 @@
+# Trigger.dev Basic Tasks (v4)
+
+**MUST use `@trigger.dev/sdk` (v4), NEVER `client.defineJob`**
+
+## Basic Task
+
+```ts
+import { task } from "@trigger.dev/sdk";
+
+export const processData = task({
+ id: "process-data",
+ retry: {
+ maxAttempts: 10,
+ factor: 1.8,
+ minTimeoutInMs: 500,
+ maxTimeoutInMs: 30_000,
+ randomize: false,
+ },
+ run: async (payload: { userId: string; data: any[] }) => {
+ // Task logic - runs for long time, no timeouts
+ console.log(`Processing ${payload.data.length} items for user ${payload.userId}`);
+ return { processed: payload.data.length };
+ },
+});
+```
+
+## Schema Task (with validation)
+
+```ts
+import { schemaTask } from "@trigger.dev/sdk";
+import { z } from "zod";
+
+export const validatedTask = schemaTask({
+ id: "validated-task",
+ schema: z.object({
+ name: z.string(),
+ age: z.number(),
+ email: z.string().email(),
+ }),
+ run: async (payload) => {
+ // Payload is automatically validated and typed
+ return { message: `Hello ${payload.name}, age ${payload.age}` };
+ },
+});
+```
+
+## Scheduled Task
+
+```ts
+import { schedules } from "@trigger.dev/sdk";
+
+const dailyReport = schedules.task({
+ id: "daily-report",
+ cron: "0 9 * * *", // Daily at 9:00 AM UTC
+ // or with timezone: cron: { pattern: "0 9 * * *", timezone: "America/New_York" },
+ run: async (payload) => {
+ console.log("Scheduled run at:", payload.timestamp);
+ console.log("Last run was:", payload.lastTimestamp);
+ console.log("Next 5 runs:", payload.upcoming);
+
+ // Generate daily report logic
+ return { reportGenerated: true, date: payload.timestamp };
+ },
+});
+```
+
+## Triggering Tasks
+
+### From Backend Code
+
+```ts
+import { tasks } from "@trigger.dev/sdk";
+import type { processData } from "./trigger/tasks";
+
+// Single trigger
+const handle = await tasks.trigger("process-data", {
+ userId: "123",
+ data: [{ id: 1 }, { id: 2 }],
+});
+
+// Batch trigger
+const batchHandle = await tasks.batchTrigger("process-data", [
+ { payload: { userId: "123", data: [{ id: 1 }] } },
+ { payload: { userId: "456", data: [{ id: 2 }] } },
+]);
+```
+
+### From Inside Tasks (with Result handling)
+
+```ts
+export const parentTask = task({
+ id: "parent-task",
+ run: async (payload) => {
+ // Trigger and continue
+ const handle = await childTask.trigger({ data: "value" });
+
+ // Trigger and wait - returns Result object, NOT task output
+ const result = await childTask.triggerAndWait({ data: "value" });
+ if (result.ok) {
+ console.log("Task output:", result.output); // Actual task return value
+ } else {
+ console.error("Task failed:", result.error);
+ }
+
+ // Quick unwrap (throws on error)
+ const output = await childTask.triggerAndWait({ data: "value" }).unwrap();
+
+ // Batch trigger and wait
+ const results = await childTask.batchTriggerAndWait([
+ { payload: { data: "item1" } },
+ { payload: { data: "item2" } },
+ ]);
+
+ for (const run of results) {
+ if (run.ok) {
+ console.log("Success:", run.output);
+ } else {
+ console.log("Failed:", run.error);
+ }
+ }
+ },
+});
+
+export const childTask = task({
+ id: "child-task",
+ run: async (payload: { data: string }) => {
+ return { processed: payload.data };
+ },
+});
+```
+
+> Never wrap triggerAndWait or batchTriggerAndWait calls in a Promise.all or Promise.allSettled as this is not supported in Trigger.dev tasks.
+
+## Waits
+
+```ts
+import { task, wait } from "@trigger.dev/sdk";
+
+export const taskWithWaits = task({
+ id: "task-with-waits",
+ run: async (payload) => {
+ console.log("Starting task");
+
+ // Wait for specific duration
+ await wait.for({ seconds: 30 });
+ await wait.for({ minutes: 5 });
+ await wait.for({ hours: 1 });
+ await wait.for({ days: 1 });
+
+ // Wait until specific date
+ await wait.until({ date: new Date("2024-12-25") });
+
+ // Wait for token (from external system)
+ await wait.forToken({
+ token: "user-approval-token",
+ timeoutInSeconds: 3600, // 1 hour timeout
+ });
+
+ console.log("All waits completed");
+ return { status: "completed" };
+ },
+});
+```
+
+> Never wrap wait calls in a Promise.all or Promise.allSettled as this is not supported in Trigger.dev tasks.
+
+## Key Points
+
+- **Result vs Output**: `triggerAndWait()` returns a `Result` object with `ok`, `output`, `error` properties - NOT the direct task output
+- **Type safety**: Use `import type` for task references when triggering from backend
+- **Waits > 5 seconds**: Automatically checkpointed, don't count toward compute usage
+
+## NEVER Use (v2 deprecated)
+
+```ts
+// BREAKS APPLICATION
+client.defineJob({
+ id: "job-id",
+ run: async (payload, io) => {
+ /* ... */
+ },
+});
+```
+
+Use v4 SDK (`@trigger.dev/sdk`), check `result.ok` before accessing `result.output`
diff --git a/rules/4.0.0/claude-code-agent.md b/rules/4.0.0/claude-code-agent.md
new file mode 100644
index 0000000000..db3663e97e
--- /dev/null
+++ b/rules/4.0.0/claude-code-agent.md
@@ -0,0 +1,238 @@
+---
+name: trigger-dev-expert
+description: Use this agent when you need to design, implement, or optimize background jobs and workflows using Trigger.dev framework. This includes creating reliable async tasks, implementing AI workflows, setting up scheduled jobs, structuring complex task hierarchies with subtasks, configuring build extensions for tools like ffmpeg or Puppeteer/Playwright, and handling task schemas with Zod validation. The agent excels at architecting scalable background job solutions with proper error handling, retries, and monitoring.\n\nExamples:\n- \n Context: User needs to create a background job for processing video files\n user: "I need to create a task that processes uploaded videos, extracts thumbnails, and transcodes them"\n assistant: "I'll use the trigger-dev-expert agent to design a robust video processing workflow with proper task structure and ffmpeg configuration"\n \n Since this involves creating background tasks with media processing, the trigger-dev-expert agent is ideal for structuring the workflow and configuring build extensions.\n \n\n- \n Context: User wants to implement a scheduled data sync task\n user: "Create a scheduled task that runs every hour to sync data from our API to the database"\n assistant: "Let me use the trigger-dev-expert agent to create a properly structured scheduled task with error handling"\n \n The user needs a scheduled background task, which is a core Trigger.dev feature that the expert agent specializes in.\n \n\n- \n Context: User needs help with task orchestration\n user: "I have a complex workflow where I need to run multiple AI models in sequence and parallel, how should I structure this?"\n assistant: "I'll engage the trigger-dev-expert agent to architect an efficient task hierarchy using triggerAndWait and batchTriggerAndWait patterns"\n \n Complex task orchestration with subtasks is a specialty of the trigger-dev-expert agent.\n \n
+model: inherit
+color: green
+---
+
+You are an elite Trigger.dev framework expert with deep knowledge of building production-grade background job systems. You specialize in designing reliable, scalable workflows using Trigger.dev's async-first architecture. Tasks deployed to Trigger.dev generally run in Node.js 21+ and use the `@trigger.dev/sdk` package, along with the `@trigger.dev/build` package for build extensions and the `trigger.dev` CLI package to run the `dev` server and `deploy` command.
+
+> Never use `node-fetch` in your code, use the `fetch` function that's built into Node.js.
+
+## Design Principles
+
+When creating Trigger.dev solutions, you will:
+
+- Use the `@trigger.dev/sdk` package to create tasks, ideally using the `schemaTask` function and passing in a Zod or other schema validation library schema to the `schema` property so the task payload can be validated and automatically typed.
+- Break complex workflows into subtasks that can be independently retried and made idempotent, but don't overly complicate your tasks with too many subtasks. Sometimes the correct approach is to NOT use a subtask and do things like await Promise.allSettled to do work in parallel so save on costs, as each task gets it's own dedicated process and is charged by the millisecond.
+- Always configure the `retry` property in the task definition to set the maximum number of retries, the delay between retries, and the backoff factor. Don't retry too much unless absolutely necessary.
+- When triggering a task from inside another task, consider whether to use the `triggerAndWait`/`batchTriggerAndWait` pattern or just the `trigger`/`batchTrigger` function. Use the "andWait" variants when the parent task needs the results of the child task.
+- When triggering a task, especially from inside another task, always consider whether to pass the `idempotencyKey` property to the `options` argument. This is especially important when inside another task and that task can be retried and you don't want to redo the work in children tasks (whether waiting for the results or not).
+- Use the `logger` system in Trigger.dev to log useful messages at key execution points.
+- Group subtasks that are only used from a single other task into the same file as the parent task, and don't export them.
+
+> Important: Never wrap triggerAndWait or batchTriggerAndWait calls in a Promise.all or Promise.allSettled as this is not supported in Trigger.dev tasks.
+
+## Triggering tasks
+
+When triggering a task from outside of a task, like for instance from an API handler in a Next.js route, you will use the `tasks.trigger` function and do a type only import of the task instance, to prevent dependencies inside the task file from leaking into the API handler and possibly causing issues with the build. An example:
+
+```ts
+import { tasks } from "@trigger.dev/sdk";
+import type { processData } from "./trigger/tasks";
+
+const handle = await tasks.trigger("process-data", {
+ userId: "123",
+ data: [{ id: 1 }, { id: 2 }],
+});
+```
+
+When triggering tasks from inside another task, if the other task is in a different file, use the pattern above. If the task is in the same file, you can use the task instance directly like so:
+
+```ts
+const handle = await processData.trigger({
+ userId: "123",
+ data: [{ id: 1 }, { id: 2 }],
+});
+```
+
+There are a bunch of options you can pass as the second argument to the `trigger` or `triggerAndWait` functions that control behavior like the idempotency key, the machine preset, the timeout, and more:
+
+```ts
+import { idempotencyKeys } from "@trigger.dev/sdk";
+
+const handle = await processData.trigger(
+ {
+ userId: "123",
+ },
+ {
+ delay: "1h", // Will delay the task by 1 hour
+ ttl: "10m", // Will automatically cancel the task if not dequeued within 10 minutes
+ idempotencyKey: await idempotencyKeys.create("my-idempotency-key"),
+ idempotencyKeyTTL: "1h",
+ queue: "my-queue",
+ machine: "small-1x",
+ maxAttempts: 3,
+ tags: ["my-tag"],
+ region: "us-east-1",
+ }
+);
+```
+
+You can also pass these options when doing a batch trigger for each item:
+
+```ts
+const batchHandle = await processData.batchTrigger([
+ {
+ payload: { userId: "123" },
+ options: {
+ idempotencyKey: await idempotencyKeys.create("my-idempotency-key-1"),
+ },
+ },
+ {
+ payload: { userId: "456" },
+ options: {
+ idempotencyKey: await idempotencyKeys.create("my-idempotency-key-2"),
+ },
+ },
+]);
+```
+
+When triggering a task without the "andWait" suffix, you will receive a `RunHandle` object that contains the `id` of the run. You can use this with various `runs` SDK functions to get the status of the run, cancel it, etc.
+
+```ts
+import { runs } from "@trigger.dev/sdk";
+
+const handle = await processData.trigger({
+ userId: "123",
+});
+
+const run = await runs.retrieve(handle.id);
+```
+
+When triggering a task with the "andWait" suffix, you will receive a Result type object that contains the result of the task and the output. Before accessing the output, you need to check the `ok` property to see if the task was successful:
+
+```ts
+const result = await processData.triggerAndWait({
+ userId: "123",
+});
+
+if (result.ok) {
+ const output = result.output;
+} else {
+ const error = result.error;
+}
+
+// Or you can unwrap the result and access the output directly, if the task was not successful, the unwrap will throw an error
+const unwrappedOutput = await processData
+ .triggerAndWait({
+ userId: "123",
+ })
+ .unwrap();
+
+const batchResult = await processData.batchTriggerAndWait([
+ { payload: { userId: "123" } },
+ { payload: { userId: "456" } },
+]);
+
+for (const run of batchResult.runs) {
+ if (run.ok) {
+ const output = run.output;
+ } else {
+ const error = run.error;
+ }
+}
+```
+
+## Idempotency keys
+
+Any time you trigger a task inside another task, you should consider passing an idempotency key to the options argument using the `idempotencyKeys.create` function. This will ensure that the task is only triggered once per task run, even if the parent task is retried. If you want the idempotency key to be scoped globally instead of per task run, you can just pass a string instead of an idempotency key object:
+
+```ts
+const idempotencyKey = await idempotencyKeys.create("my-idempotency-key");
+
+const handle = await processData.trigger(
+ {
+ userId: "123",
+ },
+ {
+ idempotencyKey, // Scoped to the current run, across retries
+ }
+);
+
+const handle = await processData.trigger(
+ {
+ userId: "123",
+ },
+ {
+ idempotencyKey: "my-idempotency-key", // Scoped across all runs
+ }
+);
+```
+
+Idempotency keys are always also scoped to the task identifier of the task being triggered. This means you can use the same idempotency key for different tasks, and they will not conflict with each other.
+
+## Machine Presets
+
+- The default machine preset is `small-1x` which is a 0.5vCPU and 0.5GB of memory.
+- The default machine preset can be overridden in the trigger.config.ts file by setting the `machine` property.
+- The machine preset for a specific task can be overridden in the task definition by setting the `machine` property.
+- You can set the machine preset at trigger time by passing in the `machine` property in the options argument to any of the trigger functions.
+
+| Preset | vCPU | Memory | Disk space |
+| :----------------- | :--- | :----- | :--------- |
+| micro | 0.25 | 0.25 | 10GB |
+| small-1x (default) | 0.5 | 0.5 | 10GB |
+| small-2x | 1 | 1 | 10GB |
+| medium-1x | 1 | 2 | 10GB |
+| medium-2x | 2 | 4 | 10GB |
+| large-1x | 4 | 8 | 10GB |
+| large-2x | 8 | 16 | 10GB |
+
+## Configuration Expertise
+
+When setting up Trigger.dev projects, you will configure the `trigger.config.ts` file with the following if needed:
+
+- Build extensions for tools like ffmpeg, Puppeteer, Playwright, and other binary dependencies. An example:
+
+```ts
+import { defineConfig } from "@trigger.dev/sdk";
+import { playwright } from "@trigger.dev/build/extensions/playwright";
+import { ffmpeg, aptGet, additionalFiles } from "@trigger.dev/build/extensions/core";
+import { prismaExtension } from "@trigger.dev/build/extensions/prisma";
+import { pythonExtension } from "@trigger.dev/python/extension";
+import { lightpanda } from "@trigger.dev/build/extensions/lightpanda";
+import { esbuildPlugin } from "@trigger.dev/build/extensions";
+import { sentryEsbuildPlugin } from "@sentry/esbuild-plugin";
+
+export default defineConfig({
+ project: "",
+ machine: "small-1x", // optional, default is small-1x
+ build: {
+ extensions: [
+ playwright(),
+ ffmpeg(),
+ aptGet({ packages: ["curl"] }),
+ prismaExtension({
+ version: "5.19.0", // optional, we'll automatically detect the version if not provided
+ schema: "prisma/schema.prisma",
+ }),
+ pythonExtension(),
+ lightpanda(),
+ esbuildPlugin(
+ sentryEsbuildPlugin({
+ org: process.env.SENTRY_ORG,
+ project: process.env.SENTRY_PROJECT,
+ authToken: process.env.SENTRY_AUTH_TOKEN,
+ }),
+ // optional - only runs during the deploy command, and adds the plugin to the end of the list of plugins
+ { placement: "last", target: "deploy" }
+ ),
+ ],
+ },
+});
+```
+
+- Default retry settings for tasks
+- Default machine preset
+
+## Code Quality Standards
+
+You will produce code that:
+
+- Uses modern TypeScript with strict type checking
+- When catching errors, remember that the type of the error is `unknown` and you need to check `error instanceof Error` to see if it's a real error instance
+- Follows Trigger.dev's recommended project structure
+- Don't go overboard with error handling
+- Write some inline documentation for complex logic
+- Uses descriptive task IDs following the pattern: 'domain.action.target'
diff --git a/rules/4.0.0/config.md b/rules/4.0.0/config.md
new file mode 100644
index 0000000000..33b68554f1
--- /dev/null
+++ b/rules/4.0.0/config.md
@@ -0,0 +1,346 @@
+# Trigger.dev Configuration (v4)
+
+**Complete guide to configuring `trigger.config.ts` with build extensions**
+
+## Basic Configuration
+
+```ts
+import { defineConfig } from "@trigger.dev/sdk";
+
+export default defineConfig({
+ project: "", // Required: Your project reference
+ dirs: ["./trigger"], // Task directories
+ runtime: "node", // "node", "node-22", or "bun"
+ logLevel: "info", // "debug", "info", "warn", "error"
+
+ // Default retry settings
+ retries: {
+ enabledInDev: false,
+ default: {
+ maxAttempts: 3,
+ minTimeoutInMs: 1000,
+ maxTimeoutInMs: 10000,
+ factor: 2,
+ randomize: true,
+ },
+ },
+
+ // Build configuration
+ build: {
+ autoDetectExternal: true,
+ keepNames: true,
+ minify: false,
+ extensions: [], // Build extensions go here
+ },
+
+ // Global lifecycle hooks
+ onStart: async ({ payload, ctx }) => {
+ console.log("Global task start");
+ },
+ onSuccess: async ({ payload, output, ctx }) => {
+ console.log("Global task success");
+ },
+ onFailure: async ({ payload, error, ctx }) => {
+ console.log("Global task failure");
+ },
+});
+```
+
+## Build Extensions
+
+### Database & ORM
+
+#### Prisma
+
+```ts
+import { prismaExtension } from "@trigger.dev/build/extensions/prisma";
+
+extensions: [
+ prismaExtension({
+ schema: "prisma/schema.prisma",
+ version: "5.19.0", // Optional: specify version
+ migrate: true, // Run migrations during build
+ directUrlEnvVarName: "DIRECT_DATABASE_URL",
+ typedSql: true, // Enable TypedSQL support
+ }),
+];
+```
+
+#### TypeScript Decorators (for TypeORM)
+
+```ts
+import { emitDecoratorMetadata } from "@trigger.dev/build/extensions/typescript";
+
+extensions: [
+ emitDecoratorMetadata(), // Enables decorator metadata
+];
+```
+
+### Scripting Languages
+
+#### Python
+
+```ts
+import { pythonExtension } from "@trigger.dev/build/extensions/python";
+
+extensions: [
+ pythonExtension({
+ scripts: ["./python/**/*.py"], // Copy Python files
+ requirementsFile: "./requirements.txt", // Install packages
+ devPythonBinaryPath: ".venv/bin/python", // Dev mode binary
+ }),
+];
+
+// Usage in tasks
+const result = await python.runInline(`print("Hello, world!")`);
+const output = await python.runScript("./python/script.py", ["arg1"]);
+```
+
+### Browser Automation
+
+#### Playwright
+
+```ts
+import { playwright } from "@trigger.dev/build/extensions/playwright";
+
+extensions: [
+ playwright({
+ browsers: ["chromium", "firefox", "webkit"], // Default: ["chromium"]
+ headless: true, // Default: true
+ }),
+];
+```
+
+#### Puppeteer
+
+```ts
+import { puppeteer } from "@trigger.dev/build/extensions/puppeteer";
+
+extensions: [puppeteer()];
+
+// Environment variable needed:
+// PUPPETEER_EXECUTABLE_PATH: "/usr/bin/google-chrome-stable"
+```
+
+#### Lightpanda
+
+```ts
+import { lightpanda } from "@trigger.dev/build/extensions/lightpanda";
+
+extensions: [
+ lightpanda({
+ version: "latest", // or "nightly"
+ disableTelemetry: false,
+ }),
+];
+```
+
+### Media Processing
+
+#### FFmpeg
+
+```ts
+import { ffmpeg } from "@trigger.dev/build/extensions/core";
+
+extensions: [
+ ffmpeg({ version: "7" }), // Static build, or omit for Debian version
+];
+
+// Automatically sets FFMPEG_PATH and FFPROBE_PATH
+// Add fluent-ffmpeg to external packages if using
+```
+
+#### Audio Waveform
+
+```ts
+import { audioWaveform } from "@trigger.dev/build/extensions/audioWaveform";
+
+extensions: [
+ audioWaveform(), // Installs Audio Waveform 1.1.0
+];
+```
+
+### System & Package Management
+
+#### System Packages (apt-get)
+
+```ts
+import { aptGet } from "@trigger.dev/build/extensions/core";
+
+extensions: [
+ aptGet({
+ packages: ["ffmpeg", "imagemagick", "curl=7.68.0-1"], // Can specify versions
+ }),
+];
+```
+
+#### Additional NPM Packages
+
+Only use this for installing CLI tools, NOT packages you import in your code.
+
+```ts
+import { additionalPackages } from "@trigger.dev/build/extensions/core";
+
+extensions: [
+ additionalPackages({
+ packages: ["wrangler"], // CLI tools and specific versions
+ }),
+];
+```
+
+#### Additional Files
+
+```ts
+import { additionalFiles } from "@trigger.dev/build/extensions/core";
+
+extensions: [
+ additionalFiles({
+ files: ["wrangler.toml", "./assets/**", "./fonts/**"], // Glob patterns supported
+ }),
+];
+```
+
+### Environment & Build Tools
+
+#### Environment Variable Sync
+
+```ts
+import { syncEnvVars } from "@trigger.dev/build/extensions/core";
+
+extensions: [
+ syncEnvVars(async (ctx) => {
+ // ctx contains: environment, projectRef, env
+ return [
+ { name: "SECRET_KEY", value: await getSecret(ctx.environment) },
+ { name: "API_URL", value: ctx.environment === "prod" ? "api.prod.com" : "api.dev.com" },
+ ];
+ }),
+];
+```
+
+#### ESBuild Plugins
+
+```ts
+import { esbuildPlugin } from "@trigger.dev/build/extensions";
+import { sentryEsbuildPlugin } from "@sentry/esbuild-plugin";
+
+extensions: [
+ esbuildPlugin(
+ sentryEsbuildPlugin({
+ org: process.env.SENTRY_ORG,
+ project: process.env.SENTRY_PROJECT,
+ authToken: process.env.SENTRY_AUTH_TOKEN,
+ }),
+ { placement: "last", target: "deploy" } // Optional config
+ ),
+];
+```
+
+## Custom Build Extensions
+
+```ts
+import { defineConfig } from "@trigger.dev/sdk";
+
+const customExtension = {
+ name: "my-custom-extension",
+
+ externalsForTarget: (target) => {
+ return ["some-native-module"]; // Add external dependencies
+ },
+
+ onBuildStart: async (context) => {
+ console.log(`Build starting for ${context.target}`);
+ // Register esbuild plugins, modify build context
+ },
+
+ onBuildComplete: async (context, manifest) => {
+ console.log("Build complete, adding layers");
+ // Add build layers, modify deployment
+ context.addLayer({
+ id: "my-layer",
+ files: [{ source: "./custom-file", destination: "/app/custom" }],
+ commands: ["chmod +x /app/custom"],
+ });
+ },
+};
+
+export default defineConfig({
+ project: "my-project",
+ build: {
+ extensions: [customExtension],
+ },
+});
+```
+
+## Advanced Configuration
+
+### Telemetry
+
+```ts
+import { PrismaInstrumentation } from "@prisma/instrumentation";
+import { OpenAIInstrumentation } from "@langfuse/openai";
+
+export default defineConfig({
+ // ... other config
+ telemetry: {
+ instrumentations: [new PrismaInstrumentation(), new OpenAIInstrumentation()],
+ exporters: [customExporter], // Optional custom exporters
+ },
+});
+```
+
+### Machine & Performance
+
+```ts
+export default defineConfig({
+ // ... other config
+ defaultMachine: "large-1x", // Default machine for all tasks
+ maxDuration: 300, // Default max duration (seconds)
+ enableConsoleLogging: true, // Console logging in development
+});
+```
+
+## Common Extension Combinations
+
+### Full-Stack Web App
+
+```ts
+extensions: [
+ prismaExtension({ schema: "prisma/schema.prisma", migrate: true }),
+ additionalFiles({ files: ["./public/**", "./assets/**"] }),
+ syncEnvVars(async (ctx) => [...envVars]),
+];
+```
+
+### AI/ML Processing
+
+```ts
+extensions: [
+ pythonExtension({
+ scripts: ["./ai/**/*.py"],
+ requirementsFile: "./requirements.txt",
+ }),
+ ffmpeg({ version: "7" }),
+ additionalPackages({ packages: ["wrangler"] }),
+];
+```
+
+### Web Scraping
+
+```ts
+extensions: [
+ playwright({ browsers: ["chromium"] }),
+ puppeteer(),
+ additionalFiles({ files: ["./selectors.json", "./proxies.txt"] }),
+];
+```
+
+## Best Practices
+
+- **Use specific versions**: Pin extension versions for reproducible builds
+- **External packages**: Add modules with native addons to the `build.external` array
+- **Environment sync**: Use `syncEnvVars` for dynamic secrets
+- **File paths**: Use glob patterns for flexible file inclusion
+- **Debug builds**: Use `--log-level debug --dry-run` for troubleshooting
+
+Extensions only affect deployment, not local development. Use `external` array for packages that shouldn't be bundled.
diff --git a/rules/4.0.0/realtime.md b/rules/4.0.0/realtime.md
new file mode 100644
index 0000000000..24cbb9aac0
--- /dev/null
+++ b/rules/4.0.0/realtime.md
@@ -0,0 +1,272 @@
+# Trigger.dev Realtime (v4)
+
+**Real-time monitoring and updates for runs**
+
+## Core Concepts
+
+Realtime allows you to:
+
+- Subscribe to run status changes, metadata updates, and streams
+- Build real-time dashboards and UI updates
+- Monitor task progress from frontend and backend
+
+## Authentication
+
+### Public Access Tokens
+
+```ts
+import { auth } from "@trigger.dev/sdk";
+
+// Read-only token for specific runs
+const publicToken = await auth.createPublicToken({
+ scopes: {
+ read: {
+ runs: ["run_123", "run_456"],
+ tasks: ["my-task-1", "my-task-2"],
+ },
+ },
+ expirationTime: "1h", // Default: 15 minutes
+});
+```
+
+### Trigger Tokens (Frontend only)
+
+```ts
+// Single-use token for triggering tasks
+const triggerToken = await auth.createTriggerPublicToken("my-task", {
+ expirationTime: "30m",
+});
+```
+
+## Backend Usage
+
+### Subscribe to Runs
+
+```ts
+import { runs, tasks } from "@trigger.dev/sdk";
+
+// Trigger and subscribe
+const handle = await tasks.trigger("my-task", { data: "value" });
+
+// Subscribe to specific run
+for await (const run of runs.subscribeToRun(handle.id)) {
+ console.log(`Status: ${run.status}, Progress: ${run.metadata?.progress}`);
+ if (run.status === "COMPLETED") break;
+}
+
+// Subscribe to runs with tag
+for await (const run of runs.subscribeToRunsWithTag("user-123")) {
+ console.log(`Tagged run ${run.id}: ${run.status}`);
+}
+
+// Subscribe to batch
+for await (const run of runs.subscribeToBatch(batchId)) {
+ console.log(`Batch run ${run.id}: ${run.status}`);
+}
+```
+
+### Streams
+
+```ts
+import { task, metadata } from "@trigger.dev/sdk";
+
+// Task that streams data
+export type STREAMS = {
+ openai: OpenAI.ChatCompletionChunk;
+};
+
+export const streamingTask = task({
+ id: "streaming-task",
+ run: async (payload) => {
+ const completion = await openai.chat.completions.create({
+ model: "gpt-4",
+ messages: [{ role: "user", content: payload.prompt }],
+ stream: true,
+ });
+
+ // Register stream
+ const stream = await metadata.stream("openai", completion);
+
+ let text = "";
+ for await (const chunk of stream) {
+ text += chunk.choices[0]?.delta?.content || "";
+ }
+
+ return { text };
+ },
+});
+
+// Subscribe to streams
+for await (const part of runs.subscribeToRun(runId).withStreams()) {
+ switch (part.type) {
+ case "run":
+ console.log("Run update:", part.run.status);
+ break;
+ case "openai":
+ console.log("Stream chunk:", part.chunk);
+ break;
+ }
+}
+```
+
+## React Frontend Usage
+
+### Installation
+
+```bash
+npm add @trigger.dev/react-hooks
+```
+
+### Triggering Tasks
+
+```tsx
+"use client";
+import { useTaskTrigger, useRealtimeTaskTrigger } from "@trigger.dev/react-hooks";
+import type { myTask } from "../trigger/tasks";
+
+function TriggerComponent({ accessToken }: { accessToken: string }) {
+ // Basic trigger
+ const { submit, handle, isLoading } = useTaskTrigger("my-task", {
+ accessToken,
+ });
+
+ // Trigger with realtime updates
+ const {
+ submit: realtimeSubmit,
+ run,
+ isLoading: isRealtimeLoading,
+ } = useRealtimeTaskTrigger("my-task", { accessToken });
+
+ return (
+
+
+
+
+
+ {run &&
Status: {run.status}
}
+
+ );
+}
+```
+
+### Subscribing to Runs
+
+```tsx
+"use client";
+import { useRealtimeRun, useRealtimeRunsWithTag } from "@trigger.dev/react-hooks";
+import type { myTask } from "../trigger/tasks";
+
+function SubscribeComponent({ runId, accessToken }: { runId: string; accessToken: string }) {
+ // Subscribe to specific run
+ const { run, error } = useRealtimeRun(runId, {
+ accessToken,
+ onComplete: (run) => {
+ console.log("Task completed:", run.output);
+ },
+ });
+
+ // Subscribe to tagged runs
+ const { runs } = useRealtimeRunsWithTag("user-123", { accessToken });
+
+ if (error) return Error: {error.message}
;
+ if (!run) return Loading...
;
+
+ return (
+
+
Status: {run.status}
+
Progress: {run.metadata?.progress || 0}%
+ {run.output &&
Result: {JSON.stringify(run.output)}
}
+
+
Tagged Runs:
+ {runs.map((r) => (
+
+ {r.id}: {r.status}
+
+ ))}
+
+ );
+}
+```
+
+### Streams with React
+
+```tsx
+"use client";
+import { useRealtimeRunWithStreams } from "@trigger.dev/react-hooks";
+import type { streamingTask, STREAMS } from "../trigger/tasks";
+
+function StreamComponent({ runId, accessToken }: { runId: string; accessToken: string }) {
+ const { run, streams } = useRealtimeRunWithStreams(runId, {
+ accessToken,
+ });
+
+ const text = streams.openai
+ .filter((chunk) => chunk.choices[0]?.delta?.content)
+ .map((chunk) => chunk.choices[0].delta.content)
+ .join("");
+
+ return (
+
+
Status: {run?.status}
+
Streamed Text: {text}
+
+ );
+}
+```
+
+### Wait Tokens
+
+```tsx
+"use client";
+import { useWaitToken } from "@trigger.dev/react-hooks";
+
+function WaitTokenComponent({ tokenId, accessToken }: { tokenId: string; accessToken: string }) {
+ const { complete } = useWaitToken(tokenId, { accessToken });
+
+ return ;
+}
+```
+
+### SWR Hooks (Fetch Once)
+
+```tsx
+"use client";
+import { useRun } from "@trigger.dev/react-hooks";
+import type { myTask } from "../trigger/tasks";
+
+function SWRComponent({ runId, accessToken }: { runId: string; accessToken: string }) {
+ const { run, error, isLoading } = useRun(runId, {
+ accessToken,
+ refreshInterval: 0, // Disable polling (recommended)
+ });
+
+ if (isLoading) return Loading...
;
+ if (error) return Error: {error.message}
;
+
+ return Run: {run?.status}
;
+}
+```
+
+## Run Object Properties
+
+Key properties available in run subscriptions:
+
+- `id`: Unique run identifier
+- `status`: `QUEUED`, `EXECUTING`, `COMPLETED`, `FAILED`, `CANCELED`, etc.
+- `payload`: Task input data (typed)
+- `output`: Task result (typed, when completed)
+- `metadata`: Real-time updatable data
+- `createdAt`, `updatedAt`: Timestamps
+- `costInCents`: Execution cost
+
+## Best Practices
+
+- **Use Realtime over SWR**: Recommended for most use cases due to rate limits
+- **Scope tokens properly**: Only grant necessary read/trigger permissions
+- **Handle errors**: Always check for errors in hooks and subscriptions
+- **Type safety**: Use task types for proper payload/output typing
+- **Cleanup subscriptions**: Backend subscriptions auto-complete, frontend hooks auto-cleanup
diff --git a/rules/4.0.0/scheduled-tasks.md b/rules/4.0.0/scheduled-tasks.md
new file mode 100644
index 0000000000..7d46a45ad9
--- /dev/null
+++ b/rules/4.0.0/scheduled-tasks.md
@@ -0,0 +1,117 @@
+# Scheduled tasks (cron)
+
+Recurring tasks using cron. For one-off future runs, use the **delay** option.
+
+## Define a scheduled task
+
+```ts
+import { schedules } from "@trigger.dev/sdk";
+
+export const task = schedules.task({
+ id: "first-scheduled-task",
+ run: async (payload) => {
+ payload.timestamp; // Date (scheduled time, UTC)
+ payload.lastTimestamp; // Date | undefined
+ payload.timezone; // IANA, e.g. "America/New_York" (default "UTC")
+ payload.scheduleId; // string
+ payload.externalId; // string | undefined
+ payload.upcoming; // Date[]
+
+ payload.timestamp.toLocaleString("en-US", { timeZone: payload.timezone });
+ },
+});
+```
+
+> Scheduled tasks need at least one schedule attached to run.
+
+## Attach schedules
+
+**Declarative (sync on dev/deploy):**
+
+```ts
+schedules.task({
+ id: "every-2h",
+ cron: "0 */2 * * *", // UTC
+ run: async () => {},
+});
+
+schedules.task({
+ id: "tokyo-5am",
+ cron: { pattern: "0 5 * * *", timezone: "Asia/Tokyo", environments: ["PRODUCTION", "STAGING"] },
+ run: async () => {},
+});
+```
+
+**Imperative (SDK or dashboard):**
+
+```ts
+await schedules.create({
+ task: task.id,
+ cron: "0 0 * * *",
+ timezone: "America/New_York", // DST-aware
+ externalId: "user_123",
+ deduplicationKey: "user_123-daily", // updates if reused
+});
+```
+
+### Dynamic / multi-tenant example
+
+```ts
+// /trigger/reminder.ts
+export const reminderTask = schedules.task({
+ id: "todo-reminder",
+ run: async (p) => {
+ if (!p.externalId) throw new Error("externalId is required");
+ const user = await db.getUser(p.externalId);
+ await sendReminderEmail(user);
+ },
+});
+```
+
+```ts
+// app/reminders/route.ts
+export async function POST(req: Request) {
+ const data = await req.json();
+ return Response.json(
+ await schedules.create({
+ task: reminderTask.id,
+ cron: "0 8 * * *",
+ timezone: data.timezone,
+ externalId: data.userId,
+ deduplicationKey: `${data.userId}-reminder`,
+ })
+ );
+}
+```
+
+## Cron syntax (no seconds)
+
+```
+* * * * *
+| | | | ā day of week (0ā7 or 1Lā7L; 0/7=Sun; L=last)
+| | | āāā month (1ā12)
+| | āāāāā day of month (1ā31 or L)
+| āāāāāāā hour (0ā23)
+āāāāāāāāā minute (0ā59)
+```
+
+## When schedules won't trigger
+
+- **Dev:** only when the dev CLI is running.
+- **Staging/Production:** only for tasks in the **latest deployment**.
+
+## SDK management (quick refs)
+
+```ts
+await schedules.retrieve(id);
+await schedules.list();
+await schedules.update(id, { cron: "0 0 1 * *", externalId: "ext", deduplicationKey: "key" });
+await schedules.deactivate(id);
+await schedules.activate(id);
+await schedules.del(id);
+await schedules.timezones(); // list of IANA timezones
+```
+
+## Dashboard
+
+Create/attach schedules visually (Task, Cron pattern, Timezone, Optional: External ID, Dedup key, Environments). Test scheduled tasks from the **Test** page.
diff --git a/rules/manifest.json b/rules/manifest.json
new file mode 100644
index 0000000000..a5b205920d
--- /dev/null
+++ b/rules/manifest.json
@@ -0,0 +1,56 @@
+{
+ "name": "trigger.dev",
+ "description": "Trigger.dev coding agent rules",
+ "currentVersion": "4.0.0",
+ "versions": {
+ "4.0.0": {
+ "options": [
+ {
+ "name": "basic",
+ "title": "Basic tasks",
+ "label": "Only the most important rules for writing basic Trigger.dev tasks",
+ "path": "4.0.0/basic-tasks.md",
+ "tokens": 1200
+ },
+ {
+ "name": "advanced-tasks",
+ "title": "Advanced tasks",
+ "label": "Comprehensive rules to help you write advanced Trigger.dev tasks",
+ "path": "4.0.0/advanced-tasks.md",
+ "tokens": 3000
+ },
+ {
+ "name": "config",
+ "title": "Configuring Trigger.dev",
+ "label": "Configure your Trigger.dev project with a trigger.config.ts file",
+ "path": "4.0.0/config.md",
+ "tokens": 1900,
+ "applyTo": "**/trigger.config.ts"
+ },
+ {
+ "name": "scheduled-tasks",
+ "title": "Scheduled Tasks",
+ "label": "How to write and use scheduled Trigger.dev tasks",
+ "path": "4.0.0/scheduled-tasks.md",
+ "tokens": 780
+ },
+ {
+ "name": "realtime",
+ "title": "Realtime",
+ "label": "How to use realtime in your Trigger.dev tasks and your frontend",
+ "path": "4.0.0/realtime.md",
+ "tokens": 1700
+ },
+ {
+ "name": "claude-code-agent",
+ "title": "Claude Code Agent",
+ "label": "An expert Trigger.dev developer as a Claude Code subagent",
+ "path": "4.0.0/claude-code-agent.md",
+ "tokens": 2700,
+ "client": "claude-code",
+ "installStrategy": "claude-code-subagent"
+ }
+ ]
+ }
+ }
+}
\ No newline at end of file