diff --git a/.changeset/brave-forks-compare.md b/.changeset/brave-forks-compare.md new file mode 100644 index 0000000000..906b5bef05 --- /dev/null +++ b/.changeset/brave-forks-compare.md @@ -0,0 +1,7 @@ +--- +"@trigger.dev/react-hooks": minor +"@trigger.dev/sdk": minor +"@trigger.dev/core": minor +--- + +Access run status updates in realtime, from your server or from your frontend diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cc1e1f3808..66cdbc0abe 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,7 +17,7 @@ concurrency: jobs: release: name: 🦋 Changesets Release - runs-on: buildjet-8vcpu-ubuntu-2204 + runs-on: ubuntu-latest if: github.repository == 'triggerdotdev/trigger.dev' outputs: published: ${{ steps.changesets.outputs.published }} diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 93dd07deda..1d75f3488c 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -6,7 +6,7 @@ on: jobs: unitTests: name: "🧪 Unit Tests" - runs-on: buildjet-8vcpu-ubuntu-2204 + runs-on: buildjet-16vcpu-ubuntu-2204 steps: - name: ⬇️ Checkout repo uses: actions/checkout@v4 @@ -30,5 +30,15 @@ jobs: - name: 📀 Generate Prisma Client run: pnpm run generate - - name: 🧪 Run Unit Tests - run: pnpm run test + - name: 🧪 Run Webapp Unit Tests + run: pnpm run test --filter webapp + env: + DATABASE_URL: postgresql://postgres:postgres@localhost:5432/postgres + DIRECT_URL: postgresql://postgres:postgres@localhost:5432/postgres + SESSION_SECRET: "secret" + MAGIC_LINK_SECRET: "secret" + ENCRYPTION_KEY: "secret" + + + - name: 🧪 Run Internal Unit Tests + run: pnpm run test --filter "@internal/*" diff --git a/.npmrc b/.npmrc index 6da70cea9b..8dbd39f189 100644 --- a/.npmrc +++ b/.npmrc @@ -1,2 +1,3 @@ link-workspace-packages=false -public-hoist-pattern[]=*prisma* \ No newline at end of file +public-hoist-pattern[]=*prisma* +prefer-workspace-packages=true \ No newline at end of file diff --git a/.vscode/extensions.json b/.vscode/extensions.json index e4860ef70c..ec85d436e9 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,8 +1,4 @@ { - "recommendations": [ - "denoland.vscode-deno" - ], - "unwantedRecommendations": [ - - ] + "recommendations": ["bierner.comment-tagged-templates"], + "unwantedRecommendations": [] } diff --git a/apps/docker-provider/tsconfig.json b/apps/docker-provider/tsconfig.json index 3a866dd2b8..c09aca4c1f 100644 --- a/apps/docker-provider/tsconfig.json +++ b/apps/docker-provider/tsconfig.json @@ -6,10 +6,6 @@ "forceConsistentCasingInFileNames": true, "resolveJsonModule": true, "strict": true, - "skipLibCheck": true, - "paths": { - "@trigger.dev/core/v3": ["../../packages/core/src/v3"], - "@trigger.dev/core/v3/*": ["../../packages/core/src/v3/*"] - } + "skipLibCheck": true } } diff --git a/apps/kubernetes-provider/tsconfig.json b/apps/kubernetes-provider/tsconfig.json index 661823ef74..0c3704369d 100644 --- a/apps/kubernetes-provider/tsconfig.json +++ b/apps/kubernetes-provider/tsconfig.json @@ -6,10 +6,6 @@ "forceConsistentCasingInFileNames": true, "resolveJsonModule": true, "strict": true, - "skipLibCheck": true, - "paths": { - "@trigger.dev/core/v3": ["../../packages/core/src/v3"], - "@trigger.dev/core/v3/*": ["../../packages/core/src/v3/*"] - } + "skipLibCheck": true } } diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index e60a257257..eab78c92af 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -31,7 +31,7 @@ const EnvironmentSchema = z.object({ REMIX_APP_PORT: z.string().optional(), LOGIN_ORIGIN: z.string().default("http://localhost:3030"), APP_ORIGIN: z.string().default("http://localhost:3030"), - ELECTRIC_ORIGIN: z.string(), + ELECTRIC_ORIGIN: z.string().default("http://localhost:3060"), APP_ENV: z.string().default(process.env.NODE_ENV), SERVICE_NAME: z.string().default("trigger.dev webapp"), SECRET_STORE: SecretStoreOptionsSchema.default("DATABASE"), @@ -103,6 +103,25 @@ const EnvironmentSchema = z.object({ API_RATE_LIMIT_REFILL_RATE: z.coerce.number().int().default(250), // refix 250 tokens every 10 seconds API_RATE_LIMIT_REQUEST_LOGS_ENABLED: z.string().default("0"), API_RATE_LIMIT_REJECTION_LOGS_ENABLED: z.string().default("1"), + API_RATE_LIMIT_LIMITER_LOGS_ENABLED: z.string().default("0"), + + API_RATE_LIMIT_JWT_WINDOW: z.string().default("1m"), + API_RATE_LIMIT_JWT_TOKENS: z.coerce.number().int().default(60), + + //Realtime rate limiting + /** + * @example "60s" + * @example "1m" + * @example "1h" + * @example "1d" + * @example "1000ms" + * @example "1000s" + */ + REALTIME_RATE_LIMIT_WINDOW: z.string().default("1m"), + REALTIME_RATE_LIMIT_TOKENS: z.coerce.number().int().default(100), + REALTIME_RATE_LIMIT_REQUEST_LOGS_ENABLED: z.string().default("0"), + REALTIME_RATE_LIMIT_REJECTION_LOGS_ENABLED: z.string().default("1"), + REALTIME_RATE_LIMIT_LIMITER_LOGS_ENABLED: z.string().default("0"), //Ingesting event rate limit INGEST_EVENT_RATE_LIMIT_WINDOW: z.string().default("60s"), diff --git a/apps/webapp/app/models/taskRunTag.server.ts b/apps/webapp/app/models/taskRunTag.server.ts index fcb3b2076d..f676b99a0c 100644 --- a/apps/webapp/app/models/taskRunTag.server.ts +++ b/apps/webapp/app/models/taskRunTag.server.ts @@ -1,7 +1,7 @@ import { prisma } from "~/db.server"; import { generateFriendlyId } from "~/v3/friendlyIdentifiers"; -export const MAX_TAGS_PER_RUN = 5; +export const MAX_TAGS_PER_RUN = 10; export async function createTag({ tag, projectId }: { tag: string; projectId: string }) { if (tag.trim().length === 0) return; diff --git a/apps/webapp/app/presenters/v3/ApiRetrieveRunPresenter.server.ts b/apps/webapp/app/presenters/v3/ApiRetrieveRunPresenter.server.ts index dd9d024609..daea48cf0c 100644 --- a/apps/webapp/app/presenters/v3/ApiRetrieveRunPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/ApiRetrieveRunPresenter.server.ts @@ -62,8 +62,7 @@ type CommonRelatedRun = Prisma.Result< export class ApiRetrieveRunPresenter extends BasePresenter { public async call( friendlyId: string, - env: AuthenticatedEnvironment, - showSecretDetails: boolean + env: AuthenticatedEnvironment ): Promise { return this.traceWithEnv("call", env, async (span) => { const taskRun = await this._replica.taskRun.findFirst({ @@ -72,11 +71,7 @@ export class ApiRetrieveRunPresenter extends BasePresenter { runtimeEnvironmentId: env.id, }, include: { - attempts: { - orderBy: { - createdAt: "desc", - }, - }, + attempts: true, lockedToVersion: true, schedule: true, tags: true, @@ -111,50 +106,48 @@ export class ApiRetrieveRunPresenter extends BasePresenter { let $output: any; let $outputPresignedUrl: string | undefined; - if (showSecretDetails) { - const payloadPacket = await conditionallyImportPacket({ - data: taskRun.payload, - dataType: taskRun.payloadType, - }); + const payloadPacket = await conditionallyImportPacket({ + data: taskRun.payload, + dataType: taskRun.payloadType, + }); - if ( - payloadPacket.dataType === "application/store" && - typeof payloadPacket.data === "string" - ) { - $payloadPresignedUrl = await generatePresignedUrl( - env.project.externalRef, - env.slug, - payloadPacket.data, - "GET" - ); - } else { - $payload = await parsePacket(payloadPacket); - } + if ( + payloadPacket.dataType === "application/store" && + typeof payloadPacket.data === "string" + ) { + $payloadPresignedUrl = await generatePresignedUrl( + env.project.externalRef, + env.slug, + payloadPacket.data, + "GET" + ); + } else { + $payload = await parsePacket(payloadPacket); + } - if (taskRun.status === "COMPLETED_SUCCESSFULLY") { - const completedAttempt = taskRun.attempts.find( - (a) => a.status === "COMPLETED" && typeof a.output !== null - ); + if (taskRun.status === "COMPLETED_SUCCESSFULLY") { + const completedAttempt = taskRun.attempts.find( + (a) => a.status === "COMPLETED" && typeof a.output !== null + ); - if (completedAttempt && completedAttempt.output) { - const outputPacket = await conditionallyImportPacket({ - data: completedAttempt.output, - dataType: completedAttempt.outputType, - }); + if (completedAttempt && completedAttempt.output) { + const outputPacket = await conditionallyImportPacket({ + data: completedAttempt.output, + dataType: completedAttempt.outputType, + }); - if ( - outputPacket.dataType === "application/store" && - typeof outputPacket.data === "string" - ) { - $outputPresignedUrl = await generatePresignedUrl( - env.project.externalRef, - env.slug, - outputPacket.data, - "GET" - ); - } else { - $output = await parsePacket(outputPacket); - } + if ( + outputPacket.dataType === "application/store" && + typeof outputPacket.data === "string" + ) { + $outputPresignedUrl = await generatePresignedUrl( + env.project.externalRef, + env.slug, + outputPacket.data, + "GET" + ); + } else { + $output = await parsePacket(outputPacket); } } } @@ -165,6 +158,7 @@ export class ApiRetrieveRunPresenter extends BasePresenter { payloadPresignedUrl: $payloadPresignedUrl, output: $output, outputPresignedUrl: $outputPresignedUrl, + error: ApiRetrieveRunPresenter.apiErrorFromError(taskRun.error), schedule: taskRun.schedule ? { id: taskRun.schedule.friendlyId, @@ -179,17 +173,9 @@ export class ApiRetrieveRunPresenter extends BasePresenter { }, } : undefined, - attempts: !showSecretDetails - ? [] - : taskRun.attempts.map((a) => ({ - id: a.friendlyId, - status: ApiRetrieveRunPresenter.apiStatusFromAttemptStatus(a.status), - createdAt: a.createdAt ?? undefined, - updatedAt: a.updatedAt ?? undefined, - startedAt: a.startedAt ?? undefined, - completedAt: a.completedAt ?? undefined, - error: ApiRetrieveRunPresenter.apiErrorFromError(a.error), - })), + // We're removing attempts from the API + attemptCount: taskRun.attempts.length, + attempts: [], relatedRuns: { root: taskRun.rootTaskRun ? await createCommonRunStructure(taskRun.rootTaskRun) diff --git a/apps/webapp/app/presenters/v3/ApiRunListPresenter.server.ts b/apps/webapp/app/presenters/v3/ApiRunListPresenter.server.ts index 43400a8bb7..745db9fdf0 100644 --- a/apps/webapp/app/presenters/v3/ApiRunListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/ApiRunListPresenter.server.ts @@ -29,7 +29,7 @@ const CoercedDate = z.preprocess((arg) => { return arg; }, z.date().optional()); -const SearchParamsSchema = z.object({ +export const ApiRunListSearchParams = z.object({ "page[size]": z.coerce.number().int().positive().min(1).max(100).optional(), "page[after]": z.string().optional(), "page[before]": z.string().optional(), @@ -121,45 +121,31 @@ const SearchParamsSchema = z.object({ "filter[createdAt][period]": z.string().optional(), }); -type SearchParamsSchema = z.infer; +type ApiRunListSearchParams = z.infer; export class ApiRunListPresenter extends BasePresenter { public async call( project: Project, - searchParams: URLSearchParams, + searchParams: ApiRunListSearchParams, environment?: RuntimeEnvironment ): Promise { return this.trace("call", async (span) => { - const rawSearchParams = Object.fromEntries(searchParams.entries()); - const $searchParams = SearchParamsSchema.safeParse(rawSearchParams); - - if (!$searchParams.success) { - logger.error("Invalid search params", { - searchParams: rawSearchParams, - errors: $searchParams.error.errors, - }); - - throw fromZodError($searchParams.error); - } - - logger.debug("Valid search params", { searchParams: $searchParams.data }); - const options: RunListOptions = { projectId: project.id, }; // pagination - if ($searchParams.data["page[size]"]) { - options.pageSize = $searchParams.data["page[size]"]; + if (searchParams["page[size]"]) { + options.pageSize = searchParams["page[size]"]; } - if ($searchParams.data["page[after]"]) { - options.cursor = $searchParams.data["page[after]"]; + if (searchParams["page[after]"]) { + options.cursor = searchParams["page[after]"]; options.direction = "forward"; } - if ($searchParams.data["page[before]"]) { - options.cursor = $searchParams.data["page[before]"]; + if (searchParams["page[before]"]) { + options.cursor = searchParams["page[before]"]; options.direction = "backward"; } @@ -167,12 +153,12 @@ export class ApiRunListPresenter extends BasePresenter { if (environment) { options.environments = [environment.id]; } else { - if ($searchParams.data["filter[env]"]) { + if (searchParams["filter[env]"]) { const environments = await this._prisma.runtimeEnvironment.findMany({ where: { projectId: project.id, slug: { - in: $searchParams.data["filter[env]"], + in: searchParams["filter[env]"], }, }, }); @@ -181,46 +167,46 @@ export class ApiRunListPresenter extends BasePresenter { } } - if ($searchParams.data["filter[status]"]) { - options.statuses = $searchParams.data["filter[status]"].flatMap((status) => + if (searchParams["filter[status]"]) { + options.statuses = searchParams["filter[status]"].flatMap((status) => ApiRunListPresenter.apiStatusToRunStatuses(status) ); } - if ($searchParams.data["filter[taskIdentifier]"]) { - options.tasks = $searchParams.data["filter[taskIdentifier]"]; + if (searchParams["filter[taskIdentifier]"]) { + options.tasks = searchParams["filter[taskIdentifier]"]; } - if ($searchParams.data["filter[version]"]) { - options.versions = $searchParams.data["filter[version]"]; + if (searchParams["filter[version]"]) { + options.versions = searchParams["filter[version]"]; } - if ($searchParams.data["filter[tag]"]) { - options.tags = $searchParams.data["filter[tag]"]; + if (searchParams["filter[tag]"]) { + options.tags = searchParams["filter[tag]"]; } - if ($searchParams.data["filter[bulkAction]"]) { - options.bulkId = $searchParams.data["filter[bulkAction]"]; + if (searchParams["filter[bulkAction]"]) { + options.bulkId = searchParams["filter[bulkAction]"]; } - if ($searchParams.data["filter[schedule]"]) { - options.scheduleId = $searchParams.data["filter[schedule]"]; + if (searchParams["filter[schedule]"]) { + options.scheduleId = searchParams["filter[schedule]"]; } - if ($searchParams.data["filter[createdAt][from]"]) { - options.from = $searchParams.data["filter[createdAt][from]"].getTime(); + if (searchParams["filter[createdAt][from]"]) { + options.from = searchParams["filter[createdAt][from]"].getTime(); } - if ($searchParams.data["filter[createdAt][to]"]) { - options.to = $searchParams.data["filter[createdAt][to]"].getTime(); + if (searchParams["filter[createdAt][to]"]) { + options.to = searchParams["filter[createdAt][to]"].getTime(); } - if ($searchParams.data["filter[createdAt][period]"]) { - options.period = $searchParams.data["filter[createdAt][period]"]; + if (searchParams["filter[createdAt][period]"]) { + options.period = searchParams["filter[createdAt][period]"]; } - if (typeof $searchParams.data["filter[isTest]"] === "boolean") { - options.isTest = $searchParams.data["filter[isTest]"]; + if (typeof searchParams["filter[isTest]"] === "boolean") { + options.isTest = searchParams["filter[isTest]"]; } const presenter = new RunListPresenter(); diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug/route.tsx index dc61a77bd1..fd7ca385e3 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug/route.tsx @@ -10,7 +10,7 @@ import { useTypedMatchesData } from "~/hooks/useTypedMatchData"; import { useUser } from "~/hooks/useUser"; import { OrganizationsPresenter } from "~/presenters/OrganizationsPresenter.server"; import { getImpersonationId } from "~/services/impersonation.server"; -import { getCurrentPlan, getUsage } from "~/services/platform.v3.server"; +import { getCachedUsage, getCurrentPlan, getUsage } from "~/services/platform.v3.server"; import { requireUserId } from "~/services/session.server"; import { telemetry } from "~/services/telemetry.server"; import { organizationPath } from "~/utils/pathBuilder"; @@ -29,6 +29,27 @@ export function useCurrentPlan(matches?: UIMatch[]) { return data?.currentPlan; } +export const shouldRevalidate: ShouldRevalidateFunction = (params) => { + const { currentParams, nextParams } = params; + + const current = ParamsSchema.safeParse(currentParams); + const next = ParamsSchema.safeParse(nextParams); + + if (current.success && next.success) { + if (current.data.organizationSlug !== next.data.organizationSlug) { + return true; + } + if (current.data.projectParam !== next.data.projectParam) { + return true; + } + } + + // This prevents revalidation when there are search params changes + // IMPORTANT: If the loader function depends on search params, this should be updated + return params.currentUrl.pathname !== params.nextUrl.pathname; +}; + +// IMPORTANT: Make sure to update shouldRevalidate if this loader depends on search params export const loader = async ({ request, params }: LoaderFunctionArgs) => { const userId = await requireUserId(request); const impersonationId = await getImpersonationId(request); @@ -50,11 +71,17 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const firstDayOfMonth = new Date(); firstDayOfMonth.setUTCDate(1); firstDayOfMonth.setUTCHours(0, 0, 0, 0); - const tomorrow = new Date(); - tomorrow.setUTCDate(tomorrow.getDate() + 1); + + // Using the 1st day of next month means we get the usage for the current month + // and the cache key for getCachedUsage is stable over the month + const firstDayOfNextMonth = new Date(); + firstDayOfNextMonth.setUTCMonth(firstDayOfNextMonth.getUTCMonth() + 1); + firstDayOfNextMonth.setUTCDate(1); + firstDayOfNextMonth.setUTCHours(0, 0, 0, 0); + const [plan, usage] = await Promise.all([ getCurrentPlan(organization.id), - getUsage(organization.id, { from: firstDayOfMonth, to: tomorrow }), + getCachedUsage(organization.id, { from: firstDayOfMonth, to: firstDayOfNextMonth }), ]); let hasExceededFreeTier = false; @@ -103,23 +130,3 @@ export function ErrorBoundary() { ); } - -export const shouldRevalidate: ShouldRevalidateFunction = ({ - defaultShouldRevalidate, - currentParams, - nextParams, -}) => { - const current = ParamsSchema.safeParse(currentParams); - const next = ParamsSchema.safeParse(nextParams); - - if (current.success && next.success) { - if (current.data.organizationSlug !== next.data.organizationSlug) { - return true; - } - if (current.data.projectParam !== next.data.projectParam) { - return true; - } - } - - return defaultShouldRevalidate; -}; diff --git a/apps/webapp/app/routes/api.v1.auth.jwt.claims.ts b/apps/webapp/app/routes/api.v1.auth.jwt.claims.ts new file mode 100644 index 0000000000..0091078dbb --- /dev/null +++ b/apps/webapp/app/routes/api.v1.auth.jwt.claims.ts @@ -0,0 +1,19 @@ +import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { json } from "@remix-run/server-runtime"; +import { authenticateApiRequest } from "~/services/apiAuth.server"; + +export async function action({ request }: LoaderFunctionArgs) { + // Next authenticate the request + const authenticationResult = await authenticateApiRequest(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing API key" }, { status: 401 }); + } + + const claims = { + sub: authenticationResult.environment.id, + pub: true, + }; + + return json(claims); +} diff --git a/apps/webapp/app/routes/api.v1.auth.jwt.ts b/apps/webapp/app/routes/api.v1.auth.jwt.ts new file mode 100644 index 0000000000..e495c9b368 --- /dev/null +++ b/apps/webapp/app/routes/api.v1.auth.jwt.ts @@ -0,0 +1,46 @@ +import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { json } from "@remix-run/server-runtime"; +import { authenticateApiRequest } from "~/services/apiAuth.server"; +import { z } from "zod"; +import { generateJWT as internal_generateJWT } from "@trigger.dev/core/v3"; + +const RequestBodySchema = z.object({ + claims: z + .object({ + scopes: z.array(z.string()).default([]), + }) + .optional(), + expirationTime: z.union([z.number(), z.string()]).optional(), +}); + +export async function action({ request }: LoaderFunctionArgs) { + // Next authenticate the request + const authenticationResult = await authenticateApiRequest(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing API key" }, { status: 401 }); + } + + const parsedBody = RequestBodySchema.safeParse(await request.json()); + + if (!parsedBody.success) { + return json( + { error: "Invalid request body", issues: parsedBody.error.issues }, + { status: 400 } + ); + } + + const claims = { + sub: authenticationResult.environment.id, + pub: true, + ...parsedBody.data.claims, + }; + + const jwt = await internal_generateJWT({ + secretKey: authenticationResult.apiKey, + payload: claims, + expirationTime: parsedBody.data.expirationTime ?? "1h", + }); + + return json({ token: jwt }); +} diff --git a/apps/webapp/app/routes/api.v1.integrations.$integrationSlug.connections.$connectionId.ts b/apps/webapp/app/routes/api.v1.integrations.$integrationSlug.connections.$connectionId.ts index 4ad3ec5357..56398b1e76 100644 --- a/apps/webapp/app/routes/api.v1.integrations.$integrationSlug.connections.$connectionId.ts +++ b/apps/webapp/app/routes/api.v1.integrations.$integrationSlug.connections.$connectionId.ts @@ -33,7 +33,7 @@ export async function loader({ request, params }: LoaderFunctionArgs) { id: parsedParams.data.connectionId, integration: { slug: parsedParams.data.integrationSlug, - organization: authenticatedEnv.organization, + organizationId: authenticatedEnv.organization.id, }, }, include: { diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.runs.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.runs.ts index 639338c8d7..1a9d850a98 100644 --- a/apps/webapp/app/routes/api.v1.projects.$projectRef.runs.ts +++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.runs.ts @@ -1,62 +1,36 @@ -import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; import { json } from "@remix-run/server-runtime"; import { z } from "zod"; -import { ValidationError } from "zod-validation-error"; import { findProjectByRef } from "~/models/project.server"; -import { ApiRunListPresenter } from "~/presenters/v3/ApiRunListPresenter.server"; -import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; -import { apiCors } from "~/utils/apiCors"; +import { + ApiRunListPresenter, + ApiRunListSearchParams, +} from "~/presenters/v3/ApiRunListPresenter.server"; +import { createLoaderPATApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; const ParamsSchema = z.object({ projectRef: z.string(), }); -export async function loader({ request, params }: LoaderFunctionArgs) { - if (request.method.toUpperCase() === "OPTIONS") { - return apiCors(request, json({})); - } - - const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); - - if (!authenticationResult) { - return apiCors(request, json({ error: "Invalid or Missing API key" }, { status: 401 })); - } - - const $params = ParamsSchema.safeParse(params); - - if (!$params.success) { - return json({ error: "Invalid params" }, { status: 400 }); - } - - const project = await findProjectByRef($params.data.projectRef, authenticationResult.userId); - - if (!project) { - return json({ error: "Project not found" }, { status: 404 }); - } - - const url = new URL(request.url); - - const presenter = new ApiRunListPresenter(); +export const loader = createLoaderPATApiRoute( + { + params: ParamsSchema, + searchParams: ApiRunListSearchParams, + corsStrategy: "all", + }, + async ({ searchParams, params, authentication }) => { + const project = await findProjectByRef(params.projectRef, authentication.userId); + + if (!project) { + return json({ error: "Project not found" }, { status: 404 }); + } - try { - const result = await presenter.call(project, url.searchParams); + const presenter = new ApiRunListPresenter(); + const result = await presenter.call(project, searchParams); if (!result) { - return apiCors(request, json({ data: [] })); + return json({ data: [] }); } - return apiCors(request, json(result)); - } catch (error) { - if (error instanceof ValidationError) { - return apiCors( - request, - json({ error: "Query Error", details: error.details }, { status: 400 }) - ); - } else { - return apiCors( - request, - json({ error: error instanceof Error ? error.message : String(error) }, { status: 400 }) - ); - } + return json(result); } -} +); diff --git a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts index 86bf6226d9..b65e5b53ad 100644 --- a/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts +++ b/apps/webapp/app/routes/api.v1.runs.$runId.tags.ts @@ -89,6 +89,9 @@ export async function action({ request, params }: ActionFunctionArgs) { tags: { connect: tagIds.map((id) => ({ id })), }, + runTags: { + push: newTags, + }, }, }); diff --git a/apps/webapp/app/routes/api.v1.runs.$runParam.reschedule.ts b/apps/webapp/app/routes/api.v1.runs.$runParam.reschedule.ts index 117667d016..547d92fdff 100644 --- a/apps/webapp/app/routes/api.v1.runs.$runParam.reschedule.ts +++ b/apps/webapp/app/routes/api.v1.runs.$runParam.reschedule.ts @@ -62,11 +62,7 @@ export async function action({ request, params }: ActionFunctionArgs) { } const presenter = new ApiRetrieveRunPresenter(); - const result = await presenter.call( - updatedRun.friendlyId, - authenticationResult.environment, - true - ); + const result = await presenter.call(updatedRun.friendlyId, authenticationResult.environment); if (!result) { return json({ error: "Run not found" }, { status: 404 }); diff --git a/apps/webapp/app/routes/api.v1.runs.ts b/apps/webapp/app/routes/api.v1.runs.ts index b439c7755e..43bc617e2b 100644 --- a/apps/webapp/app/routes/api.v1.runs.ts +++ b/apps/webapp/app/routes/api.v1.runs.ts @@ -1,52 +1,29 @@ -import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; import { json } from "@remix-run/server-runtime"; -import { ValidationError } from "zod-validation-error"; -import { ApiRunListPresenter } from "~/presenters/v3/ApiRunListPresenter.server"; -import { authenticateApiRequest } from "~/services/apiAuth.server"; -import { apiCors } from "~/utils/apiCors"; - -export async function loader({ request, params }: LoaderFunctionArgs) { - if (request.method.toUpperCase() === "OPTIONS") { - return apiCors(request, json({})); - } - - const authenticationResult = await authenticateApiRequest(request, { - allowPublicKey: false, - }); - - if (!authenticationResult) { - return apiCors(request, json({ error: "Invalid or Missing API key" }, { status: 401 })); - } - - const authenticatedEnv = authenticationResult.environment; - - const url = new URL(request.url); - - const presenter = new ApiRunListPresenter(); - - try { +import { + ApiRunListPresenter, + ApiRunListSearchParams, +} from "~/presenters/v3/ApiRunListPresenter.server"; +import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; + +export const loader = createLoaderApiRoute( + { + searchParams: ApiRunListSearchParams, + allowJWT: true, + corsStrategy: "all", + authorization: { + action: "read", + resource: (_, searchParams) => ({ tasks: searchParams["filter[taskIdentifier]"] }), + superScopes: ["read:runs", "read:all", "admin"], + }, + }, + async ({ searchParams, authentication }) => { + const presenter = new ApiRunListPresenter(); const result = await presenter.call( - authenticatedEnv.project, - url.searchParams, - authenticatedEnv + authentication.environment.project, + searchParams, + authentication.environment ); - if (!result) { - return apiCors(request, json({ data: [] })); - } - - return apiCors(request, json(result)); - } catch (error) { - if (error instanceof ValidationError) { - return apiCors( - request, - json({ error: "Query Error", details: error.details }, { status: 400 }) - ); - } else { - return apiCors( - request, - json({ error: error instanceof Error ? error.message : String(error) }, { status: 400 }) - ); - } + return json(result); } -} +); diff --git a/apps/webapp/app/routes/api.v1.tasks.$taskId.batch.ts b/apps/webapp/app/routes/api.v1.tasks.$taskId.batch.ts index 04f452d614..13c3f07bac 100644 --- a/apps/webapp/app/routes/api.v1.tasks.$taskId.batch.ts +++ b/apps/webapp/app/routes/api.v1.tasks.$taskId.batch.ts @@ -104,10 +104,20 @@ export async function action({ request, params }: ActionFunctionArgs) { return json({ error: "Task not found" }, { status: 404 }); } - return json({ - batchId: result.batch.friendlyId, - runs: result.runs, - }); + return json( + { + batchId: result.batch.friendlyId, + runs: result.runs, + }, + { + headers: { + "x-trigger-jwt-claims": JSON.stringify({ + sub: authenticationResult.environment.id, + pub: true, + }), + }, + } + ); } catch (error) { if (error instanceof Error) { return json({ error: error.message }, { status: 400 }); diff --git a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts index 18f09560a3..aae68c91d6 100644 --- a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts +++ b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts @@ -30,6 +30,8 @@ export async function action({ request, params }: ActionFunctionArgs) { return { status: 405, body: "Method Not Allowed" }; } + logger.debug("TriggerTask action", { headers: Object.fromEntries(request.headers) }); + // Next authenticate the request const authenticationResult = await authenticateApiRequest(request); @@ -105,9 +107,19 @@ export async function action({ request, params }: ActionFunctionArgs) { return json({ error: "Task not found" }, { status: 404 }); } - return json({ - id: run.friendlyId, - }); + return json( + { + id: run.friendlyId, + }, + { + headers: { + "x-trigger-jwt-claims": JSON.stringify({ + sub: authenticationResult.environment.id, + pub: true, + }), + }, + } + ); } catch (error) { if (error instanceof ServiceValidationError) { return json({ error: error.message }, { status: 422 }); diff --git a/apps/webapp/app/routes/api.v3.runs.$runId.ts b/apps/webapp/app/routes/api.v3.runs.$runId.ts index 5768045d1b..f79d8a9233 100644 --- a/apps/webapp/app/routes/api.v3.runs.$runId.ts +++ b/apps/webapp/app/routes/api.v3.runs.$runId.ts @@ -1,44 +1,31 @@ -import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; import { json } from "@remix-run/server-runtime"; import { z } from "zod"; import { ApiRetrieveRunPresenter } from "~/presenters/v3/ApiRetrieveRunPresenter.server"; -import { authenticateApiRequest } from "~/services/apiAuth.server"; -import { apiCors } from "~/utils/apiCors"; +import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; const ParamsSchema = z.object({ runId: z.string(), }); -export async function loader({ request, params }: LoaderFunctionArgs) { - if (request.method.toUpperCase() === "OPTIONS") { - return apiCors(request, json({})); +export const loader = createLoaderApiRoute( + { + params: ParamsSchema, + allowJWT: true, + corsStrategy: "all", + authorization: { + action: "read", + resource: (params) => ({ runs: params.runId }), + superScopes: ["read:runs", "read:all", "admin"], + }, + }, + async ({ params, authentication }) => { + const presenter = new ApiRetrieveRunPresenter(); + const result = await presenter.call(params.runId, authentication.environment); + + if (!result) { + return json({ error: "Run not found" }, { status: 404 }); + } + + return json(result); } - - const authenticationResult = await authenticateApiRequest(request, { - allowPublicKey: true, - }); - if (!authenticationResult) { - return apiCors(request, json({ error: "Invalid or Missing API key" }, { status: 401 })); - } - - const authenticatedEnv = authenticationResult.environment; - - const parsed = ParamsSchema.safeParse(params); - - if (!parsed.success) { - return apiCors(request, json({ error: "Invalid or missing runId" }, { status: 400 })); - } - - const { runId } = parsed.data; - - const showSecretDetails = authenticationResult.type === "PRIVATE"; - - const presenter = new ApiRetrieveRunPresenter(); - const result = await presenter.call(runId, authenticatedEnv, showSecretDetails); - - if (!result) { - return apiCors(request, json({ error: "Run not found" }, { status: 404 })); - } - - return apiCors(request, json(result)); -} +); diff --git a/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts b/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts new file mode 100644 index 0000000000..b4bf1cd1e8 --- /dev/null +++ b/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts @@ -0,0 +1,36 @@ +import { json } from "@remix-run/server-runtime"; +import { z } from "zod"; +import { $replica } from "~/db.server"; +import { realtimeClient } from "~/services/realtimeClientGlobal.server"; +import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; + +const ParamsSchema = z.object({ + batchId: z.string(), +}); + +export const loader = createLoaderApiRoute( + { + params: ParamsSchema, + allowJWT: true, + corsStrategy: "all", + authorization: { + action: "read", + resource: (params) => ({ batch: params.batchId }), + superScopes: ["read:runs", "read:all", "admin"], + }, + }, + async ({ params, authentication, request }) => { + const batchRun = await $replica.batchTaskRun.findFirst({ + where: { + friendlyId: params.batchId, + runtimeEnvironmentId: authentication.environment.id, + }, + }); + + if (!batchRun) { + return json({ error: "Batch not found" }, { status: 404 }); + } + + return realtimeClient.streamBatch(request.url, authentication.environment, batchRun.id); + } +); diff --git a/apps/webapp/app/routes/realtime.v1.runs.$runId.ts b/apps/webapp/app/routes/realtime.v1.runs.$runId.ts new file mode 100644 index 0000000000..4ce369ccf4 --- /dev/null +++ b/apps/webapp/app/routes/realtime.v1.runs.$runId.ts @@ -0,0 +1,36 @@ +import { json } from "@remix-run/server-runtime"; +import { z } from "zod"; +import { $replica } from "~/db.server"; +import { realtimeClient } from "~/services/realtimeClientGlobal.server"; +import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; + +const ParamsSchema = z.object({ + runId: z.string(), +}); + +export const loader = createLoaderApiRoute( + { + params: ParamsSchema, + allowJWT: true, + corsStrategy: "all", + authorization: { + action: "read", + resource: (params) => ({ runs: params.runId }), + superScopes: ["read:runs", "read:all", "admin"], + }, + }, + async ({ params, authentication, request }) => { + const run = await $replica.taskRun.findFirst({ + where: { + friendlyId: params.runId, + runtimeEnvironmentId: authentication.environment.id, + }, + }); + + if (!run) { + return json({ error: "Run not found" }, { status: 404 }); + } + + return realtimeClient.streamRun(request.url, authentication.environment, run.id); + } +); diff --git a/apps/webapp/app/routes/realtime.v1.runs.ts b/apps/webapp/app/routes/realtime.v1.runs.ts new file mode 100644 index 0000000000..d4a0170c61 --- /dev/null +++ b/apps/webapp/app/routes/realtime.v1.runs.ts @@ -0,0 +1,28 @@ +import { z } from "zod"; +import { realtimeClient } from "~/services/realtimeClientGlobal.server"; +import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; + +const SearchParamsSchema = z.object({ + tags: z + .string() + .optional() + .transform((value) => { + return value ? value.split(",") : undefined; + }), +}); + +export const loader = createLoaderApiRoute( + { + searchParams: SearchParamsSchema, + allowJWT: true, + corsStrategy: "all", + authorization: { + action: "read", + resource: (_, searchParams) => searchParams, + superScopes: ["read:runs", "read:all", "admin"], + }, + }, + async ({ searchParams, authentication, request }) => { + return realtimeClient.streamRuns(request.url, authentication.environment, searchParams); + } +); diff --git a/apps/webapp/app/services/apiAuth.server.ts b/apps/webapp/app/services/apiAuth.server.ts index 3b3a8fec12..4e4917ce65 100644 --- a/apps/webapp/app/services/apiAuth.server.ts +++ b/apps/webapp/app/services/apiAuth.server.ts @@ -1,52 +1,56 @@ +import { json } from "@remix-run/server-runtime"; import { Prettify } from "@trigger.dev/core"; +import { SignJWT, errors, jwtVerify } from "jose"; import { z } from "zod"; +import { prisma } from "~/db.server"; +import { env } from "~/env.server"; +import { findProjectByRef } from "~/models/project.server"; import { RuntimeEnvironment, findEnvironmentByApiKey, findEnvironmentByPublicApiKey, } from "~/models/runtimeEnvironment.server"; +import { logger } from "./logger.server"; import { PersonalAccessTokenAuthenticationResult, authenticateApiRequestWithPersonalAccessToken, isPersonalAccessToken, } from "./personalAccessToken.server"; -import { prisma } from "~/db.server"; -import { json } from "@remix-run/server-runtime"; -import { findProjectByRef } from "~/models/project.server"; -import { SignJWT, jwtVerify, errors } from "jose"; -import { env } from "~/env.server"; -import { logger } from "./logger.server"; +import { isPublicJWT, validatePublicJwtKey } from "./realtime/jwtAuth.server"; -type Optional = Prettify & Partial>>; +const ClaimsSchema = z.object({ + scopes: z.array(z.string()).optional(), +}); -const AuthorizationHeaderSchema = z.string().regex(/^Bearer .+$/); +type Optional = Prettify & Partial>>; export type AuthenticatedEnvironment = Optional< NonNullable>>, "orgMember" >; -type ApiAuthenticationResult = { +export type ApiAuthenticationResult = { apiKey: string; - type: "PUBLIC" | "PRIVATE"; + type: "PUBLIC" | "PRIVATE" | "PUBLIC_JWT"; environment: AuthenticatedEnvironment; + scopes?: string[]; }; export async function authenticateApiRequest( request: Request, - { allowPublicKey = false }: { allowPublicKey?: boolean } = {} + options: { allowPublicKey?: boolean; allowJWT?: boolean } = {} ): Promise { const apiKey = getApiKeyFromRequest(request); if (!apiKey) { return; } - return authenticateApiKey(apiKey, { allowPublicKey }); + return authenticateApiKey(apiKey, options); } export async function authenticateApiKey( apiKey: string, - { allowPublicKey = false }: { allowPublicKey?: boolean } = {} + options: { allowPublicKey?: boolean; allowJWT?: boolean } = {} ): Promise { const result = getApiKeyResult(apiKey); @@ -54,14 +58,12 @@ export async function authenticateApiKey( return; } - //if it's a public API key and we don't allow public keys, return - if (!allowPublicKey) { - const environment = await findEnvironmentByApiKey(result.apiKey); - if (!environment) return; - return { - ...result, - environment, - }; + if (!options.allowPublicKey && result.type === "PUBLIC") { + return; + } + + if (!options.allowJWT && result.type === "PUBLIC_JWT") { + return; } switch (result.type) { @@ -81,27 +83,72 @@ export async function authenticateApiKey( environment, }; } + case "PUBLIC_JWT": { + const validationResults = await validatePublicJwtKey(result.apiKey); + + if (!validationResults) { + return; + } + + const parsedClaims = ClaimsSchema.safeParse(validationResults.claims); + + return { + ...result, + environment: validationResults.environment, + scopes: parsedClaims.success ? parsedClaims.data.scopes : [], + }; + } } } +export async function authenticateAuthorizationHeader( + authorization: string, + { + allowPublicKey = false, + allowJWT = false, + }: { allowPublicKey?: boolean; allowJWT?: boolean } = {} +): Promise { + const apiKey = getApiKeyFromHeader(authorization); + + if (!apiKey) { + return; + } + + return authenticateApiKey(apiKey, { allowPublicKey, allowJWT }); +} + export function isPublicApiKey(key: string) { return key.startsWith("pk_"); } +export function isSecretApiKey(key: string) { + return key.startsWith("tr_"); +} + export function getApiKeyFromRequest(request: Request) { - const rawAuthorization = request.headers.get("Authorization"); + return getApiKeyFromHeader(request.headers.get("Authorization")); +} - const authorization = AuthorizationHeaderSchema.safeParse(rawAuthorization); - if (!authorization.success) { +export function getApiKeyFromHeader(authorization?: string | null) { + if (typeof authorization !== "string" || !authorization) { return; } - const apiKey = authorization.data.replace(/^Bearer /, ""); + const apiKey = authorization.replace(/^Bearer /, ""); return apiKey; } -export function getApiKeyResult(apiKey: string) { - const type = isPublicApiKey(apiKey) ? ("PUBLIC" as const) : ("PRIVATE" as const); +export function getApiKeyResult(apiKey: string): { + apiKey: string; + type: "PUBLIC" | "PRIVATE" | "PUBLIC_JWT"; +} { + const type = isPublicApiKey(apiKey) + ? "PUBLIC" + : isSecretApiKey(apiKey) + ? "PRIVATE" + : isPublicJWT(apiKey) + ? "PUBLIC_JWT" + : "PRIVATE"; // Fallback to private key return { apiKey, type }; } diff --git a/apps/webapp/app/services/apiRateLimit.server.ts b/apps/webapp/app/services/apiRateLimit.server.ts index 1fdb3e5661..760a967bf3 100644 --- a/apps/webapp/app/services/apiRateLimit.server.ts +++ b/apps/webapp/app/services/apiRateLimit.server.ts @@ -1,150 +1,40 @@ -import { Ratelimit } from "@upstash/ratelimit"; -import { Request as ExpressRequest, Response as ExpressResponse, NextFunction } from "express"; -import { RedisOptions } from "ioredis"; -import { createHash } from "node:crypto"; import { env } from "~/env.server"; -import { logger } from "./logger.server"; -import { Duration, Limiter, RateLimiter, createRedisRateLimitClient } from "./rateLimiter.server"; +import { authenticateAuthorizationHeader } from "./apiAuth.server"; +import { authorizationRateLimitMiddleware } from "./authorizationRateLimitMiddleware.server"; +import { Duration } from "./rateLimiter.server"; -type Options = { - redis?: RedisOptions; - keyPrefix: string; - pathMatchers: (RegExp | string)[]; - pathWhiteList?: (RegExp | string)[]; - limiter: Limiter; - log?: { - requests?: boolean; - rejections?: boolean; - }; -}; - -//returns an Express middleware that rate limits using the Bearer token in the Authorization header -export function authorizationRateLimitMiddleware({ - redis, - keyPrefix, - limiter, - pathMatchers, - pathWhiteList = [], - log = { - rejections: true, - requests: true, +export const apiRateLimiter = authorizationRateLimitMiddleware({ + keyPrefix: "api", + defaultLimiter: { + type: "tokenBucket", + refillRate: env.API_RATE_LIMIT_REFILL_RATE, + interval: env.API_RATE_LIMIT_REFILL_INTERVAL as Duration, + maxTokens: env.API_RATE_LIMIT_MAX, }, -}: Options) { - const rateLimiter = new RateLimiter({ - redis, - keyPrefix, - limiter, - logSuccess: log.requests, - logFailure: log.rejections, - }); - - return async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => { - if (log.requests) { - logger.info(`RateLimiter (${keyPrefix}): request to ${req.path}`); - } - - // allow OPTIONS requests - if (req.method.toUpperCase() === "OPTIONS") { - return next(); - } - - //first check if any of the pathMatchers match the request path - const path = req.path; - if ( - !pathMatchers.some((matcher) => - matcher instanceof RegExp ? matcher.test(path) : path === matcher - ) - ) { - if (log.requests) { - logger.info(`RateLimiter (${keyPrefix}): didn't match ${req.path}`); - } - return next(); - } - - // Check if the path matches any of the whitelisted paths - if ( - pathWhiteList.some((matcher) => - matcher instanceof RegExp ? matcher.test(path) : path === matcher - ) - ) { - if (log.requests) { - logger.info(`RateLimiter (${keyPrefix}): whitelisted ${req.path}`); - } - return next(); - } - - if (log.requests) { - logger.info(`RateLimiter (${keyPrefix}): matched ${req.path}`); - } - - const authorizationValue = req.headers.authorization; - if (!authorizationValue) { - if (log.requests) { - logger.info(`RateLimiter (${keyPrefix}): no key`, { headers: req.headers, url: req.url }); - } - res.setHeader("Content-Type", "application/problem+json"); - return res.status(401).send( - JSON.stringify( - { - title: "Unauthorized", - status: 401, - type: "https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/401", - detail: "No authorization header provided", - error: "No authorization header provided", - }, - null, - 2 - ) - ); + limiterCache: { + fresh: 60_000 * 10, // Data is fresh for 10 minutes + stale: 60_000 * 20, // Date is stale after 20 minutes + }, + limiterConfigOverride: async (authorizationValue) => { + const authenticatedEnv = await authenticateAuthorizationHeader(authorizationValue, { + allowPublicKey: true, + allowJWT: true, + }); + + if (!authenticatedEnv) { + return; } - const hash = createHash("sha256"); - hash.update(authorizationValue); - const hashedAuthorizationValue = hash.digest("hex"); - - const { success, pending, limit, reset, remaining } = await rateLimiter.limit( - hashedAuthorizationValue - ); - - const $remaining = Math.max(0, remaining); // remaining can be negative if the user has exceeded the limit, so clamp it to 0 - - res.set("x-ratelimit-limit", limit.toString()); - res.set("x-ratelimit-remaining", $remaining.toString()); - res.set("x-ratelimit-reset", reset.toString()); - - if (success) { - return next(); + if (authenticatedEnv.type === "PUBLIC_JWT") { + return { + type: "fixedWindow", + window: env.API_RATE_LIMIT_JWT_WINDOW, + tokens: env.API_RATE_LIMIT_JWT_TOKENS, + }; + } else { + return authenticatedEnv.environment.organization.apiRateLimiterConfig; } - - res.setHeader("Content-Type", "application/problem+json"); - const secondsUntilReset = Math.max(0, (reset - new Date().getTime()) / 1000); - return res.status(429).send( - JSON.stringify( - { - title: "Rate Limit Exceeded", - status: 429, - type: "https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429", - detail: `Rate limit exceeded ${$remaining}/${limit} requests remaining. Retry in ${secondsUntilReset} seconds.`, - reset, - limit, - remaining, - secondsUntilReset, - error: `Rate limit exceeded ${$remaining}/${limit} requests remaining. Retry in ${secondsUntilReset} seconds.`, - }, - null, - 2 - ) - ); - }; -} - -export const apiRateLimiter = authorizationRateLimitMiddleware({ - keyPrefix: "api", - limiter: Ratelimit.tokenBucket( - env.API_RATE_LIMIT_REFILL_RATE, - env.API_RATE_LIMIT_REFILL_INTERVAL as Duration, - env.API_RATE_LIMIT_MAX - ), + }, pathMatchers: [/^\/api/], // Allow /api/v1/tasks/:id/callback/:secret pathWhiteList: [ @@ -159,11 +49,13 @@ export const apiRateLimiter = authorizationRateLimitMiddleware({ /^\/api\/v1\/endpoints\/[^\/]+\/[^\/]+\/index\/[^\/]+$/, // /api/v1/endpoints/$environmentId/$endpointSlug/index/$indexHookIdentifier "/api/v1/timezones", "/api/v1/usage/ingest", + "/api/v1/auth/jwt/claims", /^\/api\/v1\/runs\/[^\/]+\/attempts$/, // /api/v1/runs/$runFriendlyId/attempts ], log: { rejections: env.API_RATE_LIMIT_REJECTION_LOGS_ENABLED === "1", requests: env.API_RATE_LIMIT_REQUEST_LOGS_ENABLED === "1", + limiter: env.API_RATE_LIMIT_LIMITER_LOGS_ENABLED === "1", }, }); diff --git a/apps/webapp/app/services/authorization.server.ts b/apps/webapp/app/services/authorization.server.ts new file mode 100644 index 0000000000..7869b5b0fe --- /dev/null +++ b/apps/webapp/app/services/authorization.server.ts @@ -0,0 +1,103 @@ +export type AuthorizationAction = "read"; // Add more actions as needed + +const ResourceTypes = ["tasks", "tags", "runs", "batch"] as const; + +export type AuthorizationResources = { + [key in (typeof ResourceTypes)[number]]?: string | string[]; +}; + +export type AuthorizationEntity = { + type: "PUBLIC" | "PRIVATE" | "PUBLIC_JWT"; + scopes?: string[]; +}; + +/** + * Checks if the given entity is authorized to perform a specific action on a resource. + * + * @param entity - The entity requesting authorization. + * @param action - The action the entity wants to perform. + * @param resource - The resource on which the action is to be performed. + * @param superScopes - An array of super scopes that can bypass the normal authorization checks. + * + * @example + * + * ```typescript + * import { checkAuthorization } from "./authorization.server"; + * + * const entity = { + * type: "PUBLIC", + * scope: ["read:runs:run_1234", "read:tasks"] + * }; + * + * checkAuthorization(entity, "read", { runs: "run_1234" }); // Returns true + * checkAuthorization(entity, "read", { runs: "run_5678" }); // Returns false + * checkAuthorization(entity, "read", { tasks: "task_1234" }); // Returns true + * checkAuthorization(entity, "read", { tasks: ["task_5678"] }); // Returns true + * ``` + */ +export function checkAuthorization( + entity: AuthorizationEntity, + action: AuthorizationAction, + resource: AuthorizationResources, + superScopes?: string[] +) { + // "PRIVATE" is a secret key and has access to everything + if (entity.type === "PRIVATE") { + return true; + } + + // "PUBLIC" is a deprecated key and has no access + if (entity.type === "PUBLIC") { + return false; + } + + // If the entity has no permissions, deny access + if (!entity.scopes || entity.scopes.length === 0) { + return false; + } + + // If the resource object is empty, deny access + if (Object.keys(resource).length === 0) { + return false; + } + + // Check for any of the super scopes + if (superScopes && superScopes.length > 0) { + if (superScopes.some((permission) => entity.scopes?.includes(permission))) { + return true; + } + } + + const filteredResource = Object.keys(resource).reduce((acc, key) => { + if (ResourceTypes.includes(key)) { + acc[key as keyof AuthorizationResources] = resource[key as keyof AuthorizationResources]; + } + return acc; + }, {} as AuthorizationResources); + + // Check each resource type + for (const [resourceType, resourceValue] of Object.entries(filteredResource)) { + const resourceValues = Array.isArray(resourceValue) ? resourceValue : [resourceValue]; + + let resourceAuthorized = false; + for (const value of resourceValues) { + // Check for specific resource permission + const specificPermission = `${action}:${resourceType}:${value}`; + // Check for general resource type permission + const generalPermission = `${action}:${resourceType}`; + + if (entity.scopes.includes(specificPermission) || entity.scopes.includes(generalPermission)) { + resourceAuthorized = true; + break; + } + } + + // If any resource is not authorized, return false + if (!resourceAuthorized) { + return false; + } + } + + // All resources are authorized + return true; +} diff --git a/apps/webapp/app/services/authorizationRateLimitMiddleware.server.ts b/apps/webapp/app/services/authorizationRateLimitMiddleware.server.ts new file mode 100644 index 0000000000..b32f9ea1ae --- /dev/null +++ b/apps/webapp/app/services/authorizationRateLimitMiddleware.server.ts @@ -0,0 +1,301 @@ +import { createCache, DefaultStatefulContext, Namespace, Cache as UnkeyCache } from "@unkey/cache"; +import { MemoryStore } from "@unkey/cache/stores"; +import { Ratelimit } from "@upstash/ratelimit"; +import { Request as ExpressRequest, Response as ExpressResponse, NextFunction } from "express"; +import { RedisOptions } from "ioredis"; +import { createHash } from "node:crypto"; +import { z } from "zod"; +import { env } from "~/env.server"; +import { logger } from "./logger.server"; +import { createRedisRateLimitClient, Duration, RateLimiter } from "./rateLimiter.server"; +import { RedisCacheStore } from "./unkey/redisCacheStore.server"; + +const DurationSchema = z.custom((value) => { + if (typeof value !== "string") { + throw new Error("Duration must be a string"); + } + + return value as Duration; +}); + +export const RateLimitFixedWindowConfig = z.object({ + type: z.literal("fixedWindow"), + window: DurationSchema, + tokens: z.number(), +}); + +export type RateLimitFixedWindowConfig = z.infer; + +export const RateLimitSlidingWindowConfig = z.object({ + type: z.literal("slidingWindow"), + window: DurationSchema, + tokens: z.number(), +}); + +export type RateLimitSlidingWindowConfig = z.infer; + +export const RateLimitTokenBucketConfig = z.object({ + type: z.literal("tokenBucket"), + refillRate: z.number(), + interval: DurationSchema, + maxTokens: z.number(), +}); + +export type RateLimitTokenBucketConfig = z.infer; + +export const RateLimiterConfig = z.discriminatedUnion("type", [ + RateLimitFixedWindowConfig, + RateLimitSlidingWindowConfig, + RateLimitTokenBucketConfig, +]); + +export type RateLimiterConfig = z.infer; + +type LimitConfigOverrideFunction = (authorizationValue: string) => Promise; + +type Options = { + redis?: RedisOptions; + keyPrefix: string; + pathMatchers: (RegExp | string)[]; + pathWhiteList?: (RegExp | string)[]; + defaultLimiter: RateLimiterConfig; + limiterConfigOverride?: LimitConfigOverrideFunction; + limiterCache?: { + fresh: number; + stale: number; + }; + log?: { + requests?: boolean; + rejections?: boolean; + limiter?: boolean; + }; +}; + +async function resolveLimitConfig( + authorizationValue: string, + hashedAuthorizationValue: string, + defaultLimiter: RateLimiterConfig, + cache: UnkeyCache<{ limiter: RateLimiterConfig }>, + logsEnabled: boolean, + limiterConfigOverride?: LimitConfigOverrideFunction +): Promise { + if (!limiterConfigOverride) { + return defaultLimiter; + } + + if (logsEnabled) { + logger.info("RateLimiter: checking for override", { + authorizationValue: hashedAuthorizationValue, + defaultLimiter, + }); + } + + const cacheResult = await cache.limiter.swr(hashedAuthorizationValue, async (key) => { + const override = await limiterConfigOverride(authorizationValue); + + if (!override) { + if (logsEnabled) { + logger.info("RateLimiter: no override found", { + authorizationValue, + defaultLimiter, + }); + } + + return defaultLimiter; + } + + const parsedOverride = RateLimiterConfig.safeParse(override); + + if (!parsedOverride.success) { + logger.error("Error parsing rate limiter override", { + override, + errors: parsedOverride.error.errors, + }); + + return defaultLimiter; + } + + if (logsEnabled && parsedOverride.data) { + logger.info("RateLimiter: override found", { + authorizationValue, + defaultLimiter, + override: parsedOverride.data, + }); + } + + return parsedOverride.data; + }); + + return cacheResult.val ?? defaultLimiter; +} + +//returns an Express middleware that rate limits using the Bearer token in the Authorization header +export function authorizationRateLimitMiddleware({ + redis, + keyPrefix, + defaultLimiter, + pathMatchers, + pathWhiteList = [], + log = { + rejections: true, + requests: true, + }, + limiterCache, + limiterConfigOverride, +}: Options) { + const ctx = new DefaultStatefulContext(); + const memory = new MemoryStore({ persistentMap: new Map() }); + const redisCacheStore = new RedisCacheStore({ + connection: { + keyPrefix: `cache:${keyPrefix}:rate-limit-cache:`, + ...redis, + }, + }); + + // This cache holds the rate limit configuration for each org, so we don't have to fetch it every request + const cache = createCache({ + limiter: new Namespace(ctx, { + stores: [memory, redisCacheStore], + fresh: limiterCache?.fresh ?? 30_000, + stale: limiterCache?.stale ?? 60_000, + }), + }); + + const redisClient = createRedisRateLimitClient( + redis ?? { + port: env.REDIS_PORT, + host: env.REDIS_HOST, + username: env.REDIS_USERNAME, + password: env.REDIS_PASSWORD, + enableAutoPipelining: true, + ...(env.REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), + } + ); + + return async (req: ExpressRequest, res: ExpressResponse, next: NextFunction) => { + if (log.requests) { + logger.info(`RateLimiter (${keyPrefix}): request to ${req.path}`); + } + + // allow OPTIONS requests + if (req.method.toUpperCase() === "OPTIONS") { + return next(); + } + + //first check if any of the pathMatchers match the request path + const path = req.path; + if ( + !pathMatchers.some((matcher) => + matcher instanceof RegExp ? matcher.test(path) : path === matcher + ) + ) { + if (log.requests) { + logger.info(`RateLimiter (${keyPrefix}): didn't match ${req.path}`); + } + return next(); + } + + // Check if the path matches any of the whitelisted paths + if ( + pathWhiteList.some((matcher) => + matcher instanceof RegExp ? matcher.test(path) : path === matcher + ) + ) { + if (log.requests) { + logger.info(`RateLimiter (${keyPrefix}): whitelisted ${req.path}`); + } + return next(); + } + + if (log.requests) { + logger.info(`RateLimiter (${keyPrefix}): matched ${req.path}`); + } + + const authorizationValue = req.headers.authorization; + if (!authorizationValue) { + if (log.requests) { + logger.info(`RateLimiter (${keyPrefix}): no key`, { headers: req.headers, url: req.url }); + } + res.setHeader("Content-Type", "application/problem+json"); + return res.status(401).send( + JSON.stringify( + { + title: "Unauthorized", + status: 401, + type: "https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/401", + detail: "No authorization header provided", + error: "No authorization header provided", + }, + null, + 2 + ) + ); + } + + const hash = createHash("sha256"); + hash.update(authorizationValue); + const hashedAuthorizationValue = hash.digest("hex"); + + const limiterConfig = await resolveLimitConfig( + authorizationValue, + hashedAuthorizationValue, + defaultLimiter, + cache, + typeof log.limiter === "boolean" ? log.limiter : false, + limiterConfigOverride + ); + + const limiter = + limiterConfig.type === "fixedWindow" + ? Ratelimit.fixedWindow(limiterConfig.tokens, limiterConfig.window) + : limiterConfig.type === "tokenBucket" + ? Ratelimit.tokenBucket( + limiterConfig.refillRate, + limiterConfig.interval, + limiterConfig.maxTokens + ) + : Ratelimit.slidingWindow(limiterConfig.tokens, limiterConfig.window); + + const rateLimiter = new RateLimiter({ + redisClient, + keyPrefix, + limiter, + logSuccess: log.requests, + logFailure: log.rejections, + }); + + const { success, limit, reset, remaining } = await rateLimiter.limit(hashedAuthorizationValue); + + const $remaining = Math.max(0, remaining); // remaining can be negative if the user has exceeded the limit, so clamp it to 0 + + res.set("x-ratelimit-limit", limit.toString()); + res.set("x-ratelimit-remaining", $remaining.toString()); + res.set("x-ratelimit-reset", reset.toString()); + + if (success) { + return next(); + } + + res.setHeader("Content-Type", "application/problem+json"); + const secondsUntilReset = Math.max(0, (reset - new Date().getTime()) / 1000); + return res.status(429).send( + JSON.stringify( + { + title: "Rate Limit Exceeded", + status: 429, + type: "https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429", + detail: `Rate limit exceeded ${$remaining}/${limit} requests remaining. Retry in ${secondsUntilReset} seconds.`, + reset, + limit, + remaining, + secondsUntilReset, + error: `Rate limit exceeded ${$remaining}/${limit} requests remaining. Retry in ${secondsUntilReset} seconds.`, + }, + null, + 2 + ) + ); + }; +} + +export type RateLimitMiddleware = ReturnType; diff --git a/apps/webapp/app/services/platform.v3.server.ts b/apps/webapp/app/services/platform.v3.server.ts index 5d504361e7..b9a348d6b6 100644 --- a/apps/webapp/app/services/platform.v3.server.ts +++ b/apps/webapp/app/services/platform.v3.server.ts @@ -1,5 +1,13 @@ -import { BillingClient, Limits, SetPlanBody, UsageSeriesParams } from "@trigger.dev/platform/v3"; import { Organization, Project } from "@trigger.dev/database"; +import { + BillingClient, + Limits, + SetPlanBody, + UsageSeriesParams, + UsageResult, +} from "@trigger.dev/platform/v3"; +import { createCache, DefaultStatefulContext, Namespace } from "@unkey/cache"; +import { MemoryStore } from "@unkey/cache/stores"; import { redirect } from "remix-typedjson"; import { $replica } from "~/db.server"; import { env } from "~/env.server"; @@ -7,10 +15,61 @@ import { redirectWithErrorMessage, redirectWithSuccessMessage } from "~/models/m import { createEnvironment } from "~/models/organization.server"; import { logger } from "~/services/logger.server"; import { newProjectPath, organizationBillingPath } from "~/utils/pathBuilder"; +import { singleton } from "~/utils/singleton"; +import { RedisCacheStore } from "./unkey/redisCacheStore.server"; + +function initializeClient() { + if (isCloud() && process.env.BILLING_API_URL && process.env.BILLING_API_KEY) { + const client = new BillingClient({ + url: process.env.BILLING_API_URL, + apiKey: process.env.BILLING_API_KEY, + }); + console.log(`🤑 Billing client initialized: ${process.env.BILLING_API_URL}`); + return client; + } else { + console.log(`🤑 Billing client not initialized`); + } +} + +const client = singleton("billingClient", initializeClient); + +function initializePlatformCache() { + const ctx = new DefaultStatefulContext(); + const memory = new MemoryStore({ persistentMap: new Map() }); + const redisCacheStore = new RedisCacheStore({ + connection: { + keyPrefix: "tr:cache:platform:v3", + port: env.REDIS_PORT, + host: env.REDIS_HOST, + username: env.REDIS_USERNAME, + password: env.REDIS_PASSWORD, + enableAutoPipelining: true, + ...(env.REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), + }, + }); + + // This cache holds the limits fetched from the platform service + const cache = createCache({ + limits: new Namespace(ctx, { + stores: [memory, redisCacheStore], + fresh: 60_000 * 5, // 5 minutes + stale: 60_000 * 10, // 10 minutes + }), + usage: new Namespace(ctx, { + stores: [memory, redisCacheStore], + fresh: 60_000 * 5, // 5 minutes + stale: 60_000 * 10, // 10 minutes + }), + }); + + return cache; +} + +const platformCache = singleton("platformCache", initializePlatformCache); export async function getCurrentPlan(orgId: string) { - const client = getClient(); if (!client) return undefined; + try { const result = await client.currentPlan(orgId); @@ -60,8 +119,8 @@ export async function getCurrentPlan(orgId: string) { } export async function getLimits(orgId: string) { - const client = getClient(); if (!client) return undefined; + try { const result = await client.currentPlan(orgId); if (!result.success) { @@ -87,9 +146,15 @@ export async function getLimit(orgId: string, limit: keyof Limits, fallback: num return fallback; } +export async function getCachedLimit(orgId: string, limit: keyof Limits, fallback: number) { + return platformCache.limits.swr(`${orgId}:${limit}`, async () => { + return getLimit(orgId, limit, fallback); + }); +} + export async function customerPortalUrl(orgId: string, orgSlug: string) { - const client = getClient(); if (!client) return undefined; + try { return client.createPortalSession(orgId, { returnUrl: `${env.APP_ORIGIN}${organizationBillingPath({ slug: orgSlug })}`, @@ -101,8 +166,8 @@ export async function customerPortalUrl(orgId: string, orgSlug: string) { } export async function getPlans() { - const client = getClient(); if (!client) return undefined; + try { const result = await client.plans(); if (!result.success) { @@ -122,7 +187,6 @@ export async function setPlan( callerPath: string, plan: SetPlanBody ) { - const client = getClient(); if (!client) { throw redirectWithErrorMessage(callerPath, request, "Error setting plan"); } @@ -178,8 +242,8 @@ export async function setPlan( } export async function getUsage(organizationId: string, { from, to }: { from: Date; to: Date }) { - const client = getClient(); if (!client) return undefined; + try { const result = await client.usage(organizationId, { from, to }); if (!result.success) { @@ -193,9 +257,27 @@ export async function getUsage(organizationId: string, { from, to }: { from: Dat } } +export async function getCachedUsage( + organizationId: string, + { from, to }: { from: Date; to: Date } +) { + if (!client) return undefined; + + const result = await platformCache.usage.swr( + `${organizationId}:${from.toISOString()}:${to.toISOString()}`, + async () => { + const usageResponse = await getUsage(organizationId, { from, to }); + + return usageResponse; + } + ); + + return result.val; +} + export async function getUsageSeries(organizationId: string, params: UsageSeriesParams) { - const client = getClient(); if (!client) return undefined; + try { const result = await client.usageSeries(organizationId, params); if (!result.success) { @@ -214,8 +296,8 @@ export async function reportInvocationUsage( costInCents: number, additionalData?: Record ) { - const client = getClient(); if (!client) return undefined; + try { const result = await client.reportInvocationUsage({ organizationId, @@ -234,8 +316,8 @@ export async function reportInvocationUsage( } export async function reportComputeUsage(request: Request) { - const client = getClient(); if (!client) return undefined; + return fetch(`${process.env.BILLING_API_URL}/api/v1/usage/ingest/compute`, { method: "POST", headers: request.headers, @@ -244,8 +326,8 @@ export async function reportComputeUsage(request: Request) { } export async function getEntitlement(organizationId: string) { - const client = getClient(); if (!client) return undefined; + try { const result = await client.getEntitlement(organizationId); if (!result.success) { @@ -275,19 +357,6 @@ export async function projectCreated(organization: Organization, project: Projec } } -function getClient() { - if (isCloud() && process.env.BILLING_API_URL && process.env.BILLING_API_KEY) { - const client = new BillingClient({ - url: process.env.BILLING_API_URL, - apiKey: process.env.BILLING_API_KEY, - }); - console.log(`Billing client initialized: ${process.env.BILLING_API_URL}`); - return client; - } else { - console.log(`Billing client not initialized`); - } -} - function isCloud(): boolean { const acceptableHosts = [ "https://cloud.trigger.dev", diff --git a/apps/webapp/app/services/rateLimiter.server.ts b/apps/webapp/app/services/rateLimiter.server.ts index 8d6c03a1a6..f2a494d1c6 100644 --- a/apps/webapp/app/services/rateLimiter.server.ts +++ b/apps/webapp/app/services/rateLimiter.server.ts @@ -5,6 +5,7 @@ import { logger } from "./logger.server"; type Options = { redis?: RedisOptions; + redisClient?: RateLimiterRedisClient; keyPrefix: string; limiter: Limiter; logSuccess?: boolean; @@ -14,34 +15,32 @@ type Options = { export type Limiter = ConstructorParameters[0]["limiter"]; export type Duration = Parameters[1]; export type RateLimitResponse = Awaited>; +export type RateLimiterRedisClient = ConstructorParameters[0]["redis"]; export class RateLimiter { #ratelimit: Ratelimit; constructor(private readonly options: Options) { - const { redis, keyPrefix, limiter } = options; + const { redis, redisClient, keyPrefix, limiter } = options; const prefix = `ratelimit:${keyPrefix}`; this.#ratelimit = new Ratelimit({ - redis: createRedisRateLimitClient( - redis ?? { - port: env.REDIS_PORT, - host: env.REDIS_HOST, - username: env.REDIS_USERNAME, - password: env.REDIS_PASSWORD, - enableAutoPipelining: true, - ...(env.REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), - } - ), + redis: + redisClient ?? + createRedisRateLimitClient( + redis ?? { + port: env.REDIS_PORT, + host: env.REDIS_HOST, + username: env.REDIS_USERNAME, + password: env.REDIS_PASSWORD, + enableAutoPipelining: true, + ...(env.REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), + } + ), limiter, ephemeralCache: new Map(), analytics: false, prefix, }); - - logger.info(`RateLimiter (${keyPrefix}): initialized`, { - keyPrefix, - redisKeyspace: prefix, - }); } async limit(identifier: string, rate = 1): Promise { @@ -71,9 +70,7 @@ export class RateLimiter { } } -export function createRedisRateLimitClient( - redisOptions: RedisOptions -): ConstructorParameters[0]["redis"] { +export function createRedisRateLimitClient(redisOptions: RedisOptions): RateLimiterRedisClient { const redis = new Redis(redisOptions); return { diff --git a/apps/webapp/app/services/realtime/jwtAuth.server.ts b/apps/webapp/app/services/realtime/jwtAuth.server.ts new file mode 100644 index 0000000000..490e2d2adb --- /dev/null +++ b/apps/webapp/app/services/realtime/jwtAuth.server.ts @@ -0,0 +1,85 @@ +import { validateJWT } from "@trigger.dev/core/v3/jwt"; +import { findEnvironmentById } from "~/models/runtimeEnvironment.server"; + +export async function validatePublicJwtKey(token: string) { + // Get the sub claim from the token + // Use the sub claim to find the environment + // Validate the token against the environment.apiKey + // Once that's done, return the environment and the claims + const sub = extractJWTSub(token); + + if (!sub) { + return; + } + + const environment = await findEnvironmentById(sub); + + if (!environment) { + return; + } + + const claims = await validateJWT(token, environment.apiKey); + + if (!claims) { + return; + } + + return { + environment, + claims, + }; +} + +export function isPublicJWT(token: string): boolean { + // Split the token + const parts = token.split("."); + if (parts.length !== 3) return false; + + try { + // Decode the payload (second part) + const payload = JSON.parse(decodeBase64Url(parts[1])); + + if (payload === null || typeof payload !== "object") return false; + + // Check for the pub: true claim + return "pub" in payload && payload.pub === true; + } catch (error) { + // If there's any error in decoding or parsing, it's not a valid JWT + return false; + } +} + +function extractJWTSub(token: string): string | undefined { + // Split the token + const parts = token.split("."); + if (parts.length !== 3) return; + + try { + // Decode the payload (second part) + const payload = JSON.parse(decodeBase64Url(parts[1])); + + if (payload === null || typeof payload !== "object") return; + + // Check for the pub: true claim + return "sub" in payload && typeof payload.sub === "string" ? payload.sub : undefined; + } catch (error) { + // If there's any error in decoding or parsing, it's not a valid JWT + return; + } +} + +function decodeBase64Url(str: string): string { + // Replace URL-safe characters and add padding + str = str.replace(/-/g, "+").replace(/_/g, "/"); + switch (str.length % 4) { + case 2: + str += "=="; + break; + case 3: + str += "="; + break; + } + + // Decode using Node.js Buffer + return Buffer.from(str, "base64").toString("utf8"); +} diff --git a/apps/webapp/app/services/realtimeClient.server.ts b/apps/webapp/app/services/realtimeClient.server.ts new file mode 100644 index 0000000000..ad52906b45 --- /dev/null +++ b/apps/webapp/app/services/realtimeClient.server.ts @@ -0,0 +1,253 @@ +import { json } from "@remix-run/server-runtime"; +import Redis, { Callback, Result, type RedisOptions } from "ioredis"; +import { randomUUID } from "node:crypto"; +import { longPollingFetch } from "~/utils/longPollingFetch"; +import { logger } from "./logger.server"; + +export interface CachedLimitProvider { + getCachedLimit: (organizationId: string, defaultValue: number) => Promise; +} + +export type RealtimeClientOptions = { + electricOrigin: string; + redis: RedisOptions; + cachedLimitProvider: CachedLimitProvider; + keyPrefix: string; + expiryTimeInSeconds?: number; +}; + +export type RealtimeEnvironment = { + id: string; + organizationId: string; +}; + +export type RealtimeRunsParams = { + tags?: string[]; +}; + +export class RealtimeClient { + private redis: Redis; + private expiryTimeInSeconds: number; + private cachedLimitProvider: CachedLimitProvider; + + constructor(private options: RealtimeClientOptions) { + this.redis = new Redis(options.redis); + this.expiryTimeInSeconds = options.expiryTimeInSeconds ?? 60 * 5; // default to 5 minutes + this.cachedLimitProvider = options.cachedLimitProvider; + this.#registerCommands(); + } + + async streamRun(url: URL | string, environment: RealtimeEnvironment, runId: string) { + return this.#streamRunsWhere(url, environment, `id='${runId}'`); + } + + async streamBatch(url: URL | string, environment: RealtimeEnvironment, batchId: string) { + return this.#streamRunsWhere(url, environment, `"batchId"='${batchId}'`); + } + + async streamRuns( + url: URL | string, + environment: RealtimeEnvironment, + params: RealtimeRunsParams + ) { + const whereClauses: string[] = [`"runtimeEnvironmentId"='${environment.id}'`]; + + if (params.tags) { + whereClauses.push(`"runTags" @> ARRAY[${params.tags.map((t) => `'${t}'`).join(",")}]`); + } + + const whereClause = whereClauses.join(" AND "); + + return this.#streamRunsWhere(url, environment, whereClause); + } + + async #streamRunsWhere(url: URL | string, environment: RealtimeEnvironment, whereClause: string) { + const electricUrl = this.#constructElectricUrl(url, whereClause); + + return this.#performElectricRequest(electricUrl, environment); + } + + #constructElectricUrl(url: URL | string, whereClause: string): URL { + const $url = new URL(url.toString()); + + const electricUrl = new URL(`${this.options.electricOrigin}/v1/shape/public."TaskRun"`); + + // Copy over all the url search params to the electric url + $url.searchParams.forEach((value, key) => { + electricUrl.searchParams.set(key, value); + }); + + // const electricParams = ["shape_id", "live", "offset", "columns", "cursor"]; + + // electricParams.forEach((param) => { + // if ($url.searchParams.has(param) && $url.searchParams.get(param)) { + // electricUrl.searchParams.set(param, $url.searchParams.get(param)!); + // } + // }); + + electricUrl.searchParams.set("where", whereClause); + + return electricUrl; + } + + async #performElectricRequest(url: URL, environment: RealtimeEnvironment) { + const shapeId = extractShapeId(url); + + logger.debug("[realtimeClient] request", { + url: url.toString(), + }); + + if (!shapeId) { + // If the shapeId is not present, we're just getting the initial value + return longPollingFetch(url.toString()); + } + + const isLive = isLiveRequestUrl(url); + + if (!isLive) { + return longPollingFetch(url.toString()); + } + + const requestId = randomUUID(); + + // We now need to wrap the longPollingFetch in a concurrency tracker + const concurrencyLimit = await this.cachedLimitProvider.getCachedLimit( + environment.organizationId, + 100_000 + ); + + if (!concurrencyLimit) { + logger.error("Failed to get concurrency limit", { + organizationId: environment.organizationId, + }); + + return json({ error: "Failed to get concurrency limit" }, { status: 500 }); + } + + logger.debug("[realtimeClient] increment and check", { + concurrencyLimit, + shapeId, + requestId, + environment: { + id: environment.id, + organizationId: environment.organizationId, + }, + }); + + const canProceed = await this.#incrementAndCheck(environment.id, requestId, concurrencyLimit); + + if (!canProceed) { + logger.debug("[realtimeClient] too many concurrent requests", { + requestId, + environmentId: environment.id, + }); + + return json({ error: "Too many concurrent requests" }, { status: 429 }); + } + + try { + // ... (rest of your existing code for the long polling request) + const response = await longPollingFetch(url.toString()); + + // Decrement the counter after the long polling request is complete + await this.#decrementConcurrency(environment.id, requestId); + + return response; + } catch (error) { + // Decrement the counter if the request fails + await this.#decrementConcurrency(environment.id, requestId); + + throw error; + } + } + + async #incrementAndCheck(environmentId: string, requestId: string, limit: number) { + const key = this.#getKey(environmentId); + const now = Date.now(); + + const result = await this.redis.incrementAndCheckConcurrency( + key, + now.toString(), + requestId, + this.expiryTimeInSeconds.toString(), // expiry time + (now - this.expiryTimeInSeconds * 1000).toString(), // cutoff time + limit.toString() + ); + + return result === 1; + } + + async #decrementConcurrency(environmentId: string, requestId: string) { + logger.debug("[realtimeClient] decrement", { + requestId, + environmentId, + }); + + const key = this.#getKey(environmentId); + + await this.redis.zrem(key, requestId); + } + + #getKey(environmentId: string): string { + return `${this.options.keyPrefix}:${environmentId}`; + } + + #registerCommands() { + this.redis.defineCommand("incrementAndCheckConcurrency", { + numberOfKeys: 1, + lua: /* lua */ ` + local concurrencyKey = KEYS[1] + + local timestamp = tonumber(ARGV[1]) + local requestId = ARGV[2] + local expiryTime = tonumber(ARGV[3]) + local cutoffTime = tonumber(ARGV[4]) + local limit = tonumber(ARGV[5]) + + -- Remove expired entries + redis.call('ZREMRANGEBYSCORE', concurrencyKey, '-inf', cutoffTime) + + -- Add the new request to the sorted set + redis.call('ZADD', concurrencyKey, timestamp, requestId) + + -- Set the expiry time on the key + redis.call('EXPIRE', concurrencyKey, expiryTime) + + -- Get the total number of concurrent requests + local totalRequests = redis.call('ZCARD', concurrencyKey) + + -- Check if the limit has been exceeded + if totalRequests > limit then + -- Remove the request we just added + redis.call('ZREM', concurrencyKey, requestId) + return 0 + end + + -- Return 1 to indicate success + return 1 + `, + }); + } +} + +function extractShapeId(url: URL) { + return url.searchParams.get("shape_id"); +} + +function isLiveRequestUrl(url: URL) { + return url.searchParams.has("live") && url.searchParams.get("live") === "true"; +} + +declare module "ioredis" { + interface RedisCommander { + incrementAndCheckConcurrency( + key: string, + timestamp: string, + requestId: string, + expiryTime: string, + cutoffTime: string, + limit: string, + callback?: Callback + ): Result; + } +} diff --git a/apps/webapp/app/services/realtimeClientGlobal.server.ts b/apps/webapp/app/services/realtimeClientGlobal.server.ts new file mode 100644 index 0000000000..514b2fb415 --- /dev/null +++ b/apps/webapp/app/services/realtimeClientGlobal.server.ts @@ -0,0 +1,32 @@ +import { env } from "~/env.server"; +import { singleton } from "~/utils/singleton"; +import { RealtimeClient } from "./realtimeClient.server"; +import { getCachedLimit } from "./platform.v3.server"; + +function initializeRealtimeClient() { + return new RealtimeClient({ + electricOrigin: env.ELECTRIC_ORIGIN, + keyPrefix: "tr:realtime:concurrency", + redis: { + port: env.REDIS_PORT, + host: env.REDIS_HOST, + username: env.REDIS_USERNAME, + password: env.REDIS_PASSWORD, + enableAutoPipelining: true, + ...(env.REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), + }, + cachedLimitProvider: { + async getCachedLimit(organizationId, defaultValue) { + const result = await getCachedLimit( + organizationId, + "realtimeConcurrentConnections", + defaultValue + ); + + return result.val; + }, + }, + }); +} + +export const realtimeClient = singleton("realtimeClient", initializeRealtimeClient); diff --git a/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts b/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts new file mode 100644 index 0000000000..a0561cecb3 --- /dev/null +++ b/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts @@ -0,0 +1,260 @@ +import { z } from "zod"; +import { ApiAuthenticationResult, authenticateApiRequest } from "../apiAuth.server"; +import { json, LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { fromZodError } from "zod-validation-error"; +import { apiCors } from "~/utils/apiCors"; +import { + AuthorizationAction, + AuthorizationResources, + checkAuthorization, +} from "../authorization.server"; +import { logger } from "../logger.server"; +import { + authenticateApiRequestWithPersonalAccessToken, + PersonalAccessTokenAuthenticationResult, +} from "../personalAccessToken.server"; + +type ApiKeyRouteBuilderOptions< + TParamsSchema extends z.AnyZodObject | undefined = undefined, + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined +> = { + params?: TParamsSchema; + searchParams?: TSearchParamsSchema; + allowJWT?: boolean; + corsStrategy?: "all" | "none"; + authorization?: { + action: AuthorizationAction; + resource: ( + params: TParamsSchema extends z.AnyZodObject ? z.infer : undefined, + searchParams: TSearchParamsSchema extends z.AnyZodObject + ? z.infer + : undefined + ) => AuthorizationResources; + superScopes?: string[]; + }; +}; + +type ApiKeyHandlerFunction< + TParamsSchema extends z.AnyZodObject | undefined, + TSearchParamsSchema extends z.AnyZodObject | undefined +> = (args: { + params: TParamsSchema extends z.AnyZodObject ? z.infer : undefined; + searchParams: TSearchParamsSchema extends z.AnyZodObject + ? z.infer + : undefined; + authentication: ApiAuthenticationResult; + request: Request; +}) => Promise; + +export function createLoaderApiRoute< + TParamsSchema extends z.AnyZodObject | undefined = undefined, + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined +>( + options: ApiKeyRouteBuilderOptions, + handler: ApiKeyHandlerFunction +) { + return async function loader({ request, params }: LoaderFunctionArgs) { + const { + params: paramsSchema, + searchParams: searchParamsSchema, + allowJWT = false, + corsStrategy = "none", + authorization, + } = options; + + if (corsStrategy !== "none" && request.method.toUpperCase() === "OPTIONS") { + return apiCors(request, json({})); + } + + const authenticationResult = await authenticateApiRequest(request, { allowJWT }); + + if (!authenticationResult) { + return wrapResponse( + request, + json({ error: "Invalid or Missing API key" }, { status: 401 }), + corsStrategy !== "none" + ); + } + + let parsedParams: any = undefined; + if (paramsSchema) { + const parsed = paramsSchema.safeParse(params); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Params Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedParams = parsed.data; + } + + let parsedSearchParams: any = undefined; + if (searchParamsSchema) { + const searchParams = Object.fromEntries(new URL(request.url).searchParams); + const parsed = searchParamsSchema.safeParse(searchParams); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Query Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedSearchParams = parsed.data; + } + + if (authorization) { + const { action, resource, superScopes } = authorization; + const $resource = resource(parsedParams, parsedSearchParams); + + logger.debug("Checking authorization", { + action, + resource: $resource, + superScopes, + scopes: authenticationResult.scopes, + }); + + if (!checkAuthorization(authenticationResult, action, $resource, superScopes)) { + return wrapResponse( + request, + json({ error: "Unauthorized" }, { status: 403 }), + corsStrategy !== "none" + ); + } + } + + try { + const result = await handler({ + params: parsedParams, + searchParams: parsedSearchParams, + authentication: authenticationResult, + request, + }); + return wrapResponse(request, result, corsStrategy !== "none"); + } catch (error) { + console.error("Error in API route:", error); + if (error instanceof Response) { + return wrapResponse(request, error, corsStrategy !== "none"); + } + return wrapResponse( + request, + json({ error: "Internal Server Error" }, { status: 500 }), + corsStrategy !== "none" + ); + } + }; +} + +type PATRouteBuilderOptions< + TParamsSchema extends z.AnyZodObject | undefined = undefined, + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined +> = { + params?: TParamsSchema; + searchParams?: TSearchParamsSchema; + corsStrategy?: "all" | "none"; +}; + +type PATHandlerFunction< + TParamsSchema extends z.AnyZodObject | undefined, + TSearchParamsSchema extends z.AnyZodObject | undefined +> = (args: { + params: TParamsSchema extends z.AnyZodObject ? z.infer : undefined; + searchParams: TSearchParamsSchema extends z.AnyZodObject + ? z.infer + : undefined; + authentication: PersonalAccessTokenAuthenticationResult; + request: Request; +}) => Promise; + +export function createLoaderPATApiRoute< + TParamsSchema extends z.AnyZodObject | undefined = undefined, + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined +>( + options: PATRouteBuilderOptions, + handler: PATHandlerFunction +) { + return async function loader({ request, params }: LoaderFunctionArgs) { + const { + params: paramsSchema, + searchParams: searchParamsSchema, + corsStrategy = "none", + } = options; + + if (corsStrategy !== "none" && request.method.toUpperCase() === "OPTIONS") { + return apiCors(request, json({})); + } + + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return wrapResponse( + request, + json({ error: "Invalid or Missing API key" }, { status: 401 }), + corsStrategy !== "none" + ); + } + + let parsedParams: any = undefined; + if (paramsSchema) { + const parsed = paramsSchema.safeParse(params); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Params Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedParams = parsed.data; + } + + let parsedSearchParams: any = undefined; + if (searchParamsSchema) { + const searchParams = Object.fromEntries(new URL(request.url).searchParams); + const parsed = searchParamsSchema.safeParse(searchParams); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Query Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedSearchParams = parsed.data; + } + + try { + const result = await handler({ + params: parsedParams, + searchParams: parsedSearchParams, + authentication: authenticationResult, + request, + }); + return wrapResponse(request, result, corsStrategy !== "none"); + } catch (error) { + console.error("Error in API route:", error); + if (error instanceof Response) { + return wrapResponse(request, error, corsStrategy !== "none"); + } + return wrapResponse( + request, + json({ error: "Internal Server Error" }, { status: 500 }), + corsStrategy !== "none" + ); + } + }; +} + +function wrapResponse(request: Request, response: Response, useCors: boolean) { + return useCors ? apiCors(request, response) : response; +} diff --git a/apps/webapp/app/services/unkey/redisCacheStore.server.ts b/apps/webapp/app/services/unkey/redisCacheStore.server.ts new file mode 100644 index 0000000000..059f9183d5 --- /dev/null +++ b/apps/webapp/app/services/unkey/redisCacheStore.server.ts @@ -0,0 +1,97 @@ +import { Err, Ok, type Result } from "@unkey/error"; +import type { Entry, Store } from "@unkey/cache/stores"; +import type { RedisOptions } from "ioredis"; +import { Redis } from "ioredis"; +import { CacheError } from "@unkey/cache"; + +export type RedisCacheStoreConfig = { + connection: RedisOptions; +}; + +export class RedisCacheStore + implements Store +{ + public readonly name = "redis"; + private readonly redis: Redis; + + constructor(config: RedisCacheStoreConfig) { + this.redis = new Redis(config.connection); + } + + private buildCacheKey(namespace: TNamespace, key: string): string { + return [namespace, key].join("::"); + } + + public async get( + namespace: TNamespace, + key: string + ): Promise | undefined, CacheError>> { + let raw: string | null; + try { + raw = await this.redis.get(this.buildCacheKey(namespace, key)); + } catch (err) { + return Err( + new CacheError({ + tier: this.name, + key, + message: (err as Error).message, + }) + ); + } + + if (!raw) { + return Promise.resolve(Ok(undefined)); + } + + try { + const superjson = await import("superjson"); + const entry = superjson.parse(raw) as Entry; + return Ok(entry); + } catch (err) { + return Err( + new CacheError({ + tier: this.name, + key, + message: (err as Error).message, + }) + ); + } + } + + public async set( + namespace: TNamespace, + key: string, + entry: Entry + ): Promise> { + const cacheKey = this.buildCacheKey(namespace, key); + try { + const superjson = await import("superjson"); + await this.redis.set(cacheKey, superjson.stringify(entry), "PXAT", entry.staleUntil); + return Ok(); + } catch (err) { + return Err( + new CacheError({ + tier: this.name, + key, + message: (err as Error).message, + }) + ); + } + } + + public async remove(namespace: TNamespace, key: string): Promise> { + try { + const cacheKey = this.buildCacheKey(namespace, key); + await this.redis.del(cacheKey); + return Promise.resolve(Ok()); + } catch (err) { + return Err( + new CacheError({ + tier: this.name, + key, + message: (err as Error).message, + }) + ); + } + } +} diff --git a/apps/webapp/app/utils/apiCors.ts b/apps/webapp/app/utils/apiCors.ts index 4280f1d96e..a75322b37a 100644 --- a/apps/webapp/app/utils/apiCors.ts +++ b/apps/webapp/app/utils/apiCors.ts @@ -10,10 +10,25 @@ type CorsOptions = { credentials?: boolean; }; -export function apiCors( +export async function apiCors( request: Request, response: Response, options: CorsOptions = { maxAge: 5 * 60 } ): Promise { + if (hasCorsHeaders(response)) { + return response; + } + return cors(request, response, options); } + +export function makeApiCors( + request: Request, + options: CorsOptions = { maxAge: 5 * 60 } +): (response: Response) => Promise { + return (response: Response) => apiCors(request, response, options); +} + +function hasCorsHeaders(response: Response) { + return response.headers.has("access-control-allow-origin"); +} diff --git a/apps/webapp/app/utils/longPollingFetch.ts b/apps/webapp/app/utils/longPollingFetch.ts index 7d183d3acb..c070beb677 100644 --- a/apps/webapp/app/utils/longPollingFetch.ts +++ b/apps/webapp/app/utils/longPollingFetch.ts @@ -10,23 +10,16 @@ export async function longPollingFetch(url: string, options?: RequestInit) { try { let response = await fetch(url, options); - // Check if the response is ok (status in the range 200-299) - if (!response.ok) { - const body = await response.text(); - throw new Error(`HTTP error! status: ${response.status}. ${body}`); - } - - if (response.headers.get(`content-encoding`)) { + if (response.headers.get("content-encoding")) { const headers = new Headers(response.headers); - headers.delete(`content-encoding`); - headers.delete(`content-length`); + headers.delete("content-encoding"); + headers.delete("content-length"); response = new Response(response.body, { + headers, status: response.status, statusText: response.statusText, - headers, }); } - return response; } catch (error) { if (error instanceof TypeError) { diff --git a/apps/webapp/app/v3/services/triggerTask.server.ts b/apps/webapp/app/v3/services/triggerTask.server.ts index 39c97c4cc7..9ba29a8b12 100644 --- a/apps/webapp/app/v3/services/triggerTask.server.ts +++ b/apps/webapp/app/v3/services/triggerTask.server.ts @@ -378,6 +378,7 @@ export class TriggerTaskService extends BaseService { maxDurationInSeconds: body.options?.maxDuration ? clampMaxDuration(body.options.maxDuration) : undefined, + runTags: bodyTags, }, }); diff --git a/apps/webapp/package.json b/apps/webapp/package.json index 403e19b88a..b58071ed71 100644 --- a/apps/webapp/package.json +++ b/apps/webapp/package.json @@ -23,7 +23,7 @@ "clean:sourcemaps": "run-s clean:sourcemaps:*", "clean:sourcemaps:public": "rimraf ./build/**/*.map", "clean:sourcemaps:build": "rimraf ./public/build/**/*.map", - "test": "vitest" + "test": "vitest --no-file-parallelism" }, "eslintIgnore": [ "/node_modules", @@ -97,11 +97,13 @@ "@trigger.dev/core": "workspace:*", "@trigger.dev/database": "workspace:*", "@trigger.dev/otlp-importer": "workspace:*", - "@trigger.dev/platform": "1.0.12", + "@trigger.dev/platform": "1.0.13", "@trigger.dev/sdk": "workspace:*", "@trigger.dev/yalt": "npm:@trigger.dev/yalt", "@types/pg": "8.6.6", "@uiw/react-codemirror": "^4.19.5", + "@unkey/cache": "^1.5.0", + "@unkey/error": "^0.2.0", "@upstash/ratelimit": "^1.1.3", "@whatwg-node/fetch": "^0.9.14", "assert-never": "^1.2.1", @@ -183,6 +185,7 @@ "zod-validation-error": "^1.5.0" }, "devDependencies": { + "@internal/testcontainers": "workspace:*", "@remix-run/dev": "2.1.0", "@remix-run/eslint-config": "2.1.0", "@remix-run/testing": "^2.1.0", @@ -212,6 +215,7 @@ "@types/seedrandom": "^3.0.8", "@types/simple-oauth2": "^5.0.4", "@types/slug": "^5.0.3", + "@types/supertest": "^6.0.2", "@types/tar": "^6.1.4", "@types/ws": "^8.5.3", "@typescript-eslint/eslint-plugin": "^5.59.6", @@ -236,14 +240,16 @@ "prop-types": "^15.8.1", "rimraf": "^3.0.2", "style-loader": "^3.3.4", + "supertest": "^7.0.0", "tailwind-scrollbar": "^3.0.1", "tailwindcss": "3.4.1", "ts-node": "^10.7.0", "tsconfig-paths": "^3.14.1", "typescript": "^5.1.6", + "vite-tsconfig-paths": "^4.0.5", "vitest": "^1.4.0" }, "engines": { "node": ">=16.0.0" } -} +} \ No newline at end of file diff --git a/apps/webapp/remix.config.js b/apps/webapp/remix.config.js index 06f6ccf68d..d2417a3eb5 100644 --- a/apps/webapp/remix.config.js +++ b/apps/webapp/remix.config.js @@ -15,6 +15,8 @@ module.exports = { "@trigger.dev/sdk", "@trigger.dev/platform", "@trigger.dev/yalt", + "@unkey/cache", + "@unkey/cache/stores", "emails", "highlight.run", "random-words", diff --git a/apps/webapp/server.ts b/apps/webapp/server.ts index 9af6511663..520b138d5e 100644 --- a/apps/webapp/server.ts +++ b/apps/webapp/server.ts @@ -1,16 +1,16 @@ -import path from "path"; -import express from "express"; -import compression from "compression"; -import morgan from "morgan"; import { createRequestHandler } from "@remix-run/express"; -import { WebSocketServer } from "ws"; import { broadcastDevReady, logDevReady } from "@remix-run/server-runtime"; -import type { Server as IoServer } from "socket.io"; +import compression from "compression"; import type { Server as EngineServer } from "engine.io"; -import { RegistryProxy } from "~/v3/registryProxy.server"; -import { RateLimitMiddleware, apiRateLimiter } from "~/services/apiRateLimit.server"; -import { type RunWithHttpContextFunction } from "~/services/httpAsyncStorage.server"; +import express from "express"; +import morgan from "morgan"; import { nanoid } from "nanoid"; +import path from "path"; +import type { Server as IoServer } from "socket.io"; +import { WebSocketServer } from "ws"; +import { RateLimitMiddleware } from "~/services/apiRateLimit.server"; +import { type RunWithHttpContextFunction } from "~/services/httpAsyncStorage.server"; +import { RegistryProxy } from "~/v3/registryProxy.server"; const app = express(); diff --git a/apps/webapp/test/authorization.test.ts b/apps/webapp/test/authorization.test.ts new file mode 100644 index 0000000000..78950222ca --- /dev/null +++ b/apps/webapp/test/authorization.test.ts @@ -0,0 +1,219 @@ +import { describe, it, expect } from "vitest"; +import { checkAuthorization, AuthorizationEntity } from "../app/services/authorization.server"; + +describe("checkAuthorization", () => { + // Test entities + const privateEntity: AuthorizationEntity = { type: "PRIVATE" }; + const publicEntity: AuthorizationEntity = { type: "PUBLIC" }; + const publicJwtEntityWithPermissions: AuthorizationEntity = { + type: "PUBLIC_JWT", + scopes: ["read:runs:run_1234", "read:tasks", "read:tags:tag_5678"], + }; + const publicJwtEntityNoPermissions: AuthorizationEntity = { type: "PUBLIC_JWT" }; + + describe("PRIVATE entity", () => { + it("should always return true regardless of action or resource", () => { + expect(checkAuthorization(privateEntity, "read", { runs: "run_1234" })).toBe(true); + expect(checkAuthorization(privateEntity, "read", { tasks: ["task_1", "task_2"] })).toBe(true); + expect(checkAuthorization(privateEntity, "read", { tags: "nonexistent_tag" })).toBe(true); + }); + }); + + describe("PUBLIC entity", () => { + it("should always return false regardless of action or resource", () => { + expect(checkAuthorization(publicEntity, "read", { runs: "run_1234" })).toBe(false); + expect(checkAuthorization(publicEntity, "read", { tasks: ["task_1", "task_2"] })).toBe(false); + expect(checkAuthorization(publicEntity, "read", { tags: "tag_5678" })).toBe(false); + }); + }); + + describe("PUBLIC_JWT entity with scope", () => { + it("should return true for specific resource scope", () => { + expect(checkAuthorization(publicJwtEntityWithPermissions, "read", { runs: "run_1234" })).toBe( + true + ); + }); + + it("should return false for unauthorized specific resources", () => { + expect(checkAuthorization(publicJwtEntityWithPermissions, "read", { runs: "run_5678" })).toBe( + false + ); + }); + + it("should return true for general resource type scope", () => { + expect( + checkAuthorization(publicJwtEntityWithPermissions, "read", { tasks: "task_1234" }) + ).toBe(true); + expect( + checkAuthorization(publicJwtEntityWithPermissions, "read", { + tasks: ["task_5678", "task_9012"], + }) + ).toBe(true); + }); + + it("should return true if any resource in an array is authorized", () => { + expect( + checkAuthorization(publicJwtEntityWithPermissions, "read", { + tags: ["tag_1234", "tag_5678"], + }) + ).toBe(true); + }); + + it("should return true for nonexistent resource types", () => { + expect( + // @ts-expect-error + checkAuthorization(publicJwtEntityWithPermissions, "read", { nonexistent: "resource" }) + ).toBe(true); + }); + }); + + describe("PUBLIC_JWT entity without scope", () => { + it("should always return false regardless of action or resource", () => { + expect(checkAuthorization(publicJwtEntityNoPermissions, "read", { runs: "run_1234" })).toBe( + false + ); + expect( + checkAuthorization(publicJwtEntityNoPermissions, "read", { tasks: ["task_1", "task_2"] }) + ).toBe(false); + expect(checkAuthorization(publicJwtEntityNoPermissions, "read", { tags: "tag_5678" })).toBe( + false + ); + }); + }); + + describe("Edge cases", () => { + it("should handle empty resource objects", () => { + expect(checkAuthorization(publicJwtEntityWithPermissions, "read", {})).toBe(false); + }); + + it("should handle undefined scope", () => { + const entityUndefinedPermissions: AuthorizationEntity = { type: "PUBLIC_JWT" }; + expect(checkAuthorization(entityUndefinedPermissions, "read", { runs: "run_1234" })).toBe( + false + ); + }); + + it("should handle empty scope array", () => { + const entityEmptyPermissions: AuthorizationEntity = { type: "PUBLIC_JWT", scopes: [] }; + expect(checkAuthorization(entityEmptyPermissions, "read", { runs: "run_1234" })).toBe(false); + }); + + it("should return false if any resource is not authorized", () => { + expect( + checkAuthorization(publicJwtEntityWithPermissions, "read", { + runs: "run_1234", // This is authorized + tasks: "task_5678", // This is authorized (general permission) + tags: "tag_3456", // This is not authorized + }) + ).toBe(false); + }); + + it("should return true only if all resources are authorized", () => { + expect( + checkAuthorization(publicJwtEntityWithPermissions, "read", { + runs: "run_1234", // This is authorized + tasks: "task_5678", // This is authorized (general permission) + tags: "tag_5678", // This is authorized + }) + ).toBe(true); + }); + }); + + describe("Super scope", () => { + const entityWithSuperPermissions: AuthorizationEntity = { + type: "PUBLIC_JWT", + scopes: ["read:all", "admin"], + }; + + const entityWithOneSuperPermission: AuthorizationEntity = { + type: "PUBLIC_JWT", + scopes: ["read:all"], + }; + + it("should grant access with any of the super scope", () => { + expect( + checkAuthorization(entityWithSuperPermissions, "read", { tasks: "task_1234" }, [ + "read:all", + "admin", + ]) + ).toBe(true); + expect( + checkAuthorization(entityWithSuperPermissions, "read", { tags: ["tag_1", "tag_2"] }, [ + "write:all", + "admin", + ]) + ).toBe(true); + }); + + it("should grant access with one matching super permission", () => { + expect( + checkAuthorization(entityWithOneSuperPermission, "read", { runs: "run_5678" }, [ + "read:all", + "admin", + ]) + ).toBe(true); + }); + + it("should not grant access when no super scope match", () => { + expect( + checkAuthorization(entityWithOneSuperPermission, "read", { tasks: "task_1234" }, [ + "write:all", + "admin", + ]) + ).toBe(false); + }); + + it("should grant access to multiple resources with super scope", () => { + expect( + checkAuthorization( + entityWithSuperPermissions, + "read", + { + tasks: "task_1234", + tags: ["tag_1", "tag_2"], + runs: "run_5678", + }, + ["read:all"] + ) + ).toBe(true); + }); + + it("should fall back to specific scope when super scope are not provided", () => { + const entityWithSpecificPermissions: AuthorizationEntity = { + type: "PUBLIC_JWT", + scopes: ["read:tasks", "read:tags"], + }; + expect( + checkAuthorization(entityWithSpecificPermissions, "read", { tasks: "task_1234" }) + ).toBe(true); + expect(checkAuthorization(entityWithSpecificPermissions, "read", { runs: "run_5678" })).toBe( + false + ); + }); + }); + + describe("Without super scope", () => { + const entityWithoutSuperPermissions: AuthorizationEntity = { + type: "PUBLIC_JWT", + scopes: ["read:tasks"], + }; + + it("should still grant access based on specific scope", () => { + expect( + checkAuthorization(entityWithoutSuperPermissions, "read", { tasks: "task_1234" }, [ + "read:all", + "admin", + ]) + ).toBe(true); + }); + + it("should deny access to resources not in scope", () => { + expect( + checkAuthorization(entityWithoutSuperPermissions, "read", { runs: "run_5678" }, [ + "read:all", + "admin", + ]) + ).toBe(false); + }); + }); +}); diff --git a/apps/webapp/test/authorizationRateLimitMiddleware.test.ts b/apps/webapp/test/authorizationRateLimitMiddleware.test.ts new file mode 100644 index 0000000000..a089a6700c --- /dev/null +++ b/apps/webapp/test/authorizationRateLimitMiddleware.test.ts @@ -0,0 +1,416 @@ +import { redisTest } from "@internal/testcontainers"; +import { describe, expect, vi, beforeEach } from "vitest"; + +vi.setConfig({ testTimeout: 30_000 }); // 30 seconds timeout + +// Mock the logger +vi.mock("./logger.server", () => ({ + logger: { + info: vi.fn(), + error: vi.fn(), + }, +})); + +import express, { Express } from "express"; +import request from "supertest"; +import { authorizationRateLimitMiddleware } from "../app/services/authorizationRateLimitMiddleware.server.js"; + +describe("authorizationRateLimitMiddleware", () => { + let app: Express; + + beforeEach(() => { + app = express(); + }); + + redisTest("should allow requests within the rate limit", async ({ redis }) => { + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test", + defaultLimiter: { + type: "tokenBucket", + refillRate: 10, + interval: "1m", + maxTokens: 100, + }, + pathMatchers: [/^\/api/], + log: { + rejections: false, + requests: false, + }, + }); + + app.use(rateLimitMiddleware); + app.get("/api/test", (req, res) => { + res.status(200).json({ message: "Success" }); + }); + + const response = await request(app).get("/api/test").set("Authorization", "Bearer test-token"); + + expect(response.status).toBe(200); + expect(response.body).toEqual({ message: "Success" }); + expect(response.headers["x-ratelimit-limit"]).toBeDefined(); + expect(response.headers["x-ratelimit-remaining"]).toBeDefined(); + expect(response.headers["x-ratelimit-reset"]).toBeDefined(); + }); + + redisTest("should reject requests without an Authorization header", async ({ redis }) => { + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test", + defaultLimiter: { + type: "tokenBucket", + refillRate: 10, + interval: "1m", + maxTokens: 100, + }, + pathMatchers: [/^\/api/], + }); + + app.use(rateLimitMiddleware); + app.get("/api/test", (req, res) => { + res.status(200).json({ message: "Success" }); + }); + + const response = await request(app).get("/api/test"); + + expect(response.status).toBe(401); + expect(response.body).toHaveProperty("title", "Unauthorized"); + }); + + redisTest("should reject requests that exceed the rate limit", async ({ redis }) => { + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test", + defaultLimiter: { + type: "tokenBucket", + refillRate: 1, + interval: "1m", + maxTokens: 1, + }, + pathMatchers: [/^\/api/], + }); + + app.use(rateLimitMiddleware); + app.get("/api/test", (req, res) => { + res.status(200).json({ message: "Success" }); + }); + + // First request should succeed + await request(app).get("/api/test").set("Authorization", "Bearer test-token"); + + // Second request should be rate limited + const response = await request(app).get("/api/test").set("Authorization", "Bearer test-token"); + + expect(response.status).toBe(429); + expect(response.body).toHaveProperty("title", "Rate Limit Exceeded"); + }); + + redisTest("should not apply rate limiting to whitelisted paths", async ({ redis }) => { + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test", + defaultLimiter: { + type: "tokenBucket", + refillRate: 10, + interval: "1m", + maxTokens: 100, + }, + pathMatchers: [/^\/api/], + pathWhiteList: ["/api/whitelist"], + }); + + app.use(rateLimitMiddleware); + app.get("/api/whitelist", (req, res) => { + res.status(200).json({ message: "Whitelisted" }); + }); + + const response = await request(app) + .get("/api/whitelist") + .set("Authorization", "Bearer test-token"); + + expect(response.status).toBe(200); + expect(response.body).toEqual({ message: "Whitelisted" }); + expect(response.headers["x-ratelimit-limit"]).toBeUndefined(); + }); + + redisTest( + "should apply different rate limits based on limiterConfigOverride", + async ({ redis }) => { + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test", + defaultLimiter: { + type: "tokenBucket", + refillRate: 1, + interval: "1m", + maxTokens: 1, + }, + pathMatchers: [/^\/api/], + limiterConfigOverride: async (authorizationValue) => { + if (authorizationValue === "Bearer premium-token") { + return { + type: "tokenBucket", + refillRate: 10, + interval: "1m", + maxTokens: 100, + }; + } + return undefined; + }, + }); + + app.use(rateLimitMiddleware); + app.get("/api/test", (req, res) => { + res.status(200).json({ message: "Success" }); + }); + + // Regular user should be rate limited after 1 request + await request(app).get("/api/test").set("Authorization", "Bearer regular-token"); + const regularResponse = await request(app) + .get("/api/test") + .set("Authorization", "Bearer regular-token"); + expect(regularResponse.status).toBe(429); + + // Premium user should be able to make multiple requests + const premiumResponse1 = await request(app) + .get("/api/test") + .set("Authorization", "Bearer premium-token"); + expect(premiumResponse1.status).toBe(200); + const premiumResponse2 = await request(app) + .get("/api/test") + .set("Authorization", "Bearer premium-token"); + expect(premiumResponse2.status).toBe(200); + } + ); + + describe("Advanced Cases", () => { + // 1. Test different rate limit configurations + redisTest("should enforce fixed window rate limiting", async ({ redis }) => { + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test-fixed", + defaultLimiter: { + type: "fixedWindow", + window: "10s", + tokens: 3, + }, + pathMatchers: [/^\/api/], + }); + + app.use(rateLimitMiddleware); + app.get("/api/test", (req, res) => res.status(200).json({ message: "Success" })); + + const makeRequest = () => + request(app).get("/api/test").set("Authorization", "Bearer test-token"); + + // Should allow 3 requests + for (let i = 0; i < 3; i++) { + const response = await makeRequest(); + expect(response.status).toBe(200); + } + + // 4th request should be rate limited + const limitedResponse = await makeRequest(); + expect(limitedResponse.status).toBe(429); + + // Wait for the window to reset + await new Promise((resolve) => setTimeout(resolve, 10000)); + + // Should allow requests again + const newResponse = await makeRequest(); + expect(newResponse.status).toBe(200); + }); + + redisTest("should enforce sliding window rate limiting", async ({ redis }) => { + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test-sliding", + defaultLimiter: { + type: "slidingWindow", + window: "10s", + tokens: 3, + }, + pathMatchers: [/^\/api/], + }); + + app.use(rateLimitMiddleware); + app.get("/api/test", (req, res) => res.status(200).json({ message: "Success" })); + + const makeRequest = () => + request(app).get("/api/test").set("Authorization", "Bearer test-token"); + + // Should allow 3 requests + for (let i = 0; i < 3; i++) { + const response = await makeRequest(); + expect(response.status).toBe(200); + } + + // 4th request should be rate limited + const limitedResponse = await makeRequest(); + expect(limitedResponse.status).toBe(429); + + // Wait for part of the window to pass + await new Promise((resolve) => setTimeout(resolve, 1000)); + + // Should still be limited + const stillLimitedResponse = await makeRequest(); + expect(stillLimitedResponse.status).toBe(429); + + // Wait for the full window to pass + await new Promise((resolve) => setTimeout(resolve, 10000)); + + // Should allow requests again + const newResponse = await makeRequest(); + expect(newResponse.status).toBe(200); + }); + + // 2. Test edge cases around rate limit calculations + redisTest("should handle token refill correctly", async ({ redis }) => { + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test-refill", + defaultLimiter: { + type: "tokenBucket", + refillRate: 1, + interval: "5s", + maxTokens: 3, + }, + pathMatchers: [/^\/api/], + }); + + app.use(rateLimitMiddleware); + app.get("/api/test", (req, res) => res.status(200).json({ message: "Success" })); + + const makeRequest = () => + request(app).get("/api/test").set("Authorization", "Bearer test-token"); + + // Use up all tokens + for (let i = 0; i < 3; i++) { + const response = await makeRequest(); + expect(response.status).toBe(200); + } + + // Next request should be limited + const limitedResponse = await makeRequest(); + expect(limitedResponse.status).toBe(429); + + // Wait for one token to be refilled + await new Promise((resolve) => setTimeout(resolve, 5000)); + + // Should allow one request + const newResponse = await makeRequest(); + expect(newResponse.status).toBe(200); + + // But the next one should be limited again + const limitedAgainResponse = await makeRequest(); + expect(limitedAgainResponse.status).toBe(429); + }); + + redisTest("should handle near-zero remaining tokens correctly", async ({ redis }) => { + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test-near-zero", + defaultLimiter: { + type: "tokenBucket", + refillRate: 1, // 1 token every 5 seconds + interval: "5s", + maxTokens: 1, + }, + pathMatchers: [/^\/api/], + }); + + app.use(rateLimitMiddleware); + app.get("/api/test", (req, res) => res.status(200).json({ message: "Success" })); + + const makeRequest = () => + request(app).get("/api/test").set("Authorization", "Bearer test-token"); + + // First request should succeed + const firstResponse = await makeRequest(); + expect(firstResponse.status).toBe(200); + + // Immediate second request should fail + const secondResponse = await makeRequest(); + expect(secondResponse.status).toBe(429); + + // Wait for almost one token to be refilled (4.9 seconds) + await new Promise((resolve) => setTimeout(resolve, 4900)); + + // This request should still fail as we're just shy of a full token + const thirdResponse = await makeRequest(); + expect(thirdResponse.status).toBe(429); + + // Wait for the full token to be refilled (additional 200ms) + await new Promise((resolve) => setTimeout(resolve, 200)); + + // This request should now succeed + const fourthResponse = await makeRequest(); + expect(fourthResponse.status).toBe(200); + + // Immediate next request should fail again + const fifthResponse = await makeRequest(); + expect(fifthResponse.status).toBe(429); + }); + + // 3. Test the limiterCache functionality + redisTest("should use cached limiter configurations", async ({ redis }) => { + let configOverrideCalls = 0; + const rateLimitMiddleware = authorizationRateLimitMiddleware({ + redis: redis.options, + keyPrefix: "test-cache", + defaultLimiter: { + type: "tokenBucket", + refillRate: 1, + interval: "1m", + maxTokens: 10, + }, + pathMatchers: [/^\/api/], + limiterCache: { + fresh: 1000, // 1 second + stale: 2000, // 2 seconds + }, + limiterConfigOverride: async (authorizationValue) => { + configOverrideCalls++; + if (authorizationValue === "Bearer premium-token") { + return { + type: "tokenBucket", + refillRate: 10, + interval: "1m", + maxTokens: 100, + }; + } + return undefined; + }, + }); + + app.use(rateLimitMiddleware); + app.get("/api/test", (req, res) => res.status(200).json({ message: "Success" })); + + const makeRequest = () => + request(app).get("/api/test").set("Authorization", "Bearer premium-token"); + + // First request should call the override + await makeRequest(); + expect(configOverrideCalls).toBe(1); + + // Subsequent requests within 1 second should use the cache + await makeRequest(); + await makeRequest(); + expect(configOverrideCalls).toBe(1); + + // Wait for the cache to become stale + await new Promise((resolve) => setTimeout(resolve, 1100)); + + // This should still use the cache, but also trigger a refresh + await makeRequest(); + expect(configOverrideCalls).toBe(2); + + // Wait for the cache to expire completely + await new Promise((resolve) => setTimeout(resolve, 1000)); + + // This should trigger a new override call + await makeRequest(); + expect(configOverrideCalls).toBe(3); + }); + }); +}); diff --git a/apps/webapp/test/placeholder.test.ts b/apps/webapp/test/placeholder.test.ts deleted file mode 100644 index 361cb08e46..0000000000 --- a/apps/webapp/test/placeholder.test.ts +++ /dev/null @@ -1,5 +0,0 @@ -describe("Placeholder", () => { - it("should pass", () => { - expect(true).toBe(true); - }); -}); diff --git a/apps/webapp/test/realtimeClient.test.ts b/apps/webapp/test/realtimeClient.test.ts new file mode 100644 index 0000000000..d581070821 --- /dev/null +++ b/apps/webapp/test/realtimeClient.test.ts @@ -0,0 +1,211 @@ +import { containerWithElectricTest } from "@internal/testcontainers"; +import { expect, describe } from "vitest"; +import { RealtimeClient } from "../app/services/realtimeClient.server.js"; + +describe("RealtimeClient", () => { + containerWithElectricTest( + "Should only track concurrency for live requests", + { timeout: 30_000 }, + async ({ redis, electricOrigin, prisma }) => { + const client = new RealtimeClient({ + electricOrigin, + keyPrefix: "test:realtime", + redis: redis.options, + expiryTimeInSeconds: 5, + cachedLimitProvider: { + async getCachedLimit() { + return 1; + }, + }, + }); + + const organization = await prisma.organization.create({ + data: { + title: "test-org", + slug: "test-org", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-project", + slug: "test-project", + organizationId: organization.id, + externalRef: "test-project", + }, + }); + + const environment = await prisma.runtimeEnvironment.create({ + data: { + projectId: project.id, + organizationId: organization.id, + slug: "test", + type: "DEVELOPMENT", + shortcode: "1234", + apiKey: "tr_dev_1234", + pkApiKey: "pk_test_1234", + }, + }); + + const run = await prisma.taskRun.create({ + data: { + taskIdentifier: "test-task", + friendlyId: "run_1234", + payload: "{}", + payloadType: "application/json", + traceId: "trace_1234", + spanId: "span_1234", + queue: "test-queue", + projectId: project.id, + runtimeEnvironmentId: environment.id, + }, + }); + + const initialResponsePromise = client.streamRun( + "http://localhost:3000?offset=-1", + environment, + run.id + ); + + const initializeResponsePromise2 = new Promise((resolve) => { + setTimeout(async () => { + const response = await client.streamRun( + "http://localhost:3000?offset=-1", + environment, + run.id + ); + + resolve(response); + }, 1); + }); + + const [response, response2] = await Promise.all([ + initialResponsePromise, + initializeResponsePromise2, + ]); + + const headers = Object.fromEntries(response.headers.entries()); + + const shapeId = headers["electric-shape-id"]; + const chunkOffset = headers["electric-chunk-last-offset"]; + + expect(response.status).toBe(200); + expect(response2.status).toBe(200); + expect(shapeId).toBeDefined(); + expect(chunkOffset).toBe("0_0"); + + // Okay, now we will do two live requests, and the second one should fail because of the concurrency limit + const liveResponsePromise = client.streamRun( + `http://localhost:3000?offset=0_0&live=true&shape_id=${shapeId}`, + environment, + run.id + ); + + const liveResponsePromise2 = new Promise((resolve) => { + setTimeout(async () => { + const response = await client.streamRun( + `http://localhost:3000?offset=0_0&live=true&shape_id=${shapeId}`, + environment, + run.id + ); + + resolve(response); + }, 1); + }); + + const updateRunAfter1SecondPromise = new Promise((resolve) => { + setTimeout(async () => { + await prisma.taskRun.update({ + where: { id: run.id }, + data: { metadata: "{}" }, + }); + + resolve(); + }, 1000); + }); + + const [liveResponse, liveResponse2] = await Promise.all([ + liveResponsePromise, + liveResponsePromise2, + updateRunAfter1SecondPromise, + ]); + + expect(liveResponse.status).toBe(200); + expect(liveResponse2.status).toBe(429); + } + ); + + containerWithElectricTest( + "Should support subscribing to a run tag", + { timeout: 30_000 }, + async ({ redis, electricOrigin, prisma }) => { + const client = new RealtimeClient({ + electricOrigin, + keyPrefix: "test:realtime", + redis: redis.options, + expiryTimeInSeconds: 5, + cachedLimitProvider: { + async getCachedLimit() { + return 1; + }, + }, + }); + + const organization = await prisma.organization.create({ + data: { + title: "test-org", + slug: "test-org", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-project", + slug: "test-project", + organizationId: organization.id, + externalRef: "test-project", + }, + }); + + const environment = await prisma.runtimeEnvironment.create({ + data: { + projectId: project.id, + organizationId: organization.id, + slug: "test", + type: "DEVELOPMENT", + shortcode: "1234", + apiKey: "tr_dev_1234", + pkApiKey: "pk_test_1234", + }, + }); + + const run = await prisma.taskRun.create({ + data: { + taskIdentifier: "test-task", + friendlyId: "run_1234", + payload: "{}", + payloadType: "application/json", + traceId: "trace_1234", + spanId: "span_1234", + queue: "test-queue", + projectId: project.id, + runtimeEnvironmentId: environment.id, + runTags: ["test:tag:1234", "test:tag:5678"], + }, + }); + + const response = await client.streamRuns("http://localhost:3000?offset=-1", environment, { + tags: ["test:tag:1234"], + }); + + const headers = Object.fromEntries(response.headers.entries()); + + const shapeId = headers["electric-shape-id"]; + const chunkOffset = headers["electric-chunk-last-offset"]; + + expect(response.status).toBe(200); + expect(shapeId).toBeDefined(); + expect(chunkOffset).toBe("0_0"); + } + ); +}); diff --git a/apps/webapp/tsconfig.json b/apps/webapp/tsconfig.json index 176d59f8b2..af3d25eb48 100644 --- a/apps/webapp/tsconfig.json +++ b/apps/webapp/tsconfig.json @@ -7,6 +7,7 @@ "isolatedModules": true, "esModuleInterop": true, "jsx": "react-jsx", + "module": "esnext", "moduleResolution": "Bundler", "resolveJsonModule": true, "target": "ES2019", diff --git a/apps/webapp/vitest.config.ts b/apps/webapp/vitest.config.ts index 758c27056e..0c08af40ea 100644 --- a/apps/webapp/vitest.config.ts +++ b/apps/webapp/vitest.config.ts @@ -1,8 +1,12 @@ import { defineConfig } from "vitest/config"; +import tsconfigPaths from "vite-tsconfig-paths"; export default defineConfig({ test: { include: ["test/**/*.test.ts"], globals: true, + pool: "forks", }, + // @ts-ignore + plugins: [tsconfigPaths({ projects: ["./tsconfig.json"] })], }); diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 8595f2ae13..6653e931ce 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -60,10 +60,10 @@ services: - 6379:6379 electric: - image: electricsql/electric + image: electricsql/electric:0.7.5 restart: always environment: - DATABASE_URL: postgresql://postgres:postgres@database:5432/postgres + DATABASE_URL: postgresql://postgres:postgres@database:5432/postgres?sslmode=disable networks: - app_network ports: diff --git a/internal-packages/database/prisma/migrations/20240929115226_add_run_tags_to_task_run/migration.sql b/internal-packages/database/prisma/migrations/20240929115226_add_run_tags_to_task_run/migration.sql new file mode 100644 index 0000000000..85e95ab458 --- /dev/null +++ b/internal-packages/database/prisma/migrations/20240929115226_add_run_tags_to_task_run/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "TaskRun" ADD COLUMN "runTags" TEXT[]; diff --git a/internal-packages/database/prisma/migrations/20241001190022_add_rate_limiter_config_to_orgs/migration.sql b/internal-packages/database/prisma/migrations/20241001190022_add_rate_limiter_config_to_orgs/migration.sql new file mode 100644 index 0000000000..488a0144ad --- /dev/null +++ b/internal-packages/database/prisma/migrations/20241001190022_add_rate_limiter_config_to_orgs/migration.sql @@ -0,0 +1,3 @@ +-- AlterTable +ALTER TABLE "Organization" ADD COLUMN "apiRateLimiterConfig" JSONB, +ADD COLUMN "realtimeRateLimiterConfig" JSONB; diff --git a/internal-packages/database/prisma/schema.prisma b/internal-packages/database/prisma/schema.prisma index d346d4584c..9db032f752 100644 --- a/internal-packages/database/prisma/schema.prisma +++ b/internal-packages/database/prisma/schema.prisma @@ -138,6 +138,9 @@ model Organization { events EventRecord[] jobRuns JobRun[] + apiRateLimiterConfig Json? + realtimeRateLimiterConfig Json? + projects Project[] members OrgMember[] invites OrgMemberInvite[] @@ -1683,6 +1686,9 @@ model TaskRun { attempts TaskRunAttempt[] @relation("attempts") tags TaskRunTag[] + /// Denormized column that holds the raw tags + runTags String[] + checkpoints Checkpoint[] startedAt DateTime? diff --git a/internal-packages/testcontainers/package.json b/internal-packages/testcontainers/package.json index dc147eec3e..d64add0cab 100644 --- a/internal-packages/testcontainers/package.json +++ b/internal-packages/testcontainers/package.json @@ -14,6 +14,7 @@ "@testcontainers/postgresql": "^10.13.1", "@testcontainers/redis": "^10.13.1", "testcontainers": "^10.13.1", + "tinyexec": "^0.3.0", "vitest": "^1.4.0" }, "scripts": { diff --git a/internal-packages/testcontainers/src/index.ts b/internal-packages/testcontainers/src/index.ts index 24d734c772..77e5f6294f 100644 --- a/internal-packages/testcontainers/src/index.ts +++ b/internal-packages/testcontainers/src/index.ts @@ -3,20 +3,37 @@ import { StartedRedisContainer } from "@testcontainers/redis"; import { Redis } from "ioredis"; import { test } from "vitest"; import { PrismaClient } from "@trigger.dev/database"; -import { createPostgresContainer, createRedisContainer } from "./utils"; +import { createPostgresContainer, createRedisContainer, createElectricContainer } from "./utils"; +import { Network, type StartedNetwork, type StartedTestContainer } from "testcontainers"; -type PostgresContext = { +type NetworkContext = { network: StartedNetwork }; + +type PostgresContext = NetworkContext & { postgresContainer: StartedPostgreSqlContainer; prisma: PrismaClient; }; type RedisContext = { redisContainer: StartedRedisContainer; redis: Redis }; -type ContainerContext = PostgresContext & RedisContext; + +type ElectricContext = { + electricOrigin: string; +}; + +type ContainerContext = NetworkContext & PostgresContext & RedisContext; +type ContainerWithElectricContext = ContainerContext & ElectricContext; type Use = (value: T) => Promise; -const postgresContainer = async ({}, use: Use) => { - const { container } = await createPostgresContainer(); +const network = async ({}, use: Use) => { + const network = await new Network().start(); + await use(network); +}; + +const postgresContainer = async ( + { network }: { network: StartedNetwork }, + use: Use +) => { + const { container } = await createPostgresContainer(network); await use(container); await container.stop(); }; @@ -36,7 +53,7 @@ const prisma = async ( await prisma.$disconnect(); }; -export const postgresTest = test.extend({ postgresContainer, prisma }); +export const postgresTest = test.extend({ network, postgresContainer, prisma }); const redisContainer = async ({}, use: Use) => { const { container } = await createRedisContainer(); @@ -59,9 +76,31 @@ const redis = async ( export const redisTest = test.extend({ redisContainer, redis }); +const electricOrigin = async ( + { + postgresContainer, + network, + }: { postgresContainer: StartedPostgreSqlContainer; network: StartedNetwork }, + use: Use +) => { + const { origin, container } = await createElectricContainer(postgresContainer, network); + await use(origin); + await container.stop(); +}; + export const containerTest = test.extend({ + network, + postgresContainer, + prisma, + redisContainer, + redis, +}); + +export const containerWithElectricTest = test.extend({ + network, postgresContainer, prisma, redisContainer, redis, + electricOrigin, }); diff --git a/internal-packages/testcontainers/src/utils.ts b/internal-packages/testcontainers/src/utils.ts index 67fd022525..343c538754 100644 --- a/internal-packages/testcontainers/src/utils.ts +++ b/internal-packages/testcontainers/src/utils.ts @@ -1,35 +1,70 @@ -import { PostgreSqlContainer } from "@testcontainers/postgresql"; +import { PostgreSqlContainer, StartedPostgreSqlContainer } from "@testcontainers/postgresql"; import { RedisContainer } from "@testcontainers/redis"; -import { execSync } from "child_process"; import path from "path"; +import { GenericContainer, StartedNetwork } from "testcontainers"; +import { x } from "tinyexec"; -export async function createPostgresContainer() { - const container = await new PostgreSqlContainer().start(); +export async function createPostgresContainer(network: StartedNetwork) { + const container = await new PostgreSqlContainer("docker.io/postgres:14") + .withNetwork(network) + .withNetworkAliases("database") + .withCommand(["-c", "listen_addresses=*", "-c", "wal_level=logical"]) + .start(); // Run migrations const databasePath = path.resolve(__dirname, "../../database"); - execSync(`npx prisma@5.4.1 db push --schema ${databasePath}/prisma/schema.prisma`, { - env: { - ...process.env, - DATABASE_URL: container.getConnectionUri(), - DIRECT_URL: container.getConnectionUri(), - }, - }); + await x( + `${databasePath}/node_modules/.bin/prisma`, + [ + "db", + "push", + "--force-reset", + "--accept-data-loss", + "--skip-generate", + "--schema", + `${databasePath}/prisma/schema.prisma`, + ], + { + nodeOptions: { + env: { + ...process.env, + DATABASE_URL: container.getConnectionUri(), + DIRECT_URL: container.getConnectionUri(), + }, + }, + } + ); - // console.log(container.getConnectionUri()); - - return { url: container.getConnectionUri(), container }; + return { url: container.getConnectionUri(), container, network }; } export async function createRedisContainer() { const container = await new RedisContainer().start(); - try { - return { - container, - }; - } catch (e) { - console.error(e); - throw e; - } + + return { + container, + }; +} + +export async function createElectricContainer( + postgresContainer: StartedPostgreSqlContainer, + network: StartedNetwork +) { + const databaseUrl = `postgresql://${postgresContainer.getUsername()}:${postgresContainer.getPassword()}@${postgresContainer.getIpAddress( + network.getName() + )}:5432/${postgresContainer.getDatabase()}?sslmode=disable`; + + const container = await new GenericContainer("electricsql/electric:0.7.5") + .withExposedPorts(3000) + .withNetwork(network) + .withEnvironment({ + DATABASE_URL: databaseUrl, + }) + .start(); + + return { + container, + origin: `http://${container.getHost()}:${container.getMappedPort(3000)}`, + }; } diff --git a/packages/cli-v3/src/entryPoints/deploy-run-worker.ts b/packages/cli-v3/src/entryPoints/deploy-run-worker.ts index a30e1c3725..7944a4b9ba 100644 --- a/packages/cli-v3/src/entryPoints/deploy-run-worker.ts +++ b/packages/cli-v3/src/entryPoints/deploy-run-worker.ts @@ -11,17 +11,19 @@ import { TaskRunExecution, WorkerToExecutorMessageCatalog, TriggerConfig, - TriggerTracer, WorkerManifest, ExecutorToWorkerMessageCatalog, timeout, + runMetadata, } from "@trigger.dev/core/v3"; +import { TriggerTracer } from "@trigger.dev/core/v3/tracer"; import { ProdRuntimeManager } from "@trigger.dev/core/v3/prod"; import { ConsoleInterceptor, DevUsageManager, DurableClock, getEnvVar, + getNumberEnvVar, logLevels, OtelTaskLogger, ProdUsageManager, @@ -303,6 +305,10 @@ const zodIpc = new ZodIpcConnection({ _execution = execution; _isRunning = true; + runMetadata.startPeriodicFlush( + getNumberEnvVar("TRIGGER_RUN_METADATA_FLUSH_INTERVAL", 1000) + ); + const measurement = usage.start(); // This lives outside of the executor because this will eventually be moved to the controller level @@ -397,7 +403,11 @@ const zodIpc = new ZodIpcConnection({ async function flushAll(timeoutInMs: number = 10_000) { const now = performance.now(); - await Promise.all([flushUsage(timeoutInMs), flushTracingSDK(timeoutInMs)]); + await Promise.all([ + flushUsage(timeoutInMs), + flushTracingSDK(timeoutInMs), + flushMetadata(timeoutInMs), + ]); const duration = performance.now() - now; @@ -424,6 +434,16 @@ async function flushTracingSDK(timeoutInMs: number = 10_000) { console.log(`Flushed tracingSDK in ${duration}ms`); } +async function flushMetadata(timeoutInMs: number = 10_000) { + const now = performance.now(); + + await Promise.race([runMetadata.flush(), setTimeout(timeoutInMs)]); + + const duration = performance.now() - now; + + console.log(`Flushed runMetadata in ${duration}ms`); +} + const prodRuntimeManager = new ProdRuntimeManager(zodIpc, { waitThresholdInMs: parseInt(env.TRIGGER_RUNTIME_WAIT_THRESHOLD_IN_MS ?? "30000", 10), }); diff --git a/packages/cli-v3/src/entryPoints/dev-run-worker.ts b/packages/cli-v3/src/entryPoints/dev-run-worker.ts index 05f7ffe493..1298860b9f 100644 --- a/packages/cli-v3/src/entryPoints/dev-run-worker.ts +++ b/packages/cli-v3/src/entryPoints/dev-run-worker.ts @@ -11,11 +11,12 @@ import { TaskRunExecution, WorkerToExecutorMessageCatalog, TriggerConfig, - TriggerTracer, WorkerManifest, ExecutorToWorkerMessageCatalog, timeout, + runMetadata, } from "@trigger.dev/core/v3"; +import { TriggerTracer } from "@trigger.dev/core/v3/tracer"; import { DevRuntimeManager } from "@trigger.dev/core/v3/dev"; import { ConsoleInterceptor, @@ -30,6 +31,7 @@ import { TracingDiagnosticLogLevel, TracingSDK, usage, + getNumberEnvVar, } from "@trigger.dev/core/v3/workers"; import { ZodIpcConnection } from "@trigger.dev/core/v3/zodIpc"; import { readFile } from "node:fs/promises"; @@ -273,6 +275,9 @@ const zodIpc = new ZodIpcConnection({ _execution = execution; _isRunning = true; + runMetadata.startPeriodicFlush( + getNumberEnvVar("TRIGGER_RUN_METADATA_FLUSH_INTERVAL", 1000) + ); const measurement = usage.start(); // This lives outside of the executor because this will eventually be moved to the controller level @@ -345,7 +350,7 @@ const zodIpc = new ZodIpcConnection({ } }, FLUSH: async ({ timeoutInMs }, sender) => { - await _tracingSDK?.flush(); + await Promise.allSettled([_tracingSDK?.flush(), runMetadata.flush()]); }, }, }); diff --git a/packages/core/package.json b/packages/core/package.json index e03701e372..6b87560fde 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -33,8 +33,10 @@ "./types": "./src/types.ts", "./versions": "./src/versions.ts", "./v3": "./src/v3/index.ts", + "./v3/tracer": "./src/v3/tracer.ts", "./v3/build": "./src/v3/build/index.ts", "./v3/apps": "./src/v3/apps/index.ts", + "./v3/jwt": "./src/v3/jwt.ts", "./v3/errors": "./src/v3/errors.ts", "./v3/logger-api": "./src/v3/logger-api.ts", "./v3/otel": "./src/v3/otel/index.ts", @@ -95,6 +97,9 @@ "v3": [ "dist/commonjs/v3/index.d.ts" ], + "v3/tracer": [ + "dist/commonjs/v3/tracer.d.ts" + ], "v3/build": [ "dist/commonjs/v3/build/index.d.ts" ], @@ -160,6 +165,9 @@ ], "v3/schemas": [ "dist/commonjs/v3/schemas/index.d.ts" + ], + "v3/jwt": [ + "dist/commonjs/v3/jwt.d.ts" ] } }, @@ -174,7 +182,9 @@ "check-exports": "attw --pack ." }, "dependencies": { + "@electric-sql/client": "0.6.3", "@google-cloud/precise-date": "^4.0.0", + "@jsonhero/path": "^1.0.21", "@opentelemetry/api": "1.9.0", "@opentelemetry/api-logs": "0.52.1", "@opentelemetry/exporter-logs-otlp-http": "0.52.1", @@ -186,8 +196,10 @@ "@opentelemetry/sdk-trace-base": "1.25.1", "@opentelemetry/sdk-trace-node": "1.25.1", "@opentelemetry/semantic-conventions": "1.25.1", + "dequal": "^2.0.3", "execa": "^8.0.1", "humanize-duration": "^3.27.3", + "jose": "^5.4.0", "nanoid": "^3.3.4", "socket.io-client": "4.7.5", "superjson": "^2.2.1", @@ -347,6 +359,17 @@ "default": "./dist/commonjs/v3/index.js" } }, + "./v3/tracer": { + "import": { + "@triggerdotdev/source": "./src/v3/tracer.ts", + "types": "./dist/esm/v3/tracer.d.ts", + "default": "./dist/esm/v3/tracer.js" + }, + "require": { + "types": "./dist/commonjs/v3/tracer.d.ts", + "default": "./dist/commonjs/v3/tracer.js" + } + }, "./v3/build": { "import": { "@triggerdotdev/source": "./src/v3/build/index.ts", @@ -369,6 +392,17 @@ "default": "./dist/commonjs/v3/apps/index.js" } }, + "./v3/jwt": { + "import": { + "@triggerdotdev/source": "./src/v3/jwt.ts", + "types": "./dist/esm/v3/jwt.d.ts", + "default": "./dist/esm/v3/jwt.js" + }, + "require": { + "types": "./dist/commonjs/v3/jwt.d.ts", + "default": "./dist/commonjs/v3/jwt.js" + } + }, "./v3/errors": { "import": { "@triggerdotdev/source": "./src/v3/errors.ts", diff --git a/packages/core/src/v3/apiClient/core.ts b/packages/core/src/v3/apiClient/core.ts index e1c88c051b..c607f3ec4e 100644 --- a/packages/core/src/v3/apiClient/core.ts +++ b/packages/core/src/v3/apiClient/core.ts @@ -6,7 +6,7 @@ import { ApiConnectionError, ApiError, ApiSchemaValidationError } from "./errors import { Attributes, Span, context, propagation } from "@opentelemetry/api"; import { SemanticInternalAttributes } from "../semanticInternalAttributes.js"; -import { TriggerTracer } from "../tracer.js"; +import type { TriggerTracer } from "../tracer.js"; import { accessoryAttributes } from "../utils/styleAttributes.js"; import { CursorPage, @@ -16,6 +16,7 @@ import { OffsetLimitPageParams, OffsetLimitPageResponse, } from "./pagination.js"; +import { TriggerJwtOptions } from "../types/tasks.js"; export const defaultRetryOptions = { maxAttempts: 3, @@ -35,6 +36,7 @@ export type ZodFetchOptions = { }; export type ApiRequestOptions = Pick; + type KeysEnum = { [P in keyof Required]: true }; // This is required so that we can determine if a given object matches the ApiRequestOptions diff --git a/packages/core/src/v3/apiClient/index.ts b/packages/core/src/v3/apiClient/index.ts index 7b117102ae..9b46c1f1fa 100644 --- a/packages/core/src/v3/apiClient/index.ts +++ b/packages/core/src/v3/apiClient/index.ts @@ -1,4 +1,5 @@ import { z } from "zod"; +import { VERSION } from "../../version.js"; import { AddTagsRequestBody, BatchTaskRunExecutionResult, @@ -37,25 +38,44 @@ import { zodfetchOffsetLimitPage, } from "./core.js"; import { ApiError } from "./errors.js"; +import { + RunShape, + AnyRunShape, + runShapeStream, + RunStreamCallback, + RunSubscription, + TaskRunShape, +} from "./runStream.js"; import { CreateEnvironmentVariableParams, ImportEnvironmentVariablesParams, ListProjectRunsQueryParams, ListRunsQueryParams, + SubscribeToRunsQueryParams, UpdateEnvironmentVariableParams, } from "./types.js"; -import { VERSION } from "../../version.js"; +import { generateJWT } from "../jwt.js"; +import { AnyRunTypes, TriggerJwtOptions } from "../types/tasks.js"; export type { CreateEnvironmentVariableParams, ImportEnvironmentVariablesParams, UpdateEnvironmentVariableParams, + SubscribeToRunsQueryParams, }; export type TriggerOptions = { spanParentAsLink?: boolean; }; +export type TriggerRequestOptions = ZodFetchOptions & { + publicAccessToken?: TriggerJwtOptions; +}; + +export type TriggerApiRequestOptions = ApiRequestOptions & { + publicAccessToken?: TriggerJwtOptions; +}; + const DEFAULT_ZOD_FETCH_OPTIONS: ZodFetchOptions = { retry: { maxAttempts: 3, @@ -68,23 +88,40 @@ const DEFAULT_ZOD_FETCH_OPTIONS: ZodFetchOptions = { export { isRequestOptions }; export type { ApiRequestOptions }; +export type { RunShape, AnyRunShape, TaskRunShape, RunStreamCallback, RunSubscription }; /** * Trigger.dev v3 API client */ export class ApiClient { - private readonly baseUrl: string; + public readonly baseUrl: string; + public readonly accessToken: string; private readonly defaultRequestOptions: ZodFetchOptions; - constructor( - baseUrl: string, - private readonly accessToken: string, - requestOptions: ApiRequestOptions = {} - ) { + constructor(baseUrl: string, accessToken: string, requestOptions: ApiRequestOptions = {}) { + this.accessToken = accessToken; this.baseUrl = baseUrl.replace(/\/$/, ""); this.defaultRequestOptions = mergeRequestOptions(DEFAULT_ZOD_FETCH_OPTIONS, requestOptions); } + get fetchClient(): typeof fetch { + const headers = this.#getHeaders(false); + + const fetchClient: typeof fetch = (input, requestInit) => { + const $requestInit: RequestInit = { + ...requestInit, + headers: { + ...requestInit?.headers, + ...headers, + }, + }; + + return fetch(input, $requestInit); + }; + + return fetchClient; + } + async getRunResult( runId: string, requestOptions?: ZodFetchOptions @@ -129,7 +166,7 @@ export class ApiClient { taskId: string, body: TriggerTaskRequestBody, options?: TriggerOptions, - requestOptions?: ZodFetchOptions + requestOptions?: TriggerRequestOptions ) { const encodedTaskId = encodeURIComponent(taskId); @@ -142,14 +179,35 @@ export class ApiClient { body: JSON.stringify(body), }, mergeRequestOptions(this.defaultRequestOptions, requestOptions) - ); + ) + .withResponse() + .then(async ({ response, data }) => { + const claimsHeader = response.headers.get("x-trigger-jwt-claims"); + const claims = claimsHeader ? JSON.parse(claimsHeader) : undefined; + + const jwt = await generateJWT({ + secretKey: this.accessToken, + payload: { + ...claims, + scopes: [`read:runs:${data.id}`].concat( + body.options?.tags ? Array.from(body.options?.tags).map((t) => `read:tags:${t}`) : [] + ), + }, + expirationTime: requestOptions?.publicAccessToken?.expirationTime ?? "1h", + }); + + return { + ...data, + publicAccessToken: jwt, + }; + }); } batchTriggerTask( taskId: string, body: BatchTriggerTaskRequestBody, options?: TriggerOptions, - requestOptions?: ZodFetchOptions + requestOptions?: TriggerRequestOptions ) { const encodedTaskId = encodeURIComponent(taskId); @@ -162,7 +220,26 @@ export class ApiClient { body: JSON.stringify(body), }, mergeRequestOptions(this.defaultRequestOptions, requestOptions) - ); + ) + .withResponse() + .then(async ({ response, data }) => { + const claimsHeader = response.headers.get("x-trigger-jwt-claims"); + const claims = claimsHeader ? JSON.parse(claimsHeader) : undefined; + + const jwt = await generateJWT({ + secretKey: this.accessToken, + payload: { + ...claims, + scopes: [`read:batch:${data.batchId}`], + }, + expirationTime: requestOptions?.publicAccessToken?.expirationTime ?? "1h", + }); + + return { + ...data, + publicAccessToken: jwt, + }; + }); } createUploadPayloadUrl(filename: string, requestOptions?: ZodFetchOptions) { @@ -517,6 +594,46 @@ export class ApiClient { ); } + subscribeToRun(runId: string) { + return runShapeStream(`${this.baseUrl}/realtime/v1/runs/${runId}`, { + closeOnComplete: true, + headers: this.#getRealtimeHeaders(), + }); + } + + subscribeToRunsWithTag(tag: string | string[]) { + const searchParams = createSearchQueryForSubscribeToRuns({ + tags: tag, + }); + + return runShapeStream( + `${this.baseUrl}/realtime/v1/runs${searchParams ? `?${searchParams}` : ""}`, + { + closeOnComplete: false, + headers: this.#getRealtimeHeaders(), + } + ); + } + + subscribeToBatch(batchId: string) { + return runShapeStream(`${this.baseUrl}/realtime/v1/batches/${batchId}`, { + closeOnComplete: false, + headers: this.#getRealtimeHeaders(), + }); + } + + async generateJWTClaims(requestOptions?: ZodFetchOptions): Promise> { + return zodfetch( + z.record(z.any()), + `${this.baseUrl}/api/v1/auth/jwt/claims`, + { + method: "POST", + headers: this.#getHeaders(false), + }, + mergeRequestOptions(this.defaultRequestOptions, requestOptions) + ); + } + #getHeaders(spanParentAsLink: boolean) { const headers: Record = { "Content-Type": "application/json", @@ -535,6 +652,34 @@ export class ApiClient { return headers; } + + #getRealtimeHeaders() { + const headers: Record = { + Authorization: `Bearer ${this.accessToken}`, + "trigger-version": VERSION, + }; + + return headers; + } +} + +function createSearchQueryForSubscribeToRuns(query?: SubscribeToRunsQueryParams): URLSearchParams { + const searchParams = new URLSearchParams(); + + if (query) { + if (query.tasks) { + searchParams.append( + "tasks", + Array.isArray(query.tasks) ? query.tasks.join(",") : query.tasks + ); + } + + if (query.tags) { + searchParams.append("tags", Array.isArray(query.tags) ? query.tags.join(",") : query.tags); + } + } + + return searchParams; } function createSearchQueryForListRuns(query?: ListRunsQueryParams): URLSearchParams { diff --git a/packages/core/src/v3/apiClient/runStream.ts b/packages/core/src/v3/apiClient/runStream.ts new file mode 100644 index 0000000000..64de6eb153 --- /dev/null +++ b/packages/core/src/v3/apiClient/runStream.ts @@ -0,0 +1,232 @@ +import { DeserializedJson } from "../../schemas/json.js"; +import { RunStatus, SubscribeRunRawShape } from "../schemas/api.js"; +import { SerializedError } from "../schemas/common.js"; +import { AnyRunTypes, AnyTask, InferRunTypes } from "../types/tasks.js"; +import { + conditionallyImportAndParsePacket, + IOPacket, + parsePacket, +} from "../utils/ioSerialization.js"; +import { AsyncIterableStream, createAsyncIterableStream, zodShapeStream } from "./stream.js"; + +export type RunShape = TRunTypes extends AnyRunTypes + ? { + id: string; + taskIdentifier: TRunTypes["taskIdentifier"]; + payload: TRunTypes["payload"]; + output?: TRunTypes["output"]; + createdAt: Date; + updatedAt: Date; + number: number; + status: RunStatus; + durationMs: number; + costInCents: number; + baseCostInCents: number; + tags: string[]; + idempotencyKey?: string; + expiredAt?: Date; + ttl?: string; + finishedAt?: Date; + startedAt?: Date; + delayedUntil?: Date; + queuedAt?: Date; + metadata?: Record; + error?: SerializedError; + isTest: boolean; + } + : never; + +export type AnyRunShape = RunShape; + +export type TaskRunShape = RunShape>; + +export type RunStreamCallback = ( + run: RunShape +) => void | Promise; + +export type RunShapeStreamOptions = { + headers?: Record; + fetchClient?: typeof fetch; + closeOnComplete?: boolean; +}; + +export function runShapeStream( + url: string, + options?: RunShapeStreamOptions +): RunSubscription { + return new RunSubscription(url, options); +} + +export class RunSubscription { + private abortController: AbortController; + private unsubscribeShape?: () => void; + private stream: AsyncIterableStream>; + private packetCache = new Map(); + + constructor( + private url: string, + private options?: RunShapeStreamOptions + ) { + this.abortController = new AbortController(); + + const source = new ReadableStream({ + start: async (controller) => { + this.unsubscribeShape = await zodShapeStream( + SubscribeRunRawShape, + this.url, + async (shape) => { + controller.enqueue(shape); + if ( + this.options?.closeOnComplete && + shape.completedAt && + !this.abortController.signal.aborted + ) { + controller.close(); + this.abortController.abort(); + } + }, + { + signal: this.abortController.signal, + fetchClient: this.options?.fetchClient, + headers: this.options?.headers, + } + ); + }, + cancel: () => { + this.unsubscribe(); + }, + }); + + this.stream = createAsyncIterableStream(source, { + transform: async (chunk, controller) => { + const run = await this.transformRunShape(chunk); + + controller.enqueue(run); + }, + }); + } + + unsubscribe(): void { + if (!this.abortController.signal.aborted) { + this.abortController.abort(); + } + this.unsubscribeShape?.(); + } + + [Symbol.asyncIterator](): AsyncIterator> { + return this.stream[Symbol.asyncIterator](); + } + + getReader(): ReadableStreamDefaultReader> { + return this.stream.getReader(); + } + + private async transformRunShape(row: SubscribeRunRawShape): Promise> { + const payloadPacket = row.payloadType + ? ({ data: row.payload ?? undefined, dataType: row.payloadType } satisfies IOPacket) + : undefined; + + const outputPacket = row.outputType + ? ({ data: row.output ?? undefined, dataType: row.outputType } satisfies IOPacket) + : undefined; + + const [payload, output] = await Promise.all( + [ + { packet: payloadPacket, key: "payload" }, + { packet: outputPacket, key: "output" }, + ].map(async ({ packet, key }) => { + if (!packet) { + return; + } + + const cachedResult = this.packetCache.get(`${row.friendlyId}/${key}`); + + if (typeof cachedResult !== "undefined") { + return cachedResult; + } + + const result = await conditionallyImportAndParsePacket(packet); + this.packetCache.set(`${row.friendlyId}/${key}`, result); + + return result; + }) + ); + + const metadata = + row.metadata && row.metadataType + ? await parsePacket({ data: row.metadata, dataType: row.metadataType }) + : undefined; + + return { + id: row.friendlyId, + payload, + output, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + taskIdentifier: row.taskIdentifier, + number: row.number, + status: apiStatusFromRunStatus(row.status), + durationMs: row.usageDurationMs, + costInCents: row.costInCents, + baseCostInCents: row.baseCostInCents, + tags: row.runTags ?? [], + idempotencyKey: row.idempotencyKey ?? undefined, + expiredAt: row.expiredAt ?? undefined, + finishedAt: row.completedAt ?? undefined, + startedAt: row.startedAt ?? undefined, + delayedUntil: row.delayUntil ?? undefined, + queuedAt: row.queuedAt ?? undefined, + error: row.error ?? undefined, + isTest: row.isTest, + metadata, + } as RunShape; + } +} + +function apiStatusFromRunStatus(status: string): RunStatus { + switch (status) { + case "DELAYED": { + return "DELAYED"; + } + case "WAITING_FOR_DEPLOY": { + return "WAITING_FOR_DEPLOY"; + } + case "PENDING": { + return "QUEUED"; + } + case "PAUSED": + case "WAITING_TO_RESUME": { + return "FROZEN"; + } + case "RETRYING_AFTER_FAILURE": { + return "REATTEMPTING"; + } + case "EXECUTING": { + return "EXECUTING"; + } + case "CANCELED": { + return "CANCELED"; + } + case "COMPLETED_SUCCESSFULLY": { + return "COMPLETED"; + } + case "SYSTEM_FAILURE": { + return "SYSTEM_FAILURE"; + } + case "INTERRUPTED": { + return "INTERRUPTED"; + } + case "CRASHED": { + return "CRASHED"; + } + case "COMPLETED_WITH_ERRORS": { + return "FAILED"; + } + case "EXPIRED": { + return "EXPIRED"; + } + default: { + throw new Error(`Unknown status: ${status}`); + } + } +} diff --git a/packages/core/src/v3/apiClient/stream.ts b/packages/core/src/v3/apiClient/stream.ts new file mode 100644 index 0000000000..87b703a1bd --- /dev/null +++ b/packages/core/src/v3/apiClient/stream.ts @@ -0,0 +1,58 @@ +import { z } from "zod"; + +export type ZodShapeStreamOptions = { + headers?: Record; + fetchClient?: typeof fetch; + signal?: AbortSignal; +}; + +export async function zodShapeStream( + schema: TShapeSchema, + url: string, + callback: (shape: z.output) => void | Promise, + options?: ZodShapeStreamOptions +) { + const { ShapeStream, Shape } = await import("@electric-sql/client"); + + const stream = new ShapeStream>({ + url, + headers: options?.headers, + fetchClient: options?.fetchClient, + signal: options?.signal, + }); + + const shape = new Shape(stream); + + const initialValue = await shape.value; + + for (const shapeRow of initialValue.values()) { + await callback(schema.parse(shapeRow)); + } + + return shape.subscribe(async (newShape) => { + for (const shapeRow of newShape.values()) { + await callback(schema.parse(shapeRow)); + } + }); +} + +export type AsyncIterableStream = AsyncIterable & ReadableStream; + +export function createAsyncIterableStream( + source: ReadableStream, + transformer: Transformer +): AsyncIterableStream { + const transformedStream: any = source.pipeThrough(new TransformStream(transformer)); + + transformedStream[Symbol.asyncIterator] = () => { + const reader = transformedStream.getReader(); + return { + async next(): Promise> { + const { done, value } = await reader.read(); + return done ? { done: true, value: undefined } : { done: false, value }; + }, + }; + }; + + return transformedStream; +} diff --git a/packages/core/src/v3/apiClient/types.ts b/packages/core/src/v3/apiClient/types.ts index b38da63fd7..e7c51a1553 100644 --- a/packages/core/src/v3/apiClient/types.ts +++ b/packages/core/src/v3/apiClient/types.ts @@ -36,3 +36,8 @@ export interface ListRunsQueryParams extends CursorPageParams { export interface ListProjectRunsQueryParams extends CursorPageParams, ListRunsQueryParams { env?: Array<"dev" | "staging" | "prod"> | "dev" | "staging" | "prod"; } + +export interface SubscribeToRunsQueryParams { + tasks?: Array | string; + tags?: Array | string; +} diff --git a/packages/core/src/v3/apiClientManager/index.ts b/packages/core/src/v3/apiClientManager/index.ts index a0aa0402a1..e2f47c9261 100644 --- a/packages/core/src/v3/apiClientManager/index.ts +++ b/packages/core/src/v3/apiClientManager/index.ts @@ -29,18 +29,19 @@ export class APIClientManagerAPI { unregisterGlobal(API_NAME); } - public setGlobalAPIClientConfiguration(config: ApiClientConfiguration): boolean { - return registerGlobal(API_NAME, config); - } - get baseURL(): string | undefined { - const store = this.#getConfig(); - return store?.baseURL ?? getEnvVar("TRIGGER_API_URL") ?? "https://api.trigger.dev"; + const config = this.#getConfig(); + return config?.baseURL ?? getEnvVar("TRIGGER_API_URL") ?? "https://api.trigger.dev"; } get accessToken(): string | undefined { - const store = this.#getConfig(); - return store?.secretKey ?? getEnvVar("TRIGGER_SECRET_KEY") ?? getEnvVar("TRIGGER_ACCESS_TOKEN"); + const config = this.#getConfig(); + return ( + config?.secretKey ?? + config?.accessToken ?? + getEnvVar("TRIGGER_SECRET_KEY") ?? + getEnvVar("TRIGGER_ACCESS_TOKEN") + ); } get client(): ApiClient | undefined { @@ -59,6 +60,23 @@ export class APIClientManagerAPI { return new ApiClient(this.baseURL, this.accessToken); } + runWithConfig Promise>( + config: ApiClientConfiguration, + fn: R + ): Promise> { + const originalConfig = this.#getConfig(); + const $config = { ...originalConfig, ...config }; + registerGlobal(API_NAME, $config, true); + + return fn().finally(() => { + registerGlobal(API_NAME, originalConfig, true); + }); + } + + public setGlobalAPIClientConfiguration(config: ApiClientConfiguration): boolean { + return registerGlobal(API_NAME, config); + } + #getConfig(): ApiClientConfiguration | undefined { return getGlobal(API_NAME); } diff --git a/packages/core/src/v3/apiClientManager/types.ts b/packages/core/src/v3/apiClientManager/types.ts index a1049bf832..b0e3da624c 100644 --- a/packages/core/src/v3/apiClientManager/types.ts +++ b/packages/core/src/v3/apiClientManager/types.ts @@ -2,6 +2,13 @@ import { type ApiRequestOptions } from "../apiClient/index.js"; export type ApiClientConfiguration = { baseURL?: string; + /** + * @deprecated Use `accessToken` instead. + */ secretKey?: string; + /** + * The access token to authenticate with the Trigger API. + */ + accessToken?: string; requestOptions?: ApiRequestOptions; }; diff --git a/packages/core/src/v3/errors.ts b/packages/core/src/v3/errors.ts index bc6fc20e47..039942f3b2 100644 --- a/packages/core/src/v3/errors.ts +++ b/packages/core/src/v3/errors.ts @@ -18,6 +18,18 @@ export class AbortTaskRunError extends Error { } } +export class TaskPayloadParsedError extends Error { + public readonly cause: unknown; + + constructor(cause: unknown) { + const causeMessage = cause instanceof Error ? cause.message : String(cause); + + super("Parsing payload with schema failed: " + causeMessage); + this.name = "TaskPayloadParsedError"; + this.cause = cause; + } +} + export function parseError(error: unknown): TaskRunError { if (error instanceof Error) { return { diff --git a/packages/core/src/v3/index.ts b/packages/core/src/v3/index.ts index 388e66d539..cb94f42c84 100644 --- a/packages/core/src/v3/index.ts +++ b/packages/core/src/v3/index.ts @@ -18,6 +18,7 @@ export { SemanticInternalAttributes } from "./semanticInternalAttributes.js"; export * from "./task-catalog-api.js"; export * from "./types/index.js"; export { links } from "./links.js"; +export * from "./jwt.js"; export { formatDuration, formatDurationInDays, @@ -27,8 +28,6 @@ export { nanosecondsToMilliseconds, } from "./utils/durations.js"; -export { TriggerTracer } from "./tracer.js"; - export type { LogLevel } from "./logger/taskLogger.js"; export { eventFilterMatches } from "../eventFilterMatches.js"; @@ -60,6 +59,7 @@ export { } from "./utils/ioSerialization.js"; export * from "./config.js"; +export { getSchemaParseFn, type AnySchemaParseFn, type SchemaParseFn } from "./types/schemas.js"; import { VERSION } from "../version.js"; diff --git a/packages/core/src/v3/jwt.ts b/packages/core/src/v3/jwt.ts new file mode 100644 index 0000000000..4426d2cfc2 --- /dev/null +++ b/packages/core/src/v3/jwt.ts @@ -0,0 +1,40 @@ +export type GenerateJWTOptions = { + secretKey: string; + payload: Record; + expirationTime?: number | Date | string; +}; + +export const JWT_ALGORITHM = "HS256"; +export const JWT_ISSUER = "https://id.trigger.dev"; +export const JWT_AUDIENCE = "https://api.trigger.dev"; + +export async function generateJWT(options: GenerateJWTOptions): Promise { + const { SignJWT } = await import("jose"); + + const secret = new TextEncoder().encode(options.secretKey); + + return new SignJWT(options.payload) + .setIssuer(JWT_ISSUER) + .setAudience(JWT_AUDIENCE) + .setProtectedHeader({ alg: JWT_ALGORITHM }) + .setIssuedAt() + .setExpirationTime(options.expirationTime ?? "15m") + .sign(secret); +} + +export async function validateJWT(token: string, apiKey: string) { + const { jwtVerify } = await import("jose"); + + const secret = new TextEncoder().encode(apiKey); + + try { + const { payload } = await jwtVerify(token, secret, { + issuer: JWT_ISSUER, + audience: JWT_AUDIENCE, + }); + + return payload; + } catch (e) { + return; + } +} diff --git a/packages/core/src/v3/otel/index.ts b/packages/core/src/v3/otel/index.ts index 689c7193b3..a62813d360 100644 --- a/packages/core/src/v3/otel/index.ts +++ b/packages/core/src/v3/otel/index.ts @@ -1,24 +1,3 @@ -import { Span, SpanStatusCode } from "@opentelemetry/api"; +export { TracingSDK, type TracingDiagnosticLogLevel, type TracingSDKConfig } from "./tracingSDK.js"; -export { TracingSDK, type TracingSDKConfig, type TracingDiagnosticLogLevel } from "./tracingSDK.js"; - -export function recordSpanException(span: Span, error: unknown) { - if (error instanceof Error) { - span.recordException(sanitizeSpanError(error)); - } else if (typeof error === "string") { - span.recordException(error.replace(/\0/g, "")); - } else { - span.recordException(JSON.stringify(error).replace(/\0/g, "")); - } - - span.setStatus({ code: SpanStatusCode.ERROR }); -} - -function sanitizeSpanError(error: Error) { - // Create a new error object with the same name, message and stack trace - const sanitizedError = new Error(error.message.replace(/\0/g, "")); - sanitizedError.name = error.name.replace(/\0/g, ""); - sanitizedError.stack = error.stack?.replace(/\0/g, ""); - - return sanitizedError; -} +export * from "./utils.js"; diff --git a/packages/core/src/v3/otel/utils.ts b/packages/core/src/v3/otel/utils.ts new file mode 100644 index 0000000000..b5ba36aea5 --- /dev/null +++ b/packages/core/src/v3/otel/utils.ts @@ -0,0 +1,22 @@ +import { type Span, SpanStatusCode } from "@opentelemetry/api"; + +export function recordSpanException(span: Span, error: unknown) { + if (error instanceof Error) { + span.recordException(sanitizeSpanError(error)); + } else if (typeof error === "string") { + span.recordException(error.replace(/\0/g, "")); + } else { + span.recordException(JSON.stringify(error).replace(/\0/g, "")); + } + + span.setStatus({ code: SpanStatusCode.ERROR }); +} + +function sanitizeSpanError(error: Error) { + // Create a new error object with the same name, message and stack trace + const sanitizedError = new Error(error.message.replace(/\0/g, "")); + sanitizedError.name = error.name.replace(/\0/g, ""); + sanitizedError.stack = error.stack?.replace(/\0/g, ""); + + return sanitizedError; +} diff --git a/packages/core/src/v3/runMetadata/index.ts b/packages/core/src/v3/runMetadata/index.ts index 178497245b..0ea9cd2ea6 100644 --- a/packages/core/src/v3/runMetadata/index.ts +++ b/packages/core/src/v3/runMetadata/index.ts @@ -1,13 +1,17 @@ +import { dequal } from "dequal/lite"; import { DeserializedJson } from "../../schemas/json.js"; import { apiClientManager } from "../apiClientManager-api.js"; import { taskContext } from "../task-context-api.js"; import { getGlobal, registerGlobal } from "../utils/globals.js"; import { ApiRequestOptions } from "../zodfetch.js"; +import { JSONHeroPath } from "@jsonhero/path"; const API_NAME = "run-metadata"; export class RunMetadataAPI { private static _instance?: RunMetadataAPI; + private flushTimeoutId: NodeJS.Timeout | null = null; + private hasChanges: boolean = false; private constructor() {} @@ -39,68 +43,118 @@ export class RunMetadataAPI { return this.store?.[key]; } - public async setKey( - key: string, - value: DeserializedJson, - requestOptions?: ApiRequestOptions - ): Promise { + public setKey(key: string, value: DeserializedJson) { const runId = taskContext.ctx?.run.id; if (!runId) { return; } - const apiClient = apiClientManager.clientOrThrow(); + let nextStore: Record | undefined = this.store + ? structuredClone(this.store) + : undefined; + + if (key.startsWith("$.")) { + const path = new JSONHeroPath(key); + path.set(nextStore, value); + } else { + nextStore = { + ...(nextStore ?? {}), + [key]: value, + }; + } - const nextStore = { - ...(this.store ?? {}), - [key]: value, - }; + if (!nextStore) { + return; + } - const response = await apiClient.updateRunMetadata( - runId, - { metadata: nextStore }, - requestOptions - ); + if (!dequal(this.store, nextStore)) { + this.hasChanges = true; + } - this.store = response.metadata; + this.store = nextStore; } - public async deleteKey(key: string, requestOptions?: ApiRequestOptions): Promise { + public deleteKey(key: string) { const runId = taskContext.ctx?.run.id; if (!runId) { return; } - const apiClient = apiClientManager.clientOrThrow(); - const nextStore = { ...(this.store ?? {}) }; delete nextStore[key]; - const response = await apiClient.updateRunMetadata( - runId, - { metadata: nextStore }, - requestOptions - ); + if (!dequal(this.store, nextStore)) { + this.hasChanges = true; + } + + this.store = nextStore; + } + + public update(metadata: Record): void { + const runId = taskContext.ctx?.run.id; + + if (!runId) { + return; + } + + if (!dequal(this.store, metadata)) { + this.hasChanges = true; + } - this.store = response.metadata; + this.store = metadata; } - public async update( - metadata: Record, - requestOptions?: ApiRequestOptions - ): Promise { + public async flush(requestOptions?: ApiRequestOptions): Promise { const runId = taskContext.ctx?.run.id; if (!runId) { return; } + if (!this.store) { + return; + } + + if (!this.hasChanges) { + return; + } + const apiClient = apiClientManager.clientOrThrow(); - const response = await apiClient.updateRunMetadata(runId, { metadata }, requestOptions); + try { + this.hasChanges = false; + await apiClient.updateRunMetadata(runId, { metadata: this.store }, requestOptions); + } catch (error) { + this.hasChanges = true; + throw error; + } + } + + public startPeriodicFlush(intervalMs: number = 1000) { + const periodicFlush = async (intervalMs: number) => { + try { + await this.flush(); + } catch (error) { + console.error("Failed to flush metadata", error); + throw error; + } finally { + scheduleNext(); + } + }; + + const scheduleNext = () => { + this.flushTimeoutId = setTimeout(() => periodicFlush(intervalMs), intervalMs); + }; + + scheduleNext(); + } - this.store = response.metadata; + stopPeriodicFlush(): void { + if (this.flushTimeoutId) { + clearTimeout(this.flushTimeoutId); + this.flushTimeoutId = null; + } } } diff --git a/packages/core/src/v3/schemas/api.ts b/packages/core/src/v3/schemas/api.ts index 1958aacf83..2e325c9138 100644 --- a/packages/core/src/v3/schemas/api.ts +++ b/packages/core/src/v3/schemas/api.ts @@ -1,8 +1,8 @@ import { z } from "zod"; +import { DeserializedJsonSchema } from "../../schemas/json.js"; +import { SerializedError } from "./common.js"; import { BackgroundWorkerMetadata } from "./resources.js"; import { QueueOptions } from "./schemas.js"; -import { SerializedError } from "./common.js"; -import { DeserializedJsonSchema, SerializableJsonSchema } from "../../schemas/json.js"; export const WhoAmIResponseSchema = z.object({ userId: z.string(), @@ -58,7 +58,7 @@ export const CreateBackgroundWorkerResponse = z.object({ export type CreateBackgroundWorkerResponse = z.infer; //an array of 1, 2, or 3 strings -const RunTag = z.string().max(64, "Tags must be less than 64 characters"); +const RunTag = z.string().max(128, "Tags must be less than 128 characters"); export const RunTags = z.union([RunTag, RunTag.array()]); export type RunTags = z.infer; @@ -285,8 +285,8 @@ export const ScheduledTaskPayload = z.object({ type: ScheduleType, /** When the task was scheduled to run. * Note this will be slightly different from `new Date()` because it takes a few ms to run the task. - * - * This date is UTC. To output it as a string with a timezone you would do this: + * + * This date is UTC. To output it as a string with a timezone you would do this: * ```ts * const formatted = payload.timestamp.toLocaleString("en-US", { timeZone: payload.timezone, @@ -314,7 +314,7 @@ export const CreateScheduleOptions = z.object({ /** The id of the task you want to attach to. */ task: z.string(), /** The schedule in CRON format. - * + * * ```txt * * * * * * ┬ ┬ ┬ ┬ ┬ @@ -529,6 +529,7 @@ export const RetrieveRunResponse = z.object({ payloadPresignedUrl: z.string().optional(), output: z.any().optional(), outputPresignedUrl: z.string().optional(), + error: SerializedError.optional(), schedule: RunScheduleDetails.optional(), relatedRuns: z.object({ root: RelatedRunDetails.optional(), @@ -548,6 +549,7 @@ export const RetrieveRunResponse = z.object({ }) .optional() ), + attemptCount: z.number().default(0), }); export type RetrieveRunResponse = z.infer; @@ -628,3 +630,34 @@ export const UpdateMetadataResponseBody = z.object({ }); export type UpdateMetadataResponseBody = z.infer; + +export const SubscribeRunRawShape = z.object({ + id: z.string(), + idempotencyKey: z.string().nullish(), + createdAt: z.coerce.date(), + updatedAt: z.coerce.date(), + startedAt: z.coerce.date().nullish(), + delayUntil: z.coerce.date().nullish(), + queuedAt: z.coerce.date().nullish(), + expiredAt: z.coerce.date().nullish(), + completedAt: z.coerce.date().nullish(), + taskIdentifier: z.string(), + friendlyId: z.string(), + number: z.number(), + isTest: z.boolean(), + status: z.string(), + usageDurationMs: z.number(), + costInCents: z.number(), + baseCostInCents: z.number(), + ttl: z.string().nullish(), + payload: z.string().nullish(), + payloadType: z.string().nullish(), + metadata: z.string().nullish(), + metadataType: z.string().nullish(), + output: z.string().nullish(), + outputType: z.string().nullish(), + runTags: z.array(z.string()).nullish().default([]), + error: SerializedError.nullish(), +}); + +export type SubscribeRunRawShape = z.infer; diff --git a/packages/core/src/v3/tracer.ts b/packages/core/src/v3/tracer.ts index e1325ba443..13ab06310e 100644 --- a/packages/core/src/v3/tracer.ts +++ b/packages/core/src/v3/tracer.ts @@ -13,7 +13,7 @@ import { SemanticInternalAttributes } from "./semanticInternalAttributes.js"; import { clock } from "./clock-api.js"; import { usage } from "./usage-api.js"; import { taskContext } from "./task-context-api.js"; -import { recordSpanException } from "./otel/index.js"; +import { recordSpanException } from "./otel/utils.js"; export type TriggerTracerConfig = | { diff --git a/packages/core/src/v3/types/idempotencyKeys.ts b/packages/core/src/v3/types/idempotencyKeys.ts new file mode 100644 index 0000000000..920f1b30db --- /dev/null +++ b/packages/core/src/v3/types/idempotencyKeys.ts @@ -0,0 +1,5 @@ +declare const __brand: unique symbol; +type Brand = { [__brand]: B }; +type Branded = T & Brand; + +export type IdempotencyKey = Branded; diff --git a/packages/core/src/v3/types/index.ts b/packages/core/src/v3/types/index.ts index b90d7da0ce..2aebb98db2 100644 --- a/packages/core/src/v3/types/index.ts +++ b/packages/core/src/v3/types/index.ts @@ -1,79 +1,10 @@ -import { RetryOptions, TaskMetadata, TaskManifest, TaskRunContext } from "../schemas/index.js"; +import { RetrieveRunResponse } from "../schemas/api.js"; +import { AnyRunTypes, InferRunTypes } from "./tasks.js"; import { Prettify } from "./utils.js"; export * from "./utils.js"; - -export type InitOutput = Record | void | undefined; - -export type RunFnParams = Prettify<{ - /** Metadata about the task, run, attempt, queue, environment, organization, project and batch. */ - ctx: Context; - /** If you use the `init` function, this will be whatever you returned. */ - init?: TInitOutput; - /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ - signal?: AbortSignal; -}>; - -export type MiddlewareFnParams = Prettify<{ - ctx: Context; - next: () => Promise; - /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ - signal?: AbortSignal; -}>; - -export type InitFnParams = Prettify<{ - ctx: Context; - /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ - signal?: AbortSignal; -}>; - -export type StartFnParams = Prettify<{ - ctx: Context; - /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ - signal?: AbortSignal; -}>; - -export type Context = TaskRunContext; - -export type SuccessFnParams = RunFnParams; - -export type FailureFnParams = RunFnParams; - -export type HandleErrorFnParams = RunFnParams & - Prettify<{ - retry?: RetryOptions; - retryAt?: Date; - retryDelayInMs?: number; - }>; - -export type HandleErrorModificationOptions = { - skipRetrying?: boolean | undefined; - retryAt?: Date | undefined; - retryDelayInMs?: number | undefined; - retry?: RetryOptions | undefined; - error?: unknown; -}; - -export type HandleErrorResult = - | undefined - | void - | HandleErrorModificationOptions - | Promise; - -export type HandleErrorArgs = { - ctx: Context; - retry?: RetryOptions; - retryAt?: Date; - retryDelayInMs?: number; - /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ - signal?: AbortSignal; -}; - -export type HandleErrorFunction = ( - payload: any, - error: unknown, - params: HandleErrorArgs -) => HandleErrorResult; +export * from "./tasks.js"; +export * from "./idempotencyKeys.js"; type ResolveEnvironmentVariablesOptions = { variables: Record | Array<{ name: string; value: string }>; @@ -96,19 +27,11 @@ export type ResolveEnvironmentVariablesFunction = ( params: ResolveEnvironmentVariablesParams ) => ResolveEnvironmentVariablesResult; -export type TaskMetadataWithFunctions = TaskMetadata & { - fns: { - run: (payload: any, params: RunFnParams) => Promise; - init?: (payload: any, params: InitFnParams) => Promise; - cleanup?: (payload: any, params: RunFnParams) => Promise; - middleware?: (payload: any, params: MiddlewareFnParams) => Promise; - handleError?: ( - payload: any, - error: unknown, - params: HandleErrorFnParams - ) => HandleErrorResult; - onSuccess?: (payload: any, output: any, params: SuccessFnParams) => Promise; - onFailure?: (payload: any, error: unknown, params: FailureFnParams) => Promise; - onStart?: (payload: any, params: StartFnParams) => Promise; - }; -}; +export type RetrieveRunResult = Prettify< + Omit & { + output?: InferRunTypes["output"]; + payload?: InferRunTypes["payload"]; + } +>; + +export type AnyRetrieveRunResult = RetrieveRunResult; diff --git a/packages/core/src/v3/types/schemas.ts b/packages/core/src/v3/types/schemas.ts new file mode 100644 index 0000000000..f1b368330a --- /dev/null +++ b/packages/core/src/v3/types/schemas.ts @@ -0,0 +1,124 @@ +export type SchemaZodEsque = { + _input: TInput; + _output: TParsedInput; +}; + +export type SchemaValibotEsque = { + schema: { + _types?: { + input: TInput; + output: TParsedInput; + }; + }; +}; + +export type SchemaArkTypeEsque = { + inferIn: TInput; + infer: TParsedInput; +}; + +export type SchemaMyZodEsque = { + parse: (input: any) => TInput; +}; + +export type SchemaSuperstructEsque = { + create: (input: unknown) => TInput; +}; + +export type SchemaCustomValidatorEsque = (input: unknown) => Promise | TInput; + +export type SchemaYupEsque = { + validateSync: (input: unknown) => TInput; +}; + +export type SchemaScaleEsque = { + assert(value: unknown): asserts value is TInput; +}; + +export type SchemaWithoutInput = + | SchemaCustomValidatorEsque + | SchemaMyZodEsque + | SchemaScaleEsque + | SchemaSuperstructEsque + | SchemaYupEsque; + +export type SchemaWithInputOutput = + | SchemaZodEsque + | SchemaValibotEsque + | SchemaArkTypeEsque; + +export type Schema = SchemaWithInputOutput | SchemaWithoutInput; + +export type inferSchema = TSchema extends SchemaWithInputOutput< + infer $TIn, + infer $TOut +> + ? { + in: $TIn; + out: $TOut; + } + : TSchema extends SchemaWithoutInput + ? { + in: $InOut; + out: $InOut; + } + : never; + +export type inferSchemaIn< + TSchema extends Schema | undefined, + TDefault = unknown, +> = TSchema extends Schema ? inferSchema["in"] : TDefault; + +export type inferSchemaOut< + TSchema extends Schema | undefined, + TDefault = unknown, +> = TSchema extends Schema ? inferSchema["out"] : TDefault; + +export type SchemaParseFn = (value: unknown) => Promise | TType; +export type AnySchemaParseFn = SchemaParseFn; + +export function getSchemaParseFn(procedureParser: Schema): SchemaParseFn { + const parser = procedureParser as any; + + if (typeof parser === "function" && typeof parser.assert === "function") { + // ParserArkTypeEsque - arktype schemas shouldn't be called as a function because they return a union type instead of throwing + return parser.assert.bind(parser); + } + + if (typeof parser === "function") { + // ParserValibotEsque (>= v0.31.0) + // ParserCustomValidatorEsque + return parser; + } + + if (typeof parser.parseAsync === "function") { + // ParserZodEsque + return parser.parseAsync.bind(parser); + } + + if (typeof parser.parse === "function") { + // ParserZodEsque + // ParserValibotEsque (< v0.13.0) + return parser.parse.bind(parser); + } + + if (typeof parser.validateSync === "function") { + // ParserYupEsque + return parser.validateSync.bind(parser); + } + + if (typeof parser.create === "function") { + // ParserSuperstructEsque + return parser.create.bind(parser); + } + + if (typeof parser.assert === "function") { + // ParserScaleEsque + return (value) => { + parser.assert(value); + return value as TType; + }; + } + + throw new Error("Could not find a validator fn"); +} diff --git a/packages/core/src/v3/types/tasks.ts b/packages/core/src/v3/types/tasks.ts new file mode 100644 index 0000000000..394dde4458 --- /dev/null +++ b/packages/core/src/v3/types/tasks.ts @@ -0,0 +1,675 @@ +import { SerializableJson } from "../../schemas/json.js"; +import { RunTags } from "../schemas/api.js"; +import { QueueOptions } from "../schemas/schemas.js"; +import { IdempotencyKey } from "./idempotencyKeys.js"; +import { + MachineCpu, + MachineMemory, + RetryOptions, + TaskMetadata, + TaskRunContext, +} from "../schemas/index.js"; +import { Prettify } from "./utils.js"; +import { AnySchemaParseFn, inferSchemaOut, Schema } from "./schemas.js"; +import { TriggerApiRequestOptions } from "../apiClient/index.js"; + +type RequireOne = { + [X in Exclude]?: T[X]; +} & { + [P in K]-?: T[P]; +}; + +export type Queue = RequireOne; +export type TaskSchema = Schema; +export type { inferSchemaIn } from "./schemas.js"; + +type TaskRunConcurrencyOptions = Queue; + +export class SubtaskUnwrapError extends Error { + public readonly taskId: string; + public readonly runId: string; + public readonly cause?: unknown; + + constructor(taskId: string, runId: string, subtaskError: unknown) { + if (subtaskError instanceof Error) { + super(`Error in ${taskId}: ${subtaskError.message}`); + this.cause = subtaskError; + this.name = "SubtaskUnwrapError"; + } else { + super(`Error in ${taskId}`); + this.name = "SubtaskUnwrapError"; + this.cause = subtaskError; + } + + this.taskId = taskId; + this.runId = runId; + } +} + +export class TaskRunPromise extends Promise> { + constructor( + executor: ( + resolve: (value: TaskRunResult | PromiseLike>) => void, + reject: (reason?: any) => void + ) => void, + private readonly taskId: string + ) { + super(executor); + } + + unwrap(): Promise { + return this.then((result) => { + if (result.ok) { + return result.output; + } else { + throw new SubtaskUnwrapError(this.taskId, result.id, result.error); + } + }); + } +} + +export type InitOutput = Record | void | undefined; + +export type RunFnParams = Prettify<{ + /** Metadata about the task, run, attempt, queue, environment, organization, project and batch. */ + ctx: Context; + /** If you use the `init` function, this will be whatever you returned. */ + init?: TInitOutput; + /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ + signal?: AbortSignal; +}>; + +export type MiddlewareFnParams = Prettify<{ + ctx: Context; + next: () => Promise; + /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ + signal?: AbortSignal; +}>; + +export type InitFnParams = Prettify<{ + ctx: Context; + /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ + signal?: AbortSignal; +}>; + +export type StartFnParams = Prettify<{ + ctx: Context; + /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ + signal?: AbortSignal; +}>; + +export type Context = TaskRunContext; + +export type SuccessFnParams = RunFnParams; + +export type FailureFnParams = RunFnParams; + +export type HandleErrorFnParams = RunFnParams & + Prettify<{ + retry?: RetryOptions; + retryAt?: Date; + retryDelayInMs?: number; + }>; + +export type HandleErrorModificationOptions = { + skipRetrying?: boolean | undefined; + retryAt?: Date | undefined; + retryDelayInMs?: number | undefined; + retry?: RetryOptions | undefined; + error?: unknown; +}; + +export type HandleErrorResult = + | undefined + | void + | HandleErrorModificationOptions + | Promise; + +export type HandleErrorArgs = { + ctx: Context; + retry?: RetryOptions; + retryAt?: Date; + retryDelayInMs?: number; + /** Abort signal that is aborted when a task run exceeds it's maxDuration. Can be used to automatically cancel downstream requests */ + signal?: AbortSignal; +}; + +export type HandleErrorFunction = ( + payload: any, + error: unknown, + params: HandleErrorArgs +) => HandleErrorResult; + +type CommonTaskOptions< + TIdentifier extends string, + TPayload = void, + TOutput = unknown, + TInitOutput extends InitOutput = any, +> = { + /** An id for your task. This must be unique inside your project and not change between versions. */ + id: TIdentifier; + + /** The retry settings when an uncaught error is thrown. + * + * If omitted it will use the values in your `trigger.config.ts` file. + * + * @example + * + * ``` + * export const taskWithRetries = task({ + id: "task-with-retries", + retry: { + maxAttempts: 10, + factor: 1.8, + minTimeoutInMs: 500, + maxTimeoutInMs: 30_000, + randomize: false, + }, + run: async ({ payload, ctx }) => { + //... + }, + }); + * ``` + * */ + retry?: RetryOptions; + + /** Used to configure what should happen when more than one run is triggered at the same time. + * + * @example + * one at a time execution + * + * ```ts + * export const oneAtATime = task({ + id: "one-at-a-time", + queue: { + concurrencyLimit: 1, + }, + run: async ({ payload, ctx }) => { + //... + }, + }); + * ``` + */ + queue?: QueueOptions; + /** Configure the spec of the machine you want your task to run on. + * + * @example + * + * ```ts + * export const heavyTask = task({ + id: "heavy-task", + machine: { + cpu: 2, + memory: 4, + }, + run: async ({ payload, ctx }) => { + //... + }, + }); + * ``` + */ + machine?: { + /** vCPUs. The default is 0.5. + * + * Possible values: + * - 0.25 + * - 0.5 + * - 1 + * - 2 + * - 4 + * @deprecated use preset instead + */ + cpu?: MachineCpu; + /** In GBs of RAM. The default is 1. + * + * Possible values: + * - 0.25 + * - 0.5 + * - 1 + * - 2 + * - 4 + * - 8 + * * @deprecated use preset instead + */ + memory?: MachineMemory; + + /** Preset to use for the machine. Defaults to small-1x */ + preset?: + | "micro" + | "small-1x" + | "small-2x" + | "medium-1x" + | "medium-2x" + | "large-1x" + | "large-2x"; + }; + + /** + * The maximum duration in compute-time seconds that a task run is allowed to run. If the task run exceeds this duration, it will be stopped. + * + * Minimum value is 5 seconds + */ + maxDuration?: number; + + /** This gets called when a task is triggered. It's where you put the code you want to execute. + * + * @param payload - The payload that is passed to your task when it's triggered. This must be JSON serializable. + * @param params - Metadata about the run. + */ + run: (payload: TPayload, params: RunFnParams) => Promise; + + /** + * init is called before the run function is called. It's useful for setting up any global state. + */ + init?: (payload: TPayload, params: InitFnParams) => Promise; + + /** + * cleanup is called after the run function has completed. + */ + cleanup?: (payload: TPayload, params: RunFnParams) => Promise; + + /** + * handleError is called when the run function throws an error. It can be used to modify the error or return new retry options. + */ + handleError?: ( + payload: TPayload, + error: unknown, + params: HandleErrorFnParams + ) => HandleErrorResult; + + /** + * middleware allows you to run code "around" the run function. This can be useful for logging, metrics, or other cross-cutting concerns. + * + * When writing middleware, you should always call `next()` to continue the execution of the task: + * + * ```ts + * export const middlewareTask = task({ + * id: "middleware-task", + * middleware: async (payload, { ctx, next }) => { + * console.log("Before run"); + * await next(); + * console.log("After run"); + * }, + * run: async (payload, { ctx }) => {} + * }); + * ``` + */ + middleware?: (payload: TPayload, params: MiddlewareFnParams) => Promise; + + /** + * onStart is called the first time a task is executed in a run (not before every retry) + */ + onStart?: (payload: TPayload, params: StartFnParams) => Promise; + + /** + * onSuccess is called after the run function has successfully completed. + */ + onSuccess?: ( + payload: TPayload, + output: TOutput, + params: SuccessFnParams + ) => Promise; + + /** + * onFailure is called after a task run has failed (meaning the run function threw an error and won't be retried anymore) + */ + onFailure?: ( + payload: TPayload, + error: unknown, + params: FailureFnParams + ) => Promise; +}; + +export type TaskOptions< + TIdentifier extends string, + TPayload = void, + TOutput = unknown, + TInitOutput extends InitOutput = any, +> = CommonTaskOptions; + +export type TaskWithSchemaOptions< + TIdentifier extends string, + TSchema extends TaskSchema | undefined = undefined, + TOutput = unknown, + TInitOutput extends InitOutput = any, +> = CommonTaskOptions, TOutput, TInitOutput> & { + schema?: TSchema; +}; + +declare const __output: unique symbol; +declare const __payload: unique symbol; +type BrandRun = { [__output]: O; [__payload]: P }; +export type BrandedRun = T & BrandRun; + +export type RunHandle = BrandedRun< + { + id: string; + /** + * An auto-generated JWT that can be used to access the run + */ + publicAccessToken: string; + taskIdentifier: TTaskIdentifier; + }, + TPayload, + TOutput +>; + +export type AnyRunHandle = RunHandle; + +/** + * A BatchRunHandle can be used to retrieve the runs of a batch trigger in a typesafe manner. + */ +export type BatchRunHandle = BrandedRun< + { + batchId: string; + runs: Array>; + publicAccessToken: string; + taskIdentifier: TTaskIdentifier; + }, + TOutput, + TPayload +>; + +export type RunHandleOutput = TRunHandle extends RunHandle + ? TOutput + : never; + +export type RunHandlePayload = TRunHandle extends RunHandle + ? TPayload + : never; + +export type RunHandleTaskIdentifier = TRunHandle extends RunHandle< + infer TTaskIdentifier, + any, + any +> + ? TTaskIdentifier + : never; + +export type TaskRunResult = + | { + ok: true; + id: string; + output: TOutput; + } + | { + ok: false; + id: string; + error: unknown; + }; + +export type BatchResult = { + id: string; + runs: TaskRunResult[]; +}; + +export type BatchItem = TInput extends void + ? { payload?: TInput; options?: TaskRunOptions } + : { payload: TInput; options?: TaskRunOptions }; + +export interface Task { + /** + * The id of the task. + */ + id: TIdentifier; + /** + * Trigger a task with the given payload, and continue without waiting for the result. If you want to wait for the result, use `triggerAndWait`. Returns the id of the triggered task run. + * @param payload + * @param options + * @returns RunHandle + * - `id` - The id of the triggered task run. + */ + trigger: ( + payload: TInput, + options?: TaskRunOptions, + requestOptions?: TriggerApiRequestOptions + ) => Promise>; + + /** + * Batch trigger multiple task runs with the given payloads, and continue without waiting for the results. If you want to wait for the results, use `batchTriggerAndWait`. Returns the id of the triggered batch. + * @param items + * @returns InvokeBatchHandle + * - `batchId` - The id of the triggered batch. + * - `runs` - The ids of the triggered task runs. + */ + batchTrigger: ( + items: Array>, + requestOptions?: TriggerApiRequestOptions + ) => Promise>; + + /** + * Trigger a task with the given payload, and wait for the result. Returns the result of the task run + * @param payload + * @param options - Options for the task run + * @returns TaskRunResult + * @example + * ``` + * const result = await task.triggerAndWait({ foo: "bar" }); + * + * if (result.ok) { + * console.log(result.output); + * } else { + * console.error(result.error); + * } + * ``` + */ + triggerAndWait: (payload: TInput, options?: TaskRunOptions) => TaskRunPromise; + + /** + * Batch trigger multiple task runs with the given payloads, and wait for the results. Returns the results of the task runs. + * @param items + * @returns BatchResult + * @example + * ``` + * const result = await task.batchTriggerAndWait([ + * { payload: { foo: "bar" } }, + * { payload: { foo: "baz" } }, + * ]); + * + * for (const run of result.runs) { + * if (run.ok) { + * console.log(run.output); + * } else { + * console.error(run.error); + * } + * } + * ``` + */ + batchTriggerAndWait: (items: Array>) => Promise>; +} + +export type AnyTask = Task; + +export type TaskPayload = TTask extends Task + ? TInput + : never; + +export type TaskOutput = TTask extends Task + ? TOutput + : never; + +export type TaskOutputHandle = TTask extends Task< + infer TIdentifier, + infer TInput, + infer TOutput +> + ? RunHandle + : never; + +export type TaskBatchOutputHandle = TTask extends Task< + infer TIdentifier, + infer TInput, + infer TOutput +> + ? BatchRunHandle + : never; + +export type TaskIdentifier = TTask extends Task + ? TIdentifier + : never; + +export type TriggerJwtOptions = { + /** + * The expiration time of the JWT. This can be a string like "1h" or a Date object. + * + * Defaults to 1 hour. + */ + expirationTime?: number | Date | string; +}; + +export type TaskRunOptions = { + /** + * A unique key that can be used to ensure that a task is only triggered once per key. + * + * You can use `idempotencyKeys.create` to create an idempotency key first, and then pass it to the task options. + * + * @example + * + * ```typescript + * import { idempotencyKeys, task } from "@trigger.dev/sdk/v3"; + * + * export const myTask = task({ + * id: "my-task", + * run: async (payload: any) => { + * // scoped to the task run by default + * const idempotencyKey = await idempotencyKeys.create("my-task-key"); + * + * // Use the idempotency key when triggering child tasks + * await childTask.triggerAndWait(payload, { idempotencyKey }); + * + * // scoped globally, does not include the task run ID + * const globalIdempotencyKey = await idempotencyKeys.create("my-task-key", { scope: "global" }); + * + * await childTask.triggerAndWait(payload, { idempotencyKey: globalIdempotencyKey }); + * + * // You can also pass a string directly, which is the same as a global idempotency key + * await childTask.triggerAndWait(payload, { idempotencyKey: "my-very-unique-key" }); + * } + * }); + * ``` + * + * When triggering a task inside another task, we automatically inject the run ID into the key material. + * + * If you are triggering a task from your backend, ensure you include some sufficiently unique key material to prevent collisions. + * + * @example + * + * ```typescript + * import { idempotencyKeys, tasks } from "@trigger.dev/sdk/v3"; + * + * // Somewhere in your backend + * const idempotencyKey = await idempotenceKeys.create(["my-task-trigger", "user-123"]); + * await tasks.trigger("my-task", { foo: "bar" }, { idempotencyKey }); + * ``` + * + */ + idempotencyKey?: IdempotencyKey | string | string[]; + maxAttempts?: number; + queue?: TaskRunConcurrencyOptions; + concurrencyKey?: string; + /** + * The delay before the task is executed. This can be a string like "1h" or a Date object. + * + * @example + * "1h" - 1 hour + * "30d" - 30 days + * "15m" - 15 minutes + * "2w" - 2 weeks + * "60s" - 60 seconds + * new Date("2025-01-01T00:00:00Z") + */ + delay?: string | Date; + + /** + * Set a time-to-live for this run. If the run is not executed within this time, it will be removed from the queue and never execute. + * + * @example + * + * ```ts + * await myTask.trigger({ foo: "bar" }, { ttl: "1h" }); + * await myTask.trigger({ foo: "bar" }, { ttl: 60 * 60 }); // 1 hour + * ``` + * + * The minimum value is 1 second. Setting the `ttl` to `0` will disable the TTL and the run will never expire. + * + * **Note:** Runs in development have a default `ttl` of 10 minutes. You can override this by setting the `ttl` option. + */ + ttl?: string | number; + + /** + * Tags to attach to the run. Tags can be used to filter runs in the dashboard and using the SDK. + * + * You can set up to 10 tags per run, they must be less than 128 characters each. + * + * We recommend prefixing tags with a namespace using an underscore or colon, like `user_1234567` or `org:9876543`. + * + * @example + * + * ```ts + * await myTask.trigger({ foo: "bar" }, { tags: ["user:1234567", "org:9876543"] }); + * ``` + */ + tags?: RunTags; + + /** + * Metadata to attach to the run. Metadata can be used to store additional information about the run. Limited to 4KB. + */ + metadata?: Record; + + /** + * The maximum duration in compute-time seconds that a task run is allowed to run. If the task run exceeds this duration, it will be stopped. + * + * This will override the task's maxDuration. + * + * Minimum value is 5 seconds + */ + maxDuration?: number; +}; + +export type TaskMetadataWithFunctions = TaskMetadata & { + fns: { + run: (payload: any, params: RunFnParams) => Promise; + init?: (payload: any, params: InitFnParams) => Promise; + cleanup?: (payload: any, params: RunFnParams) => Promise; + middleware?: (payload: any, params: MiddlewareFnParams) => Promise; + handleError?: ( + payload: any, + error: unknown, + params: HandleErrorFnParams + ) => HandleErrorResult; + onSuccess?: (payload: any, output: any, params: SuccessFnParams) => Promise; + onFailure?: (payload: any, error: unknown, params: FailureFnParams) => Promise; + onStart?: (payload: any, params: StartFnParams) => Promise; + parsePayload?: AnySchemaParseFn; + }; +}; + +export type RunTypes = { + output: TOutput; + payload: TPayload; + taskIdentifier: TTaskIdentifier; +}; + +export type AnyRunTypes = RunTypes; + +export type InferRunTypes = T extends RunHandle< + infer TTaskIdentifier, + infer TPayload, + infer TOutput +> + ? RunTypes + : T extends Task + ? RunTypes + : AnyRunTypes; + +export type RunHandleFromTypes = RunHandle< + TRunTypes["taskIdentifier"], + TRunTypes["payload"], + TRunTypes["output"] +>; + +export type BatchRunHandleFromTypes = BatchRunHandle< + TRunTypes["taskIdentifier"], + TRunTypes["payload"], + TRunTypes["output"] +>; diff --git a/packages/core/src/v3/utils/getEnv.ts b/packages/core/src/v3/utils/getEnv.ts index ff0ced58a1..65ddee39df 100644 --- a/packages/core/src/v3/utils/getEnv.ts +++ b/packages/core/src/v3/utils/getEnv.ts @@ -6,3 +6,19 @@ export function getEnvVar(name: string): string | undefined { return; } + +export function getNumberEnvVar(name: string, defaultValue?: number): number | undefined { + const value = getEnvVar(name); + + if (value === undefined) { + return defaultValue; + } + + const parsed = Number(value); + + if (isNaN(parsed)) { + return defaultValue; + } + + return parsed; +} diff --git a/packages/core/src/v3/utils/ioSerialization.ts b/packages/core/src/v3/utils/ioSerialization.ts index 830a6fb405..e34ecc90e3 100644 --- a/packages/core/src/v3/utils/ioSerialization.ts +++ b/packages/core/src/v3/utils/ioSerialization.ts @@ -33,6 +33,12 @@ export async function parsePacket(value: IOPacket): Promise { } } +export async function conditionallyImportAndParsePacket(value: IOPacket): Promise { + const importedPacket = await conditionallyImportPacket(value); + + return await parsePacket(importedPacket); +} + export async function stringifyIO(value: any): Promise { if (value === undefined) { return { dataType: "application/json" }; diff --git a/packages/core/src/v3/utils/safeAsyncLocalStorage.ts b/packages/core/src/v3/utils/safeAsyncLocalStorage.ts index 1fba2e646c..60c01dcca5 100644 --- a/packages/core/src/v3/utils/safeAsyncLocalStorage.ts +++ b/packages/core/src/v3/utils/safeAsyncLocalStorage.ts @@ -7,6 +7,10 @@ export class SafeAsyncLocalStorage { this.storage = new AsyncLocalStorage(); } + enterWith(context: T): void { + this.storage.enterWith(context); + } + runWith Promise>(context: T, fn: R): Promise> { return this.storage.run(context, fn); } diff --git a/packages/core/src/v3/workers/index.ts b/packages/core/src/v3/workers/index.ts index a91de70db0..61a62aa55f 100644 --- a/packages/core/src/v3/workers/index.ts +++ b/packages/core/src/v3/workers/index.ts @@ -1,7 +1,7 @@ export { TaskExecutor, type TaskExecutorOptions } from "./taskExecutor.js"; export type { RuntimeManager } from "../runtime/manager.js"; export { PreciseWallClock as DurableClock } from "../clock/preciseWallClock.js"; -export { getEnvVar } from "../utils/getEnv.js"; +export { getEnvVar, getNumberEnvVar } from "../utils/getEnv.js"; export { OtelTaskLogger, logLevels } from "../logger/taskLogger.js"; export { ConsoleInterceptor } from "../consoleInterceptor.js"; export { TracingSDK, type TracingDiagnosticLogLevel, recordSpanException } from "../otel/index.js"; diff --git a/packages/core/src/v3/workers/taskExecutor.ts b/packages/core/src/v3/workers/taskExecutor.ts index 7139e35225..017c15c395 100644 --- a/packages/core/src/v3/workers/taskExecutor.ts +++ b/packages/core/src/v3/workers/taskExecutor.ts @@ -2,7 +2,7 @@ import { SpanKind } from "@opentelemetry/api"; import { VERSION } from "../../version.js"; import { ApiError, RateLimitError } from "../apiClient/errors.js"; import { ConsoleInterceptor } from "../consoleInterceptor.js"; -import { parseError, sanitizeError } from "../errors.js"; +import { parseError, sanitizeError, TaskPayloadParsedError } from "../errors.js"; import { runMetadata, TriggerConfig } from "../index.js"; import { recordSpanException, TracingSDK } from "../otel/index.js"; import { @@ -95,6 +95,8 @@ export class TaskExecutor { parsedPayload = await parsePacket(payloadPacket); + parsedPayload = await this.#parsePayload(parsedPayload); + if (execution.attempt.number === 1) { await this.#callOnStartFunctions(parsedPayload, ctx, signal); } @@ -394,6 +396,18 @@ export class TaskExecutor { } } + async #parsePayload(payload: unknown) { + if (!this.task.fns.parsePayload) { + return payload; + } + + try { + return await this.task.fns.parsePayload(payload); + } catch (e) { + throw new TaskPayloadParsedError(e); + } + } + async #callOnStartFunctions(payload: unknown, ctx: TaskRunContext, signal?: AbortSignal) { await this.#callOnStartFunction( this._importedConfig?.onStart, @@ -479,7 +493,10 @@ export class TaskExecutor { return { status: "noop" }; } - if (error instanceof Error && error.name === "AbortTaskRunError") { + if ( + error instanceof Error && + (error.name === "AbortTaskRunError" || error.name === "TaskPayloadParsedError") + ) { return { status: "skipped" }; } diff --git a/packages/react-hooks/README.md b/packages/react-hooks/README.md new file mode 100644 index 0000000000..be08a8d893 --- /dev/null +++ b/packages/react-hooks/README.md @@ -0,0 +1 @@ +## trigger.dev react hooks diff --git a/packages/react-hooks/package.json b/packages/react-hooks/package.json new file mode 100644 index 0000000000..538e7eaecd --- /dev/null +++ b/packages/react-hooks/package.json @@ -0,0 +1,77 @@ +{ + "name": "@trigger.dev/react-hooks", + "version": "3.0.11", + "description": "trigger.dev react hooks", + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/triggerdotdev/trigger.dev", + "directory": "packages/react-hooks" + }, + "type": "module", + "files": [ + "dist" + ], + "tshy": { + "selfLink": false, + "main": true, + "module": true, + "project": "./tsconfig.json", + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "sourceDialects": [ + "@triggerdotdev/source" + ] + }, + "scripts": { + "clean": "rimraf dist", + "build": "tshy && pnpm run update-version", + "dev": "tshy --watch", + "typecheck": "tsc --noEmit", + "update-version": "tsx ../../scripts/updateVersion.ts", + "check-exports": "attw --pack ." + }, + "dependencies": { + "@trigger.dev/core": "workspace:^3.0.12", + "swr": "^2.2.5" + }, + "devDependencies": { + "@arethetypeswrong/cli": "^0.15.4", + "@types/node": "^20.14.14", + "@types/react": "*", + "@types/react-dom": "*", + "rimraf": "^3.0.2", + "tshy": "^3.0.2", + "tsx": "4.17.0", + "typescript": "^5.5.4" + }, + "peerDependencies": { + "react": ">=18 || >=19.0.0-beta", + "react-dom": ">=18 || >=19.0.0-beta" + }, + "engines": { + "node": ">=18.20.0" + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "@triggerdotdev/source": "./src/index.ts", + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "module": "./dist/esm/index.js" +} diff --git a/packages/react-hooks/src/contexts.tsx b/packages/react-hooks/src/contexts.tsx new file mode 100644 index 0000000000..c4e14e68a2 --- /dev/null +++ b/packages/react-hooks/src/contexts.tsx @@ -0,0 +1,10 @@ +"use client"; + +import React from "react"; +import { createContextAndHook } from "./utils/createContextAndHook.js"; +import type { ApiClientConfiguration } from "@trigger.dev/core/v3"; + +const [TriggerAuthContext, useTriggerAuthContext] = + createContextAndHook("TriggerAuthContext"); + +export { TriggerAuthContext, useTriggerAuthContext }; diff --git a/packages/react-hooks/src/hooks/useApiClient.ts b/packages/react-hooks/src/hooks/useApiClient.ts new file mode 100644 index 0000000000..d37c4293e2 --- /dev/null +++ b/packages/react-hooks/src/hooks/useApiClient.ts @@ -0,0 +1,14 @@ +"use client"; + +import { ApiClient } from "@trigger.dev/core/v3"; +import { useTriggerAuthContext } from "../contexts.js"; + +export function useApiClient() { + const auth = useTriggerAuthContext(); + + if (!auth.baseURL || !auth.accessToken) { + throw new Error("Missing baseURL or accessToken in TriggerAuthContext"); + } + + return new ApiClient(auth.baseURL, auth.accessToken, auth.requestOptions); +} diff --git a/packages/react-hooks/src/hooks/useRealtimeBatch.ts b/packages/react-hooks/src/hooks/useRealtimeBatch.ts new file mode 100644 index 0000000000..678c65a1bc --- /dev/null +++ b/packages/react-hooks/src/hooks/useRealtimeBatch.ts @@ -0,0 +1,52 @@ +"use client"; + +import { AnyTask, InferRunTypes, TaskRunShape } from "@trigger.dev/core/v3"; +import { useEffect, useState } from "react"; +import { useApiClient } from "./useApiClient.js"; + +export function useBatch(batchId: string) { + const [runShapes, setRunShapes] = useState[]>([]); + const [error, setError] = useState(null); + const apiClient = useApiClient(); + + useEffect(() => { + const subscription = apiClient.subscribeToBatch>(batchId); + + async function iterateUpdates() { + for await (const run of subscription) { + setRunShapes((prevRuns) => { + return insertRunShapeInOrder(prevRuns, run); + }); + } + } + + iterateUpdates().catch((err) => { + setError(err); + }); + + return () => { + subscription.unsubscribe(); + }; + }, [batchId]); + + return { runs: runShapes, error }; +} + +// Inserts and then orders by the run number, and ensures that the run is not duplicated +function insertRunShapeInOrder( + previousRuns: TaskRunShape[], + run: TaskRunShape +) { + const existingRun = previousRuns.find((r) => r.id === run.id); + if (existingRun) { + return previousRuns.map((r) => (r.id === run.id ? run : r)); + } + + const runNumber = run.number; + const index = previousRuns.findIndex((r) => r.number > runNumber); + if (index === -1) { + return [...previousRuns, run]; + } + + return [...previousRuns.slice(0, index), run, ...previousRuns.slice(index)]; +} diff --git a/packages/react-hooks/src/hooks/useRealtimeRun.ts b/packages/react-hooks/src/hooks/useRealtimeRun.ts new file mode 100644 index 0000000000..6aa8fe9d9d --- /dev/null +++ b/packages/react-hooks/src/hooks/useRealtimeRun.ts @@ -0,0 +1,46 @@ +"use client"; + +import { AnyTask, InferRunTypes, TaskRunShape } from "@trigger.dev/core/v3"; +import { useEffect, useState } from "react"; +import { useApiClient } from "./useApiClient.js"; + +/** + * hook to subscribe to realtime updates of a task run. + * + * @template TTask - The type of the task. + * @param {string} runId - The unique identifier of the run to subscribe to. + * @returns {{ run: TaskRunShape | undefined, error: Error | null }} An object containing the current state of the run and any error encountered. + * + * @example + * ```ts + * import type { myTask } from './path/to/task'; + * const { run, error } = useRealtimeRun('run-id-123'); + * ``` + */ +export function useRealtimeRun( + runId: string +): { run: TaskRunShape | undefined; error: Error | null } { + const [runShape, setRunShape] = useState | undefined>(undefined); + const [error, setError] = useState(null); + const apiClient = useApiClient(); + + useEffect(() => { + const subscription = apiClient.subscribeToRun>(runId); + + async function iterateUpdates() { + for await (const run of subscription) { + setRunShape(run); + } + } + + iterateUpdates().catch((err) => { + setError(err); + }); + + return () => { + subscription.unsubscribe(); + }; + }, [runId]); + + return { run: runShape, error }; +} diff --git a/packages/react-hooks/src/hooks/useRealtimeRunsWithTag.ts b/packages/react-hooks/src/hooks/useRealtimeRunsWithTag.ts new file mode 100644 index 0000000000..32b15e7924 --- /dev/null +++ b/packages/react-hooks/src/hooks/useRealtimeRunsWithTag.ts @@ -0,0 +1,58 @@ +"use client"; + +import { AnyTask, InferRunTypes, TaskRunShape } from "@trigger.dev/core/v3"; +import { useEffect, useState } from "react"; +import { useApiClient } from "./useApiClient.js"; + +export function useRealtimeRunsWithTag(tag: string | string[]) { + const [runShapes, setRunShapes] = useState[]>([]); + const [error, setError] = useState(null); + const apiClient = useApiClient(); + + useEffect(() => { + const subscription = apiClient.subscribeToRunsWithTag>(tag); + + async function iterateUpdates() { + for await (const run of subscription) { + setRunShapes((prevRuns) => { + return insertRunShape(prevRuns, run); + }); + } + } + + iterateUpdates().catch((err) => { + setError(err); + }); + + return () => { + subscription.unsubscribe(); + }; + }, [tag]); + + return { runs: runShapes, error }; +} + +function stableSortTags(tag: string | string[]) { + return Array.isArray(tag) ? tag.slice().sort() : [tag]; +} + +// Replaces or inserts a run shape, ordered by the createdAt timestamp +function insertRunShape( + previousRuns: TaskRunShape[], + run: TaskRunShape +) { + const existingRun = previousRuns.find((r) => r.id === run.id); + if (existingRun) { + return previousRuns.map((r) => (r.id === run.id ? run : r)); + } + + const createdAt = run.createdAt; + + const index = previousRuns.findIndex((r) => r.createdAt > createdAt); + + if (index === -1) { + return [...previousRuns, run]; + } + + return [...previousRuns.slice(0, index), run, ...previousRuns.slice(index)]; +} diff --git a/packages/react-hooks/src/hooks/useRun.ts b/packages/react-hooks/src/hooks/useRun.ts new file mode 100644 index 0000000000..b054e7a212 --- /dev/null +++ b/packages/react-hooks/src/hooks/useRun.ts @@ -0,0 +1,49 @@ +"use client"; + +import { AnyTask, RetrieveRunResult } from "@trigger.dev/core/v3"; +import { CommonTriggerHookOptions, useSWR } from "../utils/trigger-swr.js"; +import { useApiClient } from "./useApiClient.js"; + +/** + * Custom hook to retrieve and manage the state of a run by its ID. + * + * @template TTask - The type of the task associated with the run. + * @param {string} runId - The unique identifier of the run to retrieve. + * @param {CommonTriggerHookOptions} [options] - Optional configuration for the hook's behavior. + * @returns {Object} An object containing the run data, error, loading state, validation state, and error state. + * @returns {RetrieveRunResult | undefined} run - The retrieved run data. + * @returns {Error | undefined} error - The error object if an error occurred. + * @returns {boolean} isLoading - Indicates if the run data is currently being loaded. + * @returns {boolean} isValidating - Indicates if the run data is currently being validated. + * @returns {boolean} isError - Indicates if an error occurred during the retrieval of the run data. + */ +export function useRun( + runId: string, + options?: CommonTriggerHookOptions +): { + run: RetrieveRunResult | undefined; + error: Error | undefined; + isLoading: boolean; + isValidating: boolean; + isError: boolean; +} { + const apiClient = useApiClient(); + const { + data: run, + error, + isLoading, + isValidating, + } = useSWR>(runId, () => apiClient.retrieveRun(runId), { + revalidateOnReconnect: options?.revalidateOnReconnect, + refreshInterval: (run) => { + if (!run) return options?.refreshInterval ?? 0; + + if (run.isCompleted) return 0; + + return options?.refreshInterval ?? 0; + }, + revalidateOnFocus: options?.revalidateOnFocus, + }); + + return { run, error, isLoading, isValidating, isError: !!error }; +} diff --git a/packages/react-hooks/src/index.ts b/packages/react-hooks/src/index.ts new file mode 100644 index 0000000000..7a0d0043de --- /dev/null +++ b/packages/react-hooks/src/index.ts @@ -0,0 +1,6 @@ +export * from "./contexts.js"; +export * from "./hooks/useApiClient.js"; +export * from "./hooks/useRun.js"; +export * from "./hooks/useRealtimeRun.js"; +export * from "./hooks/useRealtimeRunsWithTag.js"; +export * from "./hooks/useRealtimeBatch.js"; diff --git a/packages/react-hooks/src/utils/createContextAndHook.ts b/packages/react-hooks/src/utils/createContextAndHook.ts new file mode 100644 index 0000000000..c48bddd54a --- /dev/null +++ b/packages/react-hooks/src/utils/createContextAndHook.ts @@ -0,0 +1,45 @@ +"use client"; +import React from "react"; + +export function assertContextExists( + contextVal: unknown, + msgOrCtx: string | React.Context +): asserts contextVal { + if (!contextVal) { + throw typeof msgOrCtx === "string" + ? new Error(msgOrCtx) + : new Error(`${msgOrCtx.displayName} not found`); + } +} + +type Options = { assertCtxFn?: (v: unknown, msg: string) => void }; +type ContextOf = React.Context; +type UseCtxFn = () => T; + +/** + * Creates and returns a Context and two hooks that return the context value. + * The Context type is derived from the type passed in by the user. + * The first hook returned guarantees that the context exists so the returned value is always CtxValue + * The second hook makes no guarantees, so the returned value can be CtxValue | undefined + */ +export const createContextAndHook = ( + displayName: string, + options?: Options +): [ContextOf, UseCtxFn, UseCtxFn>] => { + const { assertCtxFn = assertContextExists } = options || {}; + const Ctx = React.createContext(undefined); + Ctx.displayName = displayName; + + const useCtx = () => { + const ctx = React.useContext(Ctx); + assertCtxFn(ctx, `${displayName} not found`); + return ctx as CtxVal; + }; + + const useCtxWithoutGuarantee = () => { + const ctx = React.useContext(Ctx); + return ctx ? ctx : {}; + }; + + return [Ctx, useCtx, useCtxWithoutGuarantee]; +}; diff --git a/packages/react-hooks/src/utils/trigger-swr.ts b/packages/react-hooks/src/utils/trigger-swr.ts new file mode 100644 index 0000000000..991c3932bf --- /dev/null +++ b/packages/react-hooks/src/utils/trigger-swr.ts @@ -0,0 +1,11 @@ +"use client"; +// eslint-disable-next-line import/export +export * from "swr"; +// eslint-disable-next-line import/export +export { default as useSWR, SWRConfig } from "swr"; + +export type CommonTriggerHookOptions = { + refreshInterval?: number; + revalidateOnReconnect?: boolean; + revalidateOnFocus?: boolean; +}; diff --git a/packages/react-hooks/tsconfig.json b/packages/react-hooks/tsconfig.json new file mode 100644 index 0000000000..f73f5bea4b --- /dev/null +++ b/packages/react-hooks/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../.configs/tsconfig.base.json", + "compilerOptions": { + "isolatedDeclarations": false, + "composite": true, + "sourceMap": true, + "stripInternal": true + }, + "include": ["./src/**/*.ts", "./src/**/*.tsx"] +} diff --git a/packages/trigger-sdk/package.json b/packages/trigger-sdk/package.json index 7f48380c80..4564c34431 100644 --- a/packages/trigger-sdk/package.json +++ b/packages/trigger-sdk/package.json @@ -27,6 +27,9 @@ }, "sourceDialects": [ "@triggerdotdev/source" + ], + "esmDialects": [ + "browser" ] }, "typesVersions": { @@ -80,6 +83,10 @@ "exports": { "./package.json": "./package.json", ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, "import": { "@triggerdotdev/source": "./src/index.ts", "types": "./dist/esm/index.d.ts", @@ -91,6 +98,10 @@ } }, "./v3": { + "browser": { + "types": "./dist/browser/v3/index.d.ts", + "default": "./dist/browser/v3/index.js" + }, "import": { "@triggerdotdev/source": "./src/v3/index.ts", "types": "./dist/esm/v3/index.d.ts", diff --git a/packages/trigger-sdk/src/v3/auth.ts b/packages/trigger-sdk/src/v3/auth.ts new file mode 100644 index 0000000000..687d64c563 --- /dev/null +++ b/packages/trigger-sdk/src/v3/auth.ts @@ -0,0 +1,160 @@ +import { type ApiClientConfiguration, apiClientManager } from "@trigger.dev/core/v3"; +import { generateJWT as internal_generateJWT } from "@trigger.dev/core/v3"; + +/** + * Register the global API client configuration. Alternatively, you can set the `TRIGGER_SECRET_KEY` and `TRIGGER_API_URL` environment variables. + * @param options The API client configuration. + * @param options.baseURL The base URL of the Trigger API. (default: `https://api.trigger.dev`) + * @param options.accessToken The accessToken to authenticate with the Trigger API. (default: `process.env.TRIGGER_SECRET_KEY`) This can be found in your Trigger.dev project "API Keys" settings. + * + * @example + * + * ```typescript + * import { configure } from "@trigger.dev/sdk/v3"; + * + * configure({ + * baseURL: "https://api.trigger.dev", + * accessToken: "tr_dev_1234567890" + * }); + * ``` + */ +export function configure(options: ApiClientConfiguration) { + apiClientManager.setGlobalAPIClientConfiguration(options); +} + +export const auth = { + configure, + createPublicToken, + withAuth, +}; + +type PublicTokenPermissionAction = "read"; // Add more actions as needed + +type PublicTokenPermissionProperties = { + /** + * Grant access to specific tasks + */ + tasks?: string | string[]; + + /** + * Grant access to specific run tags + */ + tags?: string | string[]; + + /** + * Grant access to specific runs + */ + runs?: string | string[] | true; + + /** + * Grant access to specific batch runs + */ + batch?: string | string[]; +}; + +export type PublicTokenPermissions = { + [key in PublicTokenPermissionAction]?: PublicTokenPermissionProperties; +}; + +export type CreatePublicTokenOptions = { + /** + * A collection of permission scopes to be granted to the token. + * + * @example + * + * ```typescript + * scopes: { + * read: { + * tags: ["file:1234"] + * } + * } + * ``` + */ + scopes?: PublicTokenPermissions; + + /** + * The expiration time for the token. This can be a number representing the time in milliseconds, a `Date` object, or a string. + * + * @example + * + * ```typescript + * expirationTime: "1h" + * ``` + */ + expirationTime?: number | Date | string; +}; + +/** + * Creates a public token using the provided options. + * + * @param options - Optional parameters for creating the public token. + * @param options.scopes - An array of permission scopes to be included in the token. + * @param options.expirationTime - The expiration time for the token. + * @returns A promise that resolves to a string representing the generated public token. + * + * @example + * + * ```typescript + * import { auth } from "@trigger.dev/sdk/v3"; + * + * const publicToken = await auth.createPublicToken({ + * scopes: { + * read: { + * tags: ["file:1234"] + * } + * }); + * ``` + */ +async function createPublicToken(options?: CreatePublicTokenOptions): Promise { + const apiClient = apiClientManager.clientOrThrow(); + + const claims = await apiClient.generateJWTClaims(); + + return await internal_generateJWT({ + secretKey: apiClient.accessToken, + payload: { + ...claims, + scopes: options?.scopes ? flattenScopes(options.scopes) : undefined, + }, + expirationTime: options?.expirationTime, + }); +} + +/** + * Executes a provided asynchronous function with a specified API client configuration. + * + * @template R - The type of the asynchronous function to be executed. + * @param {ApiClientConfiguration} config - The configuration for the API client. + * @param {R} fn - The asynchronous function to be executed. + * @returns {Promise>} A promise that resolves to the return type of the provided function. + */ +async function withAuth Promise>( + config: ApiClientConfiguration, + fn: R +): Promise> { + return apiClientManager.runWithConfig(config, fn); +} + +function flattenScopes(permissions: PublicTokenPermissions): string[] { + const flattenedPermissions: string[] = []; + + for (const [action, properties] of Object.entries(permissions)) { + if (properties) { + if (typeof properties === "boolean" && properties) { + flattenedPermissions.push(action); + } else if (typeof properties === "object") { + for (const [property, value] of Object.entries(properties)) { + if (Array.isArray(value)) { + for (const item of value) { + flattenedPermissions.push(`${action}:${property}:${item}`); + } + } else if (typeof value === "string") { + flattenedPermissions.push(`${action}:${property}:${value}`); + } + } + } + } + } + + return flattenedPermissions; +} diff --git a/packages/trigger-sdk/src/v3/idempotencyKeys.ts b/packages/trigger-sdk/src/v3/idempotencyKeys.ts index 8416aa5ec6..8322e620bf 100644 --- a/packages/trigger-sdk/src/v3/idempotencyKeys.ts +++ b/packages/trigger-sdk/src/v3/idempotencyKeys.ts @@ -1,14 +1,10 @@ -import { taskContext } from "@trigger.dev/core/v3"; +import { type IdempotencyKey, taskContext } from "@trigger.dev/core/v3"; export const idempotencyKeys = { create: createIdempotencyKey, }; -declare const __brand: unique symbol; -type Brand = { [__brand]: B }; -type Branded = T & Brand; - -export type IdempotencyKey = Branded; +export type { IdempotencyKey }; export function isIdempotencyKey( value: string | string[] | IdempotencyKey diff --git a/packages/trigger-sdk/src/v3/index-browser.mts b/packages/trigger-sdk/src/v3/index-browser.mts new file mode 100644 index 0000000000..450cfb3738 --- /dev/null +++ b/packages/trigger-sdk/src/v3/index-browser.mts @@ -0,0 +1,2 @@ +export { runs, type RunShape, type AnyRunShape } from "./runs.js"; +export { configure, auth } from "./auth.js"; diff --git a/packages/trigger-sdk/src/v3/index.ts b/packages/trigger-sdk/src/v3/index.ts index 22b8dac1e0..f88503218a 100644 --- a/packages/trigger-sdk/src/v3/index.ts +++ b/packages/trigger-sdk/src/v3/index.ts @@ -14,7 +14,6 @@ export type { Context }; import type { Context } from "./shared.js"; import type { ApiClientConfiguration } from "@trigger.dev/core/v3"; -import { apiClientManager } from "@trigger.dev/core/v3"; export type { ApiClientConfiguration }; @@ -33,28 +32,16 @@ export { type LogLevel, } from "@trigger.dev/core/v3"; -export { runs } from "./runs.js"; +export { + runs, + type RunShape, + type AnyRunShape, + type TaskRunShape, + type RetrieveRunResult, + type AnyRetrieveRunResult, +} from "./runs.js"; export * as schedules from "./schedules/index.js"; export * as envvars from "./envvars.js"; export type { ImportEnvironmentVariablesParams } from "./envvars.js"; -/** - * Register the global API client configuration. Alternatively, you can set the `TRIGGER_SECRET_KEY` and `TRIGGER_API_URL` environment variables. - * @param options The API client configuration. - * @param options.baseURL The base URL of the Trigger API. (default: `https://api.trigger.dev`) - * @param options.secretKey The secret key to authenticate with the Trigger API. (default: `process.env.TRIGGER_SECRET_KEY`) This can be found in your Trigger.dev project "API Keys" settings. - * - * @example - * - * ```typescript - * import { configure } from "@trigger.dev/sdk/v3"; - * - * configure({ - * baseURL: "https://api.trigger.dev", - * secretKey: "tr_dev_1234567890" - * }); - * ``` - */ -export function configure(options: ApiClientConfiguration) { - apiClientManager.setGlobalAPIClientConfiguration(options); -} +export { configure, auth } from "./auth.js"; diff --git a/packages/trigger-sdk/src/v3/metadata.ts b/packages/trigger-sdk/src/v3/metadata.ts index 40b091a7d2..2e081f4bf9 100644 --- a/packages/trigger-sdk/src/v3/metadata.ts +++ b/packages/trigger-sdk/src/v3/metadata.ts @@ -1,6 +1,5 @@ import { DeserializedJson } from "@trigger.dev/core"; import { - accessoryAttributes, ApiRequestOptions, flattenAttributes, mergeRequestOptions, @@ -24,6 +23,8 @@ export const metadata = { set: setMetadataKey, del: deleteMetadataKey, save: saveMetadata, + replace: replaceMetadata, + flush: flushMetadata, }; export type RunMetadata = Record; @@ -63,74 +64,24 @@ function getMetadataKey(key: string): DeserializedJson | undefined { * * @param {string} key - The key to set in the metadata. * @param {DeserializedJson} value - The value to associate with the key. - * @param {ApiRequestOptions} [requestOptions] - Optional API request options. - * @returns {Promise} A promise that resolves when the metadata is updated. * * @example - * await metadata.set("progress", 0.5); + * metadata.set("progress", 0.5); */ -async function setMetadataKey( - key: string, - value: DeserializedJson, - requestOptions?: ApiRequestOptions -): Promise { - const $requestOptions = mergeRequestOptions( - { - tracer, - name: "metadata.set()", - icon: "code-plus", - attributes: { - ...accessoryAttributes({ - items: [ - { - text: key, - variant: "normal", - }, - ], - style: "codepath", - }), - ...flattenAttributes(value, key), - }, - }, - requestOptions - ); - - await runMetadata.setKey(key, value, $requestOptions); +function setMetadataKey(key: string, value: DeserializedJson) { + runMetadata.setKey(key, value); } /** * Delete a key from the metadata of the current run if inside a task run. * * @param {string} key - The key to delete from the metadata. - * @param {ApiRequestOptions} [requestOptions] - Optional API request options. - * @returns {Promise} A promise that resolves when the key is deleted from the metadata. * * @example - * await metadata.del("progress"); + * metadata.del("progress"); */ -async function deleteMetadataKey(key: string, requestOptions?: ApiRequestOptions): Promise { - const $requestOptions = mergeRequestOptions( - { - tracer, - name: "metadata.del()", - icon: "code-minus", - attributes: { - ...accessoryAttributes({ - items: [ - { - text: key, - variant: "normal", - }, - ], - style: "codepath", - }), - key, - }, - }, - requestOptions - ); - - await runMetadata.deleteKey(key, $requestOptions); +function deleteMetadataKey(key: string) { + runMetadata.deleteKey(key); } /** @@ -138,27 +89,37 @@ async function deleteMetadataKey(key: string, requestOptions?: ApiRequestOptions * This function allows you to replace the entire metadata object with a new one. * * @param {RunMetadata} metadata - The new metadata object to set for the run. - * @param {ApiRequestOptions} [requestOptions] - Optional API request options. - * @returns {Promise} A promise that resolves when the metadata is updated. + * @returns {void} * * @example - * await metadata.save({ progress: 0.6, user: { name: "Alice", id: "user_5678" } }); + * metadata.replace({ progress: 0.6, user: { name: "Alice", id: "user_5678" } }); + */ +function replaceMetadata(metadata: RunMetadata): void { + runMetadata.update(metadata); +} + +/** + * @deprecated Use `metadata.replace()` instead. + */ +function saveMetadata(metadata: RunMetadata): void { + runMetadata.update(metadata); +} + +/** + * Flushes metadata to the Trigger.dev instance + * + * @param {ApiRequestOptions} [requestOptions] - Optional request options to customize the API request. + * @returns {Promise} A promise that resolves when the metadata flush operation is complete. */ -async function saveMetadata( - metadata: RunMetadata, - requestOptions?: ApiRequestOptions -): Promise { +async function flushMetadata(requestOptions?: ApiRequestOptions): Promise { const $requestOptions = mergeRequestOptions( { tracer, - name: "metadata.save()", + name: "metadata.flush()", icon: "code-plus", - attributes: { - ...flattenAttributes(metadata), - }, }, requestOptions ); - await runMetadata.update(metadata, $requestOptions); + await runMetadata.flush($requestOptions); } diff --git a/packages/trigger-sdk/src/v3/runs.ts b/packages/trigger-sdk/src/v3/runs.ts index 4818a4f014..3f3d16a24b 100644 --- a/packages/trigger-sdk/src/v3/runs.ts +++ b/packages/trigger-sdk/src/v3/runs.ts @@ -1,9 +1,16 @@ import type { + AnyRetrieveRunResult, + AnyRunShape, ApiRequestOptions, + InferRunTypes, ListProjectRunsQueryParams, ListRunsQueryParams, RescheduleRunRequestBody, - TriggerTracer, + RetrieveRunResult, + RunShape, + RunSubscription, + SubscribeToRunsQueryParams, + TaskRunShape, } from "@trigger.dev/core/v3"; import { ApiPromise, @@ -18,19 +25,11 @@ import { isRequestOptions, mergeRequestOptions, } from "@trigger.dev/core/v3"; -import { AnyTask, Prettify, RunHandle, Task } from "./shared.js"; -import { tracer } from "./tracer.js"; import { resolvePresignedPacketUrl } from "@trigger.dev/core/v3/utils/ioSerialization"; +import { AnyRunHandle, AnyTask } from "./shared.js"; +import { tracer } from "./tracer.js"; -export type RetrieveRunResult = Prettify< - TRunId extends RunHandle - ? Omit & { output?: TOutput } - : TRunId extends Task - ? Omit & { output?: TTaskOutput } - : TRunId extends string - ? RetrieveRunResponse - : never ->; +export type { AnyRetrieveRunResult, AnyRunShape, RetrieveRunResult, RunShape, TaskRunShape }; export const runs = { replay: replayRun, @@ -39,6 +38,8 @@ export const runs = { list: listRuns, reschedule: rescheduleRun, poll, + subscribeToRun, + subscribeToRunsWithTag, }; export type ListRunsItem = ListRunResponseItem; @@ -142,7 +143,7 @@ function listRunsRequestOptions( } // Extract out the expected type of the id, can be either a string or a RunHandle -type RunId = TRunId extends RunHandle +type RunId = TRunId extends AnyRunHandle ? TRunId : TRunId extends AnyTask ? string @@ -150,7 +151,7 @@ type RunId = TRunId extends RunHandle ? TRunId : never; -function retrieveRun | AnyTask | string>( +function retrieveRun( runId: RunId, requestOptions?: ApiRequestOptions ): ApiPromise> { @@ -184,7 +185,7 @@ function retrieveRun | AnyTask | string>( }) as ApiPromise>; } -async function resolvePayloadAndOutputUrls(run: RetrieveRunResult) { +async function resolvePayloadAndOutputUrls(run: AnyRetrieveRunResult) { const resolvedRun = { ...run }; if (run.payloadPresignedUrl && run.outputPresignedUrl) { @@ -299,7 +300,7 @@ export type PollOptions = { pollIntervalMs?: number }; const MAX_POLL_ATTEMPTS = 500; -async function poll | AnyTask | string>( +async function poll( runId: RunId, options?: { pollIntervalMs?: number }, requestOptions?: ApiRequestOptions @@ -322,3 +323,21 @@ async function poll | AnyTask | string>( } did not complete after ${MAX_POLL_ATTEMPTS} attempts` ); } + +function subscribeToRun( + runId: RunId +): RunSubscription> { + const $runId = typeof runId === "string" ? runId : runId.id; + + const apiClient = apiClientManager.clientOrThrow(); + + return apiClient.subscribeToRun($runId); +} + +function subscribeToRunsWithTag( + tag: string | string[] +): RunSubscription> { + const apiClient = apiClientManager.clientOrThrow(); + + return apiClient.subscribeToRunsWithTag>(tag); +} diff --git a/packages/trigger-sdk/src/v3/shared.ts b/packages/trigger-sdk/src/v3/shared.ts index 071e8cb268..a1f1139497 100644 --- a/packages/trigger-sdk/src/v3/shared.ts +++ b/packages/trigger-sdk/src/v3/shared.ts @@ -4,540 +4,215 @@ import { SEMATTRS_MESSAGING_OPERATION, SEMATTRS_MESSAGING_SYSTEM, } from "@opentelemetry/semantic-conventions"; +import { SerializableJson } from "@trigger.dev/core"; import { - ApiRequestOptions, - BatchTaskRunExecutionResult, - FailureFnParams, - HandleErrorFnParams, - HandleErrorResult, - InitFnParams, - InitOutput, - MachineCpu, - MachineMemory, - MiddlewareFnParams, - QueueOptions, - RetryOptions, - RunFnParams, - RunTags, - SemanticInternalAttributes, - StartFnParams, - SuccessFnParams, - TaskRunContext, - TaskRunExecutionResult, accessoryAttributes, apiClientManager, + ApiRequestOptions, + BatchTaskRunExecutionResult, conditionallyImportPacket, createErrorTaskError, defaultRetryOptions, + getSchemaParseFn, + InitOutput, logger, parsePacket, + Queue, + QueueOptions, runtime, + SemanticInternalAttributes, stringifyIO, + SubtaskUnwrapError, taskCatalog, taskContext, + TaskRunContext, + TaskRunExecutionResult, + TaskRunPromise, } from "@trigger.dev/core/v3"; import { IdempotencyKey, idempotencyKeys, isIdempotencyKey } from "./idempotencyKeys.js"; -import { PollOptions, RetrieveRunResult, runs } from "./runs.js"; +import { PollOptions, runs } from "./runs.js"; import { tracer } from "./tracer.js"; -import { SerializableJson } from "@trigger.dev/core"; -export type Context = TaskRunContext; +import type { + AnyRunHandle, + AnyRunTypes, + AnyTask, + BatchItem, + BatchResult, + BatchRunHandle, + BatchRunHandleFromTypes, + InferRunTypes, + inferSchemaIn, + RetrieveRunResult, + RunHandle, + RunHandleFromTypes, + RunHandleOutput, + RunHandlePayload, + RunTypes, + SchemaParseFn, + Task, + TaskBatchOutputHandle, + TaskIdentifier, + TaskOptions, + TaskOutput, + TaskOutputHandle, + TaskPayload, + TaskRunOptions, + TaskRunResult, + TaskSchema, + TaskWithSchemaOptions, + TriggerApiRequestOptions, +} from "@trigger.dev/core/v3"; -type RequireOne = { - [X in Exclude]?: T[X]; -} & { - [P in K]-?: T[P]; +export type { + AnyRunHandle, + AnyTask, + BatchItem, + BatchResult, + BatchRunHandle, + Queue, + RunHandle, + RunHandleOutput, + RunHandlePayload, + SerializableJson, + Task, + TaskBatchOutputHandle, + TaskIdentifier, + TaskOptions, + TaskOutput, + TaskOutputHandle, + TaskPayload, + TaskRunOptions, + TaskRunResult, }; -export type Queue = RequireOne; +export { SubtaskUnwrapError, TaskRunPromise }; + +export type Context = TaskRunContext; export function queue(options: { name: string } & QueueOptions): Queue { return options; } -export type TaskOptions< +export function createTask< TIdentifier extends string, - TPayload = void, + TInput = void, TOutput = unknown, TInitOutput extends InitOutput = any, -> = { - /** An id for your task. This must be unique inside your project and not change between versions. */ - id: TIdentifier; - /** The retry settings when an uncaught error is thrown. - * - * If omitted it will use the values in your `trigger.config.ts` file. - * - * @example - * - * ``` - * export const taskWithRetries = task({ - id: "task-with-retries", - retry: { - maxAttempts: 10, - factor: 1.8, - minTimeoutInMs: 500, - maxTimeoutInMs: 30_000, - randomize: false, - }, - run: async ({ payload, ctx }) => { - //... - }, - }); - * ``` - * */ - retry?: RetryOptions; - /** Used to configure what should happen when more than one run is triggered at the same time. - * - * @example - * one at a time execution - * - * ```ts - * export const oneAtATime = task({ - id: "one-at-a-time", - queue: { - concurrencyLimit: 1, - }, - run: async ({ payload, ctx }) => { - //... - }, - }); - * ``` - */ - queue?: QueueOptions; - /** Configure the spec of the machine you want your task to run on. - * - * @example - * - * ```ts - * export const heavyTask = task({ - id: "heavy-task", - machine: { - cpu: 2, - memory: 4, - }, - run: async ({ payload, ctx }) => { - //... - }, - }); - * ``` - */ - machine?: { - /** vCPUs. The default is 0.5. - * - * Possible values: - * - 0.25 - * - 0.5 - * - 1 - * - 2 - * - 4 - * @deprecated use preset instead - */ - cpu?: MachineCpu; - /** In GBs of RAM. The default is 1. - * - * Possible values: - * - 0.25 - * - 0.5 - * - 1 - * - 2 - * - 4 - * - 8 - * * @deprecated use preset instead - */ - memory?: MachineMemory; - - /** Preset to use for the machine. Defaults to small-1x */ - preset?: - | "micro" - | "small-1x" - | "small-2x" - | "medium-1x" - | "medium-2x" - | "large-1x" - | "large-2x"; - }; - - /** - * The maximum duration in compute-time seconds that a task run is allowed to run. If the task run exceeds this duration, it will be stopped. - * - * Minimum value is 5 seconds - */ - maxDuration?: number; - - /** This gets called when a task is triggered. It's where you put the code you want to execute. - * - * @param payload - The payload that is passed to your task when it's triggered. This must be JSON serializable. - * @param params - Metadata about the run. - */ - run: (payload: TPayload, params: RunFnParams) => Promise; - - /** - * init is called before the run function is called. It's useful for setting up any global state. - */ - init?: (payload: TPayload, params: InitFnParams) => Promise; - - /** - * cleanup is called after the run function has completed. - */ - cleanup?: (payload: TPayload, params: RunFnParams) => Promise; - - /** - * handleError is called when the run function throws an error. It can be used to modify the error or return new retry options. - */ - handleError?: ( - payload: TPayload, - error: unknown, - params: HandleErrorFnParams - ) => HandleErrorResult; - - /** - * middleware allows you to run code "around" the run function. This can be useful for logging, metrics, or other cross-cutting concerns. - * - * When writing middleware, you should always call `next()` to continue the execution of the task: - * - * ```ts - * export const middlewareTask = task({ - * id: "middleware-task", - * middleware: async (payload, { ctx, next }) => { - * console.log("Before run"); - * await next(); - * console.log("After run"); - * }, - * run: async (payload, { ctx }) => {} - * }); - * ``` - */ - middleware?: (payload: TPayload, params: MiddlewareFnParams) => Promise; - - /** - * onStart is called the first time a task is executed in a run (not before every retry) - */ - onStart?: (payload: TPayload, params: StartFnParams) => Promise; - - /** - * onSuccess is called after the run function has successfully completed. - */ - onSuccess?: ( - payload: TPayload, - output: TOutput, - params: SuccessFnParams - ) => Promise; - - /** - * onFailure is called after a task run has failed (meaning the run function threw an error and won't be retried anymore) - */ - onFailure?: ( - payload: TPayload, - error: unknown, - params: FailureFnParams - ) => Promise; -}; - -declare const __output: unique symbol; -type BrandOutput = { [__output]: B }; -export type BrandedOutput = T & BrandOutput; +>( + params: TaskOptions +): Task { + const customQueue = params.queue + ? queue({ + name: params.queue?.name ?? `task/${params.id}`, + ...params.queue, + }) + : undefined; -export type RunHandle = BrandedOutput< - { - id: string; - }, - TOutput ->; + const task: Task = { + id: params.id, + trigger: async (payload, options) => { + const taskMetadata = taskCatalog.getTaskManifest(params.id); -/** - * A BatchRunHandle can be used to retrieve the runs of a batch trigger in a typesafe manner. - */ -export type BatchRunHandle = BrandedOutput< - { - batchId: string; - runs: Array>; - }, - TOutput ->; - -export type RunHandleOutput = TRunHandle extends RunHandle - ? TOutput - : never; - -export type TaskRunResult = - | { - ok: true; - id: string; - output: TOutput; - } - | { - ok: false; - id: string; - error: unknown; - }; + return await trigger_internal>( + taskMetadata && taskMetadata.exportName + ? `${taskMetadata.exportName}.trigger()` + : `trigger()`, + params.id, + payload, + undefined, + { + queue: customQueue, + ...options, + } + ); + }, + batchTrigger: async (items) => { + const taskMetadata = taskCatalog.getTaskManifest(params.id); -export class SubtaskUnwrapError extends Error { - public readonly taskId: string; - public readonly runId: string; - public readonly cause?: unknown; - - constructor(taskId: string, runId: string, subtaskError: unknown) { - if (subtaskError instanceof Error) { - super(`Error in ${taskId}: ${subtaskError.message}`, { cause: subtaskError }); - this.name = "SubtaskUnwrapError"; - } else { - super(`Error in ${taskId}`, { cause: subtaskError }); - this.name = "SubtaskUnwrapError"; - } + return await batchTrigger_internal>( + taskMetadata && taskMetadata.exportName + ? `${taskMetadata.exportName}.batchTrigger()` + : `batchTrigger()`, + params.id, + items, + undefined, + undefined, + customQueue + ); + }, + triggerAndWait: (payload, options) => { + const taskMetadata = taskCatalog.getTaskManifest(params.id); - this.taskId = taskId; - this.runId = runId; - } -} + return new TaskRunPromise((resolve, reject) => { + triggerAndWait_internal( + taskMetadata && taskMetadata.exportName + ? `${taskMetadata.exportName}.triggerAndWait()` + : `triggerAndWait()`, + params.id, + payload, + undefined, + { + queue: customQueue, + ...options, + } + ) + .then((result) => { + resolve(result); + }) + .catch((error) => { + reject(error); + }); + }, params.id); + }, + batchTriggerAndWait: async (items) => { + const taskMetadata = taskCatalog.getTaskManifest(params.id); -export class TaskRunPromise extends Promise> { - constructor( - executor: ( - resolve: (value: TaskRunResult | PromiseLike>) => void, - reject: (reason?: any) => void - ) => void, - private readonly taskId: string - ) { - super(executor); - } + return await batchTriggerAndWait_internal( + taskMetadata && taskMetadata.exportName + ? `${taskMetadata.exportName}.batchTriggerAndWait()` + : `batchTriggerAndWait()`, + params.id, + items, + undefined, + undefined, + customQueue + ); + }, + }; - unwrap(): Promise { - return this.then((result) => { - if (result.ok) { - return result.output; - } else { - throw new SubtaskUnwrapError(this.taskId, result.id, result.error); - } - }); - } -} + taskCatalog.registerTaskMetadata({ + id: params.id, + queue: params.queue, + retry: params.retry ? { ...defaultRetryOptions, ...params.retry } : undefined, + machine: params.machine, + maxDuration: params.maxDuration, + fns: { + run: params.run, + init: params.init, + cleanup: params.cleanup, + middleware: params.middleware, + handleError: params.handleError, + onSuccess: params.onSuccess, + onFailure: params.onFailure, + onStart: params.onStart, + }, + }); -export type BatchResult = { - id: string; - runs: TaskRunResult[]; -}; + // @ts-expect-error + task[Symbol.for("trigger.dev/task")] = true; -export type BatchItem = TInput extends void - ? { payload?: TInput; options?: TaskRunOptions } - : { payload: TInput; options?: TaskRunOptions }; - -export interface Task { - /** - * The id of the task. - */ - id: TIdentifier; - /** - * Trigger a task with the given payload, and continue without waiting for the result. If you want to wait for the result, use `triggerAndWait`. Returns the id of the triggered task run. - * @param payload - * @param options - * @returns RunHandle - * - `id` - The id of the triggered task run. - */ - trigger: (payload: TInput, options?: TaskRunOptions) => Promise>; - - /** - * Batch trigger multiple task runs with the given payloads, and continue without waiting for the results. If you want to wait for the results, use `batchTriggerAndWait`. Returns the id of the triggered batch. - * @param items - * @returns InvokeBatchHandle - * - `batchId` - The id of the triggered batch. - * - `runs` - The ids of the triggered task runs. - */ - batchTrigger: (items: Array>) => Promise>; - - /** - * Trigger a task with the given payload, and wait for the result. Returns the result of the task run - * @param payload - * @param options - Options for the task run - * @returns TaskRunResult - * @example - * ``` - * const result = await task.triggerAndWait({ foo: "bar" }); - * - * if (result.ok) { - * console.log(result.output); - * } else { - * console.error(result.error); - * } - * ``` - */ - triggerAndWait: (payload: TInput, options?: TaskRunOptions) => TaskRunPromise; - - /** - * Batch trigger multiple task runs with the given payloads, and wait for the results. Returns the results of the task runs. - * @param items - * @returns BatchResult - * @example - * ``` - * const result = await task.batchTriggerAndWait([ - * { payload: { foo: "bar" } }, - * { payload: { foo: "baz" } }, - * ]); - * - * for (const run of result.runs) { - * if (run.ok) { - * console.log(run.output); - * } else { - * console.error(run.error); - * } - * } - * ``` - */ - batchTriggerAndWait: (items: Array>) => Promise>; + return task; } -export type AnyTask = Task; - -export type TaskPayload = TTask extends Task - ? TInput - : never; - -export type TaskOutput = TTask extends Task - ? TOutput - : never; - -export type TaskOutputHandle = TTask extends Task - ? RunHandle - : never; - -export type TaskBatchOutputHandle = TTask extends Task< - string, - any, - infer TOutput -> - ? BatchRunHandle - : never; - -export type TaskIdentifier = TTask extends Task - ? TIdentifier - : never; - -export type TaskRunOptions = { - /** - * A unique key that can be used to ensure that a task is only triggered once per key. - * - * You can use `idempotencyKeys.create` to create an idempotency key first, and then pass it to the task options. - * - * @example - * - * ```typescript - * import { idempotencyKeys, task } from "@trigger.dev/sdk/v3"; - * - * export const myTask = task({ - * id: "my-task", - * run: async (payload: any) => { - * // scoped to the task run by default - * const idempotencyKey = await idempotencyKeys.create("my-task-key"); - * - * // Use the idempotency key when triggering child tasks - * await childTask.triggerAndWait(payload, { idempotencyKey }); - * - * // scoped globally, does not include the task run ID - * const globalIdempotencyKey = await idempotencyKeys.create("my-task-key", { scope: "global" }); - * - * await childTask.triggerAndWait(payload, { idempotencyKey: globalIdempotencyKey }); - * - * // You can also pass a string directly, which is the same as a global idempotency key - * await childTask.triggerAndWait(payload, { idempotencyKey: "my-very-unique-key" }); - * } - * }); - * ``` - * - * When triggering a task inside another task, we automatically inject the run ID into the key material. - * - * If you are triggering a task from your backend, ensure you include some sufficiently unique key material to prevent collisions. - * - * @example - * - * ```typescript - * import { idempotencyKeys, tasks } from "@trigger.dev/sdk/v3"; - * - * // Somewhere in your backend - * const idempotencyKey = await idempotenceKeys.create(["my-task-trigger", "user-123"]); - * await tasks.trigger("my-task", { foo: "bar" }, { idempotencyKey }); - * ``` - * - */ - idempotencyKey?: IdempotencyKey | string | string[]; - maxAttempts?: number; - queue?: TaskRunConcurrencyOptions; - concurrencyKey?: string; - /** - * The delay before the task is executed. This can be a string like "1h" or a Date object. - * - * @example - * "1h" - 1 hour - * "30d" - 30 days - * "15m" - 15 minutes - * "2w" - 2 weeks - * "60s" - 60 seconds - * new Date("2025-01-01T00:00:00Z") - */ - delay?: string | Date; - - /** - * Set a time-to-live for this run. If the run is not executed within this time, it will be removed from the queue and never execute. - * - * @example - * - * ```ts - * await myTask.trigger({ foo: "bar" }, { ttl: "1h" }); - * await myTask.trigger({ foo: "bar" }, { ttl: 60 * 60 }); // 1 hour - * ``` - * - * The minimum value is 1 second. Setting the `ttl` to `0` will disable the TTL and the run will never expire. - * - * **Note:** Runs in development have a default `ttl` of 10 minutes. You can override this by setting the `ttl` option. - */ - ttl?: string | number; - - /** - * Tags to attach to the run. Tags can be used to filter runs in the dashboard and using the SDK. - * - * You can set up to 5 tags per run, they must be less than 64 characters each. - * - * We recommend prefixing tags with a namespace using an underscore or colon, like `user_1234567` or `org:9876543`. - * - * @example - * - * ```ts - * await myTask.trigger({ foo: "bar" }, { tags: ["user:1234567", "org:9876543"] }); - * ``` - */ - tags?: RunTags; - - /** - * Metadata to attach to the run. Metadata can be used to store additional information about the run. Limited to 4KB. - */ - metadata?: Record; - - /** - * The maximum duration in compute-time seconds that a task run is allowed to run. If the task run exceeds this duration, it will be stopped. - * - * This will override the task's maxDuration. - * - * Minimum value is 5 seconds - */ - maxDuration?: number; -}; - -type TaskRunConcurrencyOptions = Queue; - -export type Prettify = { - [K in keyof T]: T[K]; -} & {}; - -export type DynamicBaseOptions = { - id: string; -}; - -export function createTask< +export function createSchemaTask< TIdentifier extends string, - TInput = void, + TSchema extends TaskSchema | undefined = undefined, TOutput = unknown, TInitOutput extends InitOutput = any, >( - params: TaskOptions -): Task { + params: TaskWithSchemaOptions +): Task, TOutput> { const customQueue = params.queue ? queue({ name: params.queue?.name ?? `task/${params.id}`, @@ -545,33 +220,40 @@ export function createTask< }) : undefined; - const task: Task = { + const parsePayload = params.schema + ? getSchemaParseFn>(params.schema) + : undefined; + + const task: Task, TOutput> = { id: params.id, - trigger: async (payload, options) => { + trigger: async (payload, options, requestOptions) => { const taskMetadata = taskCatalog.getTaskManifest(params.id); - return await trigger_internal( + return await trigger_internal, TOutput>>( taskMetadata && taskMetadata.exportName ? `${taskMetadata.exportName}.trigger()` : `trigger()`, params.id, payload, + parsePayload, { queue: customQueue, ...options, - } + }, + requestOptions ); }, - batchTrigger: async (items) => { + batchTrigger: async (items, requestOptions) => { const taskMetadata = taskCatalog.getTaskManifest(params.id); - return await batchTrigger_internal( + return await batchTrigger_internal, TOutput>>( taskMetadata && taskMetadata.exportName ? `${taskMetadata.exportName}.batchTrigger()` : `batchTrigger()`, params.id, items, - undefined, + parsePayload, + requestOptions, customQueue ); }, @@ -579,12 +261,13 @@ export function createTask< const taskMetadata = taskCatalog.getTaskManifest(params.id); return new TaskRunPromise((resolve, reject) => { - triggerAndWait_internal( + triggerAndWait_internal, TOutput>( taskMetadata && taskMetadata.exportName ? `${taskMetadata.exportName}.triggerAndWait()` : `triggerAndWait()`, params.id, payload, + parsePayload, { queue: customQueue, ...options, @@ -601,12 +284,13 @@ export function createTask< batchTriggerAndWait: async (items) => { const taskMetadata = taskCatalog.getTaskManifest(params.id); - return await batchTriggerAndWait_internal( + return await batchTriggerAndWait_internal, TOutput>( taskMetadata && taskMetadata.exportName ? `${taskMetadata.exportName}.batchTriggerAndWait()` : `batchTriggerAndWait()`, params.id, items, + parsePayload, undefined, customQueue ); @@ -628,6 +312,7 @@ export function createTask< onSuccess: params.onSuccess, onFailure: params.onFailure, onStart: params.onStart, + parsePayload, }, }); @@ -657,12 +342,13 @@ export async function trigger( id: TaskIdentifier, payload: TaskPayload, options?: TaskRunOptions, - requestOptions?: ApiRequestOptions -): Promise>> { - return await trigger_internal, TaskOutput>( + requestOptions?: TriggerApiRequestOptions +): Promise>> { + return await trigger_internal>( "tasks.trigger()", id, payload, + undefined, options, requestOptions ); @@ -697,6 +383,7 @@ export function triggerAndWait( "tasks.triggerAndWait()", id, payload, + undefined, options, requestOptions ) @@ -742,6 +429,7 @@ export async function batchTriggerAndWait( "tasks.batchTriggerAndWait()", id, items, + undefined, requestOptions ); } @@ -765,8 +453,8 @@ export async function triggerAndPoll( id: TaskIdentifier, payload: TaskPayload, options?: TaskRunOptions & PollOptions, - requestOptions?: ApiRequestOptions -): Promise>>> { + requestOptions?: TriggerApiRequestOptions +): Promise> { const handle = await trigger(id, payload, options, requestOptions); return runs.poll(handle, options, requestOptions); @@ -775,26 +463,30 @@ export async function triggerAndPoll( export async function batchTrigger( id: TaskIdentifier, items: Array>>, - requestOptions?: ApiRequestOptions -): Promise>> { - return await batchTrigger_internal, TaskOutput>( + requestOptions?: TriggerApiRequestOptions +): Promise>> { + return await batchTrigger_internal>( "tasks.batchTrigger()", id, items, + undefined, requestOptions ); } -async function trigger_internal( +async function trigger_internal( name: string, - id: string, - payload: TPayload, + id: TRunTypes["taskIdentifier"], + payload: TRunTypes["payload"], + parsePayload?: SchemaParseFn, options?: TaskRunOptions, - requestOptions?: ApiRequestOptions -): Promise> { + requestOptions?: TriggerApiRequestOptions +): Promise> { const apiClient = apiClientManager.clientOrThrow(); - const payloadPacket = await stringifyIO(payload); + const parsedPayload = parsePayload ? await parsePayload(payload) : payload; + + const payloadPacket = await stringifyIO(parsedPayload); const handle = await apiClient.triggerTask( id, @@ -839,16 +531,17 @@ async function trigger_internal( } ); - return handle as RunHandle; + return handle as RunHandleFromTypes; } -async function batchTrigger_internal( +async function batchTrigger_internal( name: string, - id: string, - items: Array>, - requestOptions?: ApiRequestOptions, + id: TRunTypes["taskIdentifier"], + items: Array>, + parsePayload?: SchemaParseFn, + requestOptions?: TriggerApiRequestOptions, queue?: QueueOptions -): Promise> { +): Promise> { const apiClient = apiClientManager.clientOrThrow(); const response = await apiClient.batchTriggerTask( @@ -856,7 +549,9 @@ async function batchTrigger_internal( { items: await Promise.all( items.map(async (item) => { - const payloadPacket = await stringifyIO(item.payload); + const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload; + + const payloadPacket = await stringifyIO(parsedPayload); return { payload: payloadPacket.data, @@ -895,15 +590,17 @@ async function batchTrigger_internal( const handle = { batchId: response.batchId, runs: response.runs.map((id) => ({ id })), + publicAccessToken: response.publicAccessToken, }; - return handle as BatchRunHandle; + return handle as BatchRunHandleFromTypes; } async function triggerAndWait_internal( name: string, id: string, payload: TPayload, + parsePayload?: SchemaParseFn, options?: TaskRunOptions, requestOptions?: ApiRequestOptions ): Promise> { @@ -915,7 +612,9 @@ async function triggerAndWait_internal( const apiClient = apiClientManager.clientOrThrow(); - const payloadPacket = await stringifyIO(payload); + const parsedPayload = parsePayload ? await parsePayload(payload) : payload; + + const payloadPacket = await stringifyIO(parsedPayload); return await tracer.startActiveSpan( name, @@ -996,6 +695,7 @@ async function batchTriggerAndWait_internal( name: string, id: string, items: Array>, + parsePayload?: SchemaParseFn, requestOptions?: ApiRequestOptions, queue?: QueueOptions ): Promise> { @@ -1015,7 +715,9 @@ async function batchTriggerAndWait_internal( { items: await Promise.all( items.map(async (item) => { - const payloadPacket = await stringifyIO(item.payload); + const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload; + + const payloadPacket = await stringifyIO(parsedPayload); return { payload: payloadPacket.data, diff --git a/packages/trigger-sdk/src/v3/tasks.ts b/packages/trigger-sdk/src/v3/tasks.ts index 53cddf8d94..391bfc5790 100644 --- a/packages/trigger-sdk/src/v3/tasks.ts +++ b/packages/trigger-sdk/src/v3/tasks.ts @@ -1,46 +1,46 @@ -import { InitOutput } from "@trigger.dev/core/v3"; import { batchTrigger, batchTriggerAndWait, createTask, + createSchemaTask, + SubtaskUnwrapError, trigger, triggerAndPoll, triggerAndWait, - SubtaskUnwrapError, } from "./shared.js"; export { SubtaskUnwrapError }; import type { - TaskOptions, - Task, + AnyTask, + BatchItem, + BatchResult, + BatchRunHandle, Queue, RunHandle, - BatchRunHandle, - TaskRunResult, - BatchResult, - BatchItem, - TaskPayload, - TaskOutput, + Task, TaskIdentifier, + TaskOptions, + TaskOutput, + TaskPayload, TaskRunOptions, - AnyTask, + TaskRunResult, } from "./shared.js"; export type { - TaskOptions, - Task, + AnyTask, + BatchItem, + BatchResult, + BatchRunHandle, Queue, RunHandle, - BatchRunHandle, - TaskRunResult, - BatchResult, - BatchItem, - TaskPayload, - TaskOutput, + Task, TaskIdentifier, + TaskOptions, + TaskOutput, + TaskPayload, TaskRunOptions, - AnyTask, + TaskRunResult, }; /** Creates a task that can be triggered @@ -61,16 +61,9 @@ export type { * * @returns A task that can be triggered */ -export function task< - TIdentifier extends string, - TInput = void, - TOutput = unknown, - TInitOutput extends InitOutput = any, ->( - options: TaskOptions -): Task { - return createTask(options); -} +export const task = createTask; + +export const schemaTask = createSchemaTask; export const tasks = { trigger, diff --git a/packages/trigger-sdk/src/v3/tracer.ts b/packages/trigger-sdk/src/v3/tracer.ts index 13d71a298e..a30c086e91 100644 --- a/packages/trigger-sdk/src/v3/tracer.ts +++ b/packages/trigger-sdk/src/v3/tracer.ts @@ -1,4 +1,4 @@ -import { TriggerTracer } from "@trigger.dev/core/v3"; +import { TriggerTracer } from "@trigger.dev/core/v3/tracer"; import { VERSION } from "../version.js"; export const tracer = new TriggerTracer({ name: "@trigger.dev/sdk", version: VERSION }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index bd9921c820..ac3cf1d433 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -385,8 +385,8 @@ importers: specifier: workspace:* version: link:../../internal-packages/otlp-importer '@trigger.dev/platform': - specifier: 1.0.12 - version: 1.0.12 + specifier: 1.0.13 + version: 1.0.13 '@trigger.dev/sdk': specifier: workspace:* version: link:../../packages/trigger-sdk @@ -399,6 +399,12 @@ importers: '@uiw/react-codemirror': specifier: ^4.19.5 version: 4.19.5(@babel/runtime@7.24.5)(@codemirror/autocomplete@6.4.0)(@codemirror/language@6.3.2)(@codemirror/lint@6.4.2)(@codemirror/search@6.2.3)(@codemirror/state@6.2.0)(@codemirror/theme-one-dark@6.1.0)(@codemirror/view@6.7.2)(codemirror@6.0.1)(react-dom@18.2.0)(react@18.2.0) + '@unkey/cache': + specifier: ^1.5.0 + version: 1.5.0 + '@unkey/error': + specifier: ^0.2.0 + version: 0.2.0 '@upstash/ratelimit': specifier: ^1.1.3 version: 1.1.3 @@ -637,6 +643,9 @@ importers: specifier: ^1.5.0 version: 1.5.0(zod@3.22.3) devDependencies: + '@internal/testcontainers': + specifier: workspace:* + version: link:../../internal-packages/testcontainers '@remix-run/dev': specifier: 2.1.0 version: 2.1.0(@remix-run/serve@2.1.0)(@types/node@18.11.18)(ts-node@10.9.1)(typescript@5.2.2) @@ -724,6 +733,9 @@ importers: '@types/slug': specifier: ^5.0.3 version: 5.0.3 + '@types/supertest': + specifier: ^6.0.2 + version: 6.0.2 '@types/tar': specifier: ^6.1.4 version: 6.1.4 @@ -796,6 +808,9 @@ importers: style-loader: specifier: ^3.3.4 version: 3.3.4(webpack@5.88.2) + supertest: + specifier: ^7.0.0 + version: 7.0.0 tailwind-scrollbar: specifier: ^3.0.1 version: 3.0.1(tailwindcss@3.4.1) @@ -811,6 +826,9 @@ importers: typescript: specifier: ^5.1.6 version: 5.2.2 + vite-tsconfig-paths: + specifier: ^4.0.5 + version: 4.0.5(typescript@5.2.2) vitest: specifier: ^1.4.0 version: 1.4.0(@types/node@18.11.18) @@ -944,6 +962,9 @@ importers: testcontainers: specifier: ^10.13.1 version: 10.13.1 + tinyexec: + specifier: ^0.3.0 + version: 0.3.0 vitest: specifier: ^1.4.0 version: 1.6.0(@types/node@20.14.14) @@ -1232,9 +1253,15 @@ importers: packages/core: dependencies: + '@electric-sql/client': + specifier: 0.6.3 + version: 0.6.3 '@google-cloud/precise-date': specifier: ^4.0.0 version: 4.0.0 + '@jsonhero/path': + specifier: ^1.0.21 + version: 1.0.21 '@opentelemetry/api': specifier: 1.9.0 version: 1.9.0 @@ -1268,12 +1295,18 @@ importers: '@opentelemetry/semantic-conventions': specifier: 1.25.1 version: 1.25.1 + dequal: + specifier: ^2.0.3 + version: 2.0.3 execa: specifier: ^8.0.1 version: 8.0.1 humanize-duration: specifier: ^3.27.3 version: 3.27.3 + jose: + specifier: ^5.4.0 + version: 5.4.0 nanoid: specifier: ^3.3.4 version: 3.3.7 @@ -1333,6 +1366,46 @@ importers: specifier: ^1.6.0 version: 1.6.0(@types/node@20.14.14) + packages/react-hooks: + dependencies: + '@trigger.dev/core': + specifier: workspace:^3.0.12 + version: link:../core + react: + specifier: '>=18 || >=19.0.0-beta' + version: 18.3.1 + react-dom: + specifier: '>=18 || >=19.0.0-beta' + version: 18.2.0(react@18.3.1) + swr: + specifier: ^2.2.5 + version: 2.2.5(react@18.3.1) + devDependencies: + '@arethetypeswrong/cli': + specifier: ^0.15.4 + version: 0.15.4 + '@types/node': + specifier: ^20.14.14 + version: 20.14.14 + '@types/react': + specifier: '*' + version: 18.3.1 + '@types/react-dom': + specifier: '*' + version: 18.2.7 + rimraf: + specifier: ^3.0.2 + version: 3.0.2 + tshy: + specifier: ^3.0.2 + version: 3.0.2 + tsx: + specifier: 4.17.0 + version: 4.17.0 + typescript: + specifier: ^5.5.4 + version: 5.5.4 + packages/trigger-sdk: dependencies: '@opentelemetry/api': @@ -1459,6 +1532,82 @@ importers: specifier: workspace:* version: link:../../packages/cli-v3 + references/nextjs-realtime: + dependencies: + '@fal-ai/serverless-client': + specifier: ^0.15.0 + version: 0.15.0 + '@radix-ui/react-icons': + specifier: ^1.3.0 + version: 1.3.0(react@18.3.1) + '@radix-ui/react-scroll-area': + specifier: ^1.2.0 + version: 1.2.0(@types/react-dom@18.2.7)(@types/react@18.3.1)(react-dom@18.2.0)(react@18.3.1) + '@radix-ui/react-slot': + specifier: ^1.1.0 + version: 1.1.0(@types/react@18.3.1)(react@18.3.1) + '@trigger.dev/react-hooks': + specifier: workspace:^3 + version: link:../../packages/react-hooks + '@trigger.dev/sdk': + specifier: workspace:^3 + version: link:../../packages/trigger-sdk + '@uploadthing/react': + specifier: ^7.0.3 + version: 7.0.3(next@14.2.15)(react@18.3.1)(uploadthing@7.1.0) + class-variance-authority: + specifier: ^0.7.0 + version: 0.7.0 + clsx: + specifier: ^2.1.1 + version: 2.1.1 + lucide-react: + specifier: ^0.451.0 + version: 0.451.0(react@18.3.1) + next: + specifier: 14.2.15 + version: 14.2.15(@playwright/test@1.37.0)(react-dom@18.2.0)(react@18.3.1) + react: + specifier: ^18 + version: 18.3.1 + react-dom: + specifier: ^18 + version: 18.2.0(react@18.3.1) + tailwind-merge: + specifier: ^2.5.3 + version: 2.5.3 + tailwindcss-animate: + specifier: ^1.0.7 + version: 1.0.7(tailwindcss@3.4.1) + uploadthing: + specifier: ^7.1.0 + version: 7.1.0(next@14.2.15)(tailwindcss@3.4.1) + zod: + specifier: 3.22.3 + version: 3.22.3 + devDependencies: + '@types/node': + specifier: ^20 + version: 20.14.14 + '@types/react': + specifier: ^18 + version: 18.3.1 + '@types/react-dom': + specifier: ^18 + version: 18.2.7 + postcss: + specifier: ^8 + version: 8.4.44 + tailwindcss: + specifier: ^3.4.1 + version: 3.4.1(ts-node@10.9.1) + trigger.dev: + specifier: workspace:^3 + version: link:../../packages/cli-v3 + typescript: + specifier: ^5 + version: 5.5.4 + references/prisma-catalog: dependencies: '@prisma/client': @@ -4597,12 +4746,39 @@ packages: '@connectrpc/connect-node': 1.4.0(@bufbuild/protobuf@1.10.0)(@connectrpc/connect@1.4.0) dev: false + /@effect/platform@0.63.2(@effect/schema@0.72.2)(effect@3.7.2): + resolution: {integrity: sha512-b39pVFw0NGo/tXjGShW7Yg0M+kG7bRrFR6+dQ3aIu99ePTkTp6bGb/kDB7n+dXsFFdIqHsQGYESeYcOQngxdFQ==} + peerDependencies: + '@effect/schema': ^0.72.2 + effect: ^3.7.2 + dependencies: + '@effect/schema': 0.72.2(effect@3.7.2) + effect: 3.7.2 + find-my-way-ts: 0.1.5 + multipasta: 0.2.5 + dev: false + + /@effect/schema@0.72.2(effect@3.7.2): + resolution: {integrity: sha512-/x1BIA2pqcUidNrOMmwYe6Z58KtSgHSc5iJu7bNwIxi2LHMVuUao1BvpI5x6i7T/zkoi4dd1S6qasZzJIYDjdw==} + peerDependencies: + effect: ^3.7.2 + dependencies: + effect: 3.7.2 + fast-check: 3.22.0 + dev: false + /@electric-sql/client@0.4.0: resolution: {integrity: sha512-YVYSqHitqVIDC1RBTfmHMfAfqDNAKMK9/AFVTDFQQxN3Q85dIQS49zThAuJVecYiuYRJvTiqf40c4n39jZSNrQ==} optionalDependencies: '@rollup/rollup-darwin-arm64': 4.21.3 dev: false + /@electric-sql/client@0.6.3: + resolution: {integrity: sha512-/AYkRrEASKIGcjtNp8IVJ3sAUm+IQ2l0NrGgDvvAG/n1+ifOl7kD1E4dRyg1qdY/b+HdKhGNYlNgsPuwMKO2Mg==} + optionalDependencies: + '@rollup/rollup-darwin-arm64': 4.21.3 + dev: false + /@electric-sql/react@0.3.5(react@18.2.0): resolution: {integrity: sha512-qPrlF3BsRg5L8zAn1sLGzc3pkswfEHyQI3lNOu7Xllv1DBx85RvHR1zgGGPAUfC8iwyWupQu9pFPE63GdbeuhA==} peerDependencies: @@ -6117,6 +6293,15 @@ packages: resolution: {integrity: sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + /@fal-ai/serverless-client@0.15.0: + resolution: {integrity: sha512-4Vuocu0342OijAN6xO/lwohDV7h90LbkTnOAEwH+pYvMFVC6RYmHS4GILc/wnOWBTw+iFlZFEKlljEVolkjVfg==} + engines: {node: '>=18.0.0'} + dependencies: + '@msgpack/msgpack': 3.0.0-beta2 + eventsource-parser: 1.1.2 + robot3: 0.4.1 + dev: false + /@fastify/busboy@2.0.0: resolution: {integrity: sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==} engines: {node: '>=14'} @@ -6641,6 +6826,10 @@ packages: '@jridgewell/resolve-uri': 3.1.0 '@jridgewell/sourcemap-codec': 1.5.0 + /@jsonhero/path@1.0.21: + resolution: {integrity: sha512-gVUDj/92acpVoJwsVJ/RuWOaHyG4oFzn898WNGQItLCTQ+hOaVlEaImhwE1WqOTf+l3dGOUkbSiVKlb3q1hd1Q==} + dev: false + /@jspm/core@2.0.1: resolution: {integrity: sha512-Lg3PnLp0QXpxwLIAuuJboLeRaIhrgJjeuh797QADg3xz8wGLugQOS5DpsE8A6i6Adgzf+bacllkKZG3J0tGfDw==} dev: true @@ -6769,6 +6958,11 @@ packages: lilconfig: 2.1.0 dev: true + /@msgpack/msgpack@3.0.0-beta2: + resolution: {integrity: sha512-y+l1PNV0XDyY8sM3YtuMLK5vE3/hkfId+Do8pLo/OPxfxuFAUwcGz3oiiUuV46/aBpwTzZ+mRWVMtlSKbradhw==} + engines: {node: '>= 14'} + dev: false + /@mswjs/interceptors@0.29.1: resolution: {integrity: sha512-3rDakgJZ77+RiQUuSK69t1F0m8BQKA8Vh5DCS5V0DWvNY67zob2JhhQrhCO0AKLGINTRSFd1tBaHcJTkhefoSw==} engines: {node: '>=18'} @@ -6785,6 +6979,10 @@ packages: resolution: {integrity: sha512-Py8zIo+02ht82brwwhTg36iogzFqGLPXlRGKQw5s+qP/kMNc4MAyDeEwBKDijk6zTIbegEgu8Qy7C1LboslQAw==} dev: false + /@next/env@14.2.15: + resolution: {integrity: sha512-S1qaj25Wru2dUpcIZMjxeMVSwkt8BK4dmWHHiBuRstcIyOsMapqT4A4jSB6onvqeygkSSmOkyny9VVx8JIGamQ==} + dev: false + /@next/env@14.2.3: resolution: {integrity: sha512-W7fd7IbkfmeeY2gXrzJYDx8D2lWKbVoTIj1o1ScPHNzvp30s1AuoEFSdr39bC5sjxJaxTtq3OTCZboNp0lNWHA==} dev: false @@ -6798,6 +6996,15 @@ packages: dev: false optional: true + /@next/swc-darwin-arm64@14.2.15: + resolution: {integrity: sha512-Rvh7KU9hOUBnZ9TJ28n2Oa7dD9cvDBKua9IKx7cfQQ0GoYUwg9ig31O2oMwH3wm+pE3IkAQ67ZobPfEgurPZIA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + /@next/swc-darwin-arm64@14.2.3: resolution: {integrity: sha512-3pEYo/RaGqPP0YzwnlmPN2puaF2WMLM3apt5jLW2fFdXD9+pqcoTzRk+iZsf8ta7+quAe4Q6Ms0nR0SFGFdS1A==} engines: {node: '>= 10'} @@ -6816,6 +7023,15 @@ packages: dev: false optional: true + /@next/swc-darwin-x64@14.2.15: + resolution: {integrity: sha512-5TGyjFcf8ampZP3e+FyCax5zFVHi+Oe7sZyaKOngsqyaNEpOgkKB3sqmymkZfowy3ufGA/tUgDPPxpQx931lHg==} + engines: {node: '>= 10'} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + /@next/swc-darwin-x64@14.2.3: resolution: {integrity: sha512-6adp7waE6P1TYFSXpY366xwsOnEXM+y1kgRpjSRVI2CBDOcbRjsJ67Z6EgKIqWIue52d2q/Mx8g9MszARj8IEA==} engines: {node: '>= 10'} @@ -6834,6 +7050,15 @@ packages: dev: false optional: true + /@next/swc-linux-arm64-gnu@14.2.15: + resolution: {integrity: sha512-3Bwv4oc08ONiQ3FiOLKT72Q+ndEMyLNsc/D3qnLMbtUYTQAmkx9E/JRu0DBpHxNddBmNT5hxz1mYBphJ3mfrrw==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + /@next/swc-linux-arm64-gnu@14.2.3: resolution: {integrity: sha512-cuzCE/1G0ZSnTAHJPUT1rPgQx1w5tzSX7POXSLaS7w2nIUJUD+e25QoXD/hMfxbsT9rslEXugWypJMILBj/QsA==} engines: {node: '>= 10'} @@ -6852,6 +7077,15 @@ packages: dev: false optional: true + /@next/swc-linux-arm64-musl@14.2.15: + resolution: {integrity: sha512-k5xf/tg1FBv/M4CMd8S+JL3uV9BnnRmoe7F+GWC3DxkTCD9aewFRH1s5rJ1zkzDa+Do4zyN8qD0N8c84Hu96FQ==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + /@next/swc-linux-arm64-musl@14.2.3: resolution: {integrity: sha512-0D4/oMM2Y9Ta3nGuCcQN8jjJjmDPYpHX9OJzqk42NZGJocU2MqhBq5tWkJrUQOQY9N+In9xOdymzapM09GeiZw==} engines: {node: '>= 10'} @@ -6870,6 +7104,15 @@ packages: dev: false optional: true + /@next/swc-linux-x64-gnu@14.2.15: + resolution: {integrity: sha512-kE6q38hbrRbKEkkVn62reLXhThLRh6/TvgSP56GkFNhU22TbIrQDEMrO7j0IcQHcew2wfykq8lZyHFabz0oBrA==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + /@next/swc-linux-x64-gnu@14.2.3: resolution: {integrity: sha512-ENPiNnBNDInBLyUU5ii8PMQh+4XLr4pG51tOp6aJ9xqFQ2iRI6IH0Ds2yJkAzNV1CfyagcyzPfROMViS2wOZ9w==} engines: {node: '>= 10'} @@ -6888,6 +7131,15 @@ packages: dev: false optional: true + /@next/swc-linux-x64-musl@14.2.15: + resolution: {integrity: sha512-PZ5YE9ouy/IdO7QVJeIcyLn/Rc4ml9M2G4y3kCM9MNf1YKvFY4heg3pVa/jQbMro+tP6yc4G2o9LjAz1zxD7tQ==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + /@next/swc-linux-x64-musl@14.2.3: resolution: {integrity: sha512-BTAbq0LnCbF5MtoM7I/9UeUu/8ZBY0i8SFjUMCbPDOLv+un67e2JgyN4pmgfXBwy/I+RHu8q+k+MCkDN6P9ViQ==} engines: {node: '>= 10'} @@ -6906,6 +7158,15 @@ packages: dev: false optional: true + /@next/swc-win32-arm64-msvc@14.2.15: + resolution: {integrity: sha512-2raR16703kBvYEQD9HNLyb0/394yfqzmIeyp2nDzcPV4yPjqNUG3ohX6jX00WryXz6s1FXpVhsCo3i+g4RUX+g==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: false + optional: true + /@next/swc-win32-arm64-msvc@14.2.3: resolution: {integrity: sha512-AEHIw/dhAMLNFJFJIJIyOFDzrzI5bAjI9J26gbO5xhAKHYTZ9Or04BesFPXiAYXDNdrwTP2dQceYA4dL1geu8A==} engines: {node: '>= 10'} @@ -6924,6 +7185,15 @@ packages: dev: false optional: true + /@next/swc-win32-ia32-msvc@14.2.15: + resolution: {integrity: sha512-fyTE8cklgkyR1p03kJa5zXEaZ9El+kDNM5A+66+8evQS5e/6v0Gk28LqA0Jet8gKSOyP+OTm/tJHzMlGdQerdQ==} + engines: {node: '>= 10'} + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: false + optional: true + /@next/swc-win32-ia32-msvc@14.2.3: resolution: {integrity: sha512-vga40n1q6aYb0CLrM+eEmisfKCR45ixQYXuBXxOOmmoV8sYST9k7E3US32FsY+CkkF7NtzdcebiFT4CHuMSyZw==} engines: {node: '>= 10'} @@ -6942,6 +7212,15 @@ packages: dev: false optional: true + /@next/swc-win32-x64-msvc@14.2.15: + resolution: {integrity: sha512-SzqGbsLsP9OwKNUG9nekShTwhj6JSB9ZLMWQ8g1gG6hdE5gQLncbnbymrwy2yVmH9nikSLYRYxYMFu78Ggp7/g==} + engines: {node: '>= 10'} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: false + optional: true + /@next/swc-win32-x64-msvc@14.2.3: resolution: {integrity: sha512-Q1/zm43RWynxrO7lW4ehciQVj+5ePBhOK+/K2P7pLFX3JaJ/IZVC69SHidrmZSOkqz7ECIOhhy7XhAFG4JYyHA==} engines: {node: '>= 10'} @@ -8218,6 +8497,10 @@ packages: '@babel/runtime': 7.24.5 dev: false + /@radix-ui/number@1.1.0: + resolution: {integrity: sha512-V3gRzhVNU1ldS5XhAPTom1fOIo4ccrjjJgmE+LI2h/WaFpHmx0MQApT+KZHnx8abG6Avtfcz4WoEciMnpFT3HQ==} + dev: false + /@radix-ui/primitive@1.0.0: resolution: {integrity: sha512-3e7rn8FDMin4CgeL7Z/49smCA3rFYY3Ha2rUQ7HRWFadS5iCRw08ZgVT1LaNTCNqgvrUiyczLflrVrF0SRQtNA==} dependencies: @@ -8230,6 +8513,10 @@ packages: '@babel/runtime': 7.24.5 dev: false + /@radix-ui/primitive@1.1.0: + resolution: {integrity: sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==} + dev: false + /@radix-ui/react-alert-dialog@1.0.4(@types/react-dom@18.2.7)(@types/react@18.2.69)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-jbfBCRlKYlhbitueOAv7z74PXYeIQmWpKwm3jllsdkw7fGWNkxqP3v0nY9WmOzcPqpQuoorNtvViBgL46n5gVg==} peerDependencies: @@ -8444,6 +8731,19 @@ packages: react: 18.2.0 dev: false + /@radix-ui/react-compose-refs@1.1.0(@types/react@18.3.1)(react@18.3.1): + resolution: {integrity: sha512-b4inOtiaOnYf9KWyO3jAeeCG6FeyfY6ldiEPanbUjWd+xIk5wZeHa8yVwmrJ2vderhu/BQvzCrJI0lHd+wIiqw==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@types/react': 18.3.1 + react: 18.3.1 + dev: false + /@radix-ui/react-context@1.0.0(react@18.2.0): resolution: {integrity: sha512-1pVM9RfOQ+n/N5PJK33kRSKsr1glNxomxONs5c49MliinBY6Yw2Q995qfBUUo0/Mbg05B/sGA0gkgPI7kmSHBg==} peerDependencies: @@ -8481,6 +8781,19 @@ packages: react: 18.3.1 dev: false + /@radix-ui/react-context@1.1.1(@types/react@18.3.1)(react@18.3.1): + resolution: {integrity: sha512-UASk9zi+crv9WteK/NU4PLvOoL3OuE6BWVKNF6hPRBtYBDXQ2u5iu3O59zUlJiTVvkyuycnqrztsHVJwcK9K+Q==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@types/react': 18.3.1 + react: 18.3.1 + dev: false + /@radix-ui/react-dialog@1.0.3(@types/react@18.2.69)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-owNhq36kNPqC2/a+zJRioPg6HHnTn5B/sh/NjTY8r4W9g1L5VJlrzZIVcBr7R9Mg8iLjVmh6MGgMlfoVf/WO/A==} peerDependencies: @@ -8579,6 +8892,19 @@ packages: react: 18.3.1 dev: false + /@radix-ui/react-direction@1.1.0(@types/react@18.3.1)(react@18.3.1): + resolution: {integrity: sha512-BUuBvgThEiAXh2DWu93XsT+a3aWrGqolGlqqw5VU1kG7p/ZH2cuDlM1sRLNnY3QcBS69UIz2mcKhMxDsdewhjg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@types/react': 18.3.1 + react: 18.3.1 + dev: false + /@radix-ui/react-dismissable-layer@1.0.3(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-nXZOvFjOuHS1ovumntGV7NNoLaEp9JEvTht3MBjP44NSW5hUKj/8OnfN3+8WmB+CEhN44XaGhpHoSsUIEl5P7Q==} peerDependencies: @@ -8767,6 +9093,14 @@ packages: react-dom: 18.2.0(react@18.3.1) dev: false + /@radix-ui/react-icons@1.3.0(react@18.3.1): + resolution: {integrity: sha512-jQxj/0LKgp+j9BiTXz3O3sgs26RNet2iLWmsPyRz2SIcR4q/4SbazXfnYwbAr+vLYKSfc7qxzyGQA1HLlYiuNw==} + peerDependencies: + react: ^16.x || ^17.x || ^18.x + dependencies: + react: 18.3.1 + dev: false + /@radix-ui/react-id@1.0.0(react@18.2.0): resolution: {integrity: sha512-Q6iAB/U7Tq3NTolBBQbHTgclPmGWE3OlktGGqrClPozSw4vkQ1DfQAOtzgRPecKsMdJINE05iaoDUG8tRzCBjw==} peerDependencies: @@ -9097,6 +9431,27 @@ packages: react-dom: 18.2.0(react@18.3.1) dev: false + /@radix-ui/react-presence@1.1.1(@types/react-dom@18.2.7)(@types/react@18.3.1)(react-dom@18.2.0)(react@18.3.1): + resolution: {integrity: sha512-IeFXVi4YS1K0wVZzXNrbaaUvIJ3qdY+/Ih4eHFhWA9SwGR9UDX7Ck8abvL57C4cv3wwMvUE0OG69Qc3NCcTe/A==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + dependencies: + '@radix-ui/react-compose-refs': 1.1.0(@types/react@18.3.1)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@18.3.1)(react@18.3.1) + '@types/react': 18.3.1 + '@types/react-dom': 18.2.7 + react: 18.3.1 + react-dom: 18.2.0(react@18.3.1) + dev: false + /@radix-ui/react-primitive@1.0.2(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-zY6G5Qq4R8diFPNwtyoLRZBxzu1Z+SXMlfYpChN7Dv8gvmx9X3qhDqiLWvKseKVJMuedFeU/Sa0Sy/Ia+t06Dw==} peerDependencies: @@ -9151,6 +9506,26 @@ packages: react-dom: 18.2.0(react@18.3.1) dev: false + /@radix-ui/react-primitive@2.0.0(@types/react-dom@18.2.7)(@types/react@18.3.1)(react-dom@18.2.0)(react@18.3.1): + resolution: {integrity: sha512-ZSpFm0/uHa8zTvKBDjLFWLo8dkr4MBsiDLz0g3gMUwqgLHz9rTaRRGYDgvZPtBJgYCBKXkS9fzmoySgr8CO6Cw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + dependencies: + '@radix-ui/react-slot': 1.1.0(@types/react@18.3.1)(react@18.3.1) + '@types/react': 18.3.1 + '@types/react-dom': 18.2.7 + react: 18.3.1 + react-dom: 18.2.0(react@18.3.1) + dev: false + /@radix-ui/react-radio-group@1.1.3(@types/react-dom@18.2.7)(@types/react@18.2.69)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-x+yELayyefNeKeTx4fjK6j99Fs6c4qKm3aY38G3swQVTN6xMpsrbigC0uHs2L//g8q4qR7qOcww8430jJmi2ag==} peerDependencies: @@ -9259,6 +9634,34 @@ packages: react-dom: 18.2.0(react@18.3.1) dev: false + /@radix-ui/react-scroll-area@1.2.0(@types/react-dom@18.2.7)(@types/react@18.3.1)(react-dom@18.2.0)(react@18.3.1): + resolution: {integrity: sha512-q2jMBdsJ9zB7QG6ngQNzNwlvxLQqONyL58QbEGwuyRZZb/ARQwk3uQVbCF7GvQVOtV6EU/pDxAw3zRzJZI3rpQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + dependencies: + '@radix-ui/number': 1.1.0 + '@radix-ui/primitive': 1.1.0 + '@radix-ui/react-compose-refs': 1.1.0(@types/react@18.3.1)(react@18.3.1) + '@radix-ui/react-context': 1.1.1(@types/react@18.3.1)(react@18.3.1) + '@radix-ui/react-direction': 1.1.0(@types/react@18.3.1)(react@18.3.1) + '@radix-ui/react-presence': 1.1.1(@types/react-dom@18.2.7)(@types/react@18.3.1)(react-dom@18.2.0)(react@18.3.1) + '@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.2.7)(@types/react@18.3.1)(react-dom@18.2.0)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@18.3.1)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@18.3.1)(react@18.3.1) + '@types/react': 18.3.1 + '@types/react-dom': 18.2.7 + react: 18.3.1 + react-dom: 18.2.0(react@18.3.1) + dev: false + /@radix-ui/react-select@1.2.1(@types/react@18.2.69)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-GULRMITaOHNj79BZvQs3iZO0+f2IgI8g5HDhMi7Bnc13t7IlG86NFtOCfTLme4PNZdEtU+no+oGgcl6IFiphpQ==} peerDependencies: @@ -9379,6 +9782,20 @@ packages: react: 18.3.1 dev: false + /@radix-ui/react-slot@1.1.0(@types/react@18.3.1)(react@18.3.1): + resolution: {integrity: sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@radix-ui/react-compose-refs': 1.1.0(@types/react@18.3.1)(react@18.3.1) + '@types/react': 18.3.1 + react: 18.3.1 + dev: false + /@radix-ui/react-switch@1.0.3(@types/react-dom@18.2.7)(@types/react@18.2.69)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-mxm87F88HyHztsI7N+ZUmEoARGkC22YVW5CaC+Byc+HRpuvCrOBPTAnXgf+tZ/7i0Sg/eOePGdMhUKhPaQEqow==} peerDependencies: @@ -9569,6 +9986,19 @@ packages: react: 18.3.1 dev: false + /@radix-ui/react-use-callback-ref@1.1.0(@types/react@18.3.1)(react@18.3.1): + resolution: {integrity: sha512-CasTfvsy+frcFkbXtSJ2Zu9JHpN8TYKxkgJGWbjiZhFivxaeW7rMeZt7QELGVLaYVfFMsKHjb7Ak0nMEe+2Vfw==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@types/react': 18.3.1 + react: 18.3.1 + dev: false + /@radix-ui/react-use-controllable-state@1.0.0(react@18.2.0): resolution: {integrity: sha512-FohDoZvk3mEXh9AWAVyRTYR4Sq7/gavuofglmiXB2g1aKyboUD4YtgWxKj8O5n+Uak52gXQ4wKz5IFST4vtJHg==} peerDependencies: @@ -9686,6 +10116,19 @@ packages: react: 18.3.1 dev: false + /@radix-ui/react-use-layout-effect@1.1.0(@types/react@18.3.1)(react@18.3.1): + resolution: {integrity: sha512-+FPE0rOdziWSrH9athwI1R0HDVbWlEhd+FR+aSDk4uWGmSJ9Z54sdZVDQPZAinJhJXwfT+qnj969mCsT2gfm5w==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@types/react': 18.3.1 + react: 18.3.1 + dev: false + /@radix-ui/react-use-previous@1.0.0(react@18.2.0): resolution: {integrity: sha512-RG2K8z/K7InnOKpq6YLDmT49HGjNmrK+fr82UCVKT2sW0GYfVnYp4wZWBooT/EYfQ5faA9uIjvsuMMhH61rheg==} peerDependencies: @@ -10141,7 +10584,7 @@ packages: '@react-aria/utils': 3.23.0(react@18.2.0) '@react-types/shared': 3.22.0(react@18.2.0) '@swc/helpers': 0.5.2 - clsx: 2.0.0 + clsx: 2.1.1 react: 18.2.0 dev: false @@ -10154,7 +10597,7 @@ packages: '@react-aria/utils': 3.25.2(react@18.2.0) '@react-types/shared': 3.24.1(react@18.2.0) '@swc/helpers': 0.5.5 - clsx: 2.1.0 + clsx: 2.1.1 react: 18.2.0 dev: false @@ -11121,7 +11564,7 @@ packages: '@react-stately/utils': 3.9.0(react@18.2.0) '@react-types/shared': 3.22.0(react@18.2.0) '@swc/helpers': 0.5.2 - clsx: 2.0.0 + clsx: 2.1.1 react: 18.2.0 dev: false @@ -11134,7 +11577,7 @@ packages: '@react-stately/utils': 3.10.3(react@18.2.0) '@react-types/shared': 3.24.1(react@18.2.0) '@swc/helpers': 0.5.5 - clsx: 2.1.0 + clsx: 2.1.1 react: 18.2.0 dev: false @@ -14322,8 +14765,8 @@ packages: react-dom: 18.2.0(react@18.2.0) dev: false - /@trigger.dev/platform@1.0.12: - resolution: {integrity: sha512-pOB9S2XG+TmTdlYtF9xuQoFz1aRlRGWSVCXuv3Vj2ArzU6Zz3ukvK24qlTd0PJB1otX71Ho/NfViGKEzCyRb5Q==} + /@trigger.dev/platform@1.0.13: + resolution: {integrity: sha512-T2NoZrHpt3T8gLVDLjdx14A3RQfZTLGsDazvxnvwFYmFHtj13Dl5i6J2WVrd58CN3FLO8a154kdrNEv71jIIxg==} dependencies: zod: 3.22.3 dev: false @@ -14416,6 +14859,10 @@ packages: /@types/cookie@0.6.0: resolution: {integrity: sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==} + /@types/cookiejar@2.1.5: + resolution: {integrity: sha512-he+DHOWReW0nghN24E1WUqM0efK4kI9oTqDm6XmK8ZPe2djZ90BSNdGnIyCLzCPw7/pogPlGbzI2wHGGmi4O/Q==} + dev: true + /@types/cors@2.8.17: resolution: {integrity: sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA==} dependencies: @@ -14649,6 +15096,10 @@ packages: resolution: {integrity: sha512-76CqzuD6Q7LC+AtbPqrvD9AqsN0k8bsYo2bM2J8pmNldP1aIPAbzUQ7QbobyXL4eLr1wK5x8FZFe8eF/ubRuBg==} dev: true + /@types/methods@1.1.4: + resolution: {integrity: sha512-ymXWVrDiCxTBE3+RIrrP533E70eA+9qu7zdWoHuOmGujkYtzf4HQF96b8nwHLqhuf4ykX61IGRIB38CC6/sImQ==} + dev: true + /@types/mime@3.0.1: resolution: {integrity: sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==} dev: true @@ -14796,7 +15247,7 @@ packages: resolution: {integrity: sha512-V0kuGBX3+prX+DQ/7r2qsv1NsdfnCLnTgnRJ1pYnxykBhGMz+qj+box5lq7XsO5mtZsBqpjwwTu/7wszPfMBcw==} dependencies: '@types/prop-types': 15.7.5 - csstype: 3.1.1 + csstype: 3.1.3 /@types/readable-stream@4.0.14: resolution: {integrity: sha512-xZn/AuUbCMShGsqH/ehZtGDwQtbx00M9rZ2ENLe4tOjFZ/JFeWMhEZkk2fEe1jAUqqEAURIkFJ7Az/go8mM1/w==} @@ -14903,6 +15354,22 @@ packages: resolution: {integrity: sha512-eqNDvZsCNY49OAXB0Firg/Sc2BgoWsntsLUdybGFOhAfCD6QJ2n9HXUIHGqt5qjrxmMv4wS8WLAw43ZkKcJ8Pw==} dev: false + /@types/superagent@8.1.9: + resolution: {integrity: sha512-pTVjI73witn+9ILmoJdajHGW2jkSaOzhiFYF1Rd3EQ94kymLqB9PjD9ISg7WaALC7+dCHT0FGe9T2LktLq/3GQ==} + dependencies: + '@types/cookiejar': 2.1.5 + '@types/methods': 1.1.4 + '@types/node': 18.19.20 + form-data: 4.0.0 + dev: true + + /@types/supertest@6.0.2: + resolution: {integrity: sha512-137ypx2lk/wTQbW6An6safu9hXmajAifU/s7szAHLN/FeIm5w7yR0Wkl9fdJMRSHwOn4HLAI0DaB2TOORuhPDg==} + dependencies: + '@types/methods': 1.1.4 + '@types/superagent': 8.1.9 + dev: true + /@types/tar@6.1.4: resolution: {integrity: sha512-Cp4oxpfIzWt7mr2pbhHT2OTXGMAL0szYCzuf8lRWyIMCgsx6/Hfc3ubztuhvzXHXgraTQxyOCmmg7TDGIMIJJQ==} dependencies: @@ -15144,6 +15611,49 @@ packages: - '@codemirror/search' dev: false + /@unkey/cache@1.5.0: + resolution: {integrity: sha512-vqGk9qH3Hn1Nva3hGAaib6G3IcrPkRQ1VgyMFG7asuANZ0RtohONd9RwxXwlaDopJJ66Dsx5rWrV4s/3U1GADg==} + dependencies: + '@opentelemetry/api': 1.8.0 + '@unkey/error': 0.2.0 + superjson: 2.2.1 + dev: false + + /@unkey/error@0.2.0: + resolution: {integrity: sha512-DFGb4A7SrusZPP0FYuRIF0CO+Gi4etLUAEJ6EKc+TKYmscL0nEJ2Pr38FyX9MvjI4Wx5l35Wc9KsBjMm9Ybh7w==} + dependencies: + zod: 3.23.8 + dev: false + + /@uploadthing/mime-types@0.3.0: + resolution: {integrity: sha512-jN/XFvpKTzcd3MXT/9D9oxx05scnYiSYxAXF/e6hyg377zFducRxivU/kHyYTkpUZPTmOL5q9EQbOkUsXMlSMg==} + dev: false + + /@uploadthing/react@7.0.3(next@14.2.15)(react@18.3.1)(uploadthing@7.1.0): + resolution: {integrity: sha512-dPRO45H1UaXmbrmelU2uZjSmWz0h15F0+SbfAJ+ehxLEU34EBRpOJF55G7yK5X7syFRnedlG4E1+iEcEmKRynw==} + peerDependencies: + next: '*' + react: ^17.0.2 || ^18.0.0 + uploadthing: 7.1.0 + peerDependenciesMeta: + next: + optional: true + dependencies: + '@uploadthing/shared': 7.0.3 + file-selector: 0.6.0 + next: 14.2.15(@playwright/test@1.37.0)(react-dom@18.2.0)(react@18.3.1) + react: 18.3.1 + uploadthing: 7.1.0(next@14.2.15)(tailwindcss@3.4.1) + dev: false + + /@uploadthing/shared@7.0.3: + resolution: {integrity: sha512-PAT5Jl6bfuVp37PBvaw7bwQYhLeDfIBuGr37mbPBPhtiqm8zf8ip8zubkdm5rXEhqRWfdI64SQpl+7Q+dLoM2Q==} + dependencies: + '@uploadthing/mime-types': 0.3.0 + effect: 3.7.2 + sqids: 0.3.0 + dev: false + /@upstash/core-analytics@0.0.8: resolution: {integrity: sha512-MCJoF+Y8fkzq4NRLG7kEHjtGyMsZ2DICBdmEdwoK9umoSrfkzgBlYdZiHTIaewyt9PGaMZCHOasz0LAuMpxwxQ==} engines: {node: '>=16.0.0'} @@ -16138,6 +16648,10 @@ packages: dependencies: printable-characters: 1.0.42 + /asap@2.0.6: + resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} + dev: true + /asn1@0.2.6: resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==} dependencies: @@ -17019,6 +17533,12 @@ packages: typescript: 5.2.2 dev: false + /class-variance-authority@0.7.0: + resolution: {integrity: sha512-jFI8IQw4hczaL4ALINxqLEXQbWcNjoSkloa4IaufXCJr6QawJyw7tuRysRsrE8w2p/4gGaxKIt/hX3qz/IbD1A==} + dependencies: + clsx: 2.0.0 + dev: false + /clean-stack@2.2.0: resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} engines: {node: '>=6'} @@ -17143,6 +17663,11 @@ packages: engines: {node: '>=6'} dev: false + /clsx@2.1.1: + resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} + engines: {node: '>=6'} + dev: false + /cluster-key-slot@1.1.2: resolution: {integrity: sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==} engines: {node: '>=0.10.0'} @@ -17247,6 +17772,10 @@ packages: resolution: {integrity: sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==} dev: true + /component-emitter@1.3.1: + resolution: {integrity: sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ==} + dev: true + /compress-commons@4.1.2: resolution: {integrity: sha512-D3uMHtGc/fcO1Gt1/L7i1e33VOvD4A9hfQLP+6ewd+BvG/gQ84Yh4oftEhAdjSMgBgwGL+jsppT7JYNpo6MHHg==} engines: {node: '>= 10'} @@ -17346,6 +17875,10 @@ packages: engines: {node: '>= 0.6'} dev: false + /cookiejar@2.1.4: + resolution: {integrity: sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==} + dev: true + /copy-anything@3.0.5: resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} engines: {node: '>=12.13'} @@ -17656,6 +18189,7 @@ packages: /csstype@3.1.1: resolution: {integrity: sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==} + dev: true /csstype@3.1.3: resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} @@ -18081,6 +18615,13 @@ packages: resolution: {integrity: sha512-75fMas7PkYNDTmDyb6PRJCH7ILmHLp+BhrZGeMsa4bCh40DTxgCz2NRy5UDzII4C5KuD0oBMZ9vXKhEl6UD/3w==} dev: false + /dezalgo@1.0.4: + resolution: {integrity: sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==} + dependencies: + asap: 2.0.6 + wrappy: 1.0.2 + dev: true + /didyoumean@1.2.2: resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==} @@ -18264,6 +18805,10 @@ packages: /ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} + /effect@3.7.2: + resolution: {integrity: sha512-pV7l1+LSZFvVObj4zuy4nYiBaC7qZOfrKV6s/Ef4p3KueiQwZFgamazklwyZ+x7Nyj2etRDFvHE/xkThTfQD1w==} + dev: false + /electron-to-chromium@1.4.433: resolution: {integrity: sha512-MGO1k0w1RgrfdbLVwmXcDhHHuxCn2qRgR7dYsJvWFKDttvYPx6FNzCGG0c/fBBvzK2LDh3UV7Tt9awnHnvAAUQ==} dev: true @@ -19738,6 +20283,13 @@ packages: engines: {'0': node >=0.6.0} dev: false + /fast-check@3.22.0: + resolution: {integrity: sha512-8HKz3qXqnHYp/VCNn2qfjHdAdcI8zcSqOyX64GOMukp7SL2bfzfeDKjSd+UyECtejccaZv3LcvZTm9YDD22iCQ==} + engines: {node: '>=8.0.0'} + dependencies: + pure-rand: 6.1.0 + dev: false + /fast-decode-uri-component@1.0.1: resolution: {integrity: sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg==} dev: false @@ -19787,6 +20339,10 @@ packages: fast-decode-uri-component: 1.0.1 dev: false + /fast-safe-stringify@2.1.1: + resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==} + dev: true + /fast-shallow-equal@1.0.0: resolution: {integrity: sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw==} dev: false @@ -19865,6 +20421,13 @@ packages: dependencies: flat-cache: 3.0.4 + /file-selector@0.6.0: + resolution: {integrity: sha512-QlZ5yJC0VxHxQQsQhXvBaC7VRJ2uaxTf+Tfpu4Z/OcVQJVpZO+DGU0rkoVW5ce2SccxugvpBJoMvUs59iILYdw==} + engines: {node: '>= 12'} + dependencies: + tslib: 2.6.2 + dev: false + /fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} engines: {node: '>=8'} @@ -19902,6 +20465,10 @@ packages: pkg-dir: 7.0.0 dev: true + /find-my-way-ts@0.1.5: + resolution: {integrity: sha512-4GOTMrpGQVzsCH2ruUn2vmwzV/02zF4q+ybhCIrw/Rkt3L8KWcycdC6aJMctJzwN4fXD4SD5F/4B9Sksh5rE0A==} + dev: false + /find-up@4.1.0: resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} engines: {node: '>=8'} @@ -20021,7 +20588,6 @@ packages: asynckit: 0.4.0 combined-stream: 1.0.8 mime-types: 2.1.35 - dev: false /format@0.2.2: resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} @@ -20043,6 +20609,14 @@ packages: fetch-blob: 3.2.0 dev: false + /formidable@3.5.1: + resolution: {integrity: sha512-WJWKelbRHN41m5dumb0/k8TeAx7Id/y3a+Z7QfhxP/htI9Js5zYaEDtG8uMgG0vM0lOlqnmjE99/kfpOYi/0Og==} + dependencies: + dezalgo: 1.0.4 + hexoid: 1.0.0 + once: 1.4.0 + dev: true + /forwarded@0.2.0: resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} engines: {node: '>= 0.6'} @@ -20718,6 +21292,11 @@ packages: resolution: {integrity: sha512-EWGTfnTqAO2L/j5HZgoM/3z82L7necsJ0pO9Tp0X1wil3PDLrkypTBRgVO2ExehEEvUycejZD3FuRaXpZZc3kw==} dev: false + /hexoid@1.0.0: + resolution: {integrity: sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==} + engines: {node: '>=8'} + dev: true + /highlight.js@10.7.3: resolution: {integrity: sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==} @@ -21172,6 +21751,7 @@ packages: resolution: {integrity: sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==} dependencies: has: 1.0.3 + dev: true /is-core-module@2.14.0: resolution: {integrity: sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A==} @@ -21498,11 +22078,11 @@ packages: /jiti@1.21.0: resolution: {integrity: sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q==} hasBin: true + dev: true /jiti@1.21.6: resolution: {integrity: sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w==} hasBin: true - dev: false /joi@17.7.0: resolution: {integrity: sha512-1/ugc8djfn93rTE3WRKdCzGGt/EtiYKxITMO4Wiv6q5JL1gl9ePt4kBsl1S499nbosspfctIQTpYIhSmHA3WAg==} @@ -21982,6 +22562,14 @@ packages: react: 18.2.0 dev: false + /lucide-react@0.451.0(react@18.3.1): + resolution: {integrity: sha512-OwQ3uljZLp2cerj8sboy5rnhtGTCl9UCJIhT1J85/yOuGVlEH+xaUPR7tvNdddPvmV5M5VLdr7cQuWE3hzA4jw==} + peerDependencies: + react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc + dependencies: + react: 18.3.1 + dev: false + /luxon@3.2.1: resolution: {integrity: sha512-QrwPArQCNLAKGO/C+ZIilgIuDnEnKx5QYODdDtbFaxzsbZcc/a7WFq7MhsVYgRlwawLtvOUESTlfJ+hc/USqPg==} engines: {node: '>=12'} @@ -22599,6 +23187,12 @@ packages: engines: {node: '>=4'} hasBin: true + /mime@2.6.0: + resolution: {integrity: sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==} + engines: {node: '>=4.0.0'} + hasBin: true + dev: true + /mime@3.0.0: resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==} engines: {node: '>=10.0.0'} @@ -22914,6 +23508,10 @@ packages: yargs: 17.7.2 dev: false + /multipasta@0.2.5: + resolution: {integrity: sha512-c8eMDb1WwZcE02WVjHoOmUVk7fnKU/RmUcosHACglrWAuPQsEJv+E8430sXj6jNc1jHw0zrS16aCjQh4BcEb4A==} + dev: false + /mustache@4.2.0: resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} hasBin: true @@ -22940,7 +23538,7 @@ packages: react-dom: '*' dependencies: css-tree: 1.1.3 - csstype: 3.1.1 + csstype: 3.1.3 fastest-stable-stringify: 2.0.2 inline-style-prefixer: 6.0.4 react: 18.2.0 @@ -23047,6 +23645,49 @@ packages: - babel-plugin-macros dev: false + /next@14.2.15(@playwright/test@1.37.0)(react-dom@18.2.0)(react@18.3.1): + resolution: {integrity: sha512-h9ctmOokpoDphRvMGnwOJAedT6zKhwqyZML9mDtspgf4Rh3Pn7UTYKqePNoDvhsWBAO5GoPNYshnAUGIazVGmw==} + engines: {node: '>=18.17.0'} + hasBin: true + peerDependencies: + '@opentelemetry/api': ^1.1.0 + '@playwright/test': ^1.41.2 + react: ^18.2.0 + react-dom: ^18.2.0 + sass: ^1.3.0 + peerDependenciesMeta: + '@opentelemetry/api': + optional: true + '@playwright/test': + optional: true + sass: + optional: true + dependencies: + '@next/env': 14.2.15 + '@playwright/test': 1.37.0 + '@swc/helpers': 0.5.5 + busboy: 1.6.0 + caniuse-lite: 1.0.30001655 + graceful-fs: 4.2.11 + postcss: 8.4.31 + react: 18.3.1 + react-dom: 18.2.0(react@18.3.1) + styled-jsx: 5.1.1(react@18.3.1) + optionalDependencies: + '@next/swc-darwin-arm64': 14.2.15 + '@next/swc-darwin-x64': 14.2.15 + '@next/swc-linux-arm64-gnu': 14.2.15 + '@next/swc-linux-arm64-musl': 14.2.15 + '@next/swc-linux-x64-gnu': 14.2.15 + '@next/swc-linux-x64-musl': 14.2.15 + '@next/swc-win32-arm64-msvc': 14.2.15 + '@next/swc-win32-ia32-msvc': 14.2.15 + '@next/swc-win32-x64-msvc': 14.2.15 + transitivePeerDependencies: + - '@babel/core' + - babel-plugin-macros + dev: false + /next@14.2.3(@babel/core@7.24.5)(@opentelemetry/api@1.4.1)(@playwright/test@1.37.0)(react-dom@18.2.0)(react@19.0.0-rc.0): resolution: {integrity: sha512-dowFkFTR8v79NPJO4QsBUtxv0g9BrS/phluVpMAt2ku7H+cbcBJlopXjkWlwxrk/xGqMemr7JkGPGemPrLLX7A==} engines: {node: '>=18.17.0'} @@ -24245,17 +24886,6 @@ packages: postcss-value-parser: 3.3.1 dev: false - /postcss-import@15.1.0(postcss@8.4.31): - resolution: {integrity: sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==} - engines: {node: '>=14.0.0'} - peerDependencies: - postcss: ^8.0.0 - dependencies: - postcss: 8.4.31 - postcss-value-parser: 4.2.0 - read-cache: 1.0.0 - resolve: 1.22.8 - /postcss-import@15.1.0(postcss@8.4.44): resolution: {integrity: sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==} engines: {node: '>=14.0.0'} @@ -24266,7 +24896,6 @@ packages: postcss-value-parser: 4.2.0 read-cache: 1.0.0 resolve: 1.22.8 - dev: false /postcss-import@16.0.1(postcss@8.4.44): resolution: {integrity: sha512-i2Pci0310NaLHr/5JUFSw1j/8hf1CzwMY13g6ZDxgOavmRHQi2ba3PmUHoihO+sjaum+KmCNzskNsw7JDrg03g==} @@ -24287,15 +24916,6 @@ packages: postcss: 7.0.39 dev: false - /postcss-js@4.0.1(postcss@8.4.31): - resolution: {integrity: sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==} - engines: {node: ^12 || ^14 || >= 16} - peerDependencies: - postcss: ^8.4.21 - dependencies: - camelcase-css: 2.0.1 - postcss: 8.4.31 - /postcss-js@4.0.1(postcss@8.4.44): resolution: {integrity: sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==} engines: {node: ^12 || ^14 || >= 16} @@ -24304,7 +24924,6 @@ packages: dependencies: camelcase-css: 2.0.1 postcss: 8.4.44 - dev: false /postcss-load-config@4.0.1(postcss@8.4.29)(ts-node@10.9.1): resolution: {integrity: sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA==} @@ -24324,24 +24943,7 @@ packages: yaml: 2.3.1 dev: true - /postcss-load-config@4.0.1(postcss@8.4.31)(ts-node@10.9.1): - resolution: {integrity: sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA==} - engines: {node: '>= 14'} - peerDependencies: - postcss: '>=8.0.9' - ts-node: '>=9.0.0' - peerDependenciesMeta: - postcss: - optional: true - ts-node: - optional: true - dependencies: - lilconfig: 2.1.0 - postcss: 8.4.31 - ts-node: 10.9.1(@swc/core@1.3.26)(@types/node@18.11.18)(typescript@5.2.2) - yaml: 2.3.1 - - /postcss-load-config@4.0.1(postcss@8.4.44): + /postcss-load-config@4.0.1(postcss@8.4.44)(ts-node@10.9.1): resolution: {integrity: sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA==} engines: {node: '>= 14'} peerDependencies: @@ -24355,8 +24957,8 @@ packages: dependencies: lilconfig: 2.1.0 postcss: 8.4.44 + ts-node: 10.9.1(@swc/core@1.3.26)(@types/node@18.11.18)(typescript@5.2.2) yaml: 2.3.1 - dev: false /postcss-loader@8.1.1(postcss@8.4.44)(typescript@5.2.2)(webpack@5.88.2): resolution: {integrity: sha512-0IeqyAsG6tYiDRCYKQJLAmgQr47DX6N7sFSWvQxt6AcupX8DIdmykuk/o/tx0Lze3ErGHJEp5OSRxrelC6+NdQ==} @@ -24485,15 +25087,6 @@ packages: postcss-selector-parser: 6.0.11 dev: false - /postcss-nested@6.0.1(postcss@8.4.31): - resolution: {integrity: sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ==} - engines: {node: '>=12.0'} - peerDependencies: - postcss: ^8.2.14 - dependencies: - postcss: 8.4.31 - postcss-selector-parser: 6.0.11 - /postcss-nested@6.0.1(postcss@8.4.44): resolution: {integrity: sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ==} engines: {node: '>=12.0'} @@ -24502,7 +25095,6 @@ packages: dependencies: postcss: 8.4.44 postcss-selector-parser: 6.0.11 - dev: false /postcss-selector-parser@6.0.10: resolution: {integrity: sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==} @@ -24557,8 +25149,8 @@ packages: engines: {node: ^10 || ^12 || >=14} dependencies: nanoid: 3.3.7 - picocolors: 1.0.0 - source-map-js: 1.0.2 + picocolors: 1.0.1 + source-map-js: 1.2.0 dev: true /postcss@8.4.31: @@ -24566,8 +25158,9 @@ packages: engines: {node: ^10 || ^12 || >=14} dependencies: nanoid: 3.3.7 - picocolors: 1.0.0 - source-map-js: 1.0.2 + picocolors: 1.0.1 + source-map-js: 1.2.0 + dev: false /postcss@8.4.35: resolution: {integrity: sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==} @@ -25037,6 +25630,10 @@ packages: - utf-8-validate dev: false + /pure-rand@6.1.0: + resolution: {integrity: sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==} + dev: false + /purgecss@2.3.0: resolution: {integrity: sha512-BE5CROfVGsx2XIhxGuZAT7rTH9lLeQx/6M0P7DTXQH4IUc3BBzs9JUzt4yzGf3JrH9enkeq6YJBe9CTtkm1WmQ==} hasBin: true @@ -26188,6 +26785,7 @@ packages: is-core-module: 2.13.0 path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 + dev: true /resolve@1.22.8: resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==} @@ -26259,6 +26857,10 @@ packages: package-json-from-dist: 1.0.0 dev: true + /robot3@0.4.1: + resolution: {integrity: sha512-hzjy826lrxzx8eRgv80idkf8ua1JAepRc9Efdtj03N3KNJuznQCPlyCJ7gnUmDFwZCLQjxy567mQVKmdv2BsXQ==} + dev: false + /rollup-plugin-inject@3.0.2: resolution: {integrity: sha512-ptg9PQwzs3orn4jkgXJ74bfs5vYz1NCZlSQMBUA0wKcGp5i5pA1AO3fOUEte8enhGUC+iapTCzEWw2jEFFUO/w==} deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-inject. @@ -26690,7 +27292,7 @@ packages: /socket.io-adapter@2.5.4: resolution: {integrity: sha512-wDNHGXGewWAjQPt3pyeYBtpWSq9cLE5UW1ZUPL/2eGK9jtse/FpXib7epSTsz0Q0m+6sg6Y4KtcFTlah1bdOVg==} dependencies: - debug: 4.3.4 + debug: 4.3.7 ws: 8.11.0 transitivePeerDependencies: - bufferutil @@ -26730,7 +27332,7 @@ packages: engines: {node: '>=10.0.0'} dependencies: '@socket.io/component-emitter': 3.1.0 - debug: 4.3.4 + debug: 4.3.7 transitivePeerDependencies: - supports-color @@ -26845,6 +27447,7 @@ packages: /source-map-js@1.0.2: resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} engines: {node: '>=0.10.0'} + dev: false /source-map-js@1.2.0: resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} @@ -26917,6 +27520,10 @@ packages: resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==} dev: false + /sqids@0.3.0: + resolution: {integrity: sha512-lOQK1ucVg+W6n3FhRwwSeUijxe93b51Bfz5PMRMihVf1iVkl82ePQG7V5vwrhzB11v0NtsR25PSZRGiSomJaJw==} + dev: false + /sqs-consumer@7.5.0(@aws-sdk/client-sqs@3.454.0): resolution: {integrity: sha512-aY3akgMjuK1aj4E7ZVAURUUnC8aNgUBES+b4SN+6ccMmJhi37MamWl7g1JbPow8sjIp1fBPz1bXCCDJmtjOTAg==} engines: {node: '>=18.0.0'} @@ -27308,6 +27915,23 @@ packages: pirates: 4.0.5 ts-interface-checker: 0.1.13 + /superagent@9.0.2: + resolution: {integrity: sha512-xuW7dzkUpcJq7QnhOsnNUgtYp3xRwpt2F7abdRYIpCsAt0hhUqia0EdxyXZQQpNmGtsCzYHryaKSV3q3GJnq7w==} + engines: {node: '>=14.18.0'} + dependencies: + component-emitter: 1.3.1 + cookiejar: 2.1.4 + debug: 4.3.7 + fast-safe-stringify: 2.1.1 + form-data: 4.0.0 + formidable: 3.5.1 + methods: 1.1.2 + mime: 2.6.0 + qs: 6.11.0 + transitivePeerDependencies: + - supports-color + dev: true + /superjson@2.2.1: resolution: {integrity: sha512-8iGv75BYOa0xRJHK5vRLEjE2H/i4lulTjzpUXic3Eg8akftYjkmQDa8JARQ42rlczXyFR3IeRoeFCc7RxHsYZA==} engines: {node: '>=16'} @@ -27315,6 +27939,16 @@ packages: copy-anything: 3.0.5 dev: false + /supertest@7.0.0: + resolution: {integrity: sha512-qlsr7fIC0lSddmA3tzojvzubYxvlGtzumcdHgPwbFWMISQwL22MhM2Y3LNt+6w9Yyx7559VW5ab70dgphm8qQA==} + engines: {node: '>=14.18.0'} + dependencies: + methods: 1.1.2 + superagent: 9.0.2 + transitivePeerDependencies: + - supports-color + dev: true + /supports-color@5.5.0: resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} engines: {node: '>=4'} @@ -27379,6 +28013,16 @@ packages: periscopic: 3.1.0 dev: false + /swr@2.2.5(react@18.3.1): + resolution: {integrity: sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==} + peerDependencies: + react: ^16.11.0 || ^17.0.0 || ^18.0.0 + dependencies: + client-only: 0.0.1 + react: 18.3.1 + use-sync-external-store: 1.2.2(react@18.3.1) + dev: false + /swr@2.2.5(react@19.0.0-rc.0): resolution: {integrity: sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==} peerDependencies: @@ -27431,6 +28075,10 @@ packages: '@babel/runtime': 7.24.5 dev: false + /tailwind-merge@2.5.3: + resolution: {integrity: sha512-d9ZolCAIzom1nf/5p4LdD5zvjmgSxY0BGgdSvmXIoMYAiPdAW/dSpP7joCDYFY7r/HkEa2qmPtkgsu0xjQeQtw==} + dev: false + /tailwind-scrollbar-hide@1.1.7: resolution: {integrity: sha512-X324n9OtpTmOMqEgDUEA/RgLrNfBF/jwJdctaPZDzB3mppxJk7TLIDmOreEDm1Bq4R9LSPu4Epf8VSdovNU+iA==} dev: false @@ -27452,6 +28100,14 @@ packages: tailwindcss: 3.4.1(ts-node@10.9.1) dev: false + /tailwindcss-animate@1.0.7(tailwindcss@3.4.1): + resolution: {integrity: sha512-bl6mpH3T7I3UFxuvDEXLxy/VuFxBk5bbzplh7tXI68mwMokNYd1t9qPBHlnyTwfa4JGC4zP516I1hYYtQ/vspA==} + peerDependencies: + tailwindcss: '>=3.0.0 || insiders' + dependencies: + tailwindcss: 3.4.1(ts-node@10.9.1) + dev: false + /tailwindcss-textshadow@2.1.3: resolution: {integrity: sha512-FGVHfK+xnV879VSQDeRvY61Aa+b0GDiGaFBPwCOKvqIrK57GyepWJL1GydjtGOLHE9qqphFucRNj9fHramCzNg==} dependencies: @@ -27509,7 +28165,7 @@ packages: postcss: 8.4.44 postcss-import: 15.1.0(postcss@8.4.44) postcss-js: 4.0.1(postcss@8.4.44) - postcss-load-config: 4.0.1(postcss@8.4.44) + postcss-load-config: 4.0.1(postcss@8.4.44)(ts-node@10.9.1) postcss-nested: 6.0.1(postcss@8.4.44) postcss-selector-parser: 6.0.11 resolve: 1.22.8 @@ -27525,25 +28181,25 @@ packages: dependencies: '@alloc/quick-lru': 5.2.0 arg: 5.0.2 - chokidar: 3.5.3 + chokidar: 3.6.0 didyoumean: 1.2.2 dlv: 1.1.3 fast-glob: 3.3.1 glob-parent: 6.0.2 is-glob: 4.0.3 - jiti: 1.21.0 + jiti: 1.21.6 lilconfig: 2.1.0 micromatch: 4.0.5 normalize-path: 3.0.0 object-hash: 3.0.0 - picocolors: 1.0.0 - postcss: 8.4.31 - postcss-import: 15.1.0(postcss@8.4.31) - postcss-js: 4.0.1(postcss@8.4.31) - postcss-load-config: 4.0.1(postcss@8.4.31)(ts-node@10.9.1) - postcss-nested: 6.0.1(postcss@8.4.31) + picocolors: 1.0.1 + postcss: 8.4.44 + postcss-import: 15.1.0(postcss@8.4.44) + postcss-js: 4.0.1(postcss@8.4.44) + postcss-load-config: 4.0.1(postcss@8.4.44)(ts-node@10.9.1) + postcss-nested: 6.0.1(postcss@8.4.44) postcss-selector-parser: 6.0.11 - resolve: 1.22.4 + resolve: 1.22.8 sucrase: 3.32.0 transitivePeerDependencies: - ts-node @@ -27815,7 +28471,6 @@ packages: /tinyexec@0.3.0: resolution: {integrity: sha512-tVGE0mVJPGb0chKhqmsoosjsS+qUnJVGJpZgsHYQcGoPlG3B51R3PouqTgEGH2Dc9jjFyOqOpix6ZHNMXp1FZg==} - dev: false /tinyglobby@0.2.2: resolution: {integrity: sha512-mZ2sDMaySvi1PkTp4lTo1In2zjU+cY8OvZsfwrDrx3YGRbXPX1/cbPwCR9zkm3O/Fz9Jo0F1HNgIQ1b8BepqyQ==} @@ -28061,6 +28716,19 @@ packages: resolution: {integrity: sha512-3IDBalvf6SyvHFS14UiwCWzqdSdo+Q0k2J7DZyJYaHW/iraW9DJpaBKDJpry3yQs3o/t/A+oGaRW3iVt2lKxzA==} dev: false + /tsconfck@2.1.2(typescript@5.2.2): + resolution: {integrity: sha512-ghqN1b0puy3MhhviwO2kGF8SeMDNhEbnKxjK7h6+fvY9JAxqvXi8y5NAHSQv687OVboS2uZIByzGd45/YxrRHg==} + engines: {node: ^14.13.1 || ^16 || >=18} + hasBin: true + peerDependencies: + typescript: ^4.3.5 || ^5.0.0 + peerDependenciesMeta: + typescript: + optional: true + dependencies: + typescript: 5.2.2 + dev: true + /tsconfck@2.1.2(typescript@5.5.4): resolution: {integrity: sha512-ghqN1b0puy3MhhviwO2kGF8SeMDNhEbnKxjK7h6+fvY9JAxqvXi8y5NAHSQv687OVboS2uZIByzGd45/YxrRHg==} engines: {node: ^14.13.1 || ^16 || >=18} @@ -28725,6 +29393,36 @@ packages: escalade: 3.2.0 picocolors: 1.0.1 + /uploadthing@7.1.0(next@14.2.15)(tailwindcss@3.4.1): + resolution: {integrity: sha512-l1bRHs+q/YLx3XwBav98t4Bl1wLWaskhPEwopxtYgiRrxX5nW3uUuSP0RJ9eKwx0+6ZhHWxHDvShf7ZLledqmQ==} + engines: {node: '>=18.13.0'} + peerDependencies: + express: '*' + fastify: '*' + h3: '*' + next: '*' + tailwindcss: '*' + peerDependenciesMeta: + express: + optional: true + fastify: + optional: true + h3: + optional: true + next: + optional: true + tailwindcss: + optional: true + dependencies: + '@effect/platform': 0.63.2(@effect/schema@0.72.2)(effect@3.7.2) + '@effect/schema': 0.72.2(effect@3.7.2) + '@uploadthing/mime-types': 0.3.0 + '@uploadthing/shared': 7.0.3 + effect: 3.7.2 + next: 14.2.15(@playwright/test@1.37.0)(react-dom@18.2.0)(react@18.3.1) + tailwindcss: 3.4.1(ts-node@10.9.1) + dev: false + /uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} dependencies: @@ -28835,6 +29533,14 @@ packages: react: 18.2.0 dev: false + /use-sync-external-store@1.2.2(react@18.3.1): + resolution: {integrity: sha512-PElTlVMwpblvbNqQ82d2n6RjStvdSoNe9FG28kNfz3WiXilJm4DdNkEzRhCZuIDwY8U08WVihhGR5iRqAwfDiw==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + react: 18.3.1 + dev: false + /use-sync-external-store@1.2.2(react@19.0.0-rc.0): resolution: {integrity: sha512-PElTlVMwpblvbNqQ82d2n6RjStvdSoNe9FG28kNfz3WiXilJm4DdNkEzRhCZuIDwY8U08WVihhGR5iRqAwfDiw==} peerDependencies: @@ -29023,7 +29729,7 @@ packages: cac: 6.7.14 debug: 4.3.7 pathe: 1.1.2 - picocolors: 1.0.0 + picocolors: 1.0.1 vite: 5.2.7(@types/node@18.11.18) transitivePeerDependencies: - '@types/node' @@ -29078,10 +29784,21 @@ packages: - terser dev: true + /vite-tsconfig-paths@4.0.5(typescript@5.2.2): + resolution: {integrity: sha512-/L/eHwySFYjwxoYt1WRJniuK/jPv+WGwgRGBYx3leciR5wBeqntQpUE6Js6+TJemChc+ter7fDBKieyEWDx4yQ==} + dependencies: + debug: 4.3.7 + globrex: 0.1.2 + tsconfck: 2.1.2(typescript@5.2.2) + transitivePeerDependencies: + - supports-color + - typescript + dev: true + /vite-tsconfig-paths@4.0.5(typescript@5.5.4): resolution: {integrity: sha512-/L/eHwySFYjwxoYt1WRJniuK/jPv+WGwgRGBYx3leciR5wBeqntQpUE6Js6+TJemChc+ter7fDBKieyEWDx4yQ==} dependencies: - debug: 4.3.4 + debug: 4.3.7 globrex: 0.1.2 tsconfck: 2.1.2(typescript@5.5.4) transitivePeerDependencies: @@ -29116,7 +29833,7 @@ packages: dependencies: '@types/node': 18.19.20 esbuild: 0.16.17 - postcss: 8.4.29 + postcss: 8.4.44 resolve: 1.22.8 rollup: 3.10.0 optionalDependencies: @@ -29150,7 +29867,7 @@ packages: dependencies: '@types/node': 20.14.14 esbuild: 0.16.17 - postcss: 8.4.29 + postcss: 8.4.44 resolve: 1.22.8 rollup: 3.10.0 optionalDependencies: diff --git a/references/hello-world/trigger.config.ts b/references/hello-world/trigger.config.ts index a3a5755074..ca95e06685 100644 --- a/references/hello-world/trigger.config.ts +++ b/references/hello-world/trigger.config.ts @@ -14,4 +14,25 @@ export default defineConfig({ randomize: true, }, }, + build: { + extensions: [ + { + name: "npm-token", + onBuildComplete: async (context, manifest) => { + if (context.target === "dev") { + return; + } + + context.addLayer({ + id: "npm-token", + build: { + env: { + NPM_TOKEN: manifest.deploy.env?.NPM_TOKEN, + }, + }, + }); + }, + }, + ], + }, }); diff --git a/references/nextjs-realtime/.gitignore b/references/nextjs-realtime/.gitignore new file mode 100644 index 0000000000..fd3dbb571a --- /dev/null +++ b/references/nextjs-realtime/.gitignore @@ -0,0 +1,36 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js +.yarn/install-state.gz + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env*.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/references/nextjs-realtime/README.md b/references/nextjs-realtime/README.md new file mode 100644 index 0000000000..e215bc4ccf --- /dev/null +++ b/references/nextjs-realtime/README.md @@ -0,0 +1,36 @@ +This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app). + +## Getting Started + +First, run the development server: + +```bash +npm run dev +# or +yarn dev +# or +pnpm dev +# or +bun dev +``` + +Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. + +You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. + +This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel. + +## Learn More + +To learn more about Next.js, take a look at the following resources: + +- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. +- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. + +You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome! + +## Deploy on Vercel + +The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. + +Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details. diff --git a/references/nextjs-realtime/components.json b/references/nextjs-realtime/components.json new file mode 100644 index 0000000000..42f059b56b --- /dev/null +++ b/references/nextjs-realtime/components.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": true, + "tsx": true, + "tailwind": { + "config": "tailwind.config.ts", + "css": "src/app/globals.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + } +} \ No newline at end of file diff --git a/references/nextjs-realtime/next.config.mjs b/references/nextjs-realtime/next.config.mjs new file mode 100644 index 0000000000..1df123ef7b --- /dev/null +++ b/references/nextjs-realtime/next.config.mjs @@ -0,0 +1,30 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = { + images: { + remotePatterns: [ + { + protocol: "https", + hostname: "utfs.io", + pathname: "/a/ze1ekrd9t9/*", + }, + { + protocol: "https", + hostname: "v2.fal.media", + }, + { + protocol: "https", + hostname: "v3.fal.media", + }, + { + protocol: "https", + hostname: "fal.media", + }, + { + protocol: "https", + hostname: "storage.googleapis.com", + }, + ], + }, +}; + +export default nextConfig; diff --git a/references/nextjs-realtime/package.json b/references/nextjs-realtime/package.json new file mode 100644 index 0000000000..22896709c7 --- /dev/null +++ b/references/nextjs-realtime/package.json @@ -0,0 +1,40 @@ +{ + "name": "references-nextjs-realtime", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint", + "dev:trigger": "trigger dev" + }, + "dependencies": { + "@fal-ai/serverless-client": "^0.15.0", + "@radix-ui/react-icons": "^1.3.0", + "@radix-ui/react-scroll-area": "^1.2.0", + "@radix-ui/react-slot": "^1.1.0", + "@trigger.dev/react-hooks": "workspace:^3", + "@trigger.dev/sdk": "workspace:^3", + "@uploadthing/react": "^7.0.3", + "class-variance-authority": "^0.7.0", + "clsx": "^2.1.1", + "lucide-react": "^0.451.0", + "next": "14.2.15", + "react": "^18", + "react-dom": "^18", + "tailwind-merge": "^2.5.3", + "tailwindcss-animate": "^1.0.7", + "uploadthing": "^7.1.0", + "zod": "3.22.3" + }, + "devDependencies": { + "@types/node": "^20", + "@types/react": "^18", + "@types/react-dom": "^18", + "postcss": "^8", + "tailwindcss": "^3.4.1", + "trigger.dev": "workspace:^3", + "typescript": "^5" + } +} \ No newline at end of file diff --git a/references/nextjs-realtime/postcss.config.mjs b/references/nextjs-realtime/postcss.config.mjs new file mode 100644 index 0000000000..1a69fd2a45 --- /dev/null +++ b/references/nextjs-realtime/postcss.config.mjs @@ -0,0 +1,8 @@ +/** @type {import('postcss-load-config').Config} */ +const config = { + plugins: { + tailwindcss: {}, + }, +}; + +export default config; diff --git a/references/nextjs-realtime/src/app/actions.ts b/references/nextjs-realtime/src/app/actions.ts new file mode 100644 index 0000000000..1411ae8e30 --- /dev/null +++ b/references/nextjs-realtime/src/app/actions.ts @@ -0,0 +1,44 @@ +"use server"; + +import type { exampleTask } from "@/trigger/example"; +import { tasks } from "@trigger.dev/sdk/v3"; +import { cookies } from "next/headers"; +import { redirect } from "next/navigation"; +import { randomUUID } from "node:crypto"; + +export async function triggerExampleTask() { + const handle = await tasks.trigger("example", { + id: randomUUID(), + }); + + console.log("Setting the run JWT in a cookie", handle.publicAccessToken); + + // Set JWT in a secure, HTTP-only cookie + cookies().set("run_token", handle.publicAccessToken); + + // Redirect to the details page + redirect(`/runs/${handle.id}`); +} + +export async function batchTriggerExampleTask() { + console.log("Batch trigger example task"); + + const handle = await tasks.batchTrigger("example", [ + { payload: { id: randomUUID() } }, + { payload: { id: randomUUID() } }, + { payload: { id: randomUUID() } }, + { payload: { id: randomUUID() } }, + { payload: { id: randomUUID() } }, + { payload: { id: randomUUID() } }, + { payload: { id: randomUUID() } }, + { payload: { id: randomUUID() } }, + ]); + + console.log("Setting the run JWT in a cookie", handle.publicAccessToken); + + // Set JWT in a secure, HTTP-only cookie + cookies().set("run_token", handle.publicAccessToken); + + // Redirect to the details page + redirect(`/batches/${handle.batchId}`); +} diff --git a/references/nextjs-realtime/src/app/api/uploadthing/core.ts b/references/nextjs-realtime/src/app/api/uploadthing/core.ts new file mode 100644 index 0000000000..7f09693f8b --- /dev/null +++ b/references/nextjs-realtime/src/app/api/uploadthing/core.ts @@ -0,0 +1,51 @@ +import { randomUUID } from "crypto"; +import { createUploadthing, type FileRouter } from "uploadthing/next"; +import { UploadThingError } from "uploadthing/server"; +import type { handleUpload } from "@/trigger/images"; +import { auth, tasks } from "@trigger.dev/sdk/v3"; + +const f = createUploadthing(); + +const mockAuth = (req: Request) => ({ id: randomUUID() }); // Fake auth function + +// FileRouter for your app, can contain multiple FileRoutes +export const ourFileRouter = { + // Define as many FileRoutes as you like, each with a unique routeSlug + imageUploader: f({ image: { maxFileSize: "4MB" } }) + // Set permissions and file types for this FileRoute + .middleware(async ({ req }) => { + // This code runs on your server before upload + const user = await mockAuth(req); + + // If you throw, the user will not be able to upload + if (!user) throw new UploadThingError("Unauthorized"); + + // Whatever is returned here is accessible in onUploadComplete as `metadata` + return { userId: user.id }; + }) + .onUploadComplete(async ({ metadata, file }) => { + // This code RUNS ON YOUR SERVER after upload + console.log("Upload complete for userId:", metadata.userId); + + console.log("file", file); + + const fileTag = `file:${file.key}`; + + await tasks.trigger("handle-upload", file, { + tags: [`user:${metadata.userId}`, fileTag], + }); + + const publicAccessToken = await auth.createPublicToken({ + scopes: { + read: { tags: fileTag }, + }, + }); + + console.log("Generated access token:", publicAccessToken); + + // !!! Whatever is returned here is sent to the clientside `onClientUploadComplete` callback + return { uploadedBy: metadata.userId, publicAccessToken, fileId: file.key }; + }), +} satisfies FileRouter; + +export type OurFileRouter = typeof ourFileRouter; diff --git a/references/nextjs-realtime/src/app/api/uploadthing/route.ts b/references/nextjs-realtime/src/app/api/uploadthing/route.ts new file mode 100644 index 0000000000..f8f1912788 --- /dev/null +++ b/references/nextjs-realtime/src/app/api/uploadthing/route.ts @@ -0,0 +1,6 @@ +import { createRouteHandler } from "uploadthing/next"; +import { ourFileRouter } from "./core"; + +export const { GET, POST } = createRouteHandler({ + router: ourFileRouter, +}); diff --git a/references/nextjs-realtime/src/app/batches/[id]/ClientBatchRunDetails.tsx b/references/nextjs-realtime/src/app/batches/[id]/ClientBatchRunDetails.tsx new file mode 100644 index 0000000000..8d11cb641d --- /dev/null +++ b/references/nextjs-realtime/src/app/batches/[id]/ClientBatchRunDetails.tsx @@ -0,0 +1,149 @@ +"use client"; + +import { Card, CardContent } from "@/components/ui/card"; +import { TriggerAuthContext, useBatch } from "@trigger.dev/react-hooks"; +import type { exampleTask } from "@/trigger/example"; + +import { Badge } from "@/components/ui/badge"; +import { + Table, + TableBody, + TableCaption, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import { TaskRunShape, AnyRunShape } from "@trigger.dev/sdk/v3"; +import { z } from "zod"; + +const MetadataSchema = z.object({ + status: z.object({ + type: z.string(), + progress: z.number(), + }), +}); + +const ProgressBar = ({ run }: { run: AnyRunShape }) => { + const metadata = run.metadata ? MetadataSchema.parse(run.metadata) : undefined; + const progress = metadata?.status.progress || 0; + + return ( +
+
+ {metadata ? metadata.status.type : "waiting..."} +
+
+
+
+
+ ); +}; + +const StatusBadge = ({ run }: { run: AnyRunShape }) => { + switch (run.status) { + case "WAITING_FOR_DEPLOY": { + return {run.status}; + } + case "DELAYED": { + return {run.status}; + } + case "EXPIRED": { + return {run.status}; + } + case "QUEUED": { + return {run.status}; + } + case "FROZEN": + case "REATTEMPTING": + case "EXECUTING": { + return {run.status}; + } + case "COMPLETED": { + return {run.status}; + } + case "TIMED_OUT": + case "SYSTEM_FAILURE": + case "INTERRUPTED": + case "CRASHED": + case "FAILED": { + return {run.status}; + } + case "CANCELED": { + return {run.status}; + } + default: { + return {run.status}; + } + } +}; + +export function BackgroundRunsTable({ runs }: { runs: TaskRunShape[] }) { + return ( + + A list of your recent background runs. + + + Run ID / Task + Status + Payload ID + Progress + + + + {runs.map((run) => ( + + +
{run.id}
+
{run.taskIdentifier}
+
+ + + + {run.payload.id} + + + +
+ ))} +
+
+ ); +} + +function BatchRunTableWrapper({ batchId }: { batchId: string }) { + const { runs, error } = useBatch(batchId); + + console.log(runs); + + if (error) { + return ( +
+ + +

Error: {error.message}

+
+
+
+ ); + } + + return ( +
+ +
+ ); +} + +export default function ClientBatchRunDetails({ batchId, jwt }: { batchId: string; jwt: string }) { + return ( + + + + ); +} diff --git a/references/nextjs-realtime/src/app/batches/[id]/page.tsx b/references/nextjs-realtime/src/app/batches/[id]/page.tsx new file mode 100644 index 0000000000..8a3f3c61d9 --- /dev/null +++ b/references/nextjs-realtime/src/app/batches/[id]/page.tsx @@ -0,0 +1,18 @@ +import { cookies } from "next/headers"; +import { notFound } from "next/navigation"; +import ClientBatchRunDetails from "./ClientBatchRunDetails"; + +export default async function DetailsPage({ params }: { params: { id: string } }) { + const cookieStore = cookies(); + const jwt = cookieStore.get("run_token"); + + if (!jwt) { + notFound(); + } + + return ( +
+ +
+ ); +} diff --git a/references/nextjs-realtime/src/app/favicon.ico b/references/nextjs-realtime/src/app/favicon.ico new file mode 100644 index 0000000000..718d6fea48 Binary files /dev/null and b/references/nextjs-realtime/src/app/favicon.ico differ diff --git a/references/nextjs-realtime/src/app/fonts/GeistMonoVF.woff b/references/nextjs-realtime/src/app/fonts/GeistMonoVF.woff new file mode 100644 index 0000000000..f2ae185cbf Binary files /dev/null and b/references/nextjs-realtime/src/app/fonts/GeistMonoVF.woff differ diff --git a/references/nextjs-realtime/src/app/fonts/GeistVF.woff b/references/nextjs-realtime/src/app/fonts/GeistVF.woff new file mode 100644 index 0000000000..1b62daacff Binary files /dev/null and b/references/nextjs-realtime/src/app/fonts/GeistVF.woff differ diff --git a/references/nextjs-realtime/src/app/globals.css b/references/nextjs-realtime/src/app/globals.css new file mode 100644 index 0000000000..1dcb0fc6d6 --- /dev/null +++ b/references/nextjs-realtime/src/app/globals.css @@ -0,0 +1,78 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +body { + font-family: Arial, Helvetica, sans-serif; +} + +@layer utilities { + .text-balance { + text-wrap: balance; + } +} + +@layer base { + :root { + --background: 0 0% 100%; + --foreground: 0 0% 3.9%; + --card: 0 0% 100%; + --card-foreground: 0 0% 3.9%; + --popover: 0 0% 100%; + --popover-foreground: 0 0% 3.9%; + --primary: 0 0% 9%; + --primary-foreground: 0 0% 98%; + --secondary: 0 0% 96.1%; + --secondary-foreground: 0 0% 9%; + --muted: 0 0% 96.1%; + --muted-foreground: 0 0% 45.1%; + --accent: 0 0% 96.1%; + --accent-foreground: 0 0% 9%; + --destructive: 0 84.2% 60.2%; + --destructive-foreground: 0 0% 98%; + --border: 0 0% 89.8%; + --input: 0 0% 89.8%; + --ring: 0 0% 3.9%; + --chart-1: 12 76% 61%; + --chart-2: 173 58% 39%; + --chart-3: 197 37% 24%; + --chart-4: 43 74% 66%; + --chart-5: 27 87% 67%; + --radius: 0.5rem; + } + .dark { + --background: 0 0% 3.9%; + --foreground: 0 0% 98%; + --card: 0 0% 3.9%; + --card-foreground: 0 0% 98%; + --popover: 0 0% 3.9%; + --popover-foreground: 0 0% 98%; + --primary: 0 0% 98%; + --primary-foreground: 0 0% 9%; + --secondary: 0 0% 14.9%; + --secondary-foreground: 0 0% 98%; + --muted: 0 0% 14.9%; + --muted-foreground: 0 0% 63.9%; + --accent: 0 0% 14.9%; + --accent-foreground: 0 0% 98%; + --destructive: 0 62.8% 30.6%; + --destructive-foreground: 0 0% 98%; + --border: 0 0% 14.9%; + --input: 0 0% 14.9%; + --ring: 0 0% 83.1%; + --chart-1: 220 70% 50%; + --chart-2: 160 60% 45%; + --chart-3: 30 80% 55%; + --chart-4: 280 65% 60%; + --chart-5: 340 75% 55%; + } +} + +@layer base { + * { + @apply border-border; + } + body { + @apply bg-background text-foreground; + } +} diff --git a/references/nextjs-realtime/src/app/layout.tsx b/references/nextjs-realtime/src/app/layout.tsx new file mode 100644 index 0000000000..04e352d9c9 --- /dev/null +++ b/references/nextjs-realtime/src/app/layout.tsx @@ -0,0 +1,45 @@ +import type { Metadata } from "next"; +import localFont from "next/font/local"; +import "./globals.css"; +import { NextSSRPlugin } from "@uploadthing/react/next-ssr-plugin"; +import { extractRouterConfig } from "uploadthing/server"; +import { ourFileRouter } from "@/app/api/uploadthing/core"; + +const geistSans = localFont({ + src: "./fonts/GeistVF.woff", + variable: "--font-geist-sans", + weight: "100 900", +}); +const geistMono = localFont({ + src: "./fonts/GeistMonoVF.woff", + variable: "--font-geist-mono", + weight: "100 900", +}); + +export const metadata: Metadata = { + title: "Trigger.dev Next.js Realtime Demo", + description: "Generated by create next app", +}; + +export default function RootLayout({ + children, +}: Readonly<{ + children: React.ReactNode; +}>) { + return ( + + + + {children} + + + ); +} diff --git a/references/nextjs-realtime/src/app/page.tsx b/references/nextjs-realtime/src/app/page.tsx new file mode 100644 index 0000000000..d23f0bc285 --- /dev/null +++ b/references/nextjs-realtime/src/app/page.tsx @@ -0,0 +1,15 @@ +import RunButton from "@/components/RunButton"; +import BatchRunButton from "@/components/BatchRunButton"; +import { ImageUploadDropzone } from "@/components/ImageUploadButton"; + +export default function Home() { + return ( +
+
+ + + +
+
+ ); +} diff --git a/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx b/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx new file mode 100644 index 0000000000..5f2ada7c31 --- /dev/null +++ b/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx @@ -0,0 +1,48 @@ +"use client"; + +import RunDetails from "@/components/RunDetails"; +import { Card, CardContent } from "@/components/ui/card"; +import { TriggerAuthContext, useRun } from "@trigger.dev/react-hooks"; +import type { exampleTask } from "@/trigger/example"; + +function RunDetailsWrapper({ runId }: { runId: string }) { + const { run, error } = useRun(runId, { refreshInterval: 1000 }); + + if (error) { + return ( +
+ + +

Error: {error.message}

+
+
+
+ ); + } + + if (!run) { + return ( +
+ + +

Loading run details...

+
+
+
+ ); + } + + return ( +
+ +
+ ); +} + +export default function ClientRunDetails({ runId, jwt }: { runId: string; jwt: string }) { + return ( + + + + ); +} diff --git a/references/nextjs-realtime/src/app/runs/[id]/page.tsx b/references/nextjs-realtime/src/app/runs/[id]/page.tsx new file mode 100644 index 0000000000..e74e709202 --- /dev/null +++ b/references/nextjs-realtime/src/app/runs/[id]/page.tsx @@ -0,0 +1,18 @@ +import { cookies } from "next/headers"; +import { notFound } from "next/navigation"; +import ClientRunDetails from "./ClientRunDetails"; + +export default async function DetailsPage({ params }: { params: { id: string } }) { + const cookieStore = cookies(); + const jwt = cookieStore.get("run_token"); + + if (!jwt) { + notFound(); + } + + return ( +
+ +
+ ); +} diff --git a/references/nextjs-realtime/src/app/uploads/[id]/ClientUploadDetails.tsx b/references/nextjs-realtime/src/app/uploads/[id]/ClientUploadDetails.tsx new file mode 100644 index 0000000000..71a249d62a --- /dev/null +++ b/references/nextjs-realtime/src/app/uploads/[id]/ClientUploadDetails.tsx @@ -0,0 +1,77 @@ +"use client"; + +import { HandleUploadFooter } from "@/components/HandleUploadFooter"; +import { Card, CardContent } from "@/components/ui/card"; +import ImageDisplay from "@/components/UploadImageDisplay"; +import { useHandleUploadRun } from "@/hooks/useHandleUploadRun"; +import { TriggerAuthContext } from "@trigger.dev/react-hooks"; + +function UploadDetailsWrapper({ fileId }: { fileId: string }) { + const { run, error, images } = useHandleUploadRun(fileId); + + if (error) { + return ( +
+ + +

Error: {error.message}

+
+
+
+ ); + } + + if (!run) { + return ( +
+ + +

Loading run details...

+
+
+
+ ); + } + + const gridImages = images.map((image) => + image.data.status === "COMPLETED" && image.data.image + ? { + status: "completed" as const, + src: image.data.image.url, + caption: image.data.image.file_name, + message: image.model, + } + : { status: "pending" as const, message: image.model } + ); + + return ( +
+ + + +
+ ); +} + +export default function ClientUploadDetails({ + fileId, + publicAccessToken, +}: { + fileId: string; + publicAccessToken: string; +}) { + return ( + + + + ); +} diff --git a/references/nextjs-realtime/src/app/uploads/[id]/page.tsx b/references/nextjs-realtime/src/app/uploads/[id]/page.tsx new file mode 100644 index 0000000000..4897ecce7a --- /dev/null +++ b/references/nextjs-realtime/src/app/uploads/[id]/page.tsx @@ -0,0 +1,22 @@ +import { notFound } from "next/navigation"; +import ClientUploadDetails from "./ClientUploadDetails"; + +export default async function UploadPage({ + params, + searchParams, +}: { + params: { id: string }; + searchParams: { [key: string]: string | string[] | undefined }; +}) { + const publicAccessToken = searchParams.publicAccessToken; + + if (typeof publicAccessToken !== "string") { + notFound(); + } + + return ( +
+ +
+ ); +} diff --git a/references/nextjs-realtime/src/components/BatchRunButton.tsx b/references/nextjs-realtime/src/components/BatchRunButton.tsx new file mode 100644 index 0000000000..4679d0ac2c --- /dev/null +++ b/references/nextjs-realtime/src/components/BatchRunButton.tsx @@ -0,0 +1,24 @@ +"use client"; + +// @ts-ignore +import { useFormStatus } from "react-dom"; +import { batchTriggerExampleTask } from "@/app/actions"; +import { Button } from "@/components/ui/button"; + +function SubmitButton() { + const { pending } = useFormStatus(); + + return ( + + ); +} + +export default function BatchRunTaskForm() { + return ( +
+ + + ); +} diff --git a/references/nextjs-realtime/src/components/HandleUploadFooter.tsx b/references/nextjs-realtime/src/components/HandleUploadFooter.tsx new file mode 100644 index 0000000000..bbd884a203 --- /dev/null +++ b/references/nextjs-realtime/src/components/HandleUploadFooter.tsx @@ -0,0 +1,52 @@ +"use client"; + +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { AnyRunShape, TaskRunShape } from "@trigger.dev/sdk/v3"; +import { ExternalLink } from "lucide-react"; +import type { handleUpload } from "@/trigger/images"; + +interface HandleUploadFooterProps { + run: TaskRunShape; + viewRunUrl: string; +} + +export function HandleUploadFooter({ run, viewRunUrl }: HandleUploadFooterProps) { + const getStatusColor = (status: AnyRunShape["status"]) => { + switch (status) { + case "EXECUTING": + return "bg-blue-500"; + case "COMPLETED": + return "bg-green-500"; + case "FAILED": + return "bg-red-500"; + default: + return "bg-gray-500"; + } + }; + + return ( +
+
+
+ Run ID: {run.id} + Processing {run.payload.name} + + {run.status} + +
+ +
+
+ ); +} diff --git a/references/nextjs-realtime/src/components/ImageUploadButton.tsx b/references/nextjs-realtime/src/components/ImageUploadButton.tsx new file mode 100644 index 0000000000..6020f8740c --- /dev/null +++ b/references/nextjs-realtime/src/components/ImageUploadButton.tsx @@ -0,0 +1,52 @@ +"use client"; + +import { UploadButton, UploadDropzone } from "@/utils/uploadthing"; +import { useRouter } from "next/navigation"; + +export function ImageUploadButton() { + const router = useRouter(); + + return ( + { + // Do something with the response + console.log("Files: ", res); + + const firstFile = res[0]; + + router.push( + `/uploads/${firstFile.serverData.fileId}?publicAccessToken=${firstFile.serverData.publicAccessToken}` + ); + }} + onUploadError={(error: Error) => { + // Do something with the error. + console.error(`ERROR! ${error.message}`); + }} + /> + ); +} + +export function ImageUploadDropzone() { + const router = useRouter(); + + return ( + { + // Do something with the response + console.log("Files: ", res); + + const firstFile = res[0]; + + router.push( + `/uploads/${firstFile.serverData.fileId}?publicAccessToken=${firstFile.serverData.publicAccessToken}` + ); + }} + onUploadError={(error: Error) => { + // Do something with the error. + console.error(`ERROR! ${error.message}`); + }} + /> + ); +} diff --git a/references/nextjs-realtime/src/components/RunButton.tsx b/references/nextjs-realtime/src/components/RunButton.tsx new file mode 100644 index 0000000000..b33c934c97 --- /dev/null +++ b/references/nextjs-realtime/src/components/RunButton.tsx @@ -0,0 +1,24 @@ +"use client"; + +// @ts-ignore +import { useFormStatus } from "react-dom"; +import { triggerExampleTask } from "@/app/actions"; +import { Button } from "@/components/ui/button"; + +function SubmitButton() { + const { pending } = useFormStatus(); + + return ( + + ); +} + +export default function RunTaskForm() { + return ( +
+ + + ); +} diff --git a/references/nextjs-realtime/src/components/RunDetails.tsx b/references/nextjs-realtime/src/components/RunDetails.tsx new file mode 100644 index 0000000000..7e1d4fc42c --- /dev/null +++ b/references/nextjs-realtime/src/components/RunDetails.tsx @@ -0,0 +1,141 @@ +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import type { RetrieveRunResult } from "@trigger.dev/sdk/v3"; +import { exampleTask } from "@/trigger/example"; + +function formatDate(date: Date | undefined) { + return date ? new Date(date).toLocaleString() : "N/A"; +} + +function JsonDisplay({ data }: { data: any }) { + return ( + +
{JSON.stringify(data, null, 2)}
+
+ ); +} + +export default function RunDetails({ record }: { record: RetrieveRunResult }) { + return ( + + + Run Details + + +
+
+

ID

+

{record.id}

+
+
+

Task Identifier

+

{record.taskIdentifier}

+
+
+

Status

+ + {record.status} + +
+
+

Is Test

+ + {record.isTest ? "Yes" : "No"} + +
+ {record.idempotencyKey && ( +
+

Idempotency Key

+

{record.idempotencyKey}

+
+ )} + {record.ttl && ( +
+

TTL

+

{record.ttl}

+
+ )} +
+ +
+

Tags

+
+ {record.tags.length > 0 ? ( + record.tags.map((tag, index) => ( + + {tag} + + )) + ) : ( + No tags + )} +
+
+ +
+
+

Created At

+

{formatDate(record.createdAt)}

+
+
+

Updated At

+

{formatDate(record.updatedAt)}

+
+
+

Started At

+

{formatDate(record.startedAt)}

+
+
+

Finished At

+

{formatDate(record.finishedAt)}

+
+
+

Delayed Until

+

{formatDate(record.delayedUntil)}

+
+
+

Expired At

+

{formatDate(record.expiredAt)}

+
+
+ +
+

Payload

+ +
+ + {record.output && ( +
+

Output

+

{record.output.message}

+
+ )} + + {record.metadata && ( +
+

Metadata

+ +
+ )} + + {record.error && ( +
+

Error

+ + +

{record.error.name}

+

{record.error.message}

+ {record.error.stackTrace && ( + +
{record.error.stackTrace}
+
+ )} +
+
+
+ )} +
+
+ ); +} diff --git a/references/nextjs-realtime/src/components/UploadImageDisplay.tsx b/references/nextjs-realtime/src/components/UploadImageDisplay.tsx new file mode 100644 index 0000000000..252164ce9b --- /dev/null +++ b/references/nextjs-realtime/src/components/UploadImageDisplay.tsx @@ -0,0 +1,81 @@ +"use client"; + +import { useState } from "react"; +import Image from "next/image"; +import { Card, CardContent } from "@/components/ui/card"; +import { LoaderPinwheel } from "lucide-react"; + +type PendingGridImage = { + status: "pending"; + src?: undefined; + caption?: undefined; + message: string; +}; + +type CompletedGridImage = { + status: "completed"; + src: string; + caption: string; + message: string; +}; + +type GridImage = PendingGridImage | CompletedGridImage; + +export default function ImageDisplay({ + uploadedImage, + uploadedCaption, + gridImages = [], +}: { + uploadedImage: string; + uploadedCaption: string; + gridImages: GridImage[]; +}) { + const [isUploadedImageLoaded, setIsUploadedImageLoaded] = useState(false); + + return ( +
+ {/* Main uploaded image */} +
+
+ {uploadedCaption} setIsUploadedImageLoaded(true)} + /> +
+

{uploadedCaption}

+
+ + {/* Grid of smaller images */} +
+ {gridImages.map((image, index) => ( + + +
+ {image.status === "completed" ? ( + {image.caption} + ) : ( +
+ +

{image.message}

+
+ )} +
+ {image.status === "completed" && ( +

{image.caption}

+ )} +
+
+ ))} +
+
+ ); +} diff --git a/references/nextjs-realtime/src/components/ui/badge.tsx b/references/nextjs-realtime/src/components/ui/badge.tsx new file mode 100644 index 0000000000..e87d62bf1a --- /dev/null +++ b/references/nextjs-realtime/src/components/ui/badge.tsx @@ -0,0 +1,36 @@ +import * as React from "react" +import { cva, type VariantProps } from "class-variance-authority" + +import { cn } from "@/lib/utils" + +const badgeVariants = cva( + "inline-flex items-center rounded-md border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2", + { + variants: { + variant: { + default: + "border-transparent bg-primary text-primary-foreground shadow hover:bg-primary/80", + secondary: + "border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80", + destructive: + "border-transparent bg-destructive text-destructive-foreground shadow hover:bg-destructive/80", + outline: "text-foreground", + }, + }, + defaultVariants: { + variant: "default", + }, + } +) + +export interface BadgeProps + extends React.HTMLAttributes, + VariantProps {} + +function Badge({ className, variant, ...props }: BadgeProps) { + return ( +
+ ) +} + +export { Badge, badgeVariants } diff --git a/references/nextjs-realtime/src/components/ui/button.tsx b/references/nextjs-realtime/src/components/ui/button.tsx new file mode 100644 index 0000000000..0270f644a8 --- /dev/null +++ b/references/nextjs-realtime/src/components/ui/button.tsx @@ -0,0 +1,57 @@ +import * as React from "react" +import { Slot } from "@radix-ui/react-slot" +import { cva, type VariantProps } from "class-variance-authority" + +import { cn } from "@/lib/utils" + +const buttonVariants = cva( + "inline-flex items-center justify-center whitespace-nowrap rounded-md text-sm font-medium transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring disabled:pointer-events-none disabled:opacity-50", + { + variants: { + variant: { + default: + "bg-primary text-primary-foreground shadow hover:bg-primary/90", + destructive: + "bg-destructive text-destructive-foreground shadow-sm hover:bg-destructive/90", + outline: + "border border-input bg-background shadow-sm hover:bg-accent hover:text-accent-foreground", + secondary: + "bg-secondary text-secondary-foreground shadow-sm hover:bg-secondary/80", + ghost: "hover:bg-accent hover:text-accent-foreground", + link: "text-primary underline-offset-4 hover:underline", + }, + size: { + default: "h-9 px-4 py-2", + sm: "h-8 rounded-md px-3 text-xs", + lg: "h-10 rounded-md px-8", + icon: "h-9 w-9", + }, + }, + defaultVariants: { + variant: "default", + size: "default", + }, + } +) + +export interface ButtonProps + extends React.ButtonHTMLAttributes, + VariantProps { + asChild?: boolean +} + +const Button = React.forwardRef( + ({ className, variant, size, asChild = false, ...props }, ref) => { + const Comp = asChild ? Slot : "button" + return ( + + ) + } +) +Button.displayName = "Button" + +export { Button, buttonVariants } diff --git a/references/nextjs-realtime/src/components/ui/card.tsx b/references/nextjs-realtime/src/components/ui/card.tsx new file mode 100644 index 0000000000..77e9fb789b --- /dev/null +++ b/references/nextjs-realtime/src/components/ui/card.tsx @@ -0,0 +1,76 @@ +import * as React from "react" + +import { cn } from "@/lib/utils" + +const Card = React.forwardRef< + HTMLDivElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +
+)) +Card.displayName = "Card" + +const CardHeader = React.forwardRef< + HTMLDivElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +
+)) +CardHeader.displayName = "CardHeader" + +const CardTitle = React.forwardRef< + HTMLParagraphElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +

+)) +CardTitle.displayName = "CardTitle" + +const CardDescription = React.forwardRef< + HTMLParagraphElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +

+)) +CardDescription.displayName = "CardDescription" + +const CardContent = React.forwardRef< + HTMLDivElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +

+)) +CardContent.displayName = "CardContent" + +const CardFooter = React.forwardRef< + HTMLDivElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +
+)) +CardFooter.displayName = "CardFooter" + +export { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent } diff --git a/references/nextjs-realtime/src/components/ui/scroll-area.tsx b/references/nextjs-realtime/src/components/ui/scroll-area.tsx new file mode 100644 index 0000000000..0b4a48d87f --- /dev/null +++ b/references/nextjs-realtime/src/components/ui/scroll-area.tsx @@ -0,0 +1,48 @@ +"use client" + +import * as React from "react" +import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area" + +import { cn } from "@/lib/utils" + +const ScrollArea = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + + {children} + + + + +)) +ScrollArea.displayName = ScrollAreaPrimitive.Root.displayName + +const ScrollBar = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, orientation = "vertical", ...props }, ref) => ( + + + +)) +ScrollBar.displayName = ScrollAreaPrimitive.ScrollAreaScrollbar.displayName + +export { ScrollArea, ScrollBar } diff --git a/references/nextjs-realtime/src/components/ui/table.tsx b/references/nextjs-realtime/src/components/ui/table.tsx new file mode 100644 index 0000000000..c0df655c0b --- /dev/null +++ b/references/nextjs-realtime/src/components/ui/table.tsx @@ -0,0 +1,120 @@ +import * as React from "react" + +import { cn } from "@/lib/utils" + +const Table = React.forwardRef< + HTMLTableElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +
+ + +)) +Table.displayName = "Table" + +const TableHeader = React.forwardRef< + HTMLTableSectionElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +TableHeader.displayName = "TableHeader" + +const TableBody = React.forwardRef< + HTMLTableSectionElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +TableBody.displayName = "TableBody" + +const TableFooter = React.forwardRef< + HTMLTableSectionElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + tr]:last:border-b-0", + className + )} + {...props} + /> +)) +TableFooter.displayName = "TableFooter" + +const TableRow = React.forwardRef< + HTMLTableRowElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +TableRow.displayName = "TableRow" + +const TableHead = React.forwardRef< + HTMLTableCellElement, + React.ThHTMLAttributes +>(({ className, ...props }, ref) => ( +
[role=checkbox]]:translate-y-[2px]", + className + )} + {...props} + /> +)) +TableHead.displayName = "TableHead" + +const TableCell = React.forwardRef< + HTMLTableCellElement, + React.TdHTMLAttributes +>(({ className, ...props }, ref) => ( + [role=checkbox]]:translate-y-[2px]", + className + )} + {...props} + /> +)) +TableCell.displayName = "TableCell" + +const TableCaption = React.forwardRef< + HTMLTableCaptionElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( +
+)) +TableCaption.displayName = "TableCaption" + +export { + Table, + TableHeader, + TableBody, + TableFooter, + TableHead, + TableRow, + TableCell, + TableCaption, +} diff --git a/references/nextjs-realtime/src/hooks/useHandleUploadRun.ts b/references/nextjs-realtime/src/hooks/useHandleUploadRun.ts new file mode 100644 index 0000000000..b560d89665 --- /dev/null +++ b/references/nextjs-realtime/src/hooks/useHandleUploadRun.ts @@ -0,0 +1,26 @@ +import type { handleUpload, runFalModel } from "@/trigger/images"; +import { RunFalMetadata } from "@/utils/schemas"; +import { useRealtimeRunsWithTag } from "@trigger.dev/react-hooks"; + +export function useHandleUploadRun(fileId: string) { + const { runs, error } = useRealtimeRunsWithTag( + `file:${fileId}` + ); + + const images = runs + .filter((run) => run.taskIdentifier === "run-fal-model") + .map((run) => { + const metadata = RunFalMetadata.default({ result: { status: "IN_PROGRESS" } }).parse( + run.metadata + ); + + return { + model: run.payload.model, + data: metadata?.result, + }; + }); + + const run = runs.find((run) => run.taskIdentifier === "handle-upload"); + + return { run, error, images }; +} diff --git a/references/nextjs-realtime/src/lib/utils.ts b/references/nextjs-realtime/src/lib/utils.ts new file mode 100644 index 0000000000..bd0c391ddd --- /dev/null +++ b/references/nextjs-realtime/src/lib/utils.ts @@ -0,0 +1,6 @@ +import { clsx, type ClassValue } from "clsx" +import { twMerge } from "tailwind-merge" + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)) +} diff --git a/references/nextjs-realtime/src/trigger/example.ts b/references/nextjs-realtime/src/trigger/example.ts new file mode 100644 index 0000000000..90b8590570 --- /dev/null +++ b/references/nextjs-realtime/src/trigger/example.ts @@ -0,0 +1,28 @@ +import { logger, metadata, schemaTask } from "@trigger.dev/sdk/v3"; +import { setTimeout } from "timers/promises"; +import { z } from "zod"; + +export const ExampleTaskPayload = z.object({ + id: z.string(), + isAdmin: z.boolean().default(false), +}); + +export const exampleTask = schemaTask({ + id: "example", + schema: ExampleTaskPayload, + run: async (payload, { ctx }) => { + logger.log("Running example task with payload", { payload }); + + metadata.set("status", { type: "started", progress: 0.1 }); + + await setTimeout(2000); + + metadata.set("status", { type: "processing", progress: 0.5 }); + + await setTimeout(2000); + + metadata.set("status", { type: "finished", progress: 1.0 }); + + return { message: "All good here!" }; + }, +}); diff --git a/references/nextjs-realtime/src/trigger/images.ts b/references/nextjs-realtime/src/trigger/images.ts new file mode 100644 index 0000000000..8e97f3dca5 --- /dev/null +++ b/references/nextjs-realtime/src/trigger/images.ts @@ -0,0 +1,85 @@ +import { idempotencyKeys, logger, metadata, schemaTask } from "@trigger.dev/sdk/v3"; +import { FalResult, GridImage, UploadedFileData } from "@/utils/schemas"; +import { z } from "zod"; + +import * as fal from "@fal-ai/serverless-client"; + +fal.config({ + credentials: process.env.FAL_KEY, +}); + +export const handleUpload = schemaTask({ + id: "handle-upload", + schema: UploadedFileData, + run: async (file, { ctx }) => { + logger.info("Handling uploaded file", { file }); + + const results = await runFalModel.batchTriggerAndWait([ + { + payload: { + model: "fal-ai/image-preprocessors/canny", + url: file.url, + input: { + low_threshold: 100, + high_threshold: 200, + }, + }, + options: { + tags: ctx.run.tags, + }, + }, + { + payload: { + model: "fal-ai/aura-sr", + url: file.url, + input: {}, + }, + options: { tags: ctx.run.tags }, + }, + { + payload: { + model: "fal-ai/imageutils/depth", + url: file.url, + input: {}, + }, + options: { tags: ctx.run.tags }, + }, + ]); + + return results; + }, +}); + +const RunFalModelInput = z.object({ + model: z.string(), + url: z.string(), + input: z.record(z.any()), +}); + +export const runFalModel = schemaTask({ + id: "run-fal-model", + schema: RunFalModelInput, + run: async (payload) => { + return await internal_runFalModel(payload.model, payload.url, payload.input); + }, +}); + +async function internal_runFalModel(model: string, url: string, input: any) { + const result = await fal.subscribe(model, { + input: { + image_url: url, + ...input, + }, + onQueueUpdate: (update) => { + logger.info(model, { update }); + + metadata.set("result", GridImage.parse(update)); + }, + }); + + const parsedResult = FalResult.parse(result); + + metadata.set("$.result.image", parsedResult.image); + + return parsedResult.image; +} diff --git a/references/nextjs-realtime/src/utils/schemas.ts b/references/nextjs-realtime/src/utils/schemas.ts new file mode 100644 index 0000000000..a5a6e4315c --- /dev/null +++ b/references/nextjs-realtime/src/utils/schemas.ts @@ -0,0 +1,60 @@ +import { z } from "zod"; + +export const EnqueuedQueueStatus = z.object({ + status: z.literal("IN_QUEUE"), + queue_position: z.number(), +}); + +export type EnqueuedQueueStatus = z.infer; + +export const InProgressGridImage = z.object({ + status: z.literal("IN_PROGRESS"), +}); + +export type InProgressGridImage = z.infer; + +export const ImageDetails = z.object({ + url: z.string(), + file_name: z.string(), +}); + +export type ImageDetails = z.infer; + +export const CompletedGridImage = z.object({ + status: z.literal("COMPLETED"), + metrics: z.object({ + inference_time: z.number().nullable(), + }), + image: ImageDetails.optional(), +}); + +export type CompletedGridImage = z.infer; + +export const GridImage = z.union([InProgressGridImage, CompletedGridImage, EnqueuedQueueStatus]); + +export type GridImage = z.infer; + +export const HandleUploadMetadata = z.record(GridImage); +export type HandleUploadMetadata = z.infer; + +export const RunFalMetadata = z.object({ result: GridImage }); +export type RunFalMetadata = z.infer; + +export const UploadedFileData = z.object({ + name: z.string(), + size: z.number(), + type: z.string(), + key: z.string(), + url: z.string(), + appUrl: z.string(), + fileHash: z.string(), + customId: z.string().nullable(), +}); + +export type UploadedFileData = z.infer; + +export const FalResult = z.object({ + image: ImageDetails, +}); + +export type FalResult = z.infer; diff --git a/references/nextjs-realtime/src/utils/uploadthing.ts b/references/nextjs-realtime/src/utils/uploadthing.ts new file mode 100644 index 0000000000..fab04f539d --- /dev/null +++ b/references/nextjs-realtime/src/utils/uploadthing.ts @@ -0,0 +1,6 @@ +import { generateUploadButton, generateUploadDropzone } from "@uploadthing/react"; + +import type { OurFileRouter } from "@/app/api/uploadthing/core"; + +export const UploadButton = generateUploadButton(); +export const UploadDropzone = generateUploadDropzone(); diff --git a/references/nextjs-realtime/tailwind.config.ts b/references/nextjs-realtime/tailwind.config.ts new file mode 100644 index 0000000000..7ba9baf7e2 --- /dev/null +++ b/references/nextjs-realtime/tailwind.config.ts @@ -0,0 +1,64 @@ +import type { Config } from "tailwindcss"; +import { withUt } from "uploadthing/tw"; + +const config: Config = withUt({ + darkMode: ["class"], + content: [ + "./src/pages/**/*.{js,ts,jsx,tsx,mdx}", + "./src/components/**/*.{js,ts,jsx,tsx,mdx}", + "./src/app/**/*.{js,ts,jsx,tsx,mdx}", + ], + theme: { + extend: { + colors: { + background: "hsl(var(--background))", + foreground: "hsl(var(--foreground))", + card: { + DEFAULT: "hsl(var(--card))", + foreground: "hsl(var(--card-foreground))", + }, + popover: { + DEFAULT: "hsl(var(--popover))", + foreground: "hsl(var(--popover-foreground))", + }, + primary: { + DEFAULT: "hsl(var(--primary))", + foreground: "hsl(var(--primary-foreground))", + }, + secondary: { + DEFAULT: "hsl(var(--secondary))", + foreground: "hsl(var(--secondary-foreground))", + }, + muted: { + DEFAULT: "hsl(var(--muted))", + foreground: "hsl(var(--muted-foreground))", + }, + accent: { + DEFAULT: "hsl(var(--accent))", + foreground: "hsl(var(--accent-foreground))", + }, + destructive: { + DEFAULT: "hsl(var(--destructive))", + foreground: "hsl(var(--destructive-foreground))", + }, + border: "hsl(var(--border))", + input: "hsl(var(--input))", + ring: "hsl(var(--ring))", + chart: { + "1": "hsl(var(--chart-1))", + "2": "hsl(var(--chart-2))", + "3": "hsl(var(--chart-3))", + "4": "hsl(var(--chart-4))", + "5": "hsl(var(--chart-5))", + }, + }, + borderRadius: { + lg: "var(--radius)", + md: "calc(var(--radius) - 2px)", + sm: "calc(var(--radius) - 4px)", + }, + }, + }, + plugins: [require("tailwindcss-animate")], +}); +export default config; diff --git a/references/nextjs-realtime/trigger.config.ts b/references/nextjs-realtime/trigger.config.ts new file mode 100644 index 0000000000..9820fb2223 --- /dev/null +++ b/references/nextjs-realtime/trigger.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "@trigger.dev/sdk/v3"; + +export default defineConfig({ + project: "proj_bzhdaqhlymtuhlrcgbqy", + dirs: ["./src/trigger"], +}); diff --git a/references/nextjs-realtime/tsconfig.json b/references/nextjs-realtime/tsconfig.json new file mode 100644 index 0000000000..7b28589304 --- /dev/null +++ b/references/nextjs-realtime/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/references/v3-catalog/package.json b/references/v3-catalog/package.json index 35fc7899ca..ceedc89f3f 100644 --- a/references/v3-catalog/package.json +++ b/references/v3-catalog/package.json @@ -8,7 +8,7 @@ "scripts": { "dev:trigger": "trigger dev", "deploy": "trigger deploy --self-hosted --load-image", - "management": "ts-node -r dotenv/config -r tsconfig-paths/register ./src/management.ts", + "management": "tsx -r dotenv/config ./src/management.ts", "queues": "ts-node -r dotenv/config -r tsconfig-paths/register ./src/queues.ts", "build:client": "tsup-node ./src/clientUsage.ts --format esm,cjs", "client": "tsx -r dotenv/config ./src/clientUsage.ts", diff --git a/references/v3-catalog/src/clientUsage.ts b/references/v3-catalog/src/clientUsage.ts index 0535af0e85..936583524a 100644 --- a/references/v3-catalog/src/clientUsage.ts +++ b/references/v3-catalog/src/clientUsage.ts @@ -1,31 +1,47 @@ -import { tasks } from "@trigger.dev/sdk/v3"; +import { auth, runs, tasks } from "@trigger.dev/sdk/v3"; +import type { task1, task2 } from "./trigger/taskTypes.js"; +import { randomUUID } from "crypto"; async function main() { - await tasks.trigger( - "create-jsonhero-doc", + const userId = randomUUID(); + + const anyHandle = await tasks.trigger( + "types/task-1", { - title: "Hello World", - content: { - message: "Hello, World!", - }, + foo: "baz", }, { - ttl: "1m", - } - ); - - await tasks.trigger( - "create-jsonhero-doc", - { - title: "Hello World", - content: { - message: "Hello, World!", - }, + tags: [`user:${userId}`], }, { - ttl: "1m", + publicAccessToken: { + expirationTime: "24hr", + }, } ); + + console.log("Auto JWT", anyHandle.publicAccessToken); + + await auth.withAuth({ accessToken: anyHandle.publicAccessToken }, async () => { + const subscription = runs.subscribeToRunsWithTag(`user:${userId}`); + + for await (const run of subscription) { + switch (run.taskIdentifier) { + case "types/task-1": { + console.log("Run update:", run); + console.log("Output:", run.output); + console.log("Payload:", run.payload); + break; + } + case "types/task-2": { + console.log("Run update:", run); + console.log("Output:", run.output); + console.log("Payload:", run.payload); + break; + } + } + } + }); } main().catch(console.error); diff --git a/references/v3-catalog/src/trigger/runMetadata.ts b/references/v3-catalog/src/trigger/runMetadata.ts index 690935da13..4a12c6009c 100644 --- a/references/v3-catalog/src/trigger/runMetadata.ts +++ b/references/v3-catalog/src/trigger/runMetadata.ts @@ -21,19 +21,19 @@ export const runMetadataChildTask = task({ run: async (payload: any, { ctx }) => { logger.info("metadata", { metadata: metadata.current() }); - await metadata.set("child", "task"); + metadata.set("child", "task"); logger.info("metadata", { metadata: metadata.current() }); - await metadata.set("child-2", "task-2"); + metadata.set("child-2", "task-2"); logger.info("metadata", { current: metadata.current() }); - await metadata.del("hello"); + metadata.del("hello"); logger.info("metadata", { metadata: metadata.current() }); - await metadata.save({ + metadata.replace({ there: { is: { something: "here", diff --git a/references/v3-catalog/src/trigger/subtasks.ts b/references/v3-catalog/src/trigger/subtasks.ts index 9624ef2ad0..dfff628620 100644 --- a/references/v3-catalog/src/trigger/subtasks.ts +++ b/references/v3-catalog/src/trigger/subtasks.ts @@ -1,5 +1,5 @@ import { logger, task, wait, tasks, tags } from "@trigger.dev/sdk/v3"; -import { taskWithRetries } from "./retries"; +import { taskWithRetries } from "./retries.js"; export const simpleParentTask = task({ id: "simple-parent-task", diff --git a/references/v3-catalog/src/trigger/tags.ts b/references/v3-catalog/src/trigger/tags.ts index bd513dc014..88440430d0 100644 --- a/references/v3-catalog/src/trigger/tags.ts +++ b/references/v3-catalog/src/trigger/tags.ts @@ -11,7 +11,7 @@ export const triggerRunsWithTags = task({ logger.info(`${ctx.run.version}`); const { id } = await simpleChildTask.trigger( - { message: "trigger from triggerRunsWithTags" }, + { message: "trigger from triggerRunsWithTags foobar" }, { tags: payload.tags } ); @@ -86,5 +86,9 @@ export const triggerRunsWithTags = task({ baseCostInCents: run.baseCostInCents, durationMs: run.durationMs, }); + + return { + tags: run.tags, + }; }, }); diff --git a/references/v3-catalog/src/trigger/taskTypes.ts b/references/v3-catalog/src/trigger/taskTypes.ts new file mode 100644 index 0000000000..28aafa068d --- /dev/null +++ b/references/v3-catalog/src/trigger/taskTypes.ts @@ -0,0 +1,23 @@ +import { task, schemaTask } from "@trigger.dev/sdk/v3"; +import { z } from "zod"; + +export const task1 = task({ + id: "types/task-1", + run: async (payload: { foo: string }) => { + return { hello: "world" }; + }, +}); + +const Task2Payload = z.object({ + bar: z.string(), +}); + +export const task2 = schemaTask({ + id: "types/task-2", + schema: Task2Payload, + run: async (payload, { ctx }) => { + console.log(ctx.run.idempotencyKey); + + return { goodbye: "world" as const }; + }, +});