Skip to content

Commit a64875a

Browse files
authored
fix: regression affecting replayed runs with superjson payloads (#2266)
* Fix regression that broke superjson behavior for the replay flow * Ignore undefined type metadata when overriding superjson payloads Addresses #1968 * Add a few unit tests for replaceSuperJsonPayload
1 parent 8690878 commit a64875a

File tree

6 files changed

+277
-14
lines changed

6 files changed

+277
-14
lines changed

apps/webapp/app/v3/replayTask.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,12 @@ export const ReplayRunData = z
99
.optional()
1010
.transform((val, ctx) => {
1111
if (!val) {
12-
return {};
12+
return "{}";
1313
}
1414

1515
try {
16-
return JSON.parse(val);
16+
JSON.parse(val);
17+
return val;
1718
} catch {
1819
ctx.addIssue({
1920
code: z.ZodIssueCode.custom,

apps/webapp/app/v3/services/replayTaskRun.server.ts

Lines changed: 29 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,18 +2,19 @@ import {
22
type MachinePresetName,
33
conditionallyImportPacket,
44
parsePacket,
5+
stringifyIO,
56
} from "@trigger.dev/core/v3";
67
import { type TaskRun } from "@trigger.dev/database";
78
import { findEnvironmentById } from "~/models/runtimeEnvironment.server";
8-
import { getTagsForRunId } from "~/models/taskRunTag.server";
99
import { logger } from "~/services/logger.server";
1010
import { BaseService } from "./baseService.server";
1111
import { OutOfEntitlementError, TriggerTaskService } from "./triggerTask.server";
1212
import { type RunOptionsData } from "../testTask";
13+
import { replaceSuperJsonPayload } from "@trigger.dev/core/v3/utils/ioSerialization";
1314

1415
type OverrideOptions = {
1516
environmentId?: string;
16-
payload?: unknown;
17+
payload?: string;
1718
metadata?: unknown;
1819
bulkActionId?: string;
1920
} & RunOptionsData;
@@ -36,7 +37,15 @@ export class ReplayTaskRunService extends BaseService {
3637
taskRunFriendlyId: existingTaskRun.friendlyId,
3738
});
3839

39-
const payload = overrideOptions.payload ?? (await this.getExistingPayload(existingTaskRun));
40+
const payloadPacket = await this.overrideExistingPayloadPacket(
41+
existingTaskRun,
42+
overrideOptions.payload
43+
);
44+
const parsedPayload =
45+
payloadPacket.dataType === "application/json"
46+
? await parsePacket(payloadPacket)
47+
: payloadPacket.data;
48+
const payloadType = payloadPacket.dataType;
4049
const metadata = overrideOptions.metadata ?? (await this.getExistingMetadata(existingTaskRun));
4150
const tags = overrideOptions.tags ?? existingTaskRun.runTags;
4251

@@ -53,8 +62,9 @@ export class ReplayTaskRunService extends BaseService {
5362
existingTaskRun.taskIdentifier,
5463
authenticatedEnvironment,
5564
{
56-
payload,
65+
payload: parsedPayload,
5766
options: {
67+
payloadType,
5868
queue: taskQueue
5969
? {
6070
name: taskQueue.name,
@@ -108,15 +118,23 @@ export class ReplayTaskRunService extends BaseService {
108118
}
109119
}
110120

111-
private async getExistingPayload(existingTaskRun: TaskRun) {
112-
const existingPayloadPacket = await conditionallyImportPacket({
113-
data: existingTaskRun.payload,
121+
private async overrideExistingPayloadPacket(
122+
existingTaskRun: TaskRun,
123+
stringifiedPayloadOverride: string | undefined
124+
) {
125+
if (stringifiedPayloadOverride && existingTaskRun.payloadType === "application/super+json") {
126+
const newPayload = await replaceSuperJsonPayload(
127+
existingTaskRun.payload,
128+
stringifiedPayloadOverride
129+
);
130+
131+
return stringifyIO(newPayload);
132+
}
133+
134+
return conditionallyImportPacket({
135+
data: stringifiedPayloadOverride ?? existingTaskRun.payload,
114136
dataType: existingTaskRun.payloadType,
115137
});
116-
117-
return existingPayloadPacket.dataType === "application/json"
118-
? await parsePacket(existingPayloadPacket)
119-
: existingPayloadPacket.data;
120138
}
121139

122140
private async getExistingMetadata(existingTaskRun: TaskRun) {

packages/core/package.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -188,6 +188,7 @@
188188
"execa": "^8.0.1",
189189
"humanize-duration": "^3.27.3",
190190
"jose": "^5.4.0",
191+
"lodash.get": "^4.4.2",
191192
"nanoid": "3.3.8",
192193
"prom-client": "^15.1.0",
193194
"socket.io": "4.7.4",
@@ -206,6 +207,7 @@
206207
"@epic-web/test-server": "^0.1.0",
207208
"@trigger.dev/database": "workspace:*",
208209
"@types/humanize-duration": "^3.27.1",
210+
"@types/lodash.get": "^4.4.9",
209211
"@types/readable-stream": "^4.0.14",
210212
"ai": "^3.4.33",
211213
"defu": "^6.1.4",

packages/core/src/v3/utils/ioSerialization.ts

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ import { SemanticInternalAttributes } from "../semanticInternalAttributes.js";
1212
import { TriggerTracer } from "../tracer.js";
1313
import { zodfetch } from "../zodfetch.js";
1414
import { flattenAttributes } from "./flattenAttributes.js";
15+
import get from "lodash.get";
1516

1617
export type IOPacket = {
1718
data?: string | undefined;
@@ -505,13 +506,46 @@ function safeJsonParse(value: string): any {
505506
}
506507
}
507508

509+
/**
510+
* Replaces the data in a SuperJSON-serialized string with new payload data while preserving
511+
* the original type metadata (Dates, BigInts, Sets, Maps, etc.).
512+
*
513+
* It is primarily useful for our run replay functionality where we want to preserve the original
514+
* type metadata for the new payload.
515+
*
516+
* Note that `undefined` type metadata is ignored when the corresponding field is overriden in the
517+
* new payload, i.e., fields which were previously undefined in the original payload are restored into
518+
* the primitive type they have in the new payload, instead of `undefined`.
519+
* This is a workaround for https://github.com/triggerdotdev/trigger.dev/issues/1968.
520+
*
521+
* @param original - A SuperJSON-serialized string containing the original data with type metadata
522+
* @param newPayload - A JSON string containing the new data to replace the original payload
523+
* @returns The deserialized object with new data but original type metadata preserved
524+
*
525+
* @throws {Error} If the newPayload is not valid JSON
526+
*/
508527
export async function replaceSuperJsonPayload(original: string, newPayload: string) {
509528
const superjson = await loadSuperJSON();
510529
const originalObject = superjson.parse(original);
530+
const newPayloadObject = JSON.parse(newPayload);
511531
const { meta } = superjson.serialize(originalObject);
512532

533+
if (meta?.values) {
534+
const originalUndefinedKeys = Object.entries(meta.values)
535+
.filter(([, value]) => Array.isArray(value) && value.at(0) === "undefined")
536+
.map(([key]) => key);
537+
538+
const overridenUndefinedKeys = originalUndefinedKeys.filter(
539+
(key) => get(newPayloadObject, key) !== undefined
540+
);
541+
542+
overridenUndefinedKeys.forEach((key) => {
543+
delete (meta.values as Record<string, any>)[key];
544+
});
545+
}
546+
513547
const newSuperJson = {
514-
json: JSON.parse(newPayload) as any,
548+
json: newPayloadObject,
515549
meta,
516550
};
517551

Lines changed: 191 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,191 @@
1+
import { replaceSuperJsonPayload } from "../src/v3/utils/ioSerialization.js";
2+
3+
describe("ioSerialization", () => {
4+
describe("replaceSuperJsonPayload", () => {
5+
it("should replace simple JSON payload while preserving SuperJSON metadata", async () => {
6+
const originalData = {
7+
name: "John",
8+
age: 30,
9+
date: new Date("2023-01-01"),
10+
};
11+
12+
const superjson = await import("superjson");
13+
const originalSerialized = superjson.stringify(originalData);
14+
15+
const newPayloadJson = JSON.stringify({
16+
name: "Jane",
17+
surname: "Doe",
18+
age: 25,
19+
date: "2023-02-01T00:00:00.000Z",
20+
});
21+
22+
const result = (await replaceSuperJsonPayload(originalSerialized, newPayloadJson)) as any;
23+
24+
expect(result.name).toBe("Jane");
25+
expect(result.surname).toBe("Doe");
26+
expect(result.age).toBe(25);
27+
expect(result.date).toBeInstanceOf(Date);
28+
expect(result.date.toISOString()).toBe("2023-02-01T00:00:00.000Z");
29+
});
30+
31+
// related to issue https://github.com/triggerdotdev/trigger.dev/issues/1968
32+
it("should ignore original undefined type metadata for overriden fields", async () => {
33+
const originalData = {
34+
name: "John",
35+
age: 30,
36+
date: new Date("2023-01-01"),
37+
country: undefined,
38+
settings: {
39+
theme: undefined,
40+
},
41+
};
42+
43+
const superjson = await import("superjson");
44+
const originalSerialized = superjson.stringify(originalData);
45+
46+
const newPayloadJson = JSON.stringify({
47+
name: "Jane",
48+
surname: "Doe",
49+
age: 25,
50+
date: "2023-02-01T00:00:00.000Z",
51+
country: "US",
52+
settings: {
53+
theme: "dark",
54+
},
55+
});
56+
57+
const result = (await replaceSuperJsonPayload(originalSerialized, newPayloadJson)) as any;
58+
59+
expect(result.name).toBe("Jane");
60+
expect(result.surname).toBe("Doe");
61+
expect(result.country).toBe("US");
62+
expect(result.settings.theme).toBe("dark");
63+
expect(result.age).toBe(25);
64+
expect(result.date).toBeInstanceOf(Date);
65+
expect(result.date.toISOString()).toBe("2023-02-01T00:00:00.000Z");
66+
});
67+
68+
it("should preserve BigInt type metadata", async () => {
69+
const originalData = {
70+
id: BigInt(123456789),
71+
count: 42,
72+
};
73+
74+
const superjson = await import("superjson");
75+
const originalSerialized = superjson.stringify(originalData);
76+
77+
const newPayloadJson = JSON.stringify({
78+
id: "987654321",
79+
count: 100,
80+
});
81+
82+
const result = (await replaceSuperJsonPayload(originalSerialized, newPayloadJson)) as any;
83+
84+
expect(result.id).toBe(BigInt(987654321));
85+
expect(typeof result.id).toBe("bigint");
86+
expect(result.count).toBe(100);
87+
});
88+
89+
it("should preserve nested type metadata", async () => {
90+
const originalData = {
91+
user: {
92+
id: BigInt(123),
93+
createdAt: new Date("2023-01-01"),
94+
settings: {
95+
theme: "dark",
96+
updatedAt: new Date("2023-01-01"),
97+
},
98+
},
99+
metadata: {
100+
version: 1,
101+
},
102+
};
103+
104+
const superjson = await import("superjson");
105+
const originalSerialized = superjson.stringify(originalData);
106+
107+
const newPayloadJson = JSON.stringify({
108+
user: {
109+
id: "456",
110+
createdAt: "2023-06-01T00:00:00.000Z",
111+
settings: {
112+
theme: "light",
113+
updatedAt: "2023-06-01T00:00:00.000Z",
114+
},
115+
},
116+
metadata: {
117+
version: 2,
118+
},
119+
});
120+
121+
const result = (await replaceSuperJsonPayload(originalSerialized, newPayloadJson)) as any;
122+
123+
expect(result.user.id).toBe(BigInt(456));
124+
expect(result.user.createdAt).toBeInstanceOf(Date);
125+
expect(result.user.createdAt.toISOString()).toBe("2023-06-01T00:00:00.000Z");
126+
expect(result.user.settings.theme).toBe("light");
127+
expect(result.user.settings.updatedAt).toBeInstanceOf(Date);
128+
expect(result.user.settings.updatedAt.toISOString()).toBe("2023-06-01T00:00:00.000Z");
129+
expect(result.metadata.version).toBe(2);
130+
});
131+
132+
it("should preserve Set type metadata", async () => {
133+
const originalData = {
134+
tags: new Set(["tag1", "tag2"]),
135+
name: "test",
136+
};
137+
138+
const superjson = await import("superjson");
139+
const originalSerialized = superjson.stringify(originalData);
140+
141+
const newPayloadJson = JSON.stringify({
142+
tags: ["tag3", "tag4", "tag5"],
143+
name: "updated",
144+
});
145+
146+
const result = (await replaceSuperJsonPayload(originalSerialized, newPayloadJson)) as any;
147+
148+
expect(result.tags).toBeInstanceOf(Set);
149+
expect(Array.from(result.tags)).toEqual(["tag3", "tag4", "tag5"]);
150+
expect(result.name).toBe("updated");
151+
});
152+
153+
it("should preserve Map type metadata", async () => {
154+
const originalData = {
155+
mapping: new Map([
156+
["key1", "value1"],
157+
["key2", "value2"],
158+
]),
159+
name: "test",
160+
};
161+
162+
const superjson = await import("superjson");
163+
const originalSerialized = superjson.stringify(originalData);
164+
165+
const newPayloadJson = JSON.stringify({
166+
mapping: [
167+
["key3", "value3"],
168+
["key4", "value4"],
169+
],
170+
name: "updated",
171+
});
172+
173+
const result = (await replaceSuperJsonPayload(originalSerialized, newPayloadJson)) as any;
174+
175+
expect(result.mapping).toBeInstanceOf(Map);
176+
expect(result.mapping.get("key3")).toBe("value3");
177+
expect(result.mapping.get("key4")).toBe("value4");
178+
expect(result.name).toBe("updated");
179+
});
180+
181+
it("should throw error for invalid JSON payload", async () => {
182+
const originalData = { name: "test" };
183+
184+
const superjson = await import("superjson");
185+
const originalSerialized = superjson.stringify(originalData);
186+
const invalidPayload = "{ invalid json }";
187+
188+
await expect(replaceSuperJsonPayload(originalSerialized, invalidPayload)).rejects.toThrow();
189+
});
190+
});
191+
});

0 commit comments

Comments
 (0)