Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion platform/dist/axios.d.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { AxiosRequestConfig } from "./index";
export declare function transformConfigForOauth(config: AxiosRequestConfig): {
method: string;
method: (string & {}) | import("axios").Method;
url: string;
};
declare function callAxios(step: any, config: AxiosRequestConfig, signConfig?: any): Promise<any>;
Expand Down
4 changes: 2 additions & 2 deletions platform/dist/axios.js
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ async function callAxios(step, config, signConfig) {
if (config.debug) {
stepExport(step, config, "debug_config");
}
const response = await axios_1.default(config);
const response = await (0, axios_1.default)(config);
if (config.debug) {
stepExport(step, response.data, "debug_response");
}
Expand All @@ -124,7 +124,7 @@ async function callAxios(step, config, signConfig) {
}
}
function stepExport(step, message, key) {
message = utils_1.cloneSafe(message);
message = (0, utils_1.cloneSafe)(message);
if (step) {
if (step.export) {
step.export(key, message);
Expand Down
68 changes: 57 additions & 11 deletions platform/dist/file-stream.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,10 @@ const mime = require("mime-types");
* @returns a Readable stream of the file content
*/
async function getFileStream(pathOrUrl) {
if (isUrl(pathOrUrl)) {
if (isDataUrl(pathOrUrl)) {
return getDataUrlStream(pathOrUrl);
}
else if (isUrl(pathOrUrl)) {
const response = await fetch(pathOrUrl);
if (!response.ok || !response.body) {
throw new Error(`Failed to fetch ${pathOrUrl}: ${response.status} ${response.statusText}`);
Expand All @@ -22,7 +25,7 @@ async function getFileStream(pathOrUrl) {
}
else {
await safeStat(pathOrUrl);
return fs_1.createReadStream(pathOrUrl);
return (0, fs_1.createReadStream)(pathOrUrl);
}
}
exports.getFileStream = getFileStream;
Expand All @@ -31,7 +34,10 @@ exports.getFileStream = getFileStream;
* @returns a Readable stream of the file content and its metadata
*/
async function getFileStreamAndMetadata(pathOrUrl) {
if (isUrl(pathOrUrl)) {
if (isDataUrl(pathOrUrl)) {
return getDataUrlStreamAndMetadata(pathOrUrl);
}
else if (isUrl(pathOrUrl)) {
return await getRemoteFileStreamAndMetadata(pathOrUrl);
}
else {
Expand All @@ -48,6 +54,46 @@ function isUrl(pathOrUrl) {
return false;
}
}
function isDataUrl(pathOrUrl) {
return pathOrUrl.startsWith("data:");
}
function parseDataUrl(dataUrl) {
// Format: data:[<mediatype>][;base64],<data>
const match = dataUrl.match(/^data:([^;,]*)?(?:;(base64))?,(.*)$/);
if (!match) {
throw new Error("Invalid data URL format");
}
const [, mediaType = "text/plain;charset=US-ASCII", base64Flag, data,] = match;
return {
mediaType,
isBase64: base64Flag === "base64",
data,
};
}
function getDataUrlStream(dataUrl) {
const parsed = parseDataUrl(dataUrl);
const buffer = parsed.isBase64
? Buffer.from(parsed.data, "base64")
: Buffer.from(decodeURIComponent(parsed.data), "utf-8");
return stream_1.Readable.from(buffer);
}
function getDataUrlStreamAndMetadata(dataUrl) {
const parsed = parseDataUrl(dataUrl);
const buffer = parsed.isBase64
? Buffer.from(parsed.data, "base64")
: Buffer.from(decodeURIComponent(parsed.data), "utf-8");
const ext = mime.extension(parsed.mediaType);
const name = ext ? `file.${ext}` : "file";
const metadata = {
size: buffer.length,
contentType: parsed.mediaType || undefined,
name,
};
return {
stream: stream_1.Readable.from(buffer),
metadata,
};
}
async function safeStat(path) {
try {
return await fs_1.promises.stat(path);
Expand All @@ -62,10 +108,10 @@ async function getLocalFileStreamAndMetadata(filePath) {
const metadata = {
size: stats.size,
lastModified: stats.mtime,
name: path_1.basename(filePath),
name: (0, path_1.basename)(filePath),
contentType,
};
const stream = fs_1.createReadStream(filePath);
const stream = (0, fs_1.createReadStream)(filePath);
return {
stream,
metadata,
Expand All @@ -83,7 +129,7 @@ async function getRemoteFileStreamAndMetadata(url) {
: undefined;
const etag = headers.get("etag") || undefined;
const urlObj = new URL(url);
const name = path_1.basename(urlObj.pathname);
const name = (0, path_1.basename)(urlObj.pathname);
const contentType = headers.get("content-type") || mime.lookup(urlObj.pathname) || undefined;
const baseMetadata = {
contentType,
Expand All @@ -108,19 +154,19 @@ async function getRemoteFileStreamAndMetadata(url) {
}
async function downloadToTemporaryFile(response, baseMetadata) {
// Generate unique temporary file path
const tempFileName = `file-stream-${uuid_1.v4()}`;
const tempFilePath = path_1.join(os_1.tmpdir(), tempFileName);
const tempFileName = `file-stream-${(0, uuid_1.v4)()}`;
const tempFilePath = (0, path_1.join)((0, os_1.tmpdir)(), tempFileName);
// Download to temporary file
const fileStream = fs_1.createWriteStream(tempFilePath);
const fileStream = (0, fs_1.createWriteStream)(tempFilePath);
const webStream = stream_1.Readable.fromWeb(response.body);
try {
await promises_1.pipeline(webStream, fileStream);
await (0, promises_1.pipeline)(webStream, fileStream);
const stats = await fs_1.promises.stat(tempFilePath);
const metadata = {
...baseMetadata,
size: stats.size,
};
const stream = fs_1.createReadStream(tempFilePath);
const stream = (0, fs_1.createReadStream)(tempFilePath);
const cleanup = async () => {
try {
await fs_1.promises.unlink(tempFilePath);
Expand Down
14 changes: 7 additions & 7 deletions platform/dist/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ export declare const SendConfigEmail: t.PartialC<{
subject: t.StringC;
text: t.StringC;
}>;
export declare type SendConfigEmail = t.TypeOf<typeof SendConfigEmail>;
export type SendConfigEmail = t.TypeOf<typeof SendConfigEmail>;
export declare const SendConfigEmit_required: t.ExactC<t.TypeC<{
raw_event: t.ObjectC;
}>>;
Expand All @@ -27,7 +27,7 @@ export declare const SendConfigEmit: t.IntersectionC<[t.ExactC<t.TypeC<{
}>>, t.PartialC<{
event: t.ObjectC;
}>]>;
export declare type SendConfigEmit = t.TypeOf<typeof SendConfigEmit>;
export type SendConfigEmit = t.TypeOf<typeof SendConfigEmit>;
export declare const HTTP_METHODS: string[];
export declare const SendConfigHTTP: t.IntersectionC<[t.ExactC<t.TypeC<{
method: t.KeyofC<{}>;
Expand All @@ -41,18 +41,18 @@ export declare const SendConfigHTTP: t.IntersectionC<[t.ExactC<t.TypeC<{
headers: t.ObjectC;
params: t.ObjectC;
}>]>;
export declare type SendConfigHTTP = t.TypeOf<typeof SendConfigHTTP>;
export type SendConfigHTTP = t.TypeOf<typeof SendConfigHTTP>;
export declare const SendConfigS3: t.ExactC<t.TypeC<{
bucket: t.StringC;
payload: t.UnionC<[t.StringC, t.ObjectC]>;
prefix: t.StringC;
}>>;
export declare type SendConfigS3 = t.TypeOf<typeof SendConfigS3>;
export type SendConfigS3 = t.TypeOf<typeof SendConfigS3>;
export declare const SendConfigSQL: t.ExactC<t.TypeC<{
payload: t.UnionC<[t.StringC, t.ObjectC]>;
table: t.StringC;
}>>;
export declare type SendConfigSQL = t.TypeOf<typeof SendConfigSQL>;
export type SendConfigSQL = t.TypeOf<typeof SendConfigSQL>;
export declare const SendConfigSnowflake: t.ExactC<t.TypeC<{
account: t.StringC;
database: t.StringC;
Expand All @@ -64,12 +64,12 @@ export declare const SendConfigSnowflake: t.ExactC<t.TypeC<{
stage_name: t.StringC;
user: t.StringC;
}>>;
export declare type SendConfigSnowflake = t.TypeOf<typeof SendConfigSnowflake>;
export type SendConfigSnowflake = t.TypeOf<typeof SendConfigSnowflake>;
export declare const SendConfigSSE: t.ExactC<t.TypeC<{
channel: t.StringC;
payload: t.UnionC<[t.StringC, t.ObjectC]>;
}>>;
export declare type SendConfigSSE = t.TypeOf<typeof SendConfigSSE>;
export type SendConfigSSE = t.TypeOf<typeof SendConfigSSE>;
interface SendFunctionsWrapper {
email: (config: SendConfigEmail) => void;
emit: (config: SendConfigEmit) => void;
Expand Down
2 changes: 1 addition & 1 deletion platform/dist/index.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.$sendConfigRuntimeTypeChecker = exports.$send = exports.$end = exports.END_NEEDLE = exports.$event = exports.sendTypeMap = exports.SendConfigSSE = exports.SendConfigSnowflake = exports.SendConfigSQL = exports.SendConfigS3 = exports.SendConfigHTTP = exports.HTTP_METHODS = exports.SendConfigEmit = exports.SendConfigEmit_optional = exports.SendConfigEmit_required = exports.SendConfigEmail = exports.transformConfigForOauth = exports.axios = void 0;
exports.$sendConfigRuntimeTypeChecker = exports.$send = exports.$end = exports.END_NEEDLE = exports.$event = exports.sendTypeMap = exports.SendConfigSSE = exports.SendConfigSnowflake = exports.SendConfigSQL = exports.SendConfigS3 = exports.SendConfigHTTP = exports.HTTP_METHODS = exports.SendConfigEmit = exports.SendConfigEmit_optional = exports.SendConfigEmit_required = exports.SendConfigEmail = exports.PD_OFFICIAL_GMAIL_OAUTH_CLIENT_ID = exports.DEFAULT_POLLING_SOURCE_TIMER_INTERVAL = exports.sqlProxy = exports.sqlProp = exports.ConfigurationError = exports.getFileStream = exports.getFileStreamAndMetadata = exports.jsonStringifySafe = exports.cloneSafe = exports.transformConfigForOauth = exports.axios = void 0;
const t = require("io-ts");
const axios_1 = require("./axios");
exports.axios = axios_1.default;
Expand Down
12 changes: 6 additions & 6 deletions platform/dist/sql-prop.d.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
import { JsonPrimitive } from "type-fest";
import { ExecuteQueryArgs } from "./sql";
export declare type ColumnSchema = {
export type ColumnSchema = {
columnDefault: JsonPrimitive;
dataType: string;
isNullable: boolean;
tableSchema?: string;
};
export declare type TableMetadata = {
export type TableMetadata = {
rowCount?: number;
};
export declare type TableSchema = {
export type TableSchema = {
[columnName: string]: ColumnSchema;
};
export declare type TableInfo = {
export type TableInfo = {
metadata: TableMetadata;
schema: TableSchema;
};
export declare type DbInfo = {
export type DbInfo = {
[tableName: string]: TableInfo;
};
export declare type SqlProp = {
export type SqlProp = {
query: string;
params?: string[];
};
Expand Down
6 changes: 3 additions & 3 deletions platform/dist/sql-proxy.d.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { ExecuteQueryArgs } from "./sql";
export declare type ClientConfiguration = object;
export declare type ProxyArgs = {
export type ClientConfiguration = object;
export type ProxyArgs = {
query: string;
params?: unknown[];
};
export declare type Row = object;
export type Row = object;
declare const _default: {
methods: {
/**
Expand Down
2 changes: 1 addition & 1 deletion platform/dist/sql.d.ts
Original file line number Diff line number Diff line change
@@ -1 +1 @@
export declare type ExecuteQueryArgs = object | string;
export type ExecuteQueryArgs = object | string;
4 changes: 2 additions & 2 deletions platform/lib/axios.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import axios from "axios";
import axios, { AxiosHeaders } from "axios";
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

🧩 Analysis chain

🏁 Script executed:

find . -type f -name "axios.ts" | head -20

Repository: PipedreamHQ/pipedream

Length of output: 88


🏁 Script executed:

cat platform/lib/axios.ts | head -50

Repository: PipedreamHQ/pipedream

Length of output: 1650


🏁 Script executed:

sed -n '160,175p' platform/lib/axios.ts

Repository: PipedreamHQ/pipedream

Length of output: 565


🌐 Web query:

AxiosHeaders set method axios 1.x documentation

💡 Result:

In Axios 1.x, AxiosHeaders#set is documented as the primary way to set/merge headers on an AxiosHeaders instance. (axios.rest)

Signature (overloads)

set(headerName, value, rewrite?: boolean);
set(headerName, value, rewrite?: (this: AxiosHeaders, value: string, name: string, headers: RawAxiosHeaders) => boolean);
set(headers?: RawAxiosHeaders | AxiosHeaders | string, rewrite?: boolean);

(axios.rest)

rewrite behavior

  • false: don’t overwrite if the header already has a value (not undefined)
  • undefined (default): overwrite unless the existing value is false
  • true: always overwrite
  • can also be a function to decide per header/value whether to overwrite (github.com)

Notes

  • Accepts a single header, an object of headers, another AxiosHeaders, or a raw header string (newline-separated).
  • Returns this for chaining. (axios.rest)

Citations:


Use AxiosHeaders.set() for Authorization header

In Axios 1.x, AxiosHeaders.set() is the documented primary way to set headers and properly handles header normalization and merge semantics. Direct property assignment bypasses this logic.

♻️ Proposed change
-      if (!config.headers) config.headers = new AxiosHeaders();
-      config.headers.Authorization = oauthSignature;
+      if (!config.headers) config.headers = new AxiosHeaders();
+      config.headers.set("Authorization", oauthSignature);

Also applies to: 165-168

🤖 Prompt for AI Agents
In `@platform/lib/axios.ts` at line 1, The code currently assigns the
Authorization header directly (bypassing Axios header normalization); change
those assignments to use AxiosHeaders.set() instead: import/keep AxiosHeaders
(already imported) and in the request-building code or request interceptor where
you currently do something like headers.Authorization = `Bearer ${token}` or
config.headers['Authorization'] = value, replace it with new
AxiosHeaders(config.headers).set('Authorization', value) or call
AxiosHeaders.set on the existing AxiosHeaders instance (e.g., const headers =
new AxiosHeaders(config.headers); headers.set('Authorization', `Bearer
${token}`); config.headers = headers). Update all occurrences (including the
spots around the current Authorization assignments referenced) to use
AxiosHeaders.set for proper normalization and merging.

import { AxiosRequestConfig } from "./index";
import * as querystring from "querystring";
import { cloneSafe } from "./utils";
Expand Down Expand Up @@ -164,7 +164,7 @@ function create(config?: AxiosRequestConfig, signConfig?: any) {
axiosInstance.interceptors.request.use(async (config) => {
if (signConfig) {
const oauthSignature = await getOauthSignature(config, signConfig);
if (!config.headers) config.headers = {};
if (!config.headers) config.headers = new AxiosHeaders();
config.headers.Authorization = oauthSignature;
}

Expand Down
68 changes: 66 additions & 2 deletions platform/lib/file-stream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@ export interface FileMetadata {
* @returns a Readable stream of the file content
*/
export async function getFileStream(pathOrUrl: string): Promise<Readable> {
if (isUrl(pathOrUrl)) {
if (isDataUrl(pathOrUrl)) {
return getDataUrlStream(pathOrUrl);
} else if (isUrl(pathOrUrl)) {
const response = await fetch(pathOrUrl);
if (!response.ok || !response.body) {
throw new Error(`Failed to fetch ${pathOrUrl}: ${response.status} ${response.statusText}`);
Expand All @@ -41,7 +43,9 @@ export async function getFileStream(pathOrUrl: string): Promise<Readable> {
* @returns a Readable stream of the file content and its metadata
*/
export async function getFileStreamAndMetadata(pathOrUrl: string): Promise<{ stream: Readable; metadata: FileMetadata }> {
if (isUrl(pathOrUrl)) {
if (isDataUrl(pathOrUrl)) {
return getDataUrlStreamAndMetadata(pathOrUrl);
} else if (isUrl(pathOrUrl)) {
return await getRemoteFileStreamAndMetadata(pathOrUrl);
} else {
return await getLocalFileStreamAndMetadata(pathOrUrl);
Expand All @@ -57,6 +61,66 @@ function isUrl(pathOrUrl: string): boolean {
}
}

function isDataUrl(pathOrUrl: string): boolean {
return pathOrUrl.startsWith("data:");
}

interface ParsedDataUrl {
mediaType: string;
isBase64: boolean;
data: string;
}

function parseDataUrl(dataUrl: string): ParsedDataUrl {
// Format: data:[<mediatype>][;base64],<data>
const match = dataUrl.match(/^data:([^;,]*)?(?:;(base64))?,(.*)$/);
if (!match) {
throw new Error("Invalid data URL format");
}
const [
,
mediaType = "text/plain;charset=US-ASCII",
base64Flag,
data,
] = match;
return {
mediaType,
isBase64: base64Flag === "base64",
data,
};
}

function getDataUrlStream(dataUrl: string): Readable {
const parsed = parseDataUrl(dataUrl);
const buffer = parsed.isBase64
? Buffer.from(parsed.data, "base64")
: Buffer.from(decodeURIComponent(parsed.data), "utf-8");
return Readable.from(buffer);
}

function getDataUrlStreamAndMetadata(dataUrl: string): { stream: Readable; metadata: FileMetadata } {
const parsed = parseDataUrl(dataUrl);
const buffer = parsed.isBase64
? Buffer.from(parsed.data, "base64")
: Buffer.from(decodeURIComponent(parsed.data), "utf-8");

const ext = mime.extension(parsed.mediaType);
const name = ext
? `file.${ext}`
: "file";

const metadata: FileMetadata = {
size: buffer.length,
contentType: parsed.mediaType || undefined,
name,
};
Comment on lines +74 to +116
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Data URL parser rejects valid media-type parameters and loses default type.

The current regex only allows ;base64 and fails for valid URLs like data:text/plain;charset=UTF-8,hello, and it treats data:;base64,... as an empty media type instead of defaulting to text/plain;charset=US-ASCII. That breaks valid data URLs and metadata extraction.

🐛 Proposed fix (more spec‑compliant parsing)
 function parseDataUrl(dataUrl: string): ParsedDataUrl {
   // Format: data:[<mediatype>][;base64],<data>
-  const match = dataUrl.match(/^data:([^;,]*)?(?:;(base64))?,(.*)$/);
-  if (!match) {
-    throw new Error("Invalid data URL format");
-  }
-  const [
-    ,
-    mediaType = "text/plain;charset=US-ASCII",
-    base64Flag,
-    data,
-  ] = match;
-  return {
-    mediaType,
-    isBase64: base64Flag === "base64",
-    data,
-  };
+  if (!dataUrl.startsWith("data:")) {
+    throw new Error("Invalid data URL format");
+  }
+  const commaIndex = dataUrl.indexOf(",");
+  if (commaIndex === -1) {
+    throw new Error("Invalid data URL format");
+  }
+  const meta = dataUrl.slice(5, commaIndex); // after "data:"
+  const data = dataUrl.slice(commaIndex + 1);
+  const isBase64 = /;base64$/i.test(meta);
+  const mediaTypeRaw = isBase64 ? meta.replace(/;base64$/i, "") : meta;
+  const mediaType = mediaTypeRaw ? mediaTypeRaw : "text/plain;charset=US-ASCII";
+  return {
+    mediaType,
+    isBase64,
+    data,
+  };
 }
🤖 Prompt for AI Agents
In `@platform/lib/file-stream.ts` around lines 74 - 116, The data URL parsing in
parseDataUrl is too strict: update parseDataUrl to accept full media-type with
optional parameters (e.g., "text/plain;charset=UTF-8") and to detect a "base64"
parameter anywhere in the parameter list instead of only allowing ";base64"
directly; if the mediatype portion is empty (e.g., "data:,...") return the
default "text/plain;charset=US-ASCII"; return the mediaType string including its
parameters (excluding the "base64" token) and an isBase64 boolean; ensure
getDataUrlStream and getDataUrlStreamAndMetadata use parseDataUrl's mediaType
and isBase64 (decode percent-encoding only for non-base64) so metadata
(contentType and size and filename extension via mime.extension) remains
correct.


return {
stream: Readable.from(buffer),
metadata,
};
}

async function safeStat(path: string): Promise<Stats> {
try {
return await fs.stat(path);
Expand Down
4 changes: 2 additions & 2 deletions platform/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@pipedream/platform",
"version": "3.1.1",
"version": "3.2.0",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

🧩 Analysis chain

🏁 Script executed:

cat -n platform/package.json

Repository: PipedreamHQ/pipedream

Length of output: 1128


🏁 Script executed:

find platform -name "jest.config.*" -o -name "jest.setup.*" | head -20

Repository: PipedreamHQ/pipedream

Length of output: 47


🏁 Script executed:

rg "ts-jest" platform/ 2>/dev/null || echo "No ts-jest references found"

Repository: PipedreamHQ/pipedream

Length of output: 92


🏁 Script executed:

rg "preset" platform/ --type json 2>/dev/null || echo "No preset configuration found"

Repository: PipedreamHQ/pipedream

Length of output: 94


🏁 Script executed:

ls -la | grep -E "jest|test"

Repository: PipedreamHQ/pipedream

Length of output: 134


🏁 Script executed:

find platform -type f \( -name "*.test.ts" -o -name "*.spec.ts" -o -name "*.test.js" -o -name "*.spec.js" \) | head -20

Repository: PipedreamHQ/pipedream

Length of output: 47


🏁 Script executed:

cat jest.config.js 2>/dev/null || cat jest.config.json 2>/dev/null || echo "No root jest config found"

Repository: PipedreamHQ/pipedream

Length of output: 556


🏁 Script executed:

head -60 package.json

Repository: PipedreamHQ/pipedream

Length of output: 2235


🌐 Web query:

ts-jest 29.1.2 TypeScript 4.9 compatibility support

💡 Result:

ts-jest 29.1.2 (released Jan 22, 2024) should be compatible with TypeScript 4.9, because ts-jest 29.x declares a TypeScript peer dependency of >=4.3 <6, which includes 4.9.x. [1][2]

Sources
[1] https://app.unpkg.com/ts-jest%4029.2.2/files/package.json
[2] https://github.com/kulshekhar/ts-jest/tags?after=v29.1.5


🏁 Script executed:

cat package.json | grep -A 100 '"devDependencies"' | grep -E "ts-jest|typescript" | head -10

Repository: PipedreamHQ/pipedream

Length of output: 246


Ensure ts-jest is listed in platform/package.json devDependencies

TypeScript 4.9.5 is compatible with ts-jest 29.1.2 (ts-jest 29.x supports TypeScript >=4.3 <6). However, ts-jest is missing from platform's devDependencies—it's currently only in the root package.json. Add it to platform to ensure explicit dependency declaration and consistency in the monorepo structure.

Also note the TypeScript version mismatch: root requires >=5.5.0 <5.7.0 while platform specifies ^4.9.5. Align these versions or document the intentional difference.

🤖 Prompt for AI Agents
In `@platform/package.json` at line 3, Add ts-jest to platform/package.json's
devDependencies (e.g., "ts-jest": "29.1.2") so the platform package declares the
Jest TypeScript transformer explicitly; update the "devDependencies" object in
platform/package.json to include that entry and run npm/yarn install. Also
address the TypeScript version mismatch between platform's "typescript":
"^4.9.5" and the root's ">=5.5.0 <5.7.0" by either aligning platform's
"typescript" to the root range or adding a comment/README note in the platform
package explaining the intentional divergence.

"description": "Pipedream platform globals (typing and runtime type checking)",
"homepage": "https://pipedream.com",
"main": "dist/index.js",
Expand Down Expand Up @@ -29,6 +29,6 @@
"husky": "^3.0.0",
"jest": "^29.1.2",
"type-fest": "^4.15.0",
"typescript": "^3.5.3"
"typescript": "^4.9.5"
}
}
1 change: 1 addition & 0 deletions platform/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"declaration": true,
"module": "commonjs",
"outDir": "./dist",
"skipLibCheck": true,
"strictNullChecks": true,
"target": "es2018"
},
Expand Down
Loading