Skip to content

Commit 83d55ea

Browse files
committed
fix(ipa): fix import issue with parquet-wasm package
1 parent b677ff6 commit 83d55ea

File tree

2 files changed

+81
-16
lines changed

2 files changed

+81
-16
lines changed
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
/**
2+
* CommonJS wrapper for parquet-wasm
3+
*
4+
* This wrapper allows us to use parquet-wasm 0.7.0 from ES modules in Node.js.
5+
*
6+
* Background: parquet-wasm 0.7.0 has package.json with "type": "module" which makes
7+
* Node.js treat .js files as ES modules. However, the node build uses CommonJS syntax.
8+
*
9+
* Solution: Since this file has a .cjs extension, Node.js always treats it as CommonJS,
10+
* allowing us to use require(). We then use createRequire() in our ES module to load this wrapper.
11+
*
12+
* See: https://github.com/kylebarron/parquet-wasm/issues/798
13+
*/
14+
15+
const path = require('path');
16+
const fs = require('fs');
17+
18+
// Read and eval the parquet-wasm node build
19+
// We use eval because require() won't work due to the "type": "module" in package.json
20+
const parquetWasmPath = path.resolve(
21+
__dirname,
22+
'../../../../../node_modules/parquet-wasm/node/parquet_wasm.js',
23+
);
24+
25+
const code = fs.readFileSync(parquetWasmPath, 'utf8');
26+
const moduleExports = {};
27+
const moduleObj = { exports: moduleExports };
28+
29+
// Execute the code in a function scope with CommonJS globals
30+
const fn = new Function('exports', 'require', 'module', '__filename', '__dirname', code);
31+
fn(moduleExports, require, moduleObj, parquetWasmPath, path.dirname(parquetWasmPath));
32+
33+
// Re-export everything
34+
module.exports = moduleObj.exports;
35+

tools/spectral/ipa/metrics/scripts/runMetricCollection.js

Lines changed: 46 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,19 @@ import fs from 'node:fs';
22
import path from 'path';
33
import { spawnSync } from 'child_process';
44
import spectral from '@stoplight/spectral-core';
5-
import { Table, writeParquet, WriterPropertiesBuilder } from 'parquet-wasm/esm/parquet_wasm.js';
65
import { tableFromJSON, tableToIPC } from 'apache-arrow';
76
import config from '../config.js';
87
import { runMetricCollectionJob } from '../metricCollection.js';
8+
import { createRequire } from 'module';
99

1010
const { Spectral } = spectral;
11+
12+
// Use createRequire to load our CommonJS wrapper for parquet-wasm
13+
// This allows us to use parquet-wasm 0.7.0 from Node.js ESM
14+
// See parquet-wasm-wrapper.cjs for details
15+
const require = createRequire(import.meta.url);
16+
const { Compression, Table, writeParquet, WriterPropertiesBuilder } = require('./parquet-wasm-wrapper.cjs');
17+
1118
const args = process.argv.slice(2);
1219
const oasFilePath = args[0];
1320

@@ -53,19 +60,42 @@ runMetricCollectionJob(
5360
new Spectral()
5461
)
5562
.then((results) => {
56-
console.log('Writing results');
57-
const table = tableFromJSON(results.metrics);
58-
const wasmTable = Table.fromIPCStream(tableToIPC(table, 'stream'));
59-
const parquetUint8Array = writeParquet(
60-
wasmTable,
61-
new WriterPropertiesBuilder().setCompression(2).build() // 2 = GZIP compression
62-
);
63-
fs.writeFileSync(config.defaultMetricCollectionResultsFilePath, parquetUint8Array);
64-
fs.writeFileSync(path.join(config.defaultOutputsDir, 'warning-count.txt'), results.warnings.count.toString());
65-
66-
fs.writeFileSync(
67-
path.join(config.defaultOutputsDir, 'warning-violations.json'),
68-
JSON.stringify(results.warnings.violations, null, 2)
69-
);
63+
console.log('Writing results to parquet file...');
64+
65+
try {
66+
console.log('Converting metrics to Arrow table...');
67+
const table = tableFromJSON(results.metrics);
68+
69+
console.log('Converting Arrow table to WASM table...');
70+
const wasmTable = Table.fromIPCStream(tableToIPC(table, 'stream'));
71+
72+
console.log('Writing parquet file with GZIP compression...');
73+
const parquetUint8Array = writeParquet(
74+
wasmTable,
75+
new WriterPropertiesBuilder().setCompression(Compression.GZIP).build()
76+
);
77+
78+
console.log(`Saving parquet file to: ${config.defaultMetricCollectionResultsFilePath}`);
79+
fs.writeFileSync(config.defaultMetricCollectionResultsFilePath, parquetUint8Array);
80+
81+
console.log('Writing warning count...');
82+
fs.writeFileSync(path.join(config.defaultOutputsDir, 'warning-count.txt'), results.warnings.count.toString());
83+
84+
console.log('Writing warning violations...');
85+
fs.writeFileSync(
86+
path.join(config.defaultOutputsDir, 'warning-violations.json'),
87+
JSON.stringify(results.warnings.violations, null, 2)
88+
);
89+
90+
console.log('Metric collection completed successfully!');
91+
} catch (error) {
92+
console.error('Error writing results:', error.message);
93+
console.error('Stack trace:', error.stack);
94+
process.exit(1);
95+
}
7096
})
71-
.catch((error) => console.error(error.message));
97+
.catch((error) => {
98+
console.error('Error during metric collection:', error.message);
99+
console.error('Stack trace:', error.stack);
100+
process.exit(1);
101+
});

0 commit comments

Comments
 (0)