@@ -2,12 +2,19 @@ import fs from 'node:fs';
22import  path  from  'path' ; 
33import  {  spawnSync  }  from  'child_process' ; 
44import  spectral  from  '@stoplight/spectral-core' ; 
5- import  {  Table ,  writeParquet ,  WriterPropertiesBuilder  }  from  'parquet-wasm/esm/parquet_wasm.js' ; 
65import  {  tableFromJSON ,  tableToIPC  }  from  'apache-arrow' ; 
76import  config  from  '../config.js' ; 
87import  {  runMetricCollectionJob  }  from  '../metricCollection.js' ; 
8+ import  {  createRequire  }  from  'module' ; 
99
1010const  {  Spectral }  =  spectral ; 
11+ 
12+ // Use createRequire to load our CommonJS wrapper for parquet-wasm 
13+ // This allows us to use parquet-wasm 0.7.0 from Node.js ESM 
14+ // See parquet-wasm-wrapper.cjs for details 
15+ const  require  =  createRequire ( import . meta. url ) ; 
16+ const  {  Compression,  Table,  writeParquet,  WriterPropertiesBuilder }  =  require ( './parquet-wasm-wrapper.cjs' ) ; 
17+ 
1118const  args  =  process . argv . slice ( 2 ) ; 
1219const  oasFilePath  =  args [ 0 ] ; 
1320
@@ -53,19 +60,42 @@ runMetricCollectionJob(
5360  new  Spectral ( ) 
5461) 
5562  . then ( ( results )  =>  { 
56-     console . log ( 'Writing results' ) ; 
57-     const  table  =  tableFromJSON ( results . metrics ) ; 
58-     const  wasmTable  =  Table . fromIPCStream ( tableToIPC ( table ,  'stream' ) ) ; 
59-     const  parquetUint8Array  =  writeParquet ( 
60-       wasmTable , 
61-       new  WriterPropertiesBuilder ( ) . setCompression ( 2 ) . build ( )  // 2 = GZIP compression 
62-     ) ; 
63-     fs . writeFileSync ( config . defaultMetricCollectionResultsFilePath ,  parquetUint8Array ) ; 
64-     fs . writeFileSync ( path . join ( config . defaultOutputsDir ,  'warning-count.txt' ) ,  results . warnings . count . toString ( ) ) ; 
65- 
66-     fs . writeFileSync ( 
67-       path . join ( config . defaultOutputsDir ,  'warning-violations.json' ) , 
68-       JSON . stringify ( results . warnings . violations ,  null ,  2 ) 
69-     ) ; 
63+     console . log ( 'Writing results to parquet file...' ) ; 
64+ 
65+     try  { 
66+       console . log ( 'Converting metrics to Arrow table...' ) ; 
67+       const  table  =  tableFromJSON ( results . metrics ) ; 
68+ 
69+       console . log ( 'Converting Arrow table to WASM table...' ) ; 
70+       const  wasmTable  =  Table . fromIPCStream ( tableToIPC ( table ,  'stream' ) ) ; 
71+ 
72+       console . log ( 'Writing parquet file with GZIP compression...' ) ; 
73+       const  parquetUint8Array  =  writeParquet ( 
74+         wasmTable , 
75+         new  WriterPropertiesBuilder ( ) . setCompression ( Compression . GZIP ) . build ( ) 
76+       ) ; 
77+ 
78+       console . log ( `Saving parquet file to: ${ config . defaultMetricCollectionResultsFilePath }  ) ; 
79+       fs . writeFileSync ( config . defaultMetricCollectionResultsFilePath ,  parquetUint8Array ) ; 
80+ 
81+       console . log ( 'Writing warning count...' ) ; 
82+       fs . writeFileSync ( path . join ( config . defaultOutputsDir ,  'warning-count.txt' ) ,  results . warnings . count . toString ( ) ) ; 
83+ 
84+       console . log ( 'Writing warning violations...' ) ; 
85+       fs . writeFileSync ( 
86+         path . join ( config . defaultOutputsDir ,  'warning-violations.json' ) , 
87+         JSON . stringify ( results . warnings . violations ,  null ,  2 ) 
88+       ) ; 
89+ 
90+       console . log ( 'Metric collection completed successfully!' ) ; 
91+     }  catch  ( error )  { 
92+       console . error ( 'Error writing results:' ,  error . message ) ; 
93+       console . error ( 'Stack trace:' ,  error . stack ) ; 
94+       process . exit ( 1 ) ; 
95+     } 
7096  } ) 
71-   . catch ( ( error )  =>  console . error ( error . message ) ) ; 
97+   . catch ( ( error )  =>  { 
98+     console . error ( 'Error during metric collection:' ,  error . message ) ; 
99+     console . error ( 'Stack trace:' ,  error . stack ) ; 
100+     process . exit ( 1 ) ; 
101+   } ) ; 
0 commit comments