@@ -116,10 +116,9 @@ export class DataSchemaCompiler {
116116 const { compilerId } = this ;
117117
118118 if ( ! transpilationNative ) {
119- const wc = getEnv ( 'transpilationWorkerThreadsCount' ) ;
120119 this . workerPool = workerpool . pool (
121120 path . join ( __dirname , 'transpilers/transpiler_worker' ) ,
122- wc > 0 ? { maxWorkers : wc } : undefined ,
121+ { maxWorkers : transpilationNativeThreadsCount } ,
123122 ) ;
124123 }
125124
@@ -128,8 +127,6 @@ export class DataSchemaCompiler {
128127 * @returns {Promise<*> }
129128 */
130129 const transpile = async ( stage ) => {
131- let results ;
132-
133130 const cubeNames = Object . keys ( this . cubeDictionary . byId ) ;
134131 // We need only cubes and all its member names for transpiling.
135132 // Cubes doesn't change during transpiling, but are changed during compilation phase,
@@ -150,6 +147,27 @@ export class DataSchemaCompiler {
150147 // Transpilers are the same for all files within phase.
151148 const transpilerNames = this . transpilers . map ( t => t . constructor . name ) ;
152149
150+ const nonJsFilesTasks = toCompile . filter ( file => ! file . fileName . endsWith ( '.js' ) )
151+ . map ( f => this . transpileFile ( f , errorsReport , { transpilerNames, compilerId } ) ) ;
152+
153+ const jsFiles = toCompile . filter ( file => file . fileName . endsWith ( '.js' ) ) ;
154+ let JsFilesTasks = [ ] ;
155+ let jsChunks = [ ] ;
156+
157+ if ( jsFiles . length > 0 ) {
158+ if ( jsFiles . length < transpilationNativeThreadsCount * transpilationNativeThreadsCount ) {
159+ jsChunks = [ jsFiles ] ;
160+ } else {
161+ const baseSize = Math . floor ( jsFiles . length / transpilationNativeThreadsCount ) ;
162+ for ( let i = 0 ; i < transpilationNativeThreadsCount ; i ++ ) {
163+ // For the last part, we take the remaining files so we don't lose the extra ones.
164+ const start = i * baseSize ;
165+ const end = ( i === transpilationNativeThreadsCount - 1 ) ? jsFiles . length : start + baseSize ;
166+ jsChunks . push ( jsFiles . slice ( start , end ) ) ;
167+ }
168+ }
169+ }
170+
153171 if ( transpilationNative ) {
154172 // Warming up swc compiler cache
155173 const dummyFile = {
@@ -159,34 +177,13 @@ export class DataSchemaCompiler {
159177
160178 await this . transpileJsFile ( dummyFile , errorsReport , { cubeNames, cubeSymbols, transpilerNames, contextSymbols : CONTEXT_SYMBOLS , compilerId, stage } ) ;
161179
162- const nonJsFilesTasks = toCompile . filter ( file => ! file . fileName . endsWith ( '.js' ) )
163- . map ( f => this . transpileFile ( f , errorsReport , { transpilerNames, compilerId } ) ) ;
164-
165- const jsFiles = toCompile . filter ( file => file . fileName . endsWith ( '.js' ) ) ;
166- let JsFilesTasks = [ ] ;
167-
168- if ( jsFiles . length > 0 ) {
169- let jsChunks ;
170- if ( jsFiles . length < transpilationNativeThreadsCount * transpilationNativeThreadsCount ) {
171- jsChunks = [ jsFiles ] ;
172- } else {
173- const baseSize = Math . floor ( jsFiles . length / transpilationNativeThreadsCount ) ;
174- jsChunks = [ ] ;
175- for ( let i = 0 ; i < transpilationNativeThreadsCount ; i ++ ) {
176- // For the last part, we take the remaining files so we don't lose the extra ones.
177- const start = i * baseSize ;
178- const end = ( i === transpilationNativeThreadsCount - 1 ) ? jsFiles . length : start + baseSize ;
179- jsChunks . push ( jsFiles . slice ( start , end ) ) ;
180- }
181- }
182- JsFilesTasks = jsChunks . map ( chunk => this . transpileJsFilesBulk ( chunk , errorsReport , { transpilerNames, compilerId } ) ) ;
183- }
184-
185- results = ( await Promise . all ( [ ...nonJsFilesTasks , ...JsFilesTasks ] ) ) . flat ( ) ;
180+ JsFilesTasks = jsChunks . map ( chunk => this . transpileJsFilesBulk ( chunk , errorsReport , { transpilerNames, compilerId } ) ) ;
186181 } else {
187- results = await Promise . all ( toCompile . map ( f => this . transpileFile ( f , errorsReport , { cubeNames, cubeSymbols, transpilerNames } ) ) ) ;
182+ JsFilesTasks = jsChunks . map ( chunk => this . transpileJsFilesBulk ( chunk , errorsReport , { cubeNames, cubeSymbols, transpilerNames, compilerId , stage } ) ) ;
188183 }
189184
185+ const results = ( await Promise . all ( [ ...nonJsFilesTasks , ...JsFilesTasks ] ) ) . flat ( ) ;
186+
190187 return results . filter ( f => ! ! f ) ;
191188 } ;
192189
@@ -260,42 +257,62 @@ export class DataSchemaCompiler {
260257 }
261258 }
262259
263- /**
264- * Right now it is used only for transpilation in native,
265- * so no checks for transpilation type inside this method
266- */
267260 async transpileJsFilesBulk ( files , errorsReport , { cubeNames, cubeSymbols, contextSymbols, transpilerNames, compilerId, stage } ) {
268- // for bulk processing this data may be optimized even more by passing transpilerNames, compilerId only once for a bulk
269- // but this requires more complex logic to be implemented in the native side.
270- // And comparing to the file content sizes, a few bytes of JSON data is not a big deal here
271- const reqDataArr = files . map ( file => ( {
272- fileName : file . fileName ,
273- fileContent : file . content ,
274- transpilers : transpilerNames ,
275- compilerId,
276- ...( cubeNames && {
277- metaData : {
278- cubeNames,
279- cubeSymbols,
280- contextSymbols,
281- stage
282- } ,
283- } ) ,
284- } ) ) ;
285- const res = await transpileJs ( reqDataArr ) ;
286-
287- return files . map ( ( file , index ) => {
288- errorsReport . inFile ( file ) ;
289- if ( ! res [ index ] ) { // This should not happen in theory but just to be safe
290- errorsReport . error ( `No transpilation result received for the file ${ file . fileName } .` ) ;
291- return undefined ;
292- }
293- errorsReport . addErrors ( res [ index ] . errors ) ;
294- errorsReport . addWarnings ( res [ index ] . warnings ) ;
295- errorsReport . exitFile ( ) ;
261+ const transpilationNative = getEnv ( 'transpilationNative' ) ;
296262
297- return { ...file , content : res [ index ] . code } ;
298- } ) ;
263+ if ( transpilationNative ) {
264+ // for bulk processing this data may be optimized even more by passing transpilerNames, compilerId only once for a bulk
265+ // but this requires more complex logic to be implemented in the native side.
266+ // And comparing to the file content sizes, a few bytes of JSON data is not a big deal here
267+ const reqDataArr = files . map ( file => ( {
268+ fileName : file . fileName ,
269+ fileContent : file . content ,
270+ transpilers : transpilerNames ,
271+ compilerId,
272+ ...( cubeNames && {
273+ metaData : {
274+ cubeNames,
275+ cubeSymbols,
276+ contextSymbols,
277+ stage
278+ } ,
279+ } ) ,
280+ } ) ) ;
281+ const res = await transpileJs ( reqDataArr ) ;
282+
283+ return files . map ( ( file , index ) => {
284+ errorsReport . inFile ( file ) ;
285+ if ( ! res [ index ] ) { // This should not happen in theory but just to be safe
286+ errorsReport . error ( `No transpilation result received for the file ${ file . fileName } .` ) ;
287+ return undefined ;
288+ }
289+ errorsReport . addErrors ( res [ index ] . errors ) ;
290+ errorsReport . addWarnings ( res [ index ] . warnings ) ;
291+ errorsReport . exitFile ( ) ;
292+
293+ return { ...file , content : res [ index ] . code } ;
294+ } ) ;
295+ } else {
296+ const request = {
297+ files,
298+ transpilers : transpilerNames ,
299+ cubeNames,
300+ cubeSymbols,
301+ } ;
302+
303+ const res = await this . workerPool . exec ( 'transpile' , [ request ] ) ;
304+ errorsReport . addErrors ( res . errors ) ;
305+ errorsReport . addWarnings ( res . warnings ) ;
306+
307+ return files . map ( ( file , index ) => {
308+ if ( ! res . content [ index ] && res . content [ index ] !== '' ) { // This should not happen in theory but just to be safe
309+ errorsReport . error ( `No transpilation result received for the file ${ file . fileName } .` ) ;
310+ return undefined ;
311+ }
312+
313+ return { ...file , content : res . content [ index ] } ;
314+ } ) ;
315+ }
299316 }
300317
301318 async transpileJsFile ( file , errorsReport , { cubeNames, cubeSymbols, contextSymbols, transpilerNames, compilerId, stage } ) {
@@ -325,8 +342,7 @@ export class DataSchemaCompiler {
325342 return { ...file , content : res [ 0 ] . code } ;
326343 } else {
327344 const data = {
328- fileName : file . fileName ,
329- content : file . content ,
345+ files : [ file ] ,
330346 transpilers : transpilerNames ,
331347 cubeNames,
332348 cubeSymbols,
@@ -336,7 +352,7 @@ export class DataSchemaCompiler {
336352 errorsReport . addErrors ( res . errors ) ;
337353 errorsReport . addWarnings ( res . warnings ) ;
338354
339- return { ...file , content : res . content } ;
355+ return { ...file , content : res . content [ 0 ] } ;
340356 }
341357 } catch ( e ) {
342358 if ( e . toString ( ) . indexOf ( 'SyntaxError' ) !== - 1 ) {
0 commit comments