Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 16 additions & 3 deletions packages/cubejs-backend-shared/src/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -228,9 +228,19 @@ const variables: Record<string, (...args: any) => any> = {
nativeOrchestrator: () => get('CUBEJS_TESSERACT_ORCHESTRATOR')
.default('true')
.asBoolStrict(),
transpilationWorkerThreads: () => get('CUBEJS_TRANSPILATION_WORKER_THREADS')
.default('true')
.asBoolStrict(),
transpilationWorkerThreads: () => {
let enabled = get('CUBEJS_TRANSPILATION_WORKER_THREADS')
.default('true')
.asBoolStrict();

if (!enabled) {
console.warn(
'Worker thread transpilation is enabled by default and cannot be disabled with CUBEJS_TRANSPILATION_WORKER_THREADS.'
);
}

return true;
},
allowNonStrictDateRangeMatching: () => get('CUBEJS_PRE_AGGREGATIONS_ALLOW_NON_STRICT_DATE_RANGE_MATCH')
.default('true')
.asBoolStrict(),
Expand Down Expand Up @@ -2186,6 +2196,9 @@ export function getEnv<T extends keyof Vars>(key: T, opts?: Parameters<Vars[T]>)
);
}

// trigger warning
getEnv('transpilationWorkerThreads');

export function isDockerImage(): boolean {
return Boolean(process.env.CUBEJS_DOCKER_IMAGE_TAG);
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,6 @@ import fs from 'fs';
import os from 'os';
import path from 'path';
import syntaxCheck from 'syntax-error';
import { parse } from '@babel/parser';
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good side effect: Cube cloud runtime will not load babel anymore

import babelGenerator from '@babel/generator';
import babelTraverse from '@babel/traverse';
import R from 'ramda';
import workerpool from 'workerpool';
import { LRUCache } from 'lru-cache';
Expand Down Expand Up @@ -275,12 +272,11 @@ export class DataSchemaCompiler {
const errorsReport = new ErrorReporter(null, [], this.errorReportOptions);
this.errorsReporter = errorsReport;

const transpilationWorkerThreads = getEnv('transpilationWorkerThreads');
const transpilationNative = getEnv('transpilationNative');
const transpilationNativeThreadsCount = getThreadsCount();
const { compilerId } = this;

if (transpilationWorkerThreads) {
if (!transpilationNative) {
const wc = getEnv('transpilationWorkerThreadsCount');
this.workerPool = workerpool.pool(
path.join(__dirname, 'transpilers/transpiler_worker'),
Expand All @@ -292,11 +288,10 @@ export class DataSchemaCompiler {
let cubeNames: string[] = [];
let cubeSymbols: Record<string, Record<string, boolean>> = {};
let transpilerNames: string[] = [];
let results: (FileContent | undefined)[];

if (transpilationNative || transpilationWorkerThreads) {
({ cubeNames, cubeSymbols, transpilerNames } = this.prepareTranspileSymbols());
}
({ cubeNames, cubeSymbols, transpilerNames } = this.prepareTranspileSymbols());

let results: (FileContent | undefined)[];

if (transpilationNative) {
const jsFiles = originalJsFiles;
Expand Down Expand Up @@ -325,28 +320,25 @@ export class DataSchemaCompiler {
.map(f => this.transpileJinjaFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames }));

results = (await Promise.all([...jsFilesTasks, ...yamlFilesTasks, ...jinjaFilesTasks])).flat();
} else if (transpilationWorkerThreads) {
results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames })));
} else {
results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, {})));
results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames })));
}

return results.filter(f => !!f) as FileContent[];
};

const transpilePhase = async (stage: CompileStage): Promise<FileContent[]> => {
let cubeNames: string[] = [];
let cubeSymbols: Record<string, Record<string, boolean>> = {};
let transpilerNames: string[] = [];
let results: (FileContent | undefined)[];

if (toCompile.length === 0) {
return [];
}

if (transpilationNative || transpilationWorkerThreads) {
({ cubeNames, cubeSymbols, transpilerNames } = this.prepareTranspileSymbols());
}
let cubeNames: string[] = [];
let cubeSymbols: Record<string, Record<string, boolean>> = {};
let transpilerNames: string[] = [];

({ cubeNames, cubeSymbols, transpilerNames } = this.prepareTranspileSymbols());

// After the first phase all files are with JS source code: original or transpiled

Expand All @@ -363,10 +355,8 @@ export class DataSchemaCompiler {
const jsFilesTasks = jsChunks.map(chunk => this.transpileJsFilesNativeBulk(chunk, errorsReport, { transpilerNames, compilerId }));

results = (await Promise.all(jsFilesTasks)).flat();
} else if (transpilationWorkerThreads) {
results = await Promise.all(toCompile.map(f => this.transpileJsFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames })));
} else {
results = await Promise.all(toCompile.map(f => this.transpileJsFile(f, errorsReport, {})));
results = await Promise.all(toCompile.map(f => this.transpileJsFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames })));
}

return results.filter(f => !!f) as FileContent[];
Expand Down Expand Up @@ -520,7 +510,7 @@ export class DataSchemaCompiler {
errorsReport,
{ cubeNames: [], cubeSymbols: {}, transpilerNames: [], contextSymbols: {}, compilerId: this.compilerId, stage: 0 }
).then(() => undefined);
} else if (transpilationWorkerThreads && this.workerPool) {
} else if (this.workerPool) {
this.workerPool.terminate();
}
});
Expand Down Expand Up @@ -701,7 +691,7 @@ export class DataSchemaCompiler {
errorsReport.exitFile();

return { ...file, content: res[0].code };
} else if (getEnv('transpilationWorkerThreads')) {
} else {
const data = {
fileName: file.fileName,
content: file.content,
Expand All @@ -715,25 +705,6 @@ export class DataSchemaCompiler {
errorsReport.addWarnings(res.warnings);

return { ...file, content: res.content };
} else {
const ast = parse(
file.content,
{
sourceFilename: file.fileName,
sourceType: 'module',
plugins: ['objectRestSpread'],
},
);

errorsReport.inFile(file);
this.transpilers.forEach((t) => {
babelTraverse(ast, t.traverseObject(errorsReport));
});
errorsReport.exitFile();

const content = babelGenerator(ast, {}, file.content).code;

return { ...file, content };
}
} catch (e: any) {
if (e.toString().indexOf('SyntaxError') !== -1) {
Expand Down Expand Up @@ -778,7 +749,7 @@ export class DataSchemaCompiler {
this.compiledYamlCache.set(cacheKey, res[0].code);

return { ...file, content: res[0].code };
} else if (getEnv('transpilationWorkerThreads')) {
} else {
const data = {
fileName: file.fileName,
content: file.content,
Expand All @@ -794,12 +765,6 @@ export class DataSchemaCompiler {
this.compiledYamlCache.set(cacheKey, res.content);

return { ...file, content: res.content };
} else {
const transpiledFile = this.yamlCompiler.transpileYamlFile(file, errorsReport);

this.compiledYamlCache.set(cacheKey, transpiledFile?.content || '');

return transpiledFile;
}
}

Expand Down
Loading