diff --git a/packages/cubejs-backend-shared/src/PerfTracker.ts b/packages/cubejs-backend-shared/src/PerfTracker.ts new file mode 100644 index 0000000000000..acbdb2488a478 --- /dev/null +++ b/packages/cubejs-backend-shared/src/PerfTracker.ts @@ -0,0 +1,88 @@ +import { performance, PerformanceObserver } from 'perf_hooks'; + +interface PerfMetric { + count: number; + totalTime: number; + avgTime: number; +} + +interface PerfStats { + [key: string]: PerfMetric; +} + +class PerfTracker { + private metrics: PerfStats = {}; + + private globalMetric: string | null = null; + + public constructor() { + const obs = new PerformanceObserver((items) => { + for (const entry of items.getEntries()) { + const { name } = entry; + if (!this.metrics[name]) { + this.metrics[name] = { count: 0, totalTime: 0, avgTime: 0 }; + } + const m = this.metrics[name]; + m.count++; + m.totalTime += entry.duration; + m.avgTime = m.totalTime / m.count; + } + }); + obs.observe({ entryTypes: ['measure'] }); + } + + public start(name: string, global: boolean = false): { end: () => void } { + const uid = `${name}-${performance.now()}`; + const startMark = `${uid}-start`; + const endMark = `${uid}-end`; + performance.mark(startMark); + + if (global && !this.globalMetric) { + this.globalMetric = name; + } + + let ended = false; + + return { + end: () => { + if (ended) return; + performance.mark(endMark); + performance.measure(name, startMark, endMark); + ended = true; + } + }; + } + + public printReport() { + console.log('\nšŸš€ PERFORMANCE REPORT šŸš€\n'); + console.log('═'.repeat(90)); + + const sorted = Object.entries(this.metrics) + .sort(([, a], [, b]) => b.totalTime - a.totalTime); + + if (!sorted.length) { + console.log('No performance data collected.'); + return; + } + + let totalTime: number = 0; + + if (this.globalMetric) { + totalTime = this.metrics[this.globalMetric]?.totalTime; + } else { + totalTime = sorted.reduce((sum, [, m]) => sum + m.totalTime, 0); + } + + console.log(`ā±ļø TOTAL TIME: ${totalTime.toFixed(2)}ms\n`); + + sorted.forEach(([name, m]) => { + const pct = totalTime > 0 ? (m.totalTime / totalTime * 100) : 0; + console.log(` ${name.padEnd(40)} │ ${m.totalTime.toFixed(2).padStart(8)}ms │ ${m.avgTime.toFixed(2).padStart(7)}ms avg │ ${pct.toFixed(1).padStart(5)}% │ ${m.count.toString().padStart(4)} calls`); + }); + + console.log('═'.repeat(90)); + console.log('šŸŽÆ End of Performance Report\n'); + } +} + +export const perfTracker = new PerfTracker(); diff --git a/packages/cubejs-backend-shared/src/index.ts b/packages/cubejs-backend-shared/src/index.ts index 247ed10632310..ac7ff477ac440 100644 --- a/packages/cubejs-backend-shared/src/index.ts +++ b/packages/cubejs-backend-shared/src/index.ts @@ -21,3 +21,4 @@ export * from './process'; export * from './platform'; export * from './FileRepository'; export * from './decorators'; +export * from './PerfTracker'; diff --git a/packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.ts b/packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.ts index ea230df9fb0b2..99bf514977200 100644 --- a/packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.ts +++ b/packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.ts @@ -47,6 +47,24 @@ const getThreadsCount = () => { return 3; // Default (like the workerpool do) }; +const splitFilesToChunks = (files: FileContent[], chunksCount: number): FileContent[][] => { + let chunks: FileContent[][]; + if (files.length < chunksCount * chunksCount) { + chunks = [files]; + } else { + const baseSize = Math.floor(files.length / chunksCount); + chunks = []; + for (let i = 0; i < chunksCount; i++) { + // For the last part, we take the remaining files so we don't lose the extra ones. + const start = i * baseSize; + const end = (i === chunksCount - 1) ? files.length : start + baseSize; + chunks.push(files.slice(start, end)); + } + } + + return chunks; +}; + export type DataSchemaCompilerOptions = { compilerCache: CompilerCache; omitErrors?: boolean; @@ -88,6 +106,8 @@ type CompileCubeFilesCompilers = { contextCompilers?: CompilerInterface[]; }; +export type CompileContext = any; + export class DataSchemaCompiler { private readonly repository: SchemaFileRepository; @@ -123,7 +143,7 @@ export class DataSchemaCompiler { private readonly compilerCache: CompilerCache; - private readonly compileContext: any; + private readonly compileContext: CompileContext; private errorReportOptions: ErrorReporterOptions | undefined; @@ -172,14 +192,13 @@ export class DataSchemaCompiler { this.standalone = options.standalone || false; this.nativeInstance = options.nativeInstance; this.yamlCompiler = options.yamlCompiler; - this.yamlCompiler.dataSchemaCompiler = this; this.pythonContext = null; this.workerPool = null; this.compilerId = options.compilerId || 'default'; this.compiledScriptCache = options.compiledScriptCache; } - public compileObjects(compileServices, objects, errorsReport: ErrorReporter) { + public compileObjects(compileServices: CompilerInterface[], objects, errorsReport: ErrorReporter) { try { return compileServices .map((compileService) => (() => compileService.compile(objects, errorsReport))) @@ -193,7 +212,7 @@ export class DataSchemaCompiler { } } - protected async loadPythonContext(files, nsFileName) { + protected async loadPythonContext(files: FileContent[], nsFileName: string): Promise { const ns = files.find((f) => f.fileName === nsFileName); if (ns) { return this.nativeInstance.loadPythonContext( @@ -216,7 +235,30 @@ export class DataSchemaCompiler { this.pythonContext = await this.loadPythonContext(files, 'globals.py'); this.yamlCompiler.initFromPythonContext(this.pythonContext); - const toCompile = files.filter((f) => !this.filesToCompile || !this.filesToCompile.length || this.filesToCompile.indexOf(f.fileName) !== -1); + const originalJsFiles: FileContent[] = []; + const jinjaTemplatedFiles: FileContent[] = []; + const yamlFiles: FileContent[] = []; + + (this.filesToCompile?.length + ? files.filter(f => this.filesToCompile.includes(f.fileName)) + : files).forEach(file => { + if (file.fileName.endsWith('.js')) { + originalJsFiles.push(file); + } else if (file.fileName.endsWith('.jinja') || + (file.fileName.endsWith('.yml') || file.fileName.endsWith('.yaml')) && file.content.match(JINJA_SYNTAX)) { + jinjaTemplatedFiles.push(file); + } else if (file.fileName.endsWith('.yml') || file.fileName.endsWith('.yaml')) { + yamlFiles.push(file); + } + // We don't transpile/compile other files (like .py and so on) + }); + + let toCompile = [...jinjaTemplatedFiles, ...yamlFiles, ...originalJsFiles]; + + if (jinjaTemplatedFiles.length > 0) { + // Preload Jinja templates to the engine + this.loadJinjaTemplates(jinjaTemplatedFiles); + } const errorsReport = new ErrorReporter(null, [], this.errorReportOptions); this.errorsReporter = errorsReport; @@ -234,34 +276,62 @@ export class DataSchemaCompiler { ); } - const transpile = async (stage: CompileStage): Promise => { + const transpilePhaseFirst = async (stage: CompileStage): Promise => { let cubeNames: string[] = []; let cubeSymbols: Record> = {}; let transpilerNames: string[] = []; let results: (FileContent | undefined)[]; if (transpilationNative || transpilationWorkerThreads) { - cubeNames = Object.keys(this.cubeDictionary.byId); - // We need only cubes and all its member names for transpiling. - // Cubes doesn't change during transpiling, but are changed during compilation phase, - // so we can prepare them once for every phase. - // Communication between main and worker threads uses - // The structured clone algorithm (@see https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) - // which doesn't allow passing any function objects, so we need to sanitize the symbols. - // Communication with native backend also involves deserialization. - cubeSymbols = Object.fromEntries( - Object.entries(this.cubeSymbols.symbols as Record>) - .map( - ([key, value]: [string, Record]) => [key, Object.fromEntries( - Object.keys(value).map((k) => [k, true]), - )], - ), - ); + ({ cubeNames, cubeSymbols, transpilerNames } = this.prepareTranspileSymbols()); + } - // Transpilers are the same for all files within phase. - transpilerNames = this.transpilers.map(t => t.constructor.name); + if (transpilationNative) { + const nonJsFilesTasks = [...jinjaTemplatedFiles, ...yamlFiles] + .map(f => this.transpileFile(f, errorsReport, { transpilerNames, compilerId })); + + const jsFiles = originalJsFiles; + let jsFilesTasks: Promise<(FileContent | undefined)[]>[] = []; + + if (jsFiles.length > 0) { + // Warming up swc compiler cache + const dummyFile = { + fileName: 'dummy.js', + content: ';', + }; + + await this.transpileJsFile(dummyFile, errorsReport, { cubeNames, cubeSymbols, transpilerNames, contextSymbols: CONTEXT_SYMBOLS, compilerId, stage }); + + const jsChunks = splitFilesToChunks(jsFiles, transpilationNativeThreadsCount); + jsFilesTasks = jsChunks.map(chunk => this.transpileJsFilesBulk(chunk, errorsReport, { transpilerNames, compilerId })); + } + + results = (await Promise.all([...nonJsFilesTasks, ...jsFilesTasks])).flat(); + } else if (transpilationWorkerThreads) { + results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames }))); + } else { + results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, {}))); + } + + return results.filter(f => !!f) as FileContent[]; + }; + + const transpilePhase = async (stage: CompileStage): Promise => { + let cubeNames: string[] = []; + let cubeSymbols: Record> = {}; + let transpilerNames: string[] = []; + let results: (FileContent | undefined)[]; + + if (toCompile.length === 0) { + return []; } + if (transpilationNative || transpilationWorkerThreads) { + ({ cubeNames, cubeSymbols, transpilerNames } = this.prepareTranspileSymbols()); + } + + // After the first phase all files are with JS source code: original or transpiled + if (transpilationNative) { // Warming up swc compiler cache const dummyFile = { @@ -271,34 +341,14 @@ export class DataSchemaCompiler { await this.transpileJsFile(dummyFile, errorsReport, { cubeNames, cubeSymbols, transpilerNames, contextSymbols: CONTEXT_SYMBOLS, compilerId, stage }); - const nonJsFilesTasks = toCompile.filter(file => !file.fileName.endsWith('.js')) - .map(f => this.transpileFile(f, errorsReport, { transpilerNames, compilerId })); - - const jsFiles = toCompile.filter(file => file.fileName.endsWith('.js')); - let JsFilesTasks = []; + const jsChunks = splitFilesToChunks(toCompile, transpilationNativeThreadsCount); + const jsFilesTasks = jsChunks.map(chunk => this.transpileJsFilesBulk(chunk, errorsReport, { transpilerNames, compilerId })); - if (jsFiles.length > 0) { - let jsChunks; - if (jsFiles.length < transpilationNativeThreadsCount * transpilationNativeThreadsCount) { - jsChunks = [jsFiles]; - } else { - const baseSize = Math.floor(jsFiles.length / transpilationNativeThreadsCount); - jsChunks = []; - for (let i = 0; i < transpilationNativeThreadsCount; i++) { - // For the last part, we take the remaining files so we don't lose the extra ones. - const start = i * baseSize; - const end = (i === transpilationNativeThreadsCount - 1) ? jsFiles.length : start + baseSize; - jsChunks.push(jsFiles.slice(start, end)); - } - } - JsFilesTasks = jsChunks.map(chunk => this.transpileJsFilesBulk(chunk, errorsReport, { transpilerNames, compilerId })); - } - - results = (await Promise.all([...nonJsFilesTasks, ...JsFilesTasks])).flat(); + results = (await Promise.all(jsFilesTasks)).flat(); } else if (transpilationWorkerThreads) { - results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames }))); + results = await Promise.all(toCompile.map(f => this.transpileJsFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames }))); } else { - results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, {}))); + results = await Promise.all(toCompile.map(f => this.transpileJsFile(f, errorsReport, {}))); } return results.filter(f => !!f) as FileContent[]; @@ -311,6 +361,14 @@ export class DataSchemaCompiler { let asyncModules: CallableFunction[] = []; let transpiledFiles: FileContent[] = []; + const cleanup = () => { + cubes = []; + exports = {}; + contexts = []; + compiledFiles = {}; + asyncModules = []; + }; + this.compileV8ContextCache = vm.createContext({ view: (name, cube) => { const file = ctxFileStorage.getStore(); @@ -373,7 +431,7 @@ export class DataSchemaCompiler { } else { const foundFile = this.resolveModuleFile(file, extensionName, transpiledFiles, errorsReport); if (!foundFile && this.allowNodeRequire) { - if (extensionName.indexOf('.') === 0) { + if (extensionName.startsWith('.')) { extensionName = path.resolve(this.repository.localPath(), extensionName); } // eslint-disable-next-line global-require,import/no-dynamic-require @@ -387,7 +445,6 @@ export class DataSchemaCompiler { foundFile, errorsReport, compiledFiles, - [], { doSyntaxCheck: true } ); exports[foundFile.fileName] = exports[foundFile.fileName] || {}; @@ -397,19 +454,28 @@ export class DataSchemaCompiler { COMPILE_CONTEXT: this.standalone ? this.standaloneCompileContextProxy() : this.cloneCompileContextWithGetterAlias(this.compileContext || {}), }); + const compilePhaseFirst = async (compilers: CompileCubeFilesCompilers, stage: 0 | 1 | 2 | 3) => { + // clear the objects for the next phase + cleanup(); + transpiledFiles = await transpilePhaseFirst(stage); + + // We render jinja and transpile yaml only once on first phase and then use resulting JS for these files + // afterward avoiding costly YAML/Python parsing again. Original JS files are preserved as is for cache hits. + const convertedToJsFiles = transpiledFiles.filter(f => !f.fileName.endsWith('.js')); + toCompile = [...originalJsFiles, ...convertedToJsFiles]; + + return this.compileCubeFiles(cubes, contexts, compiledFiles, asyncModules, compilers, transpiledFiles, errorsReport); + }; + const compilePhase = async (compilers: CompileCubeFilesCompilers, stage: 0 | 1 | 2 | 3) => { // clear the objects for the next phase - cubes = []; - exports = {}; - contexts = []; - compiledFiles = {}; - asyncModules = []; - transpiledFiles = await transpile(stage); + cleanup(); + transpiledFiles = await transpilePhase(stage); return this.compileCubeFiles(cubes, contexts, compiledFiles, asyncModules, compilers, transpiledFiles, errorsReport); }; - return compilePhase({ cubeCompilers: this.cubeNameCompilers }, 0) + return compilePhaseFirst({ cubeCompilers: this.cubeNameCompilers }, 0) .then(() => compilePhase({ cubeCompilers: this.preTranspileCubeCompilers.concat([this.viewCompilationGate]) }, 1)) .then(() => (this.viewCompilationGate.shouldCompileViews() ? compilePhase({ cubeCompilers: this.viewCompilers }, 2) @@ -420,12 +486,9 @@ export class DataSchemaCompiler { }, 3)) .then(() => { // Free unneeded resources - cubes = []; - exports = {}; - contexts = []; - compiledFiles = {}; - asyncModules = []; + cleanup(); transpiledFiles = []; + toCompile = []; if (transpilationNative) { // Clean up cache @@ -462,30 +525,64 @@ export class DataSchemaCompiler { return this.compilePromise; } + private loadJinjaTemplates(files: FileContent[]): void { + if (NATIVE_IS_SUPPORTED !== true) { + throw new Error( + `Native extension is required to process jinja files. ${NATIVE_IS_SUPPORTED.reason}. Read more: ` + + 'https://github.com/cube-js/cube/blob/master/packages/cubejs-backend-native/README.md#supported-architectures-and-platforms' + ); + } + + const jinjaEngine = this.yamlCompiler.getJinjaEngine(); + + files.forEach((file) => { + jinjaEngine.loadTemplate(file.fileName, file.content); + }); + } + + private prepareTranspileSymbols() { + const cubeNames: string[] = Object.keys(this.cubeDictionary.byId); + // We need only cubes and all its member names for transpiling. + // Cubes doesn't change during transpiling, but are changed during compilation phase, + // so we can prepare them once for every phase. + // Communication between main and worker threads uses + // The structured clone algorithm (@see https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) + // which doesn't allow passing any function objects, so we need to sanitize the symbols. + // Communication with native backend also involves deserialization. + const cubeSymbols: Record> = Object.fromEntries( + Object.entries(this.cubeSymbols.symbols as Record>) + .map( + ([key, value]: [string, Record]) => [key, Object.fromEntries( + Object.keys(value).map((k) => [k, true]), + )], + ), + ); + + // Transpilers are the same for all files within phase. + const transpilerNames: string[] = this.transpilers.map(t => t.constructor.name); + + return { cubeNames, cubeSymbols, transpilerNames }; + } + private async transpileFile( file: FileContent, errorsReport: ErrorReporter, options: TranspileOptions = {} ): Promise<(FileContent | undefined)> { - if (file.fileName.endsWith('.jinja') || + if (file.fileName.endsWith('.js')) { + return this.transpileJsFile(file, errorsReport, options); + } else if (file.fileName.endsWith('.jinja') || (file.fileName.endsWith('.yml') || file.fileName.endsWith('.yaml')) - // TODO do Jinja syntax check with jinja compiler && file.content.match(JINJA_SYNTAX) ) { - if (NATIVE_IS_SUPPORTED !== true) { - throw new Error( - `Native extension is required to process jinja files. ${NATIVE_IS_SUPPORTED.reason}. Read more: ` + - 'https://github.com/cube-js/cube/blob/master/packages/cubejs-backend-native/README.md#supported-architectures-and-platforms' - ); - } - - this.yamlCompiler.getJinjaEngine().loadTemplate(file.fileName, file.content); - - return file; + return this.yamlCompiler.compileYamlWithJinjaFile( + file, + errorsReport, + this.standalone ? {} : this.cloneCompileContextWithGetterAlias(this.compileContext), + this.pythonContext! + ); } else if (file.fileName.endsWith('.yml') || file.fileName.endsWith('.yaml')) { - return file; - } else if (file.fileName.endsWith('.js')) { - return this.transpileJsFile(file, errorsReport, options); + return this.yamlCompiler.transpileYamlFile(file, errorsReport); } else { return file; } @@ -628,16 +725,15 @@ export class DataSchemaCompiler { compiledFiles: Record, asyncModules: CallableFunction[], compilers: CompileCubeFilesCompilers, - toCompile: FileContent[], + transpiledFiles: FileContent[], errorsReport: ErrorReporter ) { - toCompile + transpiledFiles .forEach((file) => { this.compileFile( file, errorsReport, compiledFiles, - asyncModules ); }); await asyncModules.reduce((a: Promise, b: CallableFunction) => a.then(() => b()), Promise.resolve()); @@ -653,7 +749,6 @@ export class DataSchemaCompiler { file: FileContent, errorsReport: ErrorReporter, compiledFiles: Record, - asyncModules: CallableFunction[], { doSyntaxCheck } = { doSyntaxCheck: false } ) { if (compiledFiles[file.fileName]) { @@ -662,23 +757,7 @@ export class DataSchemaCompiler { compiledFiles[file.fileName] = true; - if (file.fileName.endsWith('.js')) { - this.compileJsFile(file, errorsReport, { doSyntaxCheck }); - } else if (file.fileName.endsWith('.yml.jinja') || file.fileName.endsWith('.yaml.jinja') || - ( - file.fileName.endsWith('.yml') || file.fileName.endsWith('.yaml') - // TODO do Jinja syntax check with jinja compiler - ) && file.content.match(JINJA_SYNTAX) - ) { - asyncModules.push(() => this.yamlCompiler.compileYamlWithJinjaFile( - file, - errorsReport, - this.standalone ? {} : this.cloneCompileContextWithGetterAlias(this.compileContext), - this.pythonContext! - )); - } else if (file.fileName.endsWith('.yml') || file.fileName.endsWith('.yaml')) { - this.yamlCompiler.compileYamlFile(file, errorsReport); - } + this.compileJsFile(file, errorsReport, { doSyntaxCheck }); } private getJsScript(file: FileContent): vm.Script { diff --git a/packages/cubejs-schema-compiler/src/compiler/ViewCompilationGate.ts b/packages/cubejs-schema-compiler/src/compiler/ViewCompilationGate.ts index af53f12f30de6..0479c21e087ab 100644 --- a/packages/cubejs-schema-compiler/src/compiler/ViewCompilationGate.ts +++ b/packages/cubejs-schema-compiler/src/compiler/ViewCompilationGate.ts @@ -6,10 +6,10 @@ export class ViewCompilationGate { } public compile(cubes: any[]) { - // When developing Data Access Policies feature, we've came across a + // When developing Data Access Policies feature, we've come across a // limitation that Cube members can't be referenced in access policies defined on Views, // because views aren't (yet) compiled at the time of access policy evaluation. - // To workaround this limitation and additional compilation pass is necessary, + // To work around this limitation and additional compilation pass is necessary, // however it comes with a significant performance penalty. // This gate check whether the data model contains views with access policies, // and only then allows the additional compilation pass. diff --git a/packages/cubejs-schema-compiler/src/compiler/YamlCompiler.ts b/packages/cubejs-schema-compiler/src/compiler/YamlCompiler.ts index abc207be9cb05..429debf007249 100644 --- a/packages/cubejs-schema-compiler/src/compiler/YamlCompiler.ts +++ b/packages/cubejs-schema-compiler/src/compiler/YamlCompiler.ts @@ -11,11 +11,11 @@ import { getEnv } from '@cubejs-backend/shared'; import { CubePropContextTranspiler, transpiledFields, transpiledFieldsPatterns } from './transpilers'; import { PythonParser } from '../parser/PythonParser'; import { CubeSymbols } from './CubeSymbols'; -import { DataSchemaCompiler } from './DataSchemaCompiler'; import { nonStringFields } from './CubeValidator'; import { CubeDictionary } from './CubeDictionary'; import { ErrorReporter } from './ErrorReporter'; import { camelizeCube } from './utils'; +import { CompileContext } from './DataSchemaCompiler'; type EscapeStateStack = { inFormattedStr?: boolean; @@ -25,8 +25,6 @@ type EscapeStateStack = { }; export class YamlCompiler { - public dataSchemaCompiler: DataSchemaCompiler | null = null; - protected jinjaEngine: JinjaEngine | null = null; public constructor( @@ -53,7 +51,7 @@ export class YamlCompiler { }); } - public async renderTemplate(file: FileContent, compileContext, pythonContext: PythonCtx): Promise { + public async renderTemplate(file: FileContent, compileContext: CompileContext, pythonContext: PythonCtx): Promise { return { fileName: file.fileName, content: await this.getJinjaEngine().renderTemplate(file.fileName, compileContext, { @@ -66,18 +64,18 @@ export class YamlCompiler { public async compileYamlWithJinjaFile( file: FileContent, errorsReport: ErrorReporter, - compileContext, + compileContext: CompileContext, pythonContext: PythonCtx - ) { - const compiledFile = await this.renderTemplate(file, compileContext, pythonContext); + ): Promise { + const renderedFile = await this.renderTemplate(file, compileContext, pythonContext); - return this.compileYamlFile(compiledFile, errorsReport); + return this.transpileYamlFile(renderedFile, errorsReport); } - public compileYamlFile( + public transpileYamlFile( file: FileContent, errorsReport: ErrorReporter, - ) { + ): FileContent | undefined { if (!file.content.trim()) { return; } @@ -87,33 +85,37 @@ export class YamlCompiler { return; } + const transpiledFilesContent: string[] = []; + for (const key of Object.keys(yamlObj)) { if (key === 'cubes') { (yamlObj.cubes || []).forEach(({ name, ...cube }) => { - const transpiledFile = this.transpileAndPrepareJsFile(file, 'cube', { name, ...cube }, errorsReport); - this.dataSchemaCompiler?.compileJsFile(transpiledFile, errorsReport); + const transpiledCube = this.transpileAndPrepareJsFile('cube', { name, ...cube }, errorsReport); + transpiledFilesContent.push(transpiledCube); }); } else if (key === 'views') { (yamlObj.views || []).forEach(({ name, ...cube }) => { - const transpiledFile = this.transpileAndPrepareJsFile(file, 'view', { name, ...cube }, errorsReport); - this.dataSchemaCompiler?.compileJsFile(transpiledFile, errorsReport); + const transpiledView = this.transpileAndPrepareJsFile('view', { name, ...cube }, errorsReport); + transpiledFilesContent.push(transpiledView); }); } else { errorsReport.error(`Unexpected YAML key: ${key}. Only 'cubes' and 'views' are allowed here.`); } } + + // eslint-disable-next-line consistent-return + return { + fileName: file.fileName, + content: transpiledFilesContent.join('\n\n'), + } as FileContent; } - private transpileAndPrepareJsFile(file: FileContent, methodFn: ('cube' | 'view'), cubeObj, errorsReport: ErrorReporter): FileContent { + private transpileAndPrepareJsFile(methodFn: ('cube' | 'view'), cubeObj, errorsReport: ErrorReporter): string { const yamlAst = this.transformYamlCubeObj(cubeObj, errorsReport); const cubeOrViewCall = t.callExpression(t.identifier(methodFn), [t.stringLiteral(cubeObj.name), yamlAst]); - const content = babelGenerator(cubeOrViewCall, {}, '').code; - return { - fileName: file.fileName, - content - }; + return babelGenerator(cubeOrViewCall, {}, '').code; } private transformYamlCubeObj(cubeObj, errorsReport: ErrorReporter) { diff --git a/packages/cubejs-schema-compiler/src/compiler/transpilers/CubePropContextTranspiler.ts b/packages/cubejs-schema-compiler/src/compiler/transpilers/CubePropContextTranspiler.ts index c3d57a19510b4..e027e2a0dff3b 100644 --- a/packages/cubejs-schema-compiler/src/compiler/transpilers/CubePropContextTranspiler.ts +++ b/packages/cubejs-schema-compiler/src/compiler/transpilers/CubePropContextTranspiler.ts @@ -77,20 +77,37 @@ export class CubePropContextTranspiler implements TranspilerInterface { CubePropContextTranspiler.replaceValueWithArrowFunction(resolveSymbol, path.get('value')); } - public static replaceValueWithArrowFunction(resolveSymbol: SymbolResolver, value: NodePath) { - const knownIds = CubePropContextTranspiler.collectKnownIdentifiersAndTransform( - resolveSymbol, - value, - ); - - value.replaceWith( - t.arrowFunctionExpression( - knownIds.map(i => t.identifier(i)), - // @todo Replace any with assert expression - value.node, - false, - ), - ); + public static replaceValueWithArrowFunction(resolveSymbol: (name: string) => any, value: NodePath) { + // If the current value is already an arrow function, update its parameters and keep the body + if (t.isArrowFunctionExpression(value.node)) { + const bodyPath = value.get('body') as NodePath; + const knownIds = CubePropContextTranspiler.collectKnownIdentifiersAndTransform( + resolveSymbol, + bodyPath, + ); + + value.replaceWith( + t.arrowFunctionExpression( + knownIds.map(i => t.identifier(i)), + value.node.body, + false, + ), + ); + } else { + const knownIds = CubePropContextTranspiler.collectKnownIdentifiersAndTransform( + resolveSymbol, + value, + ); + + value.replaceWith( + t.arrowFunctionExpression( + knownIds.map(i => t.identifier(i)), + // @todo Replace any with assert expression + value.node, + false, + ), + ); + } } protected sqlAndReferencesFieldVisitor(cubeName: string | null | undefined): TraverseObject { @@ -100,14 +117,17 @@ export class CubePropContextTranspiler implements TranspilerInterface { return { ObjectProperty: (path) => { - if (path.node.key.type === 'Identifier' && path.node.key.name === 'joins' && t.isObjectExpression(path.node.value)) { + if (((path.node.key.type === 'Identifier' && path.node.key.name === 'joins') || + (path.node.key.type === 'StringLiteral' && path.node.key.value === 'joins')) + && t.isObjectExpression(path.node.value)) { const fullPath = CubePropContextTranspiler.fullPath(path); if (fullPath === 'joins') { this.convertJoinsObjectToArray(path); } } - if (path.node.key.type === 'Identifier' && transpiledFields.has(path.node.key.name)) { + if ((path.node.key.type === 'Identifier' && transpiledFields.has(path.node.key.name)) || + (path.node.key.type === 'StringLiteral' && transpiledFields.has(path.node.key.value))) { const fullPath = CubePropContextTranspiler.fullPath(path); // eslint-disable-next-line no-restricted-syntax for (const p of transpiledFieldsPatterns) { @@ -156,7 +176,7 @@ export class CubePropContextTranspiler implements TranspilerInterface { protected static fullPath(path: NodePath): string { // @ts-ignore - let fp = path?.node?.key?.name || ''; + let fp = path?.node?.key?.name || path?.node?.key?.value || ''; let pp: NodePath | null | undefined = path?.parentPath; while (pp) { if (pp?.parentPath?.node?.type === 'ArrayExpression') { @@ -167,6 +187,11 @@ export class CubePropContextTranspiler implements TranspilerInterface { // @ts-ignore fp = `${pp?.parentPath?.node?.key?.name || '0'}.${fp}`; pp = pp?.parentPath?.parentPath; + // @ts-ignore + } else if (pp?.parentPath?.node?.key?.type === 'StringLiteral') { + // @ts-ignore + fp = `${pp?.parentPath?.node?.key?.value || '0'}.${fp}`; + pp = pp?.parentPath?.parentPath; } else break; } @@ -176,7 +201,8 @@ export class CubePropContextTranspiler implements TranspilerInterface { protected knownIdentifiersInjectVisitor(field: RegExp | string, resolveSymbol: SymbolResolver): TraverseObject { return { ObjectProperty: (path) => { - if (path.node.key.type === 'Identifier' && path.node.key.name.match(field)) { + if ((path.node.key.type === 'Identifier' && path.node.key.name.match(field)) || + (path.node.key.type === 'StringLiteral' && path.node.key.value.match(field))) { this.transformObjectProperty(path, resolveSymbol); } }