Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions packages/cubejs-schema-compiler/src/adapter/MssqlQuery.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import moment from 'moment-timezone';
import { QueryAlias, parseSqlInterval } from '@cubejs-backend/shared';
import { BaseQuery } from './BaseQuery';
import { BaseFilter } from './BaseFilter';
import { BaseSegment } from './BaseSegment';
import { ParamAllocator } from './ParamAllocator';

const abbrs = {
Expand Down Expand Up @@ -53,11 +54,33 @@ class MssqlFilter extends BaseFilter {
}
}

class MssqlSegment extends BaseSegment {
public filterToWhere(): string {
const where = super.filterToWhere();

const context = this.query.safeEvaluateSymbolContext();
if (context.rollupQuery) {
// Segment itself will be rendered as reference for rollupQuery
// In MSSQL using just `WHERE (segment_column) AND (other_filter)` is incorrect, because
// `segment_column` is not of boolean type, but of `BIT` type
// Correct way to work with them is to use `WHERE segment_column = 1`
// This relies on `wrapSegmentForDimensionSelect` mapping segment to a `BIT` data type
return `${where} = 1`;
}

return where;
}
}

export class MssqlQuery extends BaseQuery {
public newFilter(filter) {
return new MssqlFilter(this, filter);
}

public newSegment(segment): BaseSegment {
return new MssqlSegment(this, segment);
}

public castToString(sql) {
return `CAST(${sql} as VARCHAR)`;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1256,11 +1256,8 @@ export class PreAggregations {

const from = this.query.joinSql(toJoin);

const segmentFilters = this.query.segments.map(
s => this.query.newFilter({ dimension: s.segment, operator: 'equals', values: [true] })
);
const replacedFilters =
filters || segmentFilters
filters || this.query.segments
.concat(this.query.filters).concat(
this.query.timeDimensions.map(dimension => dimension.dateRange && ({
filterToWhere: () => this.query.timeRangeFilter(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,19 @@ export class PostgresDBRunner extends BaseDbRunner {
return {
testQueries(queries, prepareDataSet) {
prepareDataSet = prepareDataSet || defaultFixture;
return db.tx(tx => tx.query('SET TIME ZONE \'UTC\'')
.then(() => prepareDataSet(tx)
.then(() => queries
.map(([query, params]) => () => tx.query(query, params).catch(e => {
throw new Error(`Execution failed for '${query}', params: ${params}: ${e.stack || e}`);
})).reduce((a, b) => a.then(b), Promise.resolve()))
.then(r => JSON.parse(JSON.stringify(r)))));
return db.tx(async tx => {
await tx.query('SET TIME ZONE \'UTC\'');
await prepareDataSet(tx);
let lastResult;
for (const [query, params] of queries) {
try {
lastResult = await tx.query(query, params);
} catch (e) {
throw new Error(`Execution failed for '${query}', params: ${params}: ${e.stack || e}`);
}
}
return JSON.parse(JSON.stringify(lastResult));
});
},
close() {
return pgp.end();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { getEnv } from '@cubejs-backend/shared';
import { UserError } from '../../../src/compiler/UserError';
import type { BaseQuery } from '../../../src';
import { PostgresQuery } from '../../../src/adapter/PostgresQuery';
import { BigqueryQuery } from '../../../src/adapter/BigqueryQuery';
import { PrestodbQuery } from '../../../src/adapter/PrestodbQuery';
Expand Down Expand Up @@ -825,6 +826,27 @@ SELECT 1 AS revenue, cast('2024-01-01' AS timestamp) as time UNION ALL
);
}

type QueryWithParams = [string, Array<unknown>];

async function testWithPreAgg(
preAggregationsDescription: { loadSql: QueryWithParams, invalidateKeyQueries: Array<QueryWithParams> },
query: BaseQuery,
) {
const preAggSql = preAggregationsDescription
.loadSql[0]
// Without `ON COMMIT DROP` temp tables are session-bound, and can live across multiple transactions
.replace(/CREATE TABLE (.+) AS SELECT/, 'CREATE TEMP TABLE $1 ON COMMIT DROP AS SELECT');
const preAggParams = preAggregationsDescription.loadSql[1];

const queries = [
...preAggregationsDescription.invalidateKeyQueries,
[preAggSql, preAggParams],
query.buildSqlAndParams(),
];

return dbRunner.testQueries(queries);
}

it('simple join total', async () => runQueryTest({
measures: [
'visitors.visitor_revenue',
Expand Down Expand Up @@ -1978,6 +2000,40 @@ SELECT 1 AS revenue, cast('2024-01-01' AS timestamp) as time UNION ALL
});
});

/// Test that query with segment member expression, that references dimension, that is covered by pre-agg
/// would _not_ trigger stuff like `path.split is not a function` due to unexpected member expression
it('pre-aggregation with segment member expression', async () => {
await compiler.compile();

const query = new PostgresQuery({ joinGraph, cubeEvaluator, compiler }, {
segments: [
{
// eslint-disable-next-line no-new-func
expression: new Function(
'visitor_checkins',
// eslint-disable-next-line no-template-curly-in-string
'return `${visitor_checkins.source} IS NOT NULL`'
),
expressionName: 'source_is_some',
// eslint-disable-next-line no-template-curly-in-string
definition: '${visitor_checkins.source} IS NOT NULL',
cubeName: 'visitor_checkins',
},
],
timezone: 'America/Los_Angeles',
order: [],
preAggregationsSchema: ''
});

const preAggregationsDescription: any = query.preAggregations?.preAggregationsDescription()[0];

const res = await testWithPreAgg(preAggregationsDescription, query);
expect(res).toEqual(
// Empty result set, only segments in query
[{}]
);
});

it('join rollup pre-aggregation', async () => {
await compiler.compile();

Expand Down Expand Up @@ -2012,21 +2068,17 @@ SELECT 1 AS revenue, cast('2024-01-01' AS timestamp) as time UNION ALL
const preAggregationsDescription: any = query.preAggregations?.preAggregationsDescription()[0];
console.log(preAggregationsDescription);

return dbRunner.testQueries(preAggregationsDescription.invalidateKeyQueries.concat([
[preAggregationsDescription.loadSql[0].replace('CREATE TABLE', 'CREATE TEMP TABLE'), preAggregationsDescription.loadSql[1]],
query.buildSqlAndParams()
])).then(res => {
console.log(JSON.stringify(res));
expect(res).toEqual(
[
{
vc__source: 'google',
visitors__created_at_day: '2017-01-02T00:00:00.000Z',
visitors__per_visitor_revenue: '100'
}
]
);
});
const res = await testWithPreAgg(preAggregationsDescription, query);
console.log(JSON.stringify(res));
expect(res).toEqual(
[
{
vc__source: 'google',
visitors__created_at_day: '2017-01-02T00:00:00.000Z',
visitors__per_visitor_revenue: '100'
}
]
);
});

it('join rollup total pre-aggregation', async () => {
Expand Down Expand Up @@ -2057,22 +2109,15 @@ SELECT 1 AS revenue, cast('2024-01-01' AS timestamp) as time UNION ALL
const preAggregationsDescription: any = query.preAggregations?.preAggregationsDescription()[0];
console.log(preAggregationsDescription);

return dbRunner.testQueries(preAggregationsDescription.invalidateKeyQueries.concat([
[
preAggregationsDescription.loadSql[0].replace('CREATE TABLE', 'CREATE TEMP TABLE'),
preAggregationsDescription.loadSql[1]
],
query.buildSqlAndParams()
])).then(res => {
console.log(JSON.stringify(res));
expect(res).toEqual(
[{
vc__source: 'google',
visitors__created_at_day: '2017-01-02T00:00:00.000Z',
visitors__visitor_revenue: '100'
}]
);
});
const res = await testWithPreAgg(preAggregationsDescription, query);
console.log(JSON.stringify(res));
expect(res).toEqual(
[{
vc__source: 'google',
visitors__created_at_day: '2017-01-02T00:00:00.000Z',
visitors__visitor_revenue: '100'
}]
);
});

it('security context', async () => {
Expand Down
Loading