diff --git a/packages/cubejs-schema-compiler/src/adapter/BaseQuery.js b/packages/cubejs-schema-compiler/src/adapter/BaseQuery.js index e9b3d53931d9d..a5937458d9c9a 100644 --- a/packages/cubejs-schema-compiler/src/adapter/BaseQuery.js +++ b/packages/cubejs-schema-compiler/src/adapter/BaseQuery.js @@ -21,7 +21,8 @@ import { getEnv, localTimestampToUtc, timeSeries as timeSeriesBase, - timeSeriesFromCustomInterval + timeSeriesFromCustomInterval, + parseSqlInterval } from '@cubejs-backend/shared'; import { CubeSymbols } from '../compiler/CubeSymbols'; @@ -1890,6 +1891,19 @@ export class BaseQuery { return `${value}::timestamp`; } + /** + * Converts the input interval (e.g. "2 years", "3 months", "5 days") + * into a format compatible with the target SQL dialect. + * Also returns the minimal time unit required (e.g. for use in DATEDIFF). + * + * Returns a tuple: (formatted interval, minimal time unit) + */ + intervalAndMinimalTimeUnit(interval) { + const intervalParsed = parseSqlInterval(interval); + const minGranularity = this.diffTimeUnitForInterval(interval); + return [interval, minGranularity]; + } + commonQuery() { return `SELECT${this.topLimit()} ${this.baseSelect()} diff --git a/packages/cubejs-schema-compiler/src/adapter/BigqueryQuery.ts b/packages/cubejs-schema-compiler/src/adapter/BigqueryQuery.ts index 27d6816263e2c..7dab6e0983afe 100644 --- a/packages/cubejs-schema-compiler/src/adapter/BigqueryQuery.ts +++ b/packages/cubejs-schema-compiler/src/adapter/BigqueryQuery.ts @@ -42,7 +42,7 @@ export class BigqueryQuery extends BaseQuery { } public convertTz(field) { - return `DATETIME(${field}, '${this.timezone}')`; + return `DATETIME(${this.timeStampCast(field)}, '${this.timezone}')`; } public timeStampCast(value) { @@ -134,6 +134,10 @@ export class BigqueryQuery extends BaseQuery { throw new Error(`Cannot transform interval expression "${interval}" to BigQuery dialect`); } + public override intervalAndMinimalTimeUnit(interval: string): [string, string] { + return this.formatInterval(interval); + } + public newFilter(filter) { return new BigqueryFilter(this, filter); } @@ -252,13 +256,13 @@ export class BigqueryQuery extends BaseQuery { templates.functions.STRPOS = 'STRPOS({{ args_concat }})'; templates.functions.DATEDIFF = 'DATETIME_DIFF(CAST({{ args[2] }} AS DATETIME), CAST({{ args[1] }} AS DATETIME), {{ date_part }})'; // DATEADD is being rewritten to DATE_ADD - templates.functions.DATE_ADD = '{% if date_part|upper in [\'YEAR\', \'MONTH\', \'QUARTER\'] %}TIMESTAMP(DATETIME_ADD(DATETIME({{ args[0] }}), INTERVAL {{ interval }} {{ date_part }})){% else %}TIMESTAMP_ADD({{ args[0] }}, INTERVAL {{ interval }} {{ date_part }}){% endif %}'; + templates.functions.DATE_ADD = 'DATETIME_ADD(DATETIME({{ args[0] }}), INTERVAL {{ interval }} {{ date_part }})'; templates.functions.CURRENTDATE = 'CURRENT_DATE'; delete templates.functions.TO_CHAR; templates.expressions.binary = '{% if op == \'%\' %}MOD({{ left }}, {{ right }}){% else %}({{ left }} {{ op }} {{ right }}){% endif %}'; templates.expressions.interval = 'INTERVAL {{ interval }}'; templates.expressions.extract = 'EXTRACT({% if date_part == \'DOW\' %}DAYOFWEEK{% elif date_part == \'DOY\' %}DAYOFYEAR{% else %}{{ date_part }}{% endif %} FROM {{ expr }})'; - templates.expressions.timestamp_literal = 'TIMESTAMP(\'{{ value }}\')'; + templates.expressions.timestamp_literal = 'DATETIME(TIMESTAMP(\'{{ value }}\'))'; delete templates.expressions.ilike; delete templates.expressions.like_escape; templates.filters.like_pattern = 'CONCAT({% if start_wild %}\'%\'{% else %}\'\'{% endif %}, LOWER({{ value }}), {% if end_wild %}\'%\'{% else %}\'\'{% endif %})'; @@ -278,7 +282,25 @@ export class BigqueryQuery extends BaseQuery { '{% if not loop.last %} UNION ALL\n{% endif %}' + '{% endfor %}' + ') AS dates'; - + templates.statements.generated_time_series_select = 'SELECT DATETIME(d) AS date_from,\n' + + 'DATETIME_SUB(DATETIME_ADD(DATETIME(d), INTERVAL {{ granularity }}), INTERVAL 1 MILLISECOND) AS date_to \n' + + 'FROM UNNEST(\n' + + '{% if minimal_time_unit|upper in ["DAY", "WEEK", "MONTH", "QUARTER", "YEAR"] %}' + + 'GENERATE_DATE_ARRAY(DATE({{ start }}), DATE({{ end }}), INTERVAL {{ granularity }})\n' + + '{% else %}' + + 'GENERATE_TIMESTAMP_ARRAY(TIMESTAMP({{ start }}), TIMESTAMP({{ end }}), INTERVAL {{ granularity }})\n' + + '{% endif %}' + + ') AS d'; + + templates.statements.generated_time_series_with_cte_range_source = 'SELECT DATETIME(d) AS date_from,\n' + + 'DATETIME_SUB(DATETIME_ADD(DATETIME(d), INTERVAL {{ granularity }}), INTERVAL 1 MILLISECOND) AS date_to \n' + + 'FROM {{ range_source }}, UNNEST(\n' + + '{% if minimal_time_unit|upper in ["DAY", "WEEK", "MONTH", "QUARTER", "YEAR"] %}' + + 'GENERATE_DATE_ARRAY(DATE({{ range_source }}.{{ min_name }}), DATE({{ range_source }}.{{ max_name }}), INTERVAL {{ granularity }})\n' + + '{% else %}' + + 'GENERATE_TIMESTAMP_ARRAY(TIMESTAMP({{ range_source }}.{{ min_name }}), TIMESTAMP({{ range_source }}.{{ max_name }}), INTERVAL {{ granularity }})\n' + + '{% endif %}' + + ') AS d'; return templates; } } diff --git a/packages/cubejs-schema-compiler/src/adapter/PostgresQuery.ts b/packages/cubejs-schema-compiler/src/adapter/PostgresQuery.ts index 747b23fd9d19c..fdcec50062f9d 100644 --- a/packages/cubejs-schema-compiler/src/adapter/PostgresQuery.ts +++ b/packages/cubejs-schema-compiler/src/adapter/PostgresQuery.ts @@ -82,9 +82,12 @@ export class PostgresQuery extends BaseQuery { templates.types.double = 'DOUBLE PRECISION'; templates.types.binary = 'BYTEA'; templates.operators.is_not_distinct_from = 'IS NOT DISTINCT FROM'; - templates.statements.generated_time_series_select = 'SELECT date_from AS "date_from",\n' + - 'date_from + interval \'{{ granularity }}\' - interval \'1 millisecond\' AS "date_to" \n' + - 'FROM generate_series({{ start }}::timestamp, {{ end }}:: timestamp, \'{{ granularity }}\'::interval) "date_from" '; + templates.statements.generated_time_series_select = 'SELECT d AS "date_from",\n' + + 'd + interval \'{{ granularity }}\' - interval \'1 millisecond\' AS "date_to" \n' + + 'FROM generate_series({{ start }}::timestamp, {{ end }}:: timestamp, \'{{ granularity }}\'::interval) d '; + templates.statements.generated_time_series_with_cte_range_source = 'SELECT d AS "date_from",\n' + + 'd + interval \'{{ granularity }}\' - interval \'1 millisecond\' AS "date_to" \n' + + 'FROM {{ range_source }}, LATERAL generate_series({{ range_source }}.{{ min_name }}, {{ range_source }}.{{ max_name }}, \'{{ granularity }}\'::interval) d '; return templates; } diff --git a/packages/cubejs-schema-compiler/src/compiler/CubeSymbols.ts b/packages/cubejs-schema-compiler/src/compiler/CubeSymbols.ts index 0625d05988c8b..6c7ff8e06b16a 100644 --- a/packages/cubejs-schema-compiler/src/compiler/CubeSymbols.ts +++ b/packages/cubejs-schema-compiler/src/compiler/CubeSymbols.ts @@ -1010,9 +1010,9 @@ export class CubeSymbols { return true; } if (cube[propertyName]) { - depsResolveFn(propertyName, parentIndex); + const index = depsResolveFn(propertyName, parentIndex); if (cube[propertyName].type === 'time') { - return this.timeDimDependenciesProxy(parentIndex); + return this.timeDimDependenciesProxy(index); } return ''; diff --git a/packages/cubejs-schema-compiler/test/integration/postgres/sql-generation.test.ts b/packages/cubejs-schema-compiler/test/integration/postgres/sql-generation.test.ts index 9ce2f24a7ae1c..326b294ca0a93 100644 --- a/packages/cubejs-schema-compiler/test/integration/postgres/sql-generation.test.ts +++ b/packages/cubejs-schema-compiler/test/integration/postgres/sql-generation.test.ts @@ -316,6 +316,10 @@ describe('SQL Generation', () => { } } }, + created_month: { + type: 'time', + sql: \`\${created_at.month}\` + }, updated_at: { type: 'time', sql: 'updated_at' @@ -1343,6 +1347,33 @@ SELECT 1 AS revenue, cast('2024-01-01' AS timestamp) as time UNION ALL }); } + if (getEnv('nativeSqlPlanner')) { + it('rolling count proxy time dimension', async () => { + await runQueryTest({ + measures: [ + 'visitors.countRollingThreeMonth' + ], + dimensions: [ + 'visitors.created_month' + ], + order: [{ + id: 'visitors.created_month' + }], + timezone: 'America/Los_Angeles' + }, [ + { visitors__created_month: '2016-09-01T00:00:00.000Z', visitors__count_rolling_three_month: '1' }, + { visitors__created_month: '2016-10-01T00:00:00.000Z', visitors__count_rolling_three_month: '1' }, + { visitors__created_month: '2016-11-01T00:00:00.000Z', visitors__count_rolling_three_month: '1' }, + { visitors__created_month: '2016-12-01T00:00:00.000Z', visitors__count_rolling_three_month: null }, + { visitors__created_month: '2017-01-01T00:00:00.000Z', visitors__count_rolling_three_month: '5' }, + ]); + }); + } else { + it.skip('rolling count without date range', () => { + // Skipping because it works only in Tesseract + }); + } + it('rolling qtd', async () => runQueryTest({ measures: [ 'visitors.revenue_qtd' diff --git a/packages/cubejs-testing-drivers/fixtures/_schemas.json b/packages/cubejs-testing-drivers/fixtures/_schemas.json index 1ffd0d67abf7f..1cdc2c7e80b5c 100644 --- a/packages/cubejs-testing-drivers/fixtures/_schemas.json +++ b/packages/cubejs-testing-drivers/fixtures/_schemas.json @@ -421,6 +421,14 @@ "trailing": "2 month" } }, + { + "name": "rollingCountYTD", + "type": "count", + "rollingWindow": { + "type": "to_date", + "granularity": "year" + } + }, { "name": "rollingCountApproxBy2Day", "type": "count_distinct_approx", diff --git a/packages/cubejs-testing-drivers/fixtures/athena.json b/packages/cubejs-testing-drivers/fixtures/athena.json index ba5f8b2b5074c..a429d22fe442c 100644 --- a/packages/cubejs-testing-drivers/fixtures/athena.json +++ b/packages/cubejs-testing-drivers/fixtures/athena.json @@ -145,6 +145,15 @@ "querying BigECommerce: null boolean", "--------------------", + "---------------------------------------", + "Requires Tesseract. ", + "---------------------------------------", + "querying BigECommerce: rolling window by 2 day without date range", + "querying BigECommerce: rolling window by 2 month without date range", + "querying BigECommerce: rolling window YTD", + "querying BigECommerce: rolling window YTD without date range", + "--------------------", + "week granularity is not supported for intervals", "--------------------", "querying BigECommerce: rolling window by 2 week", diff --git a/packages/cubejs-testing-drivers/fixtures/bigquery.json b/packages/cubejs-testing-drivers/fixtures/bigquery.json index bd8c634a20252..1f5edc1b40715 100644 --- a/packages/cubejs-testing-drivers/fixtures/bigquery.json +++ b/packages/cubejs-testing-drivers/fixtures/bigquery.json @@ -157,6 +157,14 @@ "querying BigECommerce: null sum", "querying BigECommerce: null boolean", + "---------------------------------------", + "Requires Tesseract. ", + "---------------------------------------", + "querying BigECommerce: rolling window by 2 day without date range", + "querying BigECommerce: rolling window by 2 month without date range", + "querying BigECommerce: rolling window YTD without date range", + "--------------------", + "SKIPPED SQL API (Need work)", "---------------------------------------", "SQL API: reuse params", diff --git a/packages/cubejs-testing-drivers/fixtures/clickhouse.json b/packages/cubejs-testing-drivers/fixtures/clickhouse.json index 44cb88cbb3703..e67ceea716652 100644 --- a/packages/cubejs-testing-drivers/fixtures/clickhouse.json +++ b/packages/cubejs-testing-drivers/fixtures/clickhouse.json @@ -174,6 +174,15 @@ "querying BigECommerce: rolling window by 2 week", "querying BigECommerce: rolling window by 2 month", + "---------------------------------------", + "Requires Tesseract. ", + "---------------------------------------", + "querying BigECommerce: rolling window by 2 day without date range", + "querying BigECommerce: rolling window by 2 month without date range", + "querying BigECommerce: rolling window YTD", + "querying BigECommerce: rolling window YTD without date range", + "--------------------", + "---------------------------------------", "Custom Granularities ", "---------------------------------------", diff --git a/packages/cubejs-testing-drivers/fixtures/databricks-jdbc.json b/packages/cubejs-testing-drivers/fixtures/databricks-jdbc.json index 4d80e74c0b50f..fb19c793fe6c2 100644 --- a/packages/cubejs-testing-drivers/fixtures/databricks-jdbc.json +++ b/packages/cubejs-testing-drivers/fixtures/databricks-jdbc.json @@ -206,6 +206,15 @@ "querying ECommerce: total sales, total profit by month + order (date) + total -- doesn't work with the BigQuery", "querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- noisy test", + "---------------------------------------", + "Requires Tesseract. ", + "---------------------------------------", + "querying BigECommerce: rolling window by 2 day without date range", + "querying BigECommerce: rolling window by 2 month without date range", + "querying BigECommerce: rolling window YTD", + "querying BigECommerce: rolling window YTD without date range", + "--------------------", + "---------------------------------------", "Custom Granularities ", "---------------------------------------", diff --git a/packages/cubejs-testing-drivers/fixtures/mssql.json b/packages/cubejs-testing-drivers/fixtures/mssql.json index 1a18eb928441f..3273ee4921e92 100644 --- a/packages/cubejs-testing-drivers/fixtures/mssql.json +++ b/packages/cubejs-testing-drivers/fixtures/mssql.json @@ -136,6 +136,14 @@ "querying BigECommerce: null sum", "querying BigECommerce: null boolean", + "---------------------------------------", + "Requires Tesseract. ", + "---------------------------------------", + "querying BigECommerce: rolling window by 2 day without date range", + "querying BigECommerce: rolling window by 2 month without date range", + "querying BigECommerce: rolling window YTD", + "querying BigECommerce: rolling window YTD without date range", + "--------------------", "---------------------------------------", "SKIPPED SQL API (Need work)", "---------------------------------------", diff --git a/packages/cubejs-testing-drivers/fixtures/mysql.json b/packages/cubejs-testing-drivers/fixtures/mysql.json index 3f378eb971eaa..29b9315144ff1 100644 --- a/packages/cubejs-testing-drivers/fixtures/mysql.json +++ b/packages/cubejs-testing-drivers/fixtures/mysql.json @@ -131,6 +131,13 @@ "querying BigECommerce: partitioned pre-agg", "querying BigECommerce: null sum", "querying BigECommerce: null boolean", + "---------------------------------------", + "Requires Tesseract. ", + "---------------------------------------", + "querying BigECommerce: rolling window by 2 day without date range", + "querying BigECommerce: rolling window by 2 month without date range", + "querying BigECommerce: rolling window YTD", + "querying BigECommerce: rolling window YTD without date range", "---------------------------------------", "Custom Granularities ", diff --git a/packages/cubejs-testing-drivers/fixtures/postgres.json b/packages/cubejs-testing-drivers/fixtures/postgres.json index 0b6d6fa0a6606..94ad1dd94428e 100644 --- a/packages/cubejs-testing-drivers/fixtures/postgres.json +++ b/packages/cubejs-testing-drivers/fixtures/postgres.json @@ -155,6 +155,12 @@ "---------------------------------------", "querying Products: dimensions -- doesn't work wo ordering", "querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- rounding in athena", - "querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- noisy test" + "querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- noisy test", + "---------------------------------------", + "Requires Tesseract. ", + "---------------------------------------", + "querying BigECommerce: rolling window by 2 day without date range", + "querying BigECommerce: rolling window by 2 month without date range", + "querying BigECommerce: rolling window YTD without date range" ] } diff --git a/packages/cubejs-testing-drivers/fixtures/redshift.json b/packages/cubejs-testing-drivers/fixtures/redshift.json index 82b6df3330c83..c74370b80918e 100644 --- a/packages/cubejs-testing-drivers/fixtures/redshift.json +++ b/packages/cubejs-testing-drivers/fixtures/redshift.json @@ -166,6 +166,13 @@ "---------------------------------------", "querying Products: dimensions -- doesn't work wo ordering", "querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- rounding in athena", - "querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- noisy test" + "querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- noisy test", + "---------------------------------------", + "Requires Tesseract. ", + "---------------------------------------", + "querying BigECommerce: rolling window by 2 day without date range", + "querying BigECommerce: rolling window by 2 month without date range", + "querying BigECommerce: rolling window YTD", + "querying BigECommerce: rolling window YTD without date range" ] } diff --git a/packages/cubejs-testing-drivers/fixtures/snowflake.json b/packages/cubejs-testing-drivers/fixtures/snowflake.json index cbed7a3cf985f..a96bbbca89f75 100644 --- a/packages/cubejs-testing-drivers/fixtures/snowflake.json +++ b/packages/cubejs-testing-drivers/fixtures/snowflake.json @@ -200,6 +200,13 @@ "querying ECommerce: total sales, total profit by month + order (date) + total -- doesn't work with the BigQuery", "querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- noisy test", "querying BigECommerce: null sum", - "querying BigECommerce: null boolean" + "querying BigECommerce: null boolean", + "---------------------------------------", + "Requires Tesseract. ", + "---------------------------------------", + "querying BigECommerce: rolling window by 2 day without date range", + "querying BigECommerce: rolling window by 2 month without date range", + "querying BigECommerce: rolling window YTD", + "querying BigECommerce: rolling window YTD without date range" ] } diff --git a/packages/cubejs-testing-drivers/src/tests/testQueries.ts b/packages/cubejs-testing-drivers/src/tests/testQueries.ts index 6edf086c184aa..301b5fff0a3b7 100644 --- a/packages/cubejs-testing-drivers/src/tests/testQueries.ts +++ b/packages/cubejs-testing-drivers/src/tests/testQueries.ts @@ -1525,6 +1525,19 @@ export function testQueries(type: string, { includeIncrementalSchemaSuite, exten expect(response.rawData()).toMatchSnapshot(); }); + execute('querying BigECommerce: rolling window by 2 day without date range', async () => { + const response = await client.load({ + measures: [ + 'BigECommerce.rollingCountBy2Day', + ], + timeDimensions: [{ + dimension: 'BigECommerce.orderDate', + granularity: 'month', + }], + }); + expect(response.rawData()).toMatchSnapshot(); + }); + execute('querying BigECommerce: rolling window by 2 week', async () => { const response = await client.load({ measures: [ @@ -1553,6 +1566,46 @@ export function testQueries(type: string, { includeIncrementalSchemaSuite, exten expect(response.rawData()).toMatchSnapshot(); }); + execute('querying BigECommerce: rolling window by 2 month without date range', async () => { + const response = await client.load({ + measures: [ + 'BigECommerce.rollingCountBy2Month', + ], + timeDimensions: [{ + dimension: 'BigECommerce.orderDate', + granularity: 'month', + }], + }); + expect(response.rawData()).toMatchSnapshot(); + }); + + execute('querying BigECommerce: rolling window YTD', async () => { + const response = await client.load({ + measures: [ + 'BigECommerce.rollingCountYTD', + ], + timeDimensions: [{ + dimension: 'BigECommerce.orderDate', + granularity: 'month', + dateRange: ['2020-01-01', '2020-12-31'], + }], + }); + expect(response.rawData()).toMatchSnapshot(); + }); + + execute('querying BigECommerce: rolling window YTD without date range', async () => { + const response = await client.load({ + measures: [ + 'BigECommerce.rollingCountYTD', + ], + timeDimensions: [{ + dimension: 'BigECommerce.orderDate', + granularity: 'month', + }], + }); + expect(response.rawData()).toMatchSnapshot(); + }); + if (includeHLLSuite) { execute('querying BigECommerce: rolling count_distinct_approx window by 2 day', async () => { const response = await client.load({ diff --git a/packages/cubejs-testing-drivers/test/__snapshots__/bigquery-export-bucket-gcs-full.test.ts.snap b/packages/cubejs-testing-drivers/test/__snapshots__/bigquery-export-bucket-gcs-full.test.ts.snap index add446097f8f9..8be48b7611cfc 100644 --- a/packages/cubejs-testing-drivers/test/__snapshots__/bigquery-export-bucket-gcs-full.test.ts.snap +++ b/packages/cubejs-testing-drivers/test/__snapshots__/bigquery-export-bucket-gcs-full.test.ts.snap @@ -6493,6 +6493,136 @@ Array [ ] `; +exports[`Queries with the @cubejs-backend/bigquery-driver querying BigECommerce: rolling window YTD 1`] = ` +Array [ + Object { + "BigECommerce.orderDate": "2020-01-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-01-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 2, + }, + Object { + "BigECommerce.orderDate": "2020-02-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-02-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 3, + }, + Object { + "BigECommerce.orderDate": "2020-03-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-03-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 5, + }, + Object { + "BigECommerce.orderDate": "2020-04-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-04-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 6, + }, + Object { + "BigECommerce.orderDate": "2020-05-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-05-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 11, + }, + Object { + "BigECommerce.orderDate": "2020-06-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-06-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 18, + }, + Object { + "BigECommerce.orderDate": "2020-07-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-07-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 18, + }, + Object { + "BigECommerce.orderDate": "2020-08-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-08-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 18, + }, + Object { + "BigECommerce.orderDate": "2020-09-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-09-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 24, + }, + Object { + "BigECommerce.orderDate": "2020-10-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-10-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 28, + }, + Object { + "BigECommerce.orderDate": "2020-11-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-11-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 37, + }, + Object { + "BigECommerce.orderDate": "2020-12-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-12-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 44, + }, +] +`; + +exports[`Queries with the @cubejs-backend/bigquery-driver querying BigECommerce: rolling window YTD without date range 1`] = ` +Array [ + Object { + "BigECommerce.orderDate": "2020-01-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-01-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 2, + }, + Object { + "BigECommerce.orderDate": "2020-02-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-02-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 3, + }, + Object { + "BigECommerce.orderDate": "2020-03-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-03-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 5, + }, + Object { + "BigECommerce.orderDate": "2020-04-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-04-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 6, + }, + Object { + "BigECommerce.orderDate": "2020-05-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-05-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 11, + }, + Object { + "BigECommerce.orderDate": "2020-06-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-06-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 18, + }, + Object { + "BigECommerce.orderDate": "2020-07-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-07-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 18, + }, + Object { + "BigECommerce.orderDate": "2020-08-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-08-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 18, + }, + Object { + "BigECommerce.orderDate": "2020-09-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-09-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 24, + }, + Object { + "BigECommerce.orderDate": "2020-10-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-10-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 28, + }, + Object { + "BigECommerce.orderDate": "2020-11-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-11-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 37, + }, + Object { + "BigECommerce.orderDate": "2020-12-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-12-01T00:00:00.000", + "BigECommerce.rollingCountYTD": 44, + }, +] +`; + exports[`Queries with the @cubejs-backend/bigquery-driver querying BigECommerce: rolling window by 2 day 1`] = ` Array [ Object { @@ -6558,6 +6688,71 @@ Array [ ] `; +exports[`Queries with the @cubejs-backend/bigquery-driver querying BigECommerce: rolling window by 2 day without date range 1`] = ` +Array [ + Object { + "BigECommerce.orderDate": "2020-01-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-01-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-02-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-02-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-03-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-03-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-04-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-04-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-05-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-05-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-06-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-06-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-07-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-07-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-08-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-08-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-09-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-09-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-10-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-10-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": 1, + }, + Object { + "BigECommerce.orderDate": "2020-11-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-11-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-12-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-12-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, +] +`; + exports[`Queries with the @cubejs-backend/bigquery-driver querying BigECommerce: rolling window by 2 month 1`] = ` Array [ Object { @@ -6623,6 +6818,71 @@ Array [ ] `; +exports[`Queries with the @cubejs-backend/bigquery-driver querying BigECommerce: rolling window by 2 month without date range 1`] = ` +Array [ + Object { + "BigECommerce.orderDate": "2020-01-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-01-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 2, + }, + Object { + "BigECommerce.orderDate": "2020-02-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-02-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 3, + }, + Object { + "BigECommerce.orderDate": "2020-03-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-03-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 3, + }, + Object { + "BigECommerce.orderDate": "2020-04-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-04-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 3, + }, + Object { + "BigECommerce.orderDate": "2020-05-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-05-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 6, + }, + Object { + "BigECommerce.orderDate": "2020-06-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-06-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 12, + }, + Object { + "BigECommerce.orderDate": "2020-07-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-07-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 7, + }, + Object { + "BigECommerce.orderDate": "2020-08-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-08-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": null, + }, + Object { + "BigECommerce.orderDate": "2020-09-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-09-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 6, + }, + Object { + "BigECommerce.orderDate": "2020-10-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-10-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 10, + }, + Object { + "BigECommerce.orderDate": "2020-11-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-11-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 13, + }, + Object { + "BigECommerce.orderDate": "2020-12-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-12-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": 16, + }, +] +`; + exports[`Queries with the @cubejs-backend/bigquery-driver querying BigECommerce: rolling window by 2 week 1`] = ` Array [ Object { diff --git a/packages/cubejs-testing-drivers/test/__snapshots__/postgres-full.test.ts.snap b/packages/cubejs-testing-drivers/test/__snapshots__/postgres-full.test.ts.snap index 37a07089b1e93..bac5be410dc7a 100644 --- a/packages/cubejs-testing-drivers/test/__snapshots__/postgres-full.test.ts.snap +++ b/packages/cubejs-testing-drivers/test/__snapshots__/postgres-full.test.ts.snap @@ -11365,6 +11365,136 @@ Array [ ] `; +exports[`Queries with the @cubejs-backend/postgres-driver querying BigECommerce: rolling window YTD 1`] = ` +Array [ + Object { + "BigECommerce.orderDate": "2020-01-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-01-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "2", + }, + Object { + "BigECommerce.orderDate": "2020-02-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-02-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "3", + }, + Object { + "BigECommerce.orderDate": "2020-03-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-03-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "5", + }, + Object { + "BigECommerce.orderDate": "2020-04-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-04-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "6", + }, + Object { + "BigECommerce.orderDate": "2020-05-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-05-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "11", + }, + Object { + "BigECommerce.orderDate": "2020-06-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-06-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "18", + }, + Object { + "BigECommerce.orderDate": "2020-07-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-07-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "18", + }, + Object { + "BigECommerce.orderDate": "2020-08-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-08-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "18", + }, + Object { + "BigECommerce.orderDate": "2020-09-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-09-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "24", + }, + Object { + "BigECommerce.orderDate": "2020-10-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-10-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "28", + }, + Object { + "BigECommerce.orderDate": "2020-11-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-11-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "37", + }, + Object { + "BigECommerce.orderDate": "2020-12-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-12-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "44", + }, +] +`; + +exports[`Queries with the @cubejs-backend/postgres-driver querying BigECommerce: rolling window YTD without date range 1`] = ` +Array [ + Object { + "BigECommerce.orderDate": "2020-01-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-01-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "2", + }, + Object { + "BigECommerce.orderDate": "2020-02-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-02-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "3", + }, + Object { + "BigECommerce.orderDate": "2020-03-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-03-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "5", + }, + Object { + "BigECommerce.orderDate": "2020-04-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-04-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "6", + }, + Object { + "BigECommerce.orderDate": "2020-05-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-05-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "11", + }, + Object { + "BigECommerce.orderDate": "2020-06-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-06-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "18", + }, + Object { + "BigECommerce.orderDate": "2020-07-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-07-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "18", + }, + Object { + "BigECommerce.orderDate": "2020-08-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-08-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "18", + }, + Object { + "BigECommerce.orderDate": "2020-09-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-09-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "24", + }, + Object { + "BigECommerce.orderDate": "2020-10-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-10-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "28", + }, + Object { + "BigECommerce.orderDate": "2020-11-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-11-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "37", + }, + Object { + "BigECommerce.orderDate": "2020-12-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-12-01T00:00:00.000", + "BigECommerce.rollingCountYTD": "44", + }, +] +`; + exports[`Queries with the @cubejs-backend/postgres-driver querying BigECommerce: rolling window by 2 day 1`] = ` Array [ Object { @@ -11430,6 +11560,71 @@ Array [ ] `; +exports[`Queries with the @cubejs-backend/postgres-driver querying BigECommerce: rolling window by 2 day without date range 1`] = ` +Array [ + Object { + "BigECommerce.orderDate": "2020-01-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-01-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-02-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-02-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-03-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-03-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-04-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-04-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-05-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-05-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-06-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-06-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-07-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-07-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-08-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-08-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-09-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-09-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-10-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-10-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": "1", + }, + Object { + "BigECommerce.orderDate": "2020-11-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-11-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, + Object { + "BigECommerce.orderDate": "2020-12-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-12-01T00:00:00.000", + "BigECommerce.rollingCountBy2Day": null, + }, +] +`; + exports[`Queries with the @cubejs-backend/postgres-driver querying BigECommerce: rolling window by 2 month 1`] = ` Array [ Object { @@ -11495,6 +11690,71 @@ Array [ ] `; +exports[`Queries with the @cubejs-backend/postgres-driver querying BigECommerce: rolling window by 2 month without date range 1`] = ` +Array [ + Object { + "BigECommerce.orderDate": "2020-01-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-01-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "2", + }, + Object { + "BigECommerce.orderDate": "2020-02-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-02-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "3", + }, + Object { + "BigECommerce.orderDate": "2020-03-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-03-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "3", + }, + Object { + "BigECommerce.orderDate": "2020-04-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-04-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "3", + }, + Object { + "BigECommerce.orderDate": "2020-05-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-05-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "6", + }, + Object { + "BigECommerce.orderDate": "2020-06-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-06-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "12", + }, + Object { + "BigECommerce.orderDate": "2020-07-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-07-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "7", + }, + Object { + "BigECommerce.orderDate": "2020-08-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-08-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": null, + }, + Object { + "BigECommerce.orderDate": "2020-09-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-09-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "6", + }, + Object { + "BigECommerce.orderDate": "2020-10-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-10-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "10", + }, + Object { + "BigECommerce.orderDate": "2020-11-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-11-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "13", + }, + Object { + "BigECommerce.orderDate": "2020-12-01T00:00:00.000", + "BigECommerce.orderDate.month": "2020-12-01T00:00:00.000", + "BigECommerce.rollingCountBy2Month": "16", + }, +] +`; + exports[`Queries with the @cubejs-backend/postgres-driver querying BigECommerce: rolling window by 2 week 1`] = ` Array [ Object { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/base_tools.rs b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/base_tools.rs index 99a5463bca518..6633f487858e4 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/base_tools.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/base_tools.rs @@ -51,7 +51,9 @@ pub trait BaseTools { fn get_allocated_params(&self) -> Result, CubeError>; fn subtract_interval(&self, date: String, interval: String) -> Result; fn add_interval(&self, date: String, interval: String) -> Result; + fn add_timestamp_interval(&self, date: String, interval: String) -> Result; fn all_cube_members(&self, path: String) -> Result, CubeError>; + fn interval_and_minimal_time_unit(&self, interval: String) -> Result, CubeError>; //===== TODO Move to templates fn hll_init(&self, sql: String) -> Result; fn hll_merge(&self, sql: String) -> Result; diff --git a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/cube_definition.rs b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/cube_definition.rs index 075b0589675f3..03b7331893578 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/cube_definition.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/cube_definition.rs @@ -14,6 +14,8 @@ pub struct CubeDefinitionStatic { pub name: String, #[serde(rename = "sqlAlias")] pub sql_alias: Option, + #[serde(rename = "isView")] + pub is_view: Option, } #[nativebridge::native_bridge(CubeDefinitionStatic)] diff --git a/rust/cubesqlplanner/cubesqlplanner/src/physical_plan_builder/builder.rs b/rust/cubesqlplanner/cubesqlplanner/src/physical_plan_builder/builder.rs index ecb7539fcc9e6..8e368689f19b3 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/physical_plan_builder/builder.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/physical_plan_builder/builder.rs @@ -53,7 +53,7 @@ impl PhysicalPlanBuilderContext { pub struct PhysicalPlanBuilder { query_tools: Rc, - plan_sql_templates: PlanSqlTemplates, + _plan_sql_templates: PlanSqlTemplates, } impl PhysicalPlanBuilder { @@ -61,7 +61,7 @@ impl PhysicalPlanBuilder { let plan_sql_templates = query_tools.plan_sql_templates(); Self { query_tools, - plan_sql_templates, + _plan_sql_templates: plan_sql_templates, } } @@ -463,14 +463,18 @@ impl PhysicalPlanBuilder { .collect_vec(); let on = JoinCondition::new_dimension_join(conditions, true); let next_alias = format!("q_{}", i); + + join_builder.inner_join_source(join.clone(), next_alias, on); + + /* TODO: Full join fails even in BigQuery, where it’s theoretically supported. Disabled for now — needs investigation. if full_key_aggregate.use_full_join_and_coalesce - && self.plan_sql_templates.supports_full_join() - { - join_builder.full_join_source(join.clone(), next_alias, on); - } else { - // TODO in case of full join is not supported there should be correct blending query that keeps NULL values - join_builder.inner_join_source(join.clone(), next_alias, on); - } + && self.plan_sql_templates.supports_full_join() + { + join_builder.full_join_source(join.clone(), next_alias, on); + } else { + // TODO in case of full join is not supported there should be correct blending query that keeps NULL values + join_builder.inner_join_source(join.clone(), next_alias, on); + } */ } let result = From::new_from_join(join_builder.build()); @@ -948,13 +952,13 @@ impl PhysicalPlanBuilder { select_builder.add_projection_function_expression( "MAX", args.clone(), - "date_to".to_string(), + "max_date".to_string(), ); select_builder.add_projection_function_expression( "MIN", args.clone(), - "date_from".to_string(), + "min_date".to_string(), ); context_factory.set_render_references(render_references); let select = Rc::new(select_builder.build(context_factory)); @@ -980,7 +984,9 @@ impl PhysicalPlanBuilder { let templates = self.query_tools.plan_sql_templates(); - let ts_date_range = if templates.supports_generated_time_series() { + let ts_date_range = if templates.supports_generated_time_series() + && granularity_obj.is_predefined_granularity() + { if let Some(date_range) = time_dimension_symbol .get_range_for_time_series(date_range, self.query_tools.timezone())? { @@ -998,8 +1004,8 @@ impl PhysicalPlanBuilder { if let Some(date_range) = &time_series.date_range { TimeSeriesDateRange::Filter(date_range[0].clone(), date_range[1].clone()) } else { - return Err(CubeError::internal( - "Date range is required for time series without date range".to_string(), + return Err(CubeError::user( + "Date range is required for time series".to_string(), )); } }; diff --git a/rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs b/rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs index f48dbfff2300b..5661affe94f59 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs @@ -1,9 +1,5 @@ use super::{Schema, SchemaColumn}; -use crate::planner::{ - query_tools::QueryTools, - sql_templates::{PlanSqlTemplates, TemplateProjectionColumn}, - Granularity, -}; +use crate::planner::{query_tools::QueryTools, sql_templates::PlanSqlTemplates, Granularity}; use cubenativeutils::CubeError; use std::rc::Rc; @@ -44,56 +40,44 @@ impl TimeSeries { } pub fn to_sql(&self, templates: &PlanSqlTemplates) -> Result { - if templates.supports_generated_time_series() { - let (from_date, to_date) = match &self.date_range { + if templates.supports_generated_time_series() + && self.granularity.is_predefined_granularity() + { + let interval_description = templates + .base_tools() + .interval_and_minimal_time_unit(self.granularity.granularity_interval().clone())?; + if interval_description.len() != 2 { + return Err(CubeError::internal( + "Interval description must have 2 elements".to_string(), + )); + } + let interval = interval_description[0].clone(); + let minimal_time_unit = interval_description[1].clone(); + match &self.date_range { TimeSeriesDateRange::Filter(from_date, to_date) => { - (format!("'{}'", from_date), format!("'{}'", to_date)) + let from_date = format!("'{}'", from_date); + let to_date = format!("'{}'", to_date); + + templates.generated_time_series_select( + &from_date, + &to_date, + &interval, + &self.granularity.granularity_offset(), + &minimal_time_unit, + ) } TimeSeriesDateRange::Generated(cte_name) => { - let date_from_name = format!("date_from"); - let date_to_name = format!("date_to"); - let from_column = TemplateProjectionColumn { - expr: date_from_name.clone(), - alias: date_from_name.clone(), - aliased: templates.column_aliased(&date_from_name, &date_from_name)?, - }; - let to_column = TemplateProjectionColumn { - expr: date_to_name.clone(), - alias: date_to_name.clone(), - aliased: templates.column_aliased(&date_to_name, &date_to_name)?, - }; - let from = templates.select( - vec![], - &cte_name, - vec![from_column], - None, - vec![], - None, - vec![], - None, - None, - false, - )?; - let to = templates.select( - vec![], + let min_date_name = format!("min_date"); + let max_date_name = format!("max_date"); + templates.generated_time_series_with_cte_range_source( &cte_name, - vec![to_column], - None, - vec![], - None, - vec![], - None, - None, - false, - )?; - (format!("({})", from), format!("({})", to)) + &min_date_name, + &max_date_name, + &interval, + &minimal_time_unit, + ) } - }; - templates.generated_time_series_select( - &from_date, - &to_date, - &self.granularity.granularity_interval(), - ) + } } else { let (from_date, to_date, raw_from_date, raw_to_date) = match &self.date_range { TimeSeriesDateRange::Filter(from_date, to_date) => ( diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_dimension.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_dimension.rs index bbc1118beabd0..300a8798e0ab3 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_dimension.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_dimension.rs @@ -123,12 +123,12 @@ impl BaseDimension { member_expression_definition: Option, query_tools: Rc, ) -> Result, CubeError> { - let member_expression_symbol = MemberExpressionSymbol::new( + let member_expression_symbol = MemberExpressionSymbol::try_new( cube_name.clone(), name.clone(), expression, member_expression_definition.clone(), - ); + )?; let full_name = member_expression_symbol.full_name(); let member_evaluator = Rc::new(MemberSymbol::MemberExpression(member_expression_symbol)); let default_alias = PlanSqlTemplates::alias_name(&name); diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_measure.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_measure.rs index 00591bce13405..4c14fb5560ba3 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_measure.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_measure.rs @@ -1,9 +1,7 @@ use super::query_tools::QueryTools; use super::sql_evaluator::{MeasureTimeShift, MemberExpressionSymbol, MemberSymbol, SqlCall}; use super::{evaluate_with_context, BaseMember, BaseMemberHelper, VisitorContext}; -use crate::cube_bridge::measure_definition::{ - MeasureDefinition, RollingWindow, TimeShiftReference, -}; +use crate::cube_bridge::measure_definition::{MeasureDefinition, RollingWindow}; use crate::planner::sql_templates::PlanSqlTemplates; use cubenativeutils::CubeError; use std::fmt::{Debug, Formatter}; @@ -135,12 +133,12 @@ impl BaseMeasure { member_expression_definition: Option, query_tools: Rc, ) -> Result, CubeError> { - let member_expression_symbol = MemberExpressionSymbol::new( + let member_expression_symbol = MemberExpressionSymbol::try_new( cube_name.clone(), name.clone(), expression, member_expression_definition.clone(), - ); + )?; let full_name = member_expression_symbol.full_name(); let member_evaluator = Rc::new(MemberSymbol::MemberExpression(member_expression_symbol)); let default_alias = PlanSqlTemplates::alias_name(&name); @@ -210,12 +208,6 @@ impl BaseMeasure { } } - pub fn time_shift_references(&self) -> Option> { - self.definition - .as_ref() - .map_or(None, |d| d.static_data().time_shift_references.clone()) - } - pub fn time_shifts(&self) -> Vec { match self.member_evaluator.as_ref() { MemberSymbol::Measure(measure_symbol) => measure_symbol.time_shifts().clone(), diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_time_dimension.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_time_dimension.rs index 30cae7018895d..7351e9693ce12 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_time_dimension.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_time_dimension.rs @@ -63,6 +63,36 @@ impl BaseMember for BaseTimeDimension { } impl BaseTimeDimension { + pub fn try_new_from_td_symbol( + query_tools: Rc, + td_symbol: &TimeDimensionSymbol, + ) -> Result, CubeError> { + let dimension = + BaseDimension::try_new_required(td_symbol.base_symbol().clone(), query_tools.clone())?; + let granularity = td_symbol.granularity().clone(); + let granularity_obj = td_symbol.granularity_obj().clone(); + let date_range = td_symbol.date_range_vec(); + let alias_suffix = td_symbol.alias_suffix(); + let default_alias = BaseMemberHelper::default_alias( + &dimension.cube_name(), + &dimension.name(), + &Some(alias_suffix.clone()), + query_tools.clone(), + )?; + let member_evaluator = Rc::new(MemberSymbol::TimeDimension(td_symbol.clone())); + + Ok(Rc::new(Self { + dimension, + query_tools, + granularity, + granularity_obj, + date_range, + alias_suffix, + default_alias, + member_evaluator, + })) + } + pub fn try_new_required( query_tools: Rc, member_evaluator: Rc, @@ -176,7 +206,7 @@ impl BaseTimeDimension { self.date_range.clone() } - pub fn get_range_for_time_series(&self) -> Result, CubeError> { + pub fn get_range_for_time_series(&self) -> Result>, CubeError> { let res = if let Some(date_range) = &self.date_range { if date_range.len() != 2 { return Err(CubeError::user(format!( @@ -191,12 +221,12 @@ impl BaseTimeDimension { let start = granularity_obj.align_date_to_origin(start)?; let end = QueryDateTime::from_date_str(tz, &date_range[1])?; - Some((start.to_string(), end.to_string())) + Some(vec![start.to_string(), end.to_string()]) } else { - Some((date_range[0].clone(), date_range[1].clone())) + Some(vec![date_range[0].clone(), date_range[1].clone()]) } } else { - Some((date_range[0].clone(), date_range[1].clone())) + Some(vec![date_range[0].clone(), date_range[1].clone()]) } } } else { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/filter/base_segment.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/filter/base_segment.rs index 865f7ddcf676d..9babde4932b8d 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/filter/base_segment.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/filter/base_segment.rs @@ -28,7 +28,7 @@ impl BaseSegment { query_tools: Rc, ) -> Result, CubeError> { let member_expression_symbol = - MemberExpressionSymbol::new(cube_name.clone(), name.clone(), expression, None); + MemberExpressionSymbol::try_new(cube_name.clone(), name.clone(), expression, None)?; let full_name = full_name.unwrap_or(member_expression_symbol.full_name()); let member_evaluator = Rc::new(MemberSymbol::MemberExpression(member_expression_symbol)); diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/applied_state.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/applied_state.rs index 91d29cc455d17..9c556e51604cd 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/applied_state.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/applied_state.rs @@ -117,6 +117,10 @@ impl MultiStageAppliedState { self.time_dimensions = time_dimensions; } + pub fn set_dimensions(&mut self, dimensions: Vec>) { + self.dimensions = dimensions; + } + pub fn remove_filter_for_member(&mut self, member_name: &String) { self.time_dimensions_filters = self.extract_filters_exclude_member(member_name, &self.time_dimensions_filters); diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/multi_stage_query_planner.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/multi_stage_query_planner.rs index 22c5e184b4861..1ff72801dd3c2 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/multi_stage_query_planner.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/multi_stage_query_planner.rs @@ -171,6 +171,7 @@ impl MultiStageQueryPlanner { state: Rc, descriptions: &mut Vec>, ) -> Result, CubeError> { + let member = member.resolve_reference_chain(); let member_name = member.full_name(); if let Some(exists) = descriptions .iter() @@ -187,7 +188,7 @@ impl MultiStageQueryPlanner { return Ok(rolling_window_query); } - let childs = member_childs(&member)?; + let childs = member_childs(&member, true)?; let has_multi_stage_members = has_multi_stage_members(&member, false)?; let description = if childs.is_empty() || !has_multi_stage_members { if has_multi_stage_members { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/rolling_window_planner.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/rolling_window_planner.rs index 094f5a64cf6dc..bc705cce3139f 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/rolling_window_planner.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/rolling_window_planner.rs @@ -51,7 +51,18 @@ impl RollingWindowPlanner { } _ => false, }; - let time_dimensions = self.query_properties.time_dimensions(); + let mut time_dimensions = self.query_properties.time_dimensions().clone(); + for dim in self.query_properties.dimension_symbols() { + let dim = dim.resolve_reference_chain(); + if let Ok(time_dimension_symbol) = dim.as_time_dimension() { + let time_dimension = BaseTimeDimension::try_new_from_td_symbol( + self.query_tools.clone(), + time_dimension_symbol, + )?; + time_dimensions.push(time_dimension); + } + } + if time_dimensions.len() == 0 { let rolling_base = self.add_rolling_window_base( member.clone(), @@ -358,6 +369,19 @@ impl RollingWindowPlanner { //We keep only one time_dimension in the leaf query because, even if time_dimension values have different granularity, in the leaf query we need to group by the lowest granularity. new_state.set_time_dimensions(vec![new_time_dimension.clone()]); + let dimensions = new_state + .dimensions() + .clone() + .into_iter() + .filter(|d| { + d.member_evaluator() + .resolve_reference_chain() + .as_time_dimension() + .is_err() + }) + .collect_vec(); + new_state.set_dimensions(dimensions); + if let Some(granularity) = self.get_to_date_rolling_granularity(rolling_window)? { new_state.replace_to_date_date_range_filter(&time_dimension_base_name, &granularity); } else { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/collectors/member_childs_collector.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/collectors/member_childs_collector.rs index 2b8d955e0e1a1..002de18a005b2 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/collectors/member_childs_collector.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/collectors/member_childs_collector.rs @@ -1,5 +1,6 @@ use crate::planner::sql_evaluator::{MemberSymbol, TraversalVisitor}; use cubenativeutils::CubeError; +use itertools::Itertools; use std::rc::Rc; pub struct MemberChildsCollector { @@ -56,8 +57,19 @@ impl TraversalVisitor for MemberChildsCollector { } } -pub fn member_childs(node: &Rc) -> Result>, CubeError> { +pub fn member_childs( + node: &Rc, + resolve_references: bool, +) -> Result>, CubeError> { let mut visitor = MemberChildsCollector::new(); visitor.apply(node, &MemberChildsCollectorState::new(true))?; - Ok(visitor.extract_result()) + let res = visitor.extract_result(); + let res = if resolve_references { + res.iter() + .map(|child| child.clone().resolve_reference_chain()) + .collect_vec() + } else { + res + }; + Ok(res) } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/collectors/multiplied_measures_collector.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/collectors/multiplied_measures_collector.rs index b2e294cff518f..4678927fdc31e 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/collectors/multiplied_measures_collector.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/collectors/multiplied_measures_collector.rs @@ -122,6 +122,7 @@ impl TraversalVisitor for MultipliedMeasuresCollector { None } } + MemberSymbol::MemberExpression(_) => Some(()), MemberSymbol::Dimension(_) => None, _ => None, }; diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_call.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_call.rs index 4afbacb1d699d..80132e0615f34 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_call.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_call.rs @@ -329,6 +329,8 @@ impl SqlCall { let arg = MemberSqlArg::String(visitor.apply(&v, node_processor.clone(), templates)?); res.properties.insert(k.clone(), arg); } + let string_fn = visitor.apply(&dep.base_symbol, node_processor.clone(), templates)?; + res.to_string_fn = Some(string_fn); Ok(MemberSqlArg::Struct(res)) } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/evaluate_sql.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/evaluate_sql.rs index ff3b92f56615c..cb5eb1e6a84aa 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/evaluate_sql.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/evaluate_sql.rs @@ -24,7 +24,7 @@ impl SqlNode for EvaluateSqlNode { node_processor: Rc, templates: &PlanSqlTemplates, ) -> Result { - match node.as_ref() { + let res = match node.as_ref() { MemberSymbol::Dimension(ev) => { let res = ev.evaluate_sql( visitor, @@ -81,13 +81,17 @@ impl SqlNode for EvaluateSqlNode { templates, ), MemberSymbol::CubeName(ev) => ev.evaluate_sql(), - MemberSymbol::MemberExpression(e) => e.evaluate_sql( - visitor, - node_processor.clone(), - query_tools.clone(), - templates, - ), - } + MemberSymbol::MemberExpression(e) => { + let res = e.evaluate_sql( + visitor, + node_processor.clone(), + query_tools.clone(), + templates, + )?; + Ok(res) + } + }?; + Ok(res) } fn as_any(self: Rc) -> Rc { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/time_shift.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/time_shift.rs index 5a29cff9b43b3..5b3f81fc3825f 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/time_shift.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/time_shift.rs @@ -47,7 +47,10 @@ impl SqlNode for TimeShiftSqlNode { MemberSymbol::Dimension(ev) => { if let Some(shift) = self.shifts.get(&ev.full_name()) { let shift = shift.interval.to_sql(); - format!("({input} + interval '{shift}')") + let res = templates + .base_tools() + .add_timestamp_interval(input, shift)?; + format!("({})", res) } else { input } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/dimension_symbol.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/dimension_symbol.rs index c624ec87630b5..f3af88c124914 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/dimension_symbol.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/dimension_symbol.rs @@ -116,6 +116,17 @@ impl DimensionSymbol { self.is_reference } + pub fn reference_member(&self) -> Option> { + if !self.is_reference() { + return None; + } + let deps = self.get_dependencies(); + if deps.is_empty() { + return None; + } + deps.first().cloned() + } + pub fn get_dependencies(&self) -> Vec> { let mut deps = vec![]; if let Some(member_sql) = &self.member_sql { @@ -192,6 +203,7 @@ pub struct DimensionSymbolFactory { name: String, sql: Option>, definition: Rc, + cube_evaluator: Rc, } impl DimensionSymbolFactory { @@ -210,6 +222,7 @@ impl DimensionSymbolFactory { name, sql: definition.sql()?, definition, + cube_evaluator, }) } } @@ -241,6 +254,7 @@ impl SymbolFactory for DimensionSymbolFactory { name, sql, definition, + cube_evaluator, } = self; let sql = if let Some(sql) = sql { Some(compiler.compile_sql_call(&cube_name, sql)?) @@ -299,16 +313,19 @@ impl SymbolFactory for DimensionSymbolFactory { } else { None }; + let cube = cube_evaluator.cube_from_path(cube_name.clone())?; + let is_view = cube.static_data().is_view.unwrap_or(false); let owned_by_cube = definition.static_data().owned_by_cube.unwrap_or(true); let is_sub_query = definition.static_data().sub_query.unwrap_or(false); let is_multi_stage = definition.static_data().multi_stage.unwrap_or(false); - let is_reference = !owned_by_cube - && !is_sub_query - && is_sql_direct_ref - && case.is_none() - && latitude.is_none() - && longitude.is_none() - && !is_multi_stage; + let is_reference = is_view + || (!owned_by_cube + && !is_sub_query + && is_sql_direct_ref + && case.is_none() + && latitude.is_none() + && longitude.is_none() + && !is_multi_stage); Ok(MemberSymbol::new_dimension(DimensionSymbol::new( cube_name, name, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/measure_symbol.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/measure_symbol.rs index 6b85e2e9646e6..e61d7baaf7259 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/measure_symbol.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/measure_symbol.rs @@ -235,6 +235,17 @@ impl MeasureSymbol { self.is_reference } + pub fn reference_member(&self) -> Option> { + if !self.is_reference() { + return None; + } + let deps = self.get_dependencies(); + if deps.is_empty() { + return None; + } + deps.first().cloned() + } + pub fn measure_type(&self) -> &String { &self.definition.static_data().measure_type } @@ -492,18 +503,22 @@ impl SymbolFactory for MeasureSymbolFactory { && !definition.static_data().multi_stage.unwrap_or(false); let owned_by_cube = definition.static_data().owned_by_cube.unwrap_or(true); let is_multi_stage = definition.static_data().multi_stage.unwrap_or(false); - - let is_reference = !owned_by_cube - && is_sql_is_direct_ref - && is_calculated - && !is_multi_stage - && measure_filters.is_empty() - && measure_drill_filters.is_empty() - && time_shifts.is_empty() - && measure_order_by.is_empty() - && reduce_by.is_none() - && add_group_by.is_none() - && group_by.is_none(); + let cube = cube_evaluator.cube_from_path(cube_name.clone())?; + + let is_view = cube.static_data().is_view.unwrap_or(false); + + let is_reference = is_view + || (!owned_by_cube + && is_sql_is_direct_ref + && is_calculated + && !is_multi_stage + && measure_filters.is_empty() + && measure_drill_filters.is_empty() + && time_shifts.is_empty() + && measure_order_by.is_empty() + && reduce_by.is_none() + && add_group_by.is_none() + && group_by.is_none()); Ok(MemberSymbol::new_measure(MeasureSymbol::new( cube_name, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/member_expression_symbol.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/member_expression_symbol.rs index fe68bac959982..90029b39f4d75 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/member_expression_symbol.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/member_expression_symbol.rs @@ -11,21 +11,24 @@ pub struct MemberExpressionSymbol { expression: Rc, #[allow(dead_code)] definition: Option, + is_reference: bool, } impl MemberExpressionSymbol { - pub fn new( + pub fn try_new( cube_name: String, name: String, expression: Rc, definition: Option, - ) -> Self { - Self { + ) -> Result { + let is_reference = expression.is_direct_reference()?; + Ok(Self { cube_name, name, expression, definition, - } + is_reference, + }) } pub fn evaluate_sql( @@ -50,7 +53,18 @@ impl MemberExpressionSymbol { } pub fn is_reference(&self) -> bool { - false + self.is_reference + } + + pub fn reference_member(&self) -> Option> { + if !self.is_reference() { + return None; + } + let deps = self.get_dependencies(); + if deps.is_empty() { + return None; + } + deps.first().cloned() } pub fn get_dependencies(&self) -> Vec> { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/member_symbol.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/member_symbol.rs index 4fe3d939836c5..3f94fd68e65f9 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/member_symbol.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/member_symbol.rs @@ -136,6 +136,25 @@ impl MemberSymbol { } } + pub fn reference_member(&self) -> Option> { + match self { + Self::Dimension(d) => d.reference_member(), + Self::TimeDimension(d) => d.reference_member(), + Self::Measure(m) => m.reference_member(), + Self::CubeName(_) => None, + Self::CubeTable(_) => None, + Self::MemberExpression(e) => e.reference_member(), + } + } + + pub fn resolve_reference_chain(self: Rc) -> Rc { + let mut current = self; + while let Some(reference) = current.reference_member() { + current = reference; + } + current + } + pub fn owned_by_cube(&self) -> bool { match self { Self::Dimension(d) => d.owned_by_cube(), diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/time_dimension_symbol.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/time_dimension_symbol.rs index 3630a7e313cf5..407877faf89dc 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/time_dimension_symbol.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/time_dimension_symbol.rs @@ -68,6 +68,10 @@ impl TimeDimensionSymbol { self.base_symbol.owned_by_cube() } + pub fn date_range_vec(&self) -> Option> { + self.date_range.clone().map(|(from, to)| vec![from, to]) + } + pub fn get_dependencies_as_time_dimensions(&self) -> Vec> { self.get_dependencies() .into_iter() @@ -106,6 +110,20 @@ impl TimeDimensionSymbol { self.base_symbol.is_reference() } + pub fn reference_member(&self) -> Option> { + if let Some(base_symbol) = self.base_symbol.clone().reference_member() { + let new_time_dim = Self::new( + base_symbol, + self.granularity.clone(), + self.granularity_obj.clone(), + self.date_range.clone(), + ); + Some(Rc::new(MemberSymbol::TimeDimension(new_time_dim))) + } else { + None + } + } + pub fn name(&self) -> String { self.base_symbol.name() } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_templates/plan.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_templates/plan.rs index daeeb6192dfef..7ca66569abd7a 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_templates/plan.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_templates/plan.rs @@ -13,7 +13,7 @@ pub struct PlanSqlTemplates { base_tools: Rc, } pub const UNDERSCORE_UPPER_BOUND: Boundary = Boundary { - name: "LowerUpper", + name: "UnderscoreUpper", condition: |s, _| { s.get(0) == Some(&"_") && s.get(1) @@ -25,6 +25,21 @@ pub const UNDERSCORE_UPPER_BOUND: Boundary = Boundary { len: 0, }; +fn grapheme_is_uppercase(c: &&str) -> bool { + c.to_uppercase() != c.to_lowercase() && *c == c.to_uppercase() +} + +pub const UPPER_UPPER_BOUND: Boundary = Boundary { + name: "UpperUpper", + condition: |s, _| { + s.get(0).map(grapheme_is_uppercase) == Some(true) + && s.get(1).map(grapheme_is_uppercase) == Some(true) + }, + arg: None, + start: 1, + len: 0, +}; + impl PlanSqlTemplates { pub fn new(render: Rc, base_tools: Rc) -> Self { Self { render, base_tools } @@ -34,6 +49,7 @@ impl PlanSqlTemplates { let res = name .with_boundaries(&[ UNDERSCORE_UPPER_BOUND, + UPPER_UPPER_BOUND, Boundary::LOWER_UPPER, Boundary::DIGIT_UPPER, Boundary::ACRONYM, @@ -343,10 +359,31 @@ impl PlanSqlTemplates { start: &str, end: &str, granularity: &str, + granularity_offset: &Option, + minimal_time_unit: &str, ) -> Result { self.render.render_template( "statements/generated_time_series_select", - context! { start => start, end => end, granularity => granularity }, + context! { start => start, end => end, granularity => granularity, granularity_offset => granularity_offset, minimal_time_unit => minimal_time_unit }, + ) + } + pub fn generated_time_series_with_cte_range_source( + &self, + range_source: &str, + min_name: &str, + max_name: &str, + granularity: &str, + minimal_time_unit: &str, + ) -> Result { + self.render.render_template( + "statements/generated_time_series_with_cte_range_source", + context! { + range_source => range_source, + min_name => min_name, + max_name => max_name, + granularity => granularity, + minimal_time_unit => minimal_time_unit, + }, ) } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity.rs index 963ea1e580b00..b3d120632acef 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity.rs @@ -4,7 +4,7 @@ use cubenativeutils::CubeError; use itertools::Itertools; use std::str::FromStr; -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct Granularity { granularity: String, granularity_interval: String, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/sql_interval.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/sql_interval.rs index fbe0568b9e5e4..1ebb228dc44b4 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/sql_interval.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/sql_interval.rs @@ -61,25 +61,25 @@ impl SqlInterval { pub fn to_sql(&self) -> String { let mut res = vec![]; if self.year != 0 { - res.push(format!("{} YEAR", self.year)); + res.push(format!("{} year", self.year)); } if self.month != 0 { - res.push(format!("{} MONTH", self.month)); + res.push(format!("{} montH", self.month)); } if self.week != 0 { - res.push(format!("{} WEEK", self.week)); + res.push(format!("{} week", self.week)); } if self.day != 0 { - res.push(format!("{} DAY", self.day)); + res.push(format!("{} day", self.day)); } if self.hour != 0 { - res.push(format!("{} HOUR", self.hour)); + res.push(format!("{} hour", self.hour)); } if self.minute != 0 { - res.push(format!("{} MINUTE", self.minute)); + res.push(format!("{} minute", self.minute)); } if self.second != 0 { - res.push(format!("{} SECOND", self.second)); + res.push(format!("{} second", self.second)); } res.join(" ") }