Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/cubejs-backend-shared/src/time.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ export function parseSqlInterval(intervalStr: SqlInterval): ParsedInterval {
const unit = parts[i + 1];

// Remove ending 's' (e.g., 'days' -> 'day')
const singularUnit = (unit.endsWith('s') ? unit.slice(0, -1) : unit) as unitOfTime.DurationConstructor;
const singularUnit = ((unit.endsWith('s') ? unit.slice(0, -1) : unit).toLowerCase()) as unitOfTime.DurationConstructor;
interval[singularUnit] = value;
}

Expand Down
2 changes: 0 additions & 2 deletions packages/cubejs-schema-compiler/src/adapter/BaseQuery.js
Original file line number Diff line number Diff line change
Expand Up @@ -2048,7 +2048,6 @@ export class BaseQuery {
* Returns a tuple: (formatted interval, minimal time unit)
*/
intervalAndMinimalTimeUnit(interval) {
const intervalParsed = parseSqlInterval(interval);
const minGranularity = this.diffTimeUnitForInterval(interval);
return [interval, minGranularity];
}
Expand Down Expand Up @@ -4138,7 +4137,6 @@ export class BaseQuery {
sort: '{{ expr }} {% if asc %}ASC{% else %}DESC{% endif %} NULLS {% if nulls_first %}FIRST{% else %}LAST{% endif %}',
order_by: '{% if index %} {{ index }} {% else %} {{ expr }} {% endif %} {% if asc %}ASC{% else %}DESC{% endif %}{% if nulls_first %} NULLS FIRST{% endif %}',
cast: 'CAST({{ expr }} AS {{ data_type }})',
cast_to_string: 'CAST({{ expr }} AS TEXT)',
window_function: '{{ fun_call }} OVER ({% if partition_by_concat %}PARTITION BY {{ partition_by_concat }}{% if order_by_concat or window_frame %} {% endif %}{% endif %}{% if order_by_concat %}ORDER BY {{ order_by_concat }}{% if window_frame %} {% endif %}{% endif %}{% if window_frame %}{{ window_frame }}{% endif %})',
window_frame_bounds: '{{ frame_type }} BETWEEN {{ frame_start }} AND {{ frame_end }}',
in_list: '{{ expr }} {% if negated %}NOT {% endif %}IN ({{ in_exprs_concat }})',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,10 +127,10 @@ export class BigqueryQuery extends BaseQuery {
return [`'${intervalParsed.hour}:${intervalParsed.minute}:${intervalParsed.second}' HOUR TO SECOND`, 'SECOND'];
} else if (intervalParsed.minute && intervalParsed.second && intKeys === 2) {
return [`'${intervalParsed.minute}:${intervalParsed.second}' MINUTE TO SECOND`, 'SECOND'];
} else if (intervalParsed.millisecond && intKeys === 1) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did we miss other cases like 10 hour / 11 minute / 25 second?....

return [`'${intervalParsed.millisecond}' MILLISECOND`, 'MILLISECOND'];
}

// No need to support microseconds.

throw new Error(`Cannot transform interval expression "${interval}" to BigQuery dialect`);
}

Expand Down Expand Up @@ -367,7 +367,6 @@ export class BigqueryQuery extends BaseQuery {
templates.types.double = 'FLOAT64';
templates.types.decimal = 'BIGDECIMAL({{ precision }},{{ scale }})';
templates.types.binary = 'BYTES';
templates.expressions.cast_to_string = 'CAST({{ expr }} AS STRING)';
templates.operators.is_not_distinct_from = 'IS NOT DISTINCT FROM';
templates.join_types.full = 'FULL';
templates.statements.time_series_select = 'SELECT DATETIME(TIMESTAMP(f)) date_from, DATETIME(TIMESTAMP(t)) date_to \n' +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,11 @@ export class PostgresQuery extends BaseQuery {
templates.types.float = 'REAL';
templates.types.double = 'DOUBLE PRECISION';
templates.types.binary = 'BYTEA';
templates.tesseract.support_generated_series_for_custom_td = 'YES';
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it a configuration that is put into templates?

templates.operators.is_not_distinct_from = 'IS NOT DISTINCT FROM';
templates.statements.generated_time_series_select = 'SELECT d AS "date_from",\n' +
'd + interval {{ granularity }} - interval \'1 millisecond\' AS "date_to" \n' +
templates.statements.generated_time_series_select = 'SELECT {{ date_from }} AS "date_from",\n' +
'{{ date_to }} AS "date_to" \n' +
// 'd + interval {{ granularity }} - interval \'1 millisecond\' AS "date_to" \n' +
'FROM generate_series({{ start }}::timestamp, {{ end }}:: timestamp, {{ granularity }}::interval) d ';
templates.statements.generated_time_series_with_cte_range_source = 'SELECT d AS "date_from",\n' +
'd + interval {{ granularity }} - interval \'1 millisecond\' AS "date_to" \n' +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ export class PrestodbQuery extends BaseQuery {
// Presto intervals have a YearMonth or DayTime type variants, but no universal type
delete templates.types.interval;
templates.types.binary = 'VARBINARY';
templates.tesseract.support_generated_series_for_custom_td = 'YES';
templates.tesseract.ilike = 'LOWER({{ expr }}) {% if negated %}NOT {% endif %} LIKE {{ pattern }}';
templates.filters.like_pattern = 'CONCAT({% if start_wild %}\'%\'{% else %}\'\'{% endif %}, LOWER({{ value }}), {% if end_wild %}\'%\'{% else %}\'\'{% endif %}) ESCAPE \'\\\'';
templates.statements.time_series_select = 'SELECT from_iso8601_timestamp(dates.f) date_from, from_iso8601_timestamp(dates.t) date_to \n' +
Expand Down
3 changes: 0 additions & 3 deletions packages/cubejs-testing-drivers/fixtures/athena.json
Original file line number Diff line number Diff line change
Expand Up @@ -185,9 +185,6 @@
"querying custom granularities ECommerce: count by three_months_by_march + dimension",
"querying custom granularities (with preaggregation) ECommerce: totalQuantity by half_year + no dimension",
"querying custom granularities (with preaggregation) ECommerce: totalQuantity by half_year + dimension",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByUnbounded",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByTrailing",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading",
"pre-aggregations Customers: running total without time dimension",
"querying BigECommerce: totalProfitYearAgo",
"SQL API: post-aggregate percentage of total",
Expand Down
2 changes: 2 additions & 0 deletions packages/cubejs-testing-drivers/fixtures/bigquery.json
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,7 @@
"querying BigECommerce: rolling window by 2 day without date range",
"querying BigECommerce: rolling window by 2 month without date range",
"querying BigECommerce: rolling window YTD without date range",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range",

"---------------------------------------",
"SKIPPED SQL API (Need work)",
Expand All @@ -188,6 +189,7 @@
"querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- rounding in athena",
"querying ECommerce: total sales, total profit by month + order (date) + total -- doesn't work with the BigQuery",
"querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- noisy test",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range",
"querying BigECommerce: null sum",
"querying BigECommerce: null boolean",
"querying BigECommerce: rolling window by 2 day without date range",
Expand Down
1 change: 1 addition & 0 deletions packages/cubejs-testing-drivers/fixtures/clickhouse.json
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@
"querying BigECommerce: rolling window by 2 day without date range",
"querying BigECommerce: rolling window by 2 month without date range",
"querying BigECommerce: rolling window YTD without date range",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range",

"---------------------------------------",
"Custom Granularities ",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,7 @@
"querying BigECommerce: rolling window by 2 day without date range",
"querying BigECommerce: rolling window by 2 month without date range",
"querying BigECommerce: rolling window YTD without date range",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range",

"---------------------------------------",
"Custom Granularities ",
Expand Down
1 change: 1 addition & 0 deletions packages/cubejs-testing-drivers/fixtures/mssql.json
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@
"querying BigECommerce: rolling window by 2 day without date range",
"querying BigECommerce: rolling window by 2 month without date range",
"querying BigECommerce: rolling window YTD without date range",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range",

"---------------------------------------",
"SKIPPED SQL API (Need work)",
Expand Down
1 change: 1 addition & 0 deletions packages/cubejs-testing-drivers/fixtures/mysql.json
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@
"querying BigECommerce: rolling window by 2 day without date range",
"querying BigECommerce: rolling window by 2 month without date range",
"querying BigECommerce: rolling window YTD without date range",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range",

"---------------------------------------",
"Custom Granularities ",
Expand Down
3 changes: 2 additions & 1 deletion packages/cubejs-testing-drivers/fixtures/postgres.json
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,8 @@
"---------------------------------------",
"querying BigECommerce: rolling window by 2 day without date range",
"querying BigECommerce: rolling window by 2 month without date range",
"querying BigECommerce: rolling window YTD without date range"
"querying BigECommerce: rolling window YTD without date range",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range"
],
"tesseractSkip": [
"querying Products: dimensions -- doesn't work wo ordering",
Expand Down
1 change: 1 addition & 0 deletions packages/cubejs-testing-drivers/fixtures/redshift.json
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,7 @@
"querying BigECommerce: rolling window by 2 day without date range",
"querying BigECommerce: rolling window by 2 month without date range",
"querying BigECommerce: rolling window YTD without date range",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range",

"---------------------------------------",
"SKIPPED SQL API (Need work) ",
Expand Down
1 change: 1 addition & 0 deletions packages/cubejs-testing-drivers/fixtures/snowflake.json
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,7 @@
"querying ECommerce: total quantity, avg discount, total sales, total profit by product + order + total -- noisy test",
"querying BigECommerce: null sum",
"querying BigECommerce: null boolean",
"querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range",

"---------------------------------------",
"Requires Tesseract. ",
Expand Down
13 changes: 13 additions & 0 deletions packages/cubejs-testing-drivers/src/tests/testQueries.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1974,6 +1974,19 @@ export function testQueries(type: string, { includeIncrementalSchemaSuite, exten
expect(response.rawData()).toMatchSnapshot();
});

execute('querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range', async () => {
const response = await client.load({
measures: [
'ECommerce.rollingCountByLeading',
],
timeDimensions: [{
dimension: 'ECommerce.customOrderDateNoPreAgg',
granularity: 'two_mo_by_feb',
}],
});
expect(response.rawData()).toMatchSnapshot();
});

execute('querying custom granularities (with preaggregation) ECommerce: totalQuantity by half_year + no dimension', async () => {
const response = await client.load({
measures: [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9761,6 +9761,41 @@ Array [
]
`;

exports[`Queries with the @cubejs-backend/athena-driver querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range 1`] = `
Array [
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-02-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-02-01T10:00:00.000",
"ECommerce.rollingCountByLeading": "12",
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-04-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-04-01T10:00:00.000",
"ECommerce.rollingCountByLeading": "6",
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-06-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-06-01T10:00:00.000",
"ECommerce.rollingCountByLeading": "19",
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-08-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-08-01T10:00:00.000",
"ECommerce.rollingCountByLeading": "16",
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-10-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-10-01T10:00:00.000",
"ECommerce.rollingCountByLeading": null,
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-12-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-12-01T10:00:00.000",
"ECommerce.rollingCountByLeading": null,
},
]
`;

exports[`Queries with the @cubejs-backend/athena-driver querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByTrailing 1`] = `
Array [
Object {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18465,6 +18465,46 @@ Array [
]
`;

exports[`Queries with the @cubejs-backend/postgres-driver querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByLeading without date range 1`] = `
Array [
Object {
"ECommerce.customOrderDateNoPreAgg": "2019-12-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2019-12-01T10:00:00.000",
"ECommerce.rollingCountByLeading": "8",
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-02-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-02-01T10:00:00.000",
"ECommerce.rollingCountByLeading": "12",
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-04-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-04-01T10:00:00.000",
"ECommerce.rollingCountByLeading": "6",
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-06-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-06-01T10:00:00.000",
"ECommerce.rollingCountByLeading": "19",
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-08-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-08-01T10:00:00.000",
"ECommerce.rollingCountByLeading": "16",
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-10-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-10-01T10:00:00.000",
"ECommerce.rollingCountByLeading": null,
},
Object {
"ECommerce.customOrderDateNoPreAgg": "2020-12-01T10:00:00.000",
"ECommerce.customOrderDateNoPreAgg.two_mo_by_feb": "2020-12-01T10:00:00.000",
"ECommerce.rollingCountByLeading": null,
},
]
`;

exports[`Queries with the @cubejs-backend/postgres-driver querying custom granularities ECommerce: count by two_mo_by_feb + no dimension + rollingCountByTrailing 1`] = `
Array [
Object {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -965,8 +965,9 @@ impl PhysicalPlanBuilder {
));
};

let ts_date_range = if self.plan_sql_templates.supports_generated_time_series()
&& granularity_obj.is_predefined_granularity()
let ts_date_range = if self
.plan_sql_templates
.supports_generated_time_series(granularity_obj.is_predefined_granularity())
{
if let Some(date_range) = time_dimension_symbol
.get_range_for_time_series(date_range, self.query_tools.timezone())?
Expand Down
31 changes: 19 additions & 12 deletions rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,28 +40,35 @@ impl TimeSeries {
}

pub fn to_sql(&self, templates: &PlanSqlTemplates) -> Result<String, CubeError> {
if templates.supports_generated_time_series()
&& self.granularity.is_predefined_granularity()
{
if templates.supports_generated_time_series(self.granularity.is_predefined_granularity()) {
let interval_description = templates
.interval_and_minimal_time_unit(self.granularity.granularity_interval().clone())?;
.interval_and_minimal_time_unit(self.granularity.granularity_interval().to_sql())?;
if interval_description.len() != 2 {
return Err(CubeError::internal(
"Interval description must have 2 elements".to_string(),
));
}
let interval = interval_description[0].clone();
let interval = templates.interval_string(interval)?;
let minimal_time_unit = interval_description[1].clone();
match &self.date_range {
TimeSeriesDateRange::Filter(from_date, to_date) => {
let from_date = format!("'{}'", from_date);
let to_date = format!("'{}'", to_date);
let start = templates.quote_string(from_date)?;
let date_field = templates.quote_identifier("d")?;
let date_from = templates.time_stamp_cast(date_field.clone())?;
let end = templates.quote_string(to_date)?;
let date_to = format!(
"({})",
templates.add_interval(date_from.clone(), interval.clone())?
);
let date_to =
templates.subtract_interval(date_to, "1 millisecond".to_string())?;

templates.generated_time_series_select(
&from_date,
&to_date,
&interval,
&date_from,
&date_to,
&start,
&end,
&templates.interval_string(interval)?,
&self.granularity.granularity_offset(),
&minimal_time_unit,
)
Expand All @@ -73,7 +80,7 @@ impl TimeSeries {
&cte_name,
&min_date_name,
&max_date_name,
&interval,
&templates.interval_string(interval)?,
&minimal_time_unit,
)
}
Expand All @@ -99,7 +106,7 @@ impl TimeSeries {
)?
} else {
self.query_tools.base_tools().generate_custom_time_series(
self.granularity.granularity_interval().clone(),
self.granularity.granularity_interval().to_sql(),
vec![raw_from_date.clone(), raw_to_date.clone()],
self.granularity.origin_local_formatted(),
)?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,12 +244,8 @@ impl PlanSqlTemplates {
}

pub fn cast_to_string(&self, expr: &str) -> Result<String, CubeError> {
self.render.render_template(
"expressions/cast_to_string",
context! {
expr => expr,
},
)
let string_type = self.render.render_template("types/string", context! {})?;
self.cast(expr, &string_type)
}

pub fn count_distinct(&self, expr: &str) -> Result<String, CubeError> {
Expand Down Expand Up @@ -430,13 +426,19 @@ impl PlanSqlTemplates {
.contains_template("operators/is_not_distinct_from")
}

pub fn supports_generated_time_series(&self) -> bool {
pub fn supports_generated_time_series(&self, predifined_granularity: bool) -> bool {
self.render
.contains_template("statements/generated_time_series_select")
&& (predifined_granularity
|| self
.render
.contains_template("tesseract/support_generated_series_for_custom_td"))
}

pub fn generated_time_series_select(
&self,
date_from: &str,
date_to: &str,
start: &str,
end: &str,
granularity: &str,
Expand All @@ -445,7 +447,7 @@ impl PlanSqlTemplates {
) -> Result<String, CubeError> {
self.render.render_template(
"statements/generated_time_series_select",
context! { start => start, end => end, granularity => granularity, granularity_offset => granularity_offset, minimal_time_unit => minimal_time_unit },
context! {date_from => date_from, date_to => date_to, start => start, end => end, granularity => granularity, granularity_offset => granularity_offset, minimal_time_unit => minimal_time_unit },
)
}
pub fn generated_time_series_with_cte_range_source(
Expand Down
Loading
Loading