@@ -1413,18 +1413,59 @@ export class BaseQuery {
14131413
14141414 overTimeSeriesQuery ( baseQueryFn , cumulativeMeasure , fromRollup ) {
14151415 const dateJoinCondition = cumulativeMeasure . dateJoinCondition ( ) ;
1416+ const uniqDateJoinCondition = R . uniqBy ( djc => djc [ 0 ] . dimension , dateJoinCondition ) ;
14161417 const cumulativeMeasures = [ cumulativeMeasure ] ;
14171418 if ( ! this . timeDimensions . find ( d => d . granularity ) ) {
1418- const filters = this . segments . concat ( this . filters ) . concat ( this . dateFromStartToEndConditionSql ( dateJoinCondition , fromRollup , false ) ) ;
1419+ const filters = this . segments
1420+ . concat ( this . filters )
1421+ . concat ( this . dateFromStartToEndConditionSql (
1422+ // If the same time dimension is passed more than once, no need to build the same
1423+ // filter condition again and again. Different granularities don't play role here,
1424+ // as rollingWindow.granularity is used for filtering.
1425+ uniqDateJoinCondition ,
1426+ fromRollup ,
1427+ false
1428+ ) ) ;
14191429 return baseQueryFn ( cumulativeMeasures , filters , false ) ;
14201430 }
1421- const dateSeriesSql = this . timeDimensions . map ( d => this . dateSeriesSql ( d ) ) . join ( ', ' ) ;
1422- const filters = this . segments . concat ( this . filters ) . concat ( this . dateFromStartToEndConditionSql ( dateJoinCondition , fromRollup , true ) ) ;
1431+
1432+ // We can't do meaningful query if few time dimensions with different ranges passed,
1433+ // it won't be possible to join them together without loosing some rows.
1434+ const rangedTimeDimensions = this . timeDimensions . filter ( d => d . dateRange && d . granularity ) ;
1435+ const uniqTimeDimensionWithRanges = R . uniqBy ( d => d . dateRange , rangedTimeDimensions ) ;
1436+ if ( uniqTimeDimensionWithRanges . length > 1 ) {
1437+ throw new Error ( 'Can\'t build query for time dimensions with different date ranges' ) ;
1438+ }
1439+
1440+ // We need to generate time series table for the lowest granularity among all time dimensions
1441+ let dateSeriesDimension ;
1442+ const dateSeriesGranularity = this . timeDimensions . filter ( d => d . granularity )
1443+ . reduce ( ( acc , d ) => {
1444+ const mg = this . minGranularity ( acc , d . resolvedGranularity ( ) ) ;
1445+ if ( mg === d . resolvedGranularity ( ) ) {
1446+ dateSeriesDimension = d ;
1447+ }
1448+ return mg ;
1449+ } , undefined ) ;
1450+
1451+ const dateSeriesSql = this . dateSeriesSql ( dateSeriesDimension ) ;
1452+
1453+ // If the same time dimension is passed more than once, no need to build the same
1454+ // filter condition again and again. Different granularities don't play role here,
1455+ // as rollingWindow.granularity is used for filtering.
1456+ const filters = this . segments
1457+ . concat ( this . filters )
1458+ . concat ( this . dateFromStartToEndConditionSql (
1459+ uniqDateJoinCondition ,
1460+ fromRollup ,
1461+ true
1462+ ) ) ;
14231463 const baseQuery = this . groupedUngroupedSelect (
14241464 ( ) => baseQueryFn ( cumulativeMeasures , filters ) ,
14251465 cumulativeMeasure . shouldUngroupForCumulative ( ) ,
14261466 ! cumulativeMeasure . shouldUngroupForCumulative ( ) && this . minGranularity (
1427- cumulativeMeasure . windowGranularity ( ) , this . timeDimensions . find ( d => d . granularity ) . resolvedGranularity ( )
1467+ cumulativeMeasure . windowGranularity ( ) ,
1468+ dateSeriesGranularity
14281469 ) || undefined
14291470 ) ;
14301471 const baseQueryAlias = this . cubeAlias ( 'base' ) ;
@@ -1444,7 +1485,7 @@ export class BaseQuery {
14441485 dateSeriesSql ,
14451486 baseQuery ,
14461487 dateJoinConditionSql ,
1447- baseQueryAlias
1488+ baseQueryAlias ,
14481489 ) ;
14491490 }
14501491
@@ -1456,16 +1497,14 @@ export class BaseQuery {
14561497 }
14571498
14581499 overTimeSeriesForSelect ( cumulativeMeasures ) {
1459- return this . dimensions . map ( s => s . cumulativeSelectColumns ( ) ) . concat ( this . dateSeriesSelect ( ) ) . concat (
1460- cumulativeMeasures . map ( s => s . cumulativeSelectColumns ( ) ) ,
1461- ) . filter ( c => ! ! c )
1500+ return this . dimensions
1501+ . map ( s => s . cumulativeSelectColumns ( ) )
1502+ . concat ( this . timeDimensions . map ( d => d . dateSeriesSelectColumn ( ) ) )
1503+ . concat ( cumulativeMeasures . map ( s => s . cumulativeSelectColumns ( ) ) )
1504+ . filter ( c => ! ! c )
14621505 . join ( ', ' ) ;
14631506 }
14641507
1465- dateSeriesSelect ( ) {
1466- return this . timeDimensions . map ( d => d . dateSeriesSelectColumn ( ) ) ;
1467- }
1468-
14691508 dateFromStartToEndConditionSql ( dateJoinCondition , fromRollup , isFromStartToEnd ) {
14701509 return dateJoinCondition . map (
14711510 // TODO these weird conversions to be strict typed for big query.
@@ -1646,7 +1685,8 @@ export class BaseQuery {
16461685
16471686 /**
16481687 *
1649- * @param {{sql: string, on: {cubeName: string, expression: Function}, joinType: 'LEFT' | 'INNER', alias: string} } customJoin
1688+ * @param {{sql: string, on: {cubeName: string, expression: Function}, joinType: 'LEFT' | 'INNER', alias: string} }
1689+ * customJoin
16501690 * @returns {JoinItem }
16511691 */
16521692 customSubQueryJoin ( customJoin ) {
0 commit comments