diff --git a/packages/cubejs-schema-compiler/src/adapter/BaseQuery.js b/packages/cubejs-schema-compiler/src/adapter/BaseQuery.js index ce8cfd60371fe..e8adfe07ace2a 100644 --- a/packages/cubejs-schema-compiler/src/adapter/BaseQuery.js +++ b/packages/cubejs-schema-compiler/src/adapter/BaseQuery.js @@ -20,7 +20,8 @@ import { QueryAlias, getEnv, localTimestampToUtc, - timeSeries as timeSeriesBase + timeSeries as timeSeriesBase, + timeSeriesFromCustomInterval } from '@cubejs-backend/shared'; import { UserError } from '../compiler/UserError'; @@ -720,11 +721,16 @@ export class BaseQuery { return this.paramAllocator.getParams(); } - // FIXME helper for native generator, maybe should be moved entire to rust + // FIXME helper for native generator, maybe should be moved entirely to rust generateTimeSeries(granularity, dateRange) { return timeSeriesBase(granularity, dateRange); } + // FIXME helper for native generator, maybe should be moved entirely to rust + generateCustomTimeSeries(granularityInterval, dateRange, origin) { + return timeSeriesFromCustomInterval(granularityInterval, dateRange, moment(origin), { timestampPrecision: 3 }); + } + get shouldReuseParams() { return false; } diff --git a/packages/cubejs-schema-compiler/src/compiler/CubeSymbols.ts b/packages/cubejs-schema-compiler/src/compiler/CubeSymbols.ts index fec0159c8cb99..5c00edacdf94d 100644 --- a/packages/cubejs-schema-compiler/src/compiler/CubeSymbols.ts +++ b/packages/cubejs-schema-compiler/src/compiler/CubeSymbols.ts @@ -640,37 +640,32 @@ export class CubeSymbols { protected resolveSymbolsCallDeps(cubeName, sql) { try { - return this.resolveSymbolsCallDeps2(cubeName, sql); + const deps: any[] = []; + this.resolveSymbolsCall(sql, (name) => { + deps.push({ name }); + const resolvedSymbol = this.resolveSymbol( + cubeName, + name + ); + if (resolvedSymbol._objectWithResolvedProperties) { + return resolvedSymbol; + } + return ''; + }, { + depsResolveFn: (name, parent) => { + deps.push({ name, parent }); + return deps.length - 1; + }, + currResolveIndexFn: () => deps.length - 1, + contextSymbols: this.depsContextSymbols(), + + }); + return deps; } catch (e) { - console.log(e); return []; } } - protected resolveSymbolsCallDeps2(cubeName, sql) { - const deps: any[] = []; - this.resolveSymbolsCall(sql, (name) => { - deps.push({ name, undefined }); - const resolvedSymbol = this.resolveSymbol( - cubeName, - name - ); - if (resolvedSymbol._objectWithResolvedProperties) { - return resolvedSymbol; - } - return ''; - }, { - depsResolveFn: (name, parent) => { - deps.push({ name, parent }); - return deps.length - 1; - }, - currResolveIndexFn: () => deps.length - 1, - contextSymbols: this.depsContextSymbols(), - - }); - return deps; - } - protected depsContextSymbols() { return Object.assign({ filterParams: this.filtersProxyDep(), @@ -719,7 +714,6 @@ export class CubeSymbols { public resolveSymbol(cubeName, name) { const { sqlResolveFn, contextSymbols, collectJoinHints, depsResolveFn, currResolveIndexFn } = this.resolveSymbolsCallContext || {}; - if (name === 'USER_CONTEXT') { throw new Error('Support for USER_CONTEXT was removed, please migrate to SECURITY_CONTEXT.'); } @@ -758,6 +752,9 @@ export class CubeSymbols { const parentIndex = currResolveIndexFn(); cube = this.cubeDependenciesProxy(parentIndex, newCubeName); return cube; + } else if (this.symbols[cubeName] && this.symbols[cubeName][name] && this.symbols[cubeName][name].type === 'time') { + const parentIndex = currResolveIndexFn(); + return this.timeDimDependenciesProxy(parentIndex); } } return cube || (this.symbols[cubeName] && this.symbols[cubeName][name]); @@ -877,6 +874,10 @@ export class CubeSymbols { } if (cube[propertyName]) { depsResolveFn(propertyName, parentIndex); + if (cube[propertyName].type === 'time') { + return this.timeDimDependenciesProxy(parentIndex); + } + return ''; } if (self.symbols[propertyName]) { @@ -891,6 +892,25 @@ export class CubeSymbols { }); } + protected timeDimDependenciesProxy(parentIndex) { + const self = this; + const { depsResolveFn } = self.resolveSymbolsCallContext || {}; + return new Proxy({}, { + get: (v, propertyName) => { + if (propertyName === '_objectWithResolvedProperties') { + return true; + } + if (propertyName === 'toString') { + return () => ''; + } + if (typeof propertyName === 'string') { + depsResolveFn(propertyName, parentIndex); + } + return undefined; + } + }); + } + public isCurrentCube(name) { return CURRENT_CUBE_CONSTANTS.indexOf(name) >= 0; } diff --git a/packages/cubejs-schema-compiler/test/integration/postgres/sql-generation.test.ts b/packages/cubejs-schema-compiler/test/integration/postgres/sql-generation.test.ts index ebbcef962ba4d..0f90a3a0161c3 100644 --- a/packages/cubejs-schema-compiler/test/integration/postgres/sql-generation.test.ts +++ b/packages/cubejs-schema-compiler/test/integration/postgres/sql-generation.test.ts @@ -968,6 +968,80 @@ SELECT 1 AS revenue, cast('2024-01-01' AS timestamp) as time UNION ALL } ])); + it('rolling window with two time dimension granularities one custom one regular', async () => runQueryTest({ + + measures: [ + 'visitors.countRollingWeekToDate' + ], + timeDimensions: [ + { + dimension: 'visitors.created_at', + granularity: 'three_days', + dateRange: ['2017-01-01', '2017-01-10'] + }, + { + dimension: 'visitors.created_at', + granularity: 'day', + dateRange: ['2017-01-01', '2017-01-10'] + } + ], + order: [{ + id: 'visitors.created_at' + }], + timezone: 'America/Los_Angeles' + }, [ + { + visitors__count_rolling_week_to_date: null, + visitors__created_at_day: '2017-01-01T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-01T00:00:00.000Z', + }, + { + visitors__count_rolling_week_to_date: '1', + visitors__created_at_day: '2017-01-02T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-01T00:00:00.000Z', + }, + { + visitors__count_rolling_week_to_date: '1', + visitors__created_at_day: '2017-01-03T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-01T00:00:00.000Z', + }, + { + visitors__count_rolling_week_to_date: '2', + visitors__created_at_day: '2017-01-04T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-04T00:00:00.000Z', + }, + { + visitors__count_rolling_week_to_date: '3', + visitors__created_at_day: '2017-01-05T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-04T00:00:00.000Z', + }, + { + visitors__count_rolling_week_to_date: '5', + visitors__created_at_day: '2017-01-06T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-04T00:00:00.000Z', + }, + { + visitors__count_rolling_week_to_date: '5', + visitors__created_at_day: '2017-01-07T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-07T00:00:00.000Z', + }, + { + visitors__count_rolling_week_to_date: '5', + visitors__created_at_day: '2017-01-08T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-07T00:00:00.000Z', + }, + { + visitors__count_rolling_week_to_date: null, + visitors__created_at_day: '2017-01-09T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-07T00:00:00.000Z', + }, + { + visitors__count_rolling_week_to_date: null, + visitors__created_at_day: '2017-01-10T00:00:00.000Z', + visitors__created_at_three_days: '2017-01-10T00:00:00.000Z', + } + ])); + it('two rolling windows with two time dimension granularities', async () => runQueryTest({ measures: [ 'visitors.countRollingUnbounded', diff --git a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/base_tools.rs b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/base_tools.rs index 94ec487c28637..8823f8f507bfb 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/base_tools.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/base_tools.rs @@ -1,7 +1,6 @@ use super::base_query_options::FilterItem; use super::filter_group::{FilterGroup, NativeFilterGroup}; use super::filter_params::{FilterParams, NativeFilterParams}; -use super::member_sql::{MemberSql, NativeMemberSql}; use super::security_context::{NativeSecurityContext, SecurityContext}; use super::sql_templates_render::{NativeSqlTemplatesRender, SqlTemplatesRender}; use super::sql_utils::{NativeSqlUtils, SqlUtils}; @@ -11,16 +10,9 @@ use cubenativeutils::wrappers::serializer::{ use cubenativeutils::wrappers::NativeContextHolder; use cubenativeutils::wrappers::NativeObjectHandle; use cubenativeutils::CubeError; -use serde::Deserialize; use std::any::Any; use std::rc::Rc; -#[derive(Deserialize, Debug)] -pub struct CallDep { - pub name: String, - pub parent: Option, -} - #[nativebridge::native_bridge] pub trait BaseTools { fn convert_tz(&self, field: String) -> Result; @@ -30,11 +22,6 @@ pub trait BaseTools { dimension: String, ) -> Result; fn sql_templates(&self) -> Result, CubeError>; - fn resolve_symbols_call_deps( - &self, - cube_name: String, - sql: Rc, - ) -> Result, CubeError>; fn security_context_for_rust(&self) -> Result, CubeError>; fn sql_utils_for_rust(&self) -> Result, CubeError>; fn filters_proxy_for_rust( @@ -52,6 +39,12 @@ pub trait BaseTools { granularity: String, date_range: Vec, ) -> Result>, CubeError>; + fn generate_custom_time_series( + &self, + granularity: String, + date_range: Vec, + origin: String, + ) -> Result>, CubeError>; fn get_allocated_params(&self) -> Result, CubeError>; fn all_cube_members(&self, path: String) -> Result, CubeError>; //===== TODO Move to templates @@ -59,4 +52,10 @@ pub trait BaseTools { fn hll_merge(&self, sql: String) -> Result; fn hll_cardinality_merge(&self, sql: String) -> Result; fn count_distinct_approx(&self, sql: String) -> Result; + fn date_bin( + &self, + interval: String, + source: String, + origin: String, + ) -> Result; } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/dimension_definition.rs b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/dimension_definition.rs index 7604de1551dc1..1d9c9bd2f5af6 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/dimension_definition.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/dimension_definition.rs @@ -11,6 +11,12 @@ use serde::{Deserialize, Serialize}; use std::any::Any; use std::rc::Rc; +#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Eq, Hash)] +pub struct GranularityDefinition { + pub interval: String, + pub origin: Option, + pub offset: Option, +} #[derive(Serialize, Deserialize, Debug)] pub struct DimenstionDefinitionStatic { #[serde(rename = "type")] diff --git a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/evaluator.rs b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/evaluator.rs index 11e8cc14d09f4..64518acd4346d 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/evaluator.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/cube_bridge/evaluator.rs @@ -1,5 +1,7 @@ use super::cube_definition::{CubeDefinition, NativeCubeDefinition}; -use super::dimension_definition::{DimensionDefinition, NativeDimensionDefinition}; +use super::dimension_definition::{ + DimensionDefinition, GranularityDefinition, NativeDimensionDefinition, +}; use super::measure_definition::{MeasureDefinition, NativeMeasureDefinition}; use super::member_sql::{MemberSql, NativeMemberSql}; use super::segment_definition::{NativeSegmentDefinition, SegmentDefinition}; @@ -48,4 +50,5 @@ pub trait CubeEvaluator { cube_name: String, sql: Rc, ) -> Result, CubeError>; + fn resolve_granularity(&self, path: Vec) -> Result; } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs b/rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs index 32672739de1ec..3b98f2d4f495d 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs @@ -2,6 +2,7 @@ use super::{Schema, SchemaColumn}; use crate::planner::{ query_tools::QueryTools, sql_templates::{PlanSqlTemplates, TemplateProjectionColumn}, + Granularity, }; use cubenativeutils::CubeError; use std::rc::Rc; @@ -11,7 +12,7 @@ pub struct TimeSeries { #[allow(dead_code)] time_dimension_name: String, date_range: TimeSeriesDateRange, - granularity: String, + granularity: Granularity, schema: Rc, } @@ -25,7 +26,7 @@ impl TimeSeries { query_tools: Rc, time_dimension_name: String, date_range: TimeSeriesDateRange, - granularity: String, + granularity: Granularity, ) -> Self { let column = SchemaColumn::new(format!("date_from"), Some(time_dimension_name.clone())); let schema = Rc::new(Schema::new(vec![column])); @@ -88,7 +89,11 @@ impl TimeSeries { (format!("({})", from), format!("({})", to)) } }; - templates.generated_time_series_select(&from_date, &to_date, &self.granularity) + templates.generated_time_series_select( + &from_date, + &to_date, + &self.granularity.granularity_interval(), + ) } else { let (from_date, to_date) = match &self.date_range { TimeSeriesDateRange::Filter(from_date, to_date) => { @@ -100,10 +105,18 @@ impl TimeSeries { )); } }; - let series = self.query_tools.base_tools().generate_time_series( - self.granularity.clone(), - vec![from_date.clone(), to_date.clone()], - )?; + let series = if self.granularity.is_predefined_granularity() { + self.query_tools.base_tools().generate_time_series( + self.granularity.granularity().clone(), + vec![from_date.clone(), to_date.clone()], + )? + } else { + self.query_tools.base_tools().generate_custom_time_series( + self.granularity.granularity_interval().clone(), + vec![from_date.clone(), to_date.clone()], + self.granularity.origin_local_formatted(), + )? + }; templates.time_series_select(from_date.clone(), to_date.clone(), series) } } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_dimension.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_dimension.rs index 87bb6740aa00d..f02185f732e1d 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_dimension.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_dimension.rs @@ -132,6 +132,10 @@ impl BaseDimension { self.member_evaluator.clone() } + pub fn definition(&self) -> Option> { + self.definition.clone() + } + pub fn sql_call(&self) -> Result, CubeError> { match self.member_evaluator.as_ref() { MemberSymbol::Dimension(d) => { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_time_dimension.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_time_dimension.rs index 9eda676d80f25..d6cc7f8795723 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/base_time_dimension.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/base_time_dimension.rs @@ -1,6 +1,9 @@ use super::query_tools::QueryTools; use super::sql_evaluator::{MemberSymbol, TimeDimensionSymbol}; use super::BaseDimension; +use super::Granularity; +use super::GranularityHelper; +use super::QueryDateTime; use super::{evaluate_with_context, BaseMember, BaseMemberHelper, VisitorContext}; use crate::planner::sql_templates::PlanSqlTemplates; use cubenativeutils::CubeError; @@ -11,6 +14,7 @@ pub struct BaseTimeDimension { member_evaluator: Rc, query_tools: Rc, granularity: Option, + granularity_obj: Option, date_range: Option>, default_alias: String, alias_suffix: String, @@ -37,7 +41,6 @@ impl BaseMember for BaseTimeDimension { fn member_evaluator(&self) -> Rc { self.member_evaluator.clone() } - fn full_name(&self) -> String { self.member_evaluator.full_name() } @@ -71,6 +74,7 @@ impl BaseTimeDimension { } else { "day".to_string() }; + let dimension = BaseDimension::try_new_required(member_evaluator.clone(), query_tools.clone())?; let default_alias = BaseMemberHelper::default_alias( @@ -79,14 +83,25 @@ impl BaseTimeDimension { &Some(alias_suffix.clone()), query_tools.clone(), )?; + + let granularity_obj = GranularityHelper::make_granularity_obj( + query_tools.cube_evaluator().clone(), + query_tools.timezone().clone(), + &dimension.cube_name(), + &dimension.name(), + granularity.clone(), + )?; + let member_evaluator = Rc::new(MemberSymbol::TimeDimension(TimeDimensionSymbol::new( member_evaluator.clone(), granularity.clone(), + granularity_obj.clone(), ))); Ok(Rc::new(Self { dimension, query_tools, granularity, + granularity_obj, date_range, alias_suffix, default_alias, @@ -94,26 +109,51 @@ impl BaseTimeDimension { })) } - pub fn change_granularity(&self, new_granularity: Option) -> Rc { + pub fn change_granularity( + &self, + new_granularity: Option, + ) -> Result, CubeError> { + let new_granularity_obj = GranularityHelper::make_granularity_obj( + self.query_tools.cube_evaluator().clone(), + self.query_tools.timezone(), + &self.dimension.name(), + &self.dimension.cube_name(), + new_granularity.clone(), + )?; let member_evaluator = Rc::new(MemberSymbol::TimeDimension(TimeDimensionSymbol::new( self.dimension.member_evaluator(), new_granularity.clone(), + new_granularity_obj.clone(), ))); - Rc::new(Self { + Ok(Rc::new(Self { dimension: self.dimension.clone(), + granularity_obj: new_granularity_obj, query_tools: self.query_tools.clone(), granularity: new_granularity, date_range: self.date_range.clone(), alias_suffix: self.alias_suffix.clone(), default_alias: self.default_alias.clone(), member_evaluator, - }) + })) } pub fn get_granularity(&self) -> Option { self.granularity.clone() } + pub fn get_granularity_obj(&self) -> &Option { + &self.granularity_obj + } + + pub fn resolve_granularity(&self) -> Result, CubeError> { + let res = if let Some(granularity_obj) = &self.granularity_obj { + Some(granularity_obj.resolve_granularity()?) + } else { + None + }; + Ok(res) + } + pub fn has_granularity(&self) -> bool { self.granularity.is_some() } @@ -122,6 +162,35 @@ impl BaseTimeDimension { self.date_range.clone() } + pub fn get_range_for_time_series(&self) -> Result, CubeError> { + let res = if let Some(date_range) = &self.date_range { + if date_range.len() != 2 { + return Err(CubeError::user(format!( + "Invalid date range: {:?}", + date_range + ))); + } else { + if let Some(granularity_obj) = &self.granularity_obj { + if !granularity_obj.is_predefined_granularity() { + let tz = self.query_tools.timezone(); + let start = QueryDateTime::from_date_str(tz, &date_range[0])?; + let start = granularity_obj.align_date_to_origin(start)?; + let end = QueryDateTime::from_date_str(tz, &date_range[1])?; + + Some((start.to_string(), end.to_string())) + } else { + Some((date_range[0].clone(), date_range[1].clone())) + } + } else { + Some((date_range[0].clone(), date_range[1].clone())) + } + } + } else { + None + }; + Ok(res) + } + pub fn base_dimension(&self) -> Rc { self.dimension.clone() } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/mod.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/mod.rs index a7001ce2def47..222cf86d1806b 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/mod.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/mod.rs @@ -6,7 +6,8 @@ pub mod base_member; pub mod base_query; pub mod base_time_dimension; pub mod filter; -pub mod granularity_helper; +pub mod time_dimension; + pub mod params_allocator; pub mod planners; pub mod query_properties; @@ -23,7 +24,7 @@ pub use base_measure::BaseMeasure; pub use base_member::{BaseMember, BaseMemberHelper}; pub use base_query::BaseQuery; pub use base_time_dimension::BaseTimeDimension; -pub use granularity_helper::GranularityHelper; pub use params_allocator::ParamsAllocator; pub use query_properties::{FullKeyAggregateMeasures, OrderByItem, QueryProperties}; +pub use time_dimension::*; pub use visitor_context::{evaluate_sql_call_with_context, evaluate_with_context, VisitorContext}; diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/applied_state.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/applied_state.rs index 0378b090b7106..d4d8e832d4eab 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/applied_state.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/applied_state.rs @@ -2,6 +2,7 @@ use crate::plan::{FilterGroup, FilterItem}; use crate::planner::filter::FilterOperator; use crate::planner::planners::multi_stage::MultiStageTimeShift; use crate::planner::{BaseDimension, BaseMember, BaseTimeDimension}; +use cubenativeutils::CubeError; use itertools::Itertools; use std::cmp::PartialEq; use std::collections::HashMap; @@ -96,14 +97,15 @@ impl MultiStageAppliedState { &mut self, time_dimension: &Rc, new_granularity: Option, - ) { + ) -> Result<(), CubeError> { if let Some(time_dimension) = self .time_dimensions .iter_mut() .find(|dim| dim.full_name() == time_dimension.full_name()) { - *time_dimension = time_dimension.change_granularity(new_granularity); + *time_dimension = time_dimension.change_granularity(new_granularity)?; } + Ok(()) } pub fn remove_filter_for_member(&mut self, member_name: &String) { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/member_query_planner.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/member_query_planner.rs index 7bc51ca8c348f..ed99f5a517da0 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/member_query_planner.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/member_query_planner.rs @@ -12,6 +12,7 @@ use crate::planner::planners::{ use crate::planner::query_tools::QueryTools; use crate::planner::sql_evaluator::sql_nodes::SqlNodesFactory; use crate::planner::sql_evaluator::ReferencesBuilder; +use crate::planner::sql_templates::PlanSqlTemplates; use crate::planner::QueryProperties; use crate::planner::{BaseDimension, BaseMeasure, BaseMember, BaseMemberHelper, BaseTimeDimension}; use cubenativeutils::CubeError; @@ -113,30 +114,43 @@ impl MultiStageMemberQueryPlanner { time_series_description: Rc, ) -> Result, CubeError> { let time_dimension = time_series_description.time_dimension.clone(); - let granularity = time_dimension.get_granularity().map_or_else( - || { - Err(CubeError::user( - "Time dimension granularity is required for rolling window".to_string(), - )) - }, - |g| Ok(g.clone()), - )?; - let ts_date_range = if let Some(date_range) = time_dimension.get_date_range() { - TimeSeriesDateRange::Filter(date_range[0].clone(), date_range[1].clone()) + let granularity_obj = if let Some(granularity_obj) = time_dimension.get_granularity_obj() { + granularity_obj.clone() + } else { + return Err(CubeError::user( + "Time dimension granularity is required for rolling window".to_string(), + )); + }; + + let templates = PlanSqlTemplates::new(self.query_tools.templates_render()); + + let ts_date_range = if templates.supports_generated_time_series() { + if let Some(date_range) = time_dimension.get_range_for_time_series()? { + TimeSeriesDateRange::Filter(date_range.0.clone(), date_range.1.clone()) + } else { + if let Some(date_range_cte) = &time_series_description.date_range_cte { + TimeSeriesDateRange::Generated(date_range_cte.clone()) + } else { + return Err(CubeError::internal( + "Date range cte is required for time series without date range".to_string(), + )); + } + } } else { - if let Some(date_range_cte) = &time_series_description.date_range_cte { - TimeSeriesDateRange::Generated(date_range_cte.clone()) + if let Some(date_range) = time_dimension.get_date_range() { + TimeSeriesDateRange::Filter(date_range[0].clone(), date_range[1].clone()) } else { return Err(CubeError::internal( - "Date range cte is required for time series without date range".to_string(), + "Date range is required for time series without date range".to_string(), )); } }; + let time_seira = TimeSeries::new( self.query_tools.clone(), time_dimension.full_name(), ts_date_range, - granularity, + granularity_obj, ); let query_plan = Rc::new(QueryPlan::TimeSeries(Rc::new(time_seira))); Ok(Rc::new(Cte::new(query_plan, format!("time_series")))) diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/rolling_window_planner.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/rolling_window_planner.rs index 946a7ec8ea4b9..360b3ae3e64ba 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/rolling_window_planner.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/planners/multi_stage/rolling_window_planner.rs @@ -315,7 +315,7 @@ impl RollingWindowPlanner { GranularityHelper::min_granularity(&trailing_granularity, &leading_granularity)?; let result_granularity = GranularityHelper::min_granularity( &window_granularity, - &time_dimension.get_granularity(), + &time_dimension.resolve_granularity()?, )?; let templates = PlanSqlTemplates::new(self.query_tools.templates_render()); @@ -325,12 +325,19 @@ impl RollingWindowPlanner { self.make_time_seires_from_to_dates_suqueries_conditions("time_series")?; new_state.replace_range_to_subquery_in_date_filter(&time_dimension_base_name, from, to); } else if time_dimension.get_date_range().is_some() && result_granularity.is_some() { - let granularity = time_dimension.get_granularity().unwrap(); + let granularity = time_dimension.get_granularity_obj().clone().unwrap(); let date_range = time_dimension.get_date_range().unwrap(); - let series = self - .query_tools - .base_tools() - .generate_time_series(granularity, date_range.clone())?; + let series = if granularity.is_predefined_granularity() { + self.query_tools + .base_tools() + .generate_time_series(granularity.granularity().clone(), date_range.clone())? + } else { + self.query_tools.base_tools().generate_custom_time_series( + granularity.granularity_interval().clone(), + date_range.clone(), + granularity.origin_local_formatted(), + )? + }; if !series.is_empty() { let new_from_date = series.first().unwrap()[0].clone(); let new_to_date = series.last().unwrap()[1].clone(); @@ -341,7 +348,7 @@ impl RollingWindowPlanner { ); } } - let new_time_dimension = time_dimension.change_granularity(result_granularity.clone()); + let new_time_dimension = time_dimension.change_granularity(result_granularity.clone())?; //We keep only one time_dimension in the leaf query because, even if time_dimension values have different granularity, in the leaf query we need to group by the lowest granularity. new_state.set_time_dimensions(vec![new_time_dimension]); diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/query_tools.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/query_tools.rs index eec68a482bc51..2bd617344d02e 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/query_tools.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/query_tools.rs @@ -123,7 +123,7 @@ pub struct QueryTools { params_allocator: Rc>, evaluator_compiler: Rc>, cached_data: RefCell, - timezone: Option, + timezone: Tz, } impl QueryTools { @@ -134,16 +134,17 @@ impl QueryTools { timezone_name: Option, ) -> Result, CubeError> { let templates_render = base_tools.sql_templates()?; - let evaluator_compiler = Rc::new(RefCell::new(Compiler::new(cube_evaluator.clone()))); let timezone = if let Some(timezone) = timezone_name { - Some( - timezone - .parse::() - .map_err(|_| CubeError::user(format!("Incorrect timezone {}", timezone)))?, - ) + timezone + .parse::() + .map_err(|_| CubeError::user(format!("Incorrect timezone {}", timezone)))? } else { - None + Tz::UTC }; + let evaluator_compiler = Rc::new(RefCell::new(Compiler::new( + cube_evaluator.clone(), + timezone.clone(), + ))); let sql_templates = PlanSqlTemplates::new(templates_render.clone()); Ok(Rc::new(Self { cube_evaluator, @@ -169,8 +170,8 @@ impl QueryTools { &self.join_graph } - pub fn timezone(&self) -> &Option { - &self.timezone + pub fn timezone(&self) -> Tz { + self.timezone } pub fn cached_data(&self) -> Ref<'_, QueryToolsCachedData> { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/compiler.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/compiler.rs index fe20a91af738a..d0ef37173c45e 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/compiler.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/compiler.rs @@ -8,19 +8,22 @@ use super::{ use crate::cube_bridge::evaluator::CubeEvaluator; use crate::cube_bridge::join_hints::JoinHintItem; use crate::cube_bridge::member_sql::MemberSql; +use chrono_tz::Tz; use cubenativeutils::CubeError; use std::collections::HashMap; use std::rc::Rc; pub struct Compiler { cube_evaluator: Rc, + timezone: Tz, /* (type, name) */ members: HashMap<(String, String), Rc>, } impl Compiler { - pub fn new(cube_evaluator: Rc) -> Self { + pub fn new(cube_evaluator: Rc, timezone: Tz) -> Self { Self { cube_evaluator, + timezone, members: HashMap::new(), } } @@ -94,7 +97,8 @@ impl Compiler { cube_name: &String, member_sql: Rc, ) -> Result, CubeError> { - let dep_builder = DependenciesBuilder::new(self, self.cube_evaluator.clone()); + let dep_builder = + DependenciesBuilder::new(self, self.cube_evaluator.clone(), self.timezone.clone()); let deps = dep_builder.build(cube_name.clone(), member_sql.clone())?; let sql_call = SqlCall::new(member_sql, deps); Ok(Rc::new(sql_call)) diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/dependecy.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/dependecy.rs index 59512ba64fda7..bd68ffdb7e58f 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/dependecy.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/dependecy.rs @@ -2,6 +2,9 @@ use super::symbols::MemberSymbol; use super::Compiler; use crate::cube_bridge::evaluator::{CallDep, CubeEvaluator}; use crate::cube_bridge::member_sql::MemberSql; +use crate::planner::sql_evaluator::TimeDimensionSymbol; +use crate::planner::GranularityHelper; +use chrono_tz::Tz; use cubenativeutils::CubeError; use std::collections::HashMap; use std::rc::Rc; @@ -10,6 +13,13 @@ use std::rc::Rc; pub enum CubeDepProperty { CubeDependency(CubeDependency), SymbolDependency(Rc), + TimeDimensionDependency(TimeDimensionDependency), +} + +#[derive(Clone, Debug)] +pub struct TimeDimensionDependency { + pub base_symbol: Rc, + pub granularities: HashMap>, } #[derive(Clone, Debug)] @@ -48,19 +58,26 @@ pub enum ContextSymbolDep { pub enum Dependency { SymbolDependency(Rc), CubeDependency(CubeDependency), + TimeDimensionDependency(TimeDimensionDependency), ContextDependency(ContextSymbolDep), } pub struct DependenciesBuilder<'a> { compiler: &'a mut Compiler, cube_evaluator: Rc, + timezone: Tz, } impl<'a> DependenciesBuilder<'a> { - pub fn new(compiler: &'a mut Compiler, cube_evaluator: Rc) -> Self { + pub fn new( + compiler: &'a mut Compiler, + cube_evaluator: Rc, + timezone: Tz, + ) -> Self { DependenciesBuilder { compiler, cube_evaluator, + timezone, } } @@ -91,9 +108,14 @@ impl<'a> DependenciesBuilder<'a> { result.push(Dependency::SymbolDependency( self.build_evaluator(&cube_name, &dep.name)?, )); - } else { + } else if self.check_cube_exists(&dep.name)? { let dep = self.build_cube_dependency(&cube_name, i, &call_deps, &childs)?; result.push(Dependency::CubeDependency(dep)); + } else { + //Assuming this is a time dimension with an explicit granularity + let dep = + self.build_time_dimension_dependency(&cube_name, i, &call_deps, &childs)?; + result.push(Dependency::TimeDimensionDependency(dep)); } } @@ -128,6 +150,54 @@ impl<'a> DependenciesBuilder<'a> { Ok(childs_tree) } + fn check_cube_exists(&self, cube_name: &String) -> Result { + if self.is_current_cube(cube_name) { + Ok(true) + } else { + self.cube_evaluator.cube_exists(cube_name.clone()) + } + } + + fn build_time_dimension_dependency( + &mut self, + cube_name: &String, + dep_index: usize, + call_deps: &Vec, + call_childs: &Vec>, + ) -> Result { + let dep = &call_deps[dep_index]; + let base_evaluator = self.build_evaluator(cube_name, &dep.name)?; + let mut granularities = HashMap::new(); + for child_ind in call_childs[dep_index].iter() { + let granularity = &call_deps[*child_ind].name; + if let Some(granularity_obj) = GranularityHelper::make_granularity_obj( + self.cube_evaluator.clone(), + self.timezone.clone(), + cube_name, + &dep.name, + Some(granularity.clone()), + )? { + let member_evaluator = + Rc::new(MemberSymbol::TimeDimension(TimeDimensionSymbol::new( + base_evaluator.clone(), + Some(granularity.clone()), + Some(granularity_obj), + ))); + granularities.insert(granularity.clone(), member_evaluator); + } else { + return Err(CubeError::user(format!( + "Undefined granularity {} for time dimension {}", + granularity, dep.name + ))); + } + } + let result = TimeDimensionDependency { + base_symbol: base_evaluator, + granularities, + }; + Ok(result) + } + fn build_cube_dependency( &mut self, cube_name: &String, @@ -162,13 +232,21 @@ impl<'a> DependenciesBuilder<'a> { } else { let child_dep = if call_childs[*child_ind].is_empty() { CubeDepProperty::SymbolDependency(self.build_evaluator(&new_cube_name, &name)?) - } else { + } else if self.check_cube_exists(name)? { CubeDepProperty::CubeDependency(self.build_cube_dependency( &new_cube_name, *child_ind, call_deps, call_childs, )?) + } else { + let dep = self.build_time_dimension_dependency( + &new_cube_name, + *child_ind, + call_deps, + call_childs, + )?; + CubeDepProperty::TimeDimensionDependency(dep) }; properties.insert(name.clone(), child_dep); } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_call.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_call.rs index 46ae9fb850c9d..7326866000db7 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_call.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_call.rs @@ -1,4 +1,6 @@ -use super::dependecy::{ContextSymbolDep, CubeDepProperty, CubeDependency, Dependency}; +use super::dependecy::{ + ContextSymbolDep, CubeDepProperty, CubeDependency, Dependency, TimeDimensionDependency, +}; use super::sql_nodes::SqlNode; use super::{symbols::MemberSymbol, SqlEvaluatorVisitor}; use crate::cube_bridge::base_query_options::FilterItem as NativeFilterItem; @@ -61,6 +63,7 @@ impl SqlCall { Dependency::CubeDependency(cube_dep) => { self.extract_symbol_deps_from_cube_dep(cube_dep, result) } + Dependency::TimeDimensionDependency(dep) => result.push(dep.base_symbol.clone()), Dependency::ContextDependency(_) => {} } } @@ -70,6 +73,9 @@ impl SqlCall { for dep in self.deps.iter() { match dep { Dependency::SymbolDependency(dep) => result.push((dep.clone(), vec![])), + Dependency::TimeDimensionDependency(dep) => { + result.push((dep.base_symbol.clone(), vec![])) + } Dependency::CubeDependency(cube_dep) => { self.extract_symbol_deps_with_path_from_cube_dep(cube_dep, vec![], result) } @@ -88,6 +94,7 @@ impl SqlCall { for dep in self.deps.iter() { match dep { Dependency::SymbolDependency(_) => {} + Dependency::TimeDimensionDependency(_) => {} Dependency::CubeDependency(cube_dep) => { self.extract_cube_deps_from_cube_dep(cube_dep, result) } @@ -104,6 +111,9 @@ impl SqlCall { for (_, v) in cube_dep.properties.iter() { match v { CubeDepProperty::SymbolDependency(dep) => result.push(dep.clone()), + CubeDepProperty::TimeDimensionDependency(dep) => { + result.push(dep.base_symbol.clone()) + } CubeDepProperty::CubeDependency(cube_dep) => { self.extract_symbol_deps_from_cube_dep(cube_dep, result) } @@ -127,6 +137,9 @@ impl SqlCall { for (_, v) in cube_dep.properties.iter() { match v { CubeDepProperty::SymbolDependency(dep) => result.push((dep.clone(), path.clone())), + CubeDepProperty::TimeDimensionDependency(dep) => { + result.push((dep.base_symbol.clone(), path.clone())) + } CubeDepProperty::CubeDependency(cube_dep) => { self.extract_symbol_deps_with_path_from_cube_dep(cube_dep, path.clone(), result) } @@ -161,6 +174,9 @@ impl SqlCall { node_processor.clone(), templates, )?)), + Dependency::TimeDimensionDependency(dep) => { + self.evaluate_time_dimesion_dep(dep, visitor, node_processor.clone(), templates) + } Dependency::CubeDependency(dep) => self.evaluate_cube_dep( dep, visitor, @@ -195,6 +211,14 @@ impl SqlCall { CubeDepProperty::SymbolDependency(dep) => { MemberSqlArg::String(visitor.apply(&dep, node_processor.clone(), templates)?) } + + CubeDepProperty::TimeDimensionDependency(dep) => self.evaluate_time_dimesion_dep( + dep, + visitor, + node_processor.clone(), + templates, + )?, + CubeDepProperty::CubeDependency(dep) => self.evaluate_cube_dep( &dep, visitor, @@ -208,6 +232,21 @@ impl SqlCall { Ok(MemberSqlArg::Struct(res)) } + fn evaluate_time_dimesion_dep( + &self, + dep: &TimeDimensionDependency, + visitor: &SqlEvaluatorVisitor, + node_processor: Rc, + templates: &PlanSqlTemplates, + ) -> Result { + let mut res = MemberSqlStruct::default(); + for (k, v) in dep.granularities.iter() { + let arg = MemberSqlArg::String(visitor.apply(&v, node_processor.clone(), templates)?); + res.properties.insert(k.clone(), arg); + } + Ok(MemberSqlArg::Struct(res)) + } + pub fn apply_context_symbol( &self, visitor: &SqlEvaluatorVisitor, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/time_dimension.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/time_dimension.rs index 3d878c8400e66..284127a82c353 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/time_dimension.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/sql_nodes/time_dimension.rs @@ -43,7 +43,7 @@ impl SqlNode for TimeDimensionNode { )?; match node.as_ref() { MemberSymbol::TimeDimension(ev) => { - let res = if let Some(granularity) = ev.granularity() { + let res = if let Some(granularity_obj) = ev.granularity_obj() { let converted_tz = if self .dimensions_with_ignored_timezone .contains(&ev.full_name()) @@ -52,9 +52,31 @@ impl SqlNode for TimeDimensionNode { } else { query_tools.base_tools().convert_tz(input_sql)? }; - query_tools - .base_tools() - .time_grouped_column(granularity.clone(), converted_tz)? + + let res = if granularity_obj.is_natural_aligned() { + if let Some(granularity_offset) = granularity_obj.granularity_offset() { + let dt = + templates.sub_interval(converted_tz, granularity_offset.clone())?; + let dt = query_tools.base_tools().time_grouped_column( + granularity_obj.granularity_from_interval()?, + dt, + )?; + templates.add_interval(dt, granularity_offset.clone())? + } else { + query_tools.base_tools().time_grouped_column( + granularity_obj.granularity().clone(), + converted_tz, + )? + } + } else { + query_tools.base_tools().date_bin( + granularity_obj.granularity_interval().clone(), + converted_tz, + granularity_obj.origin_local_formatted(), + )? + }; + + res } else { input_sql }; diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/time_dimension_symbol.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/time_dimension_symbol.rs index 82a14a8c84d2a..989ffe4795e53 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/time_dimension_symbol.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_evaluator/symbols/time_dimension_symbol.rs @@ -1,14 +1,20 @@ use super::MemberSymbol; +use crate::planner::time_dimension::Granularity; use std::rc::Rc; pub struct TimeDimensionSymbol { base_symbol: Rc, full_name: String, granularity: Option, + granularity_obj: Option, } impl TimeDimensionSymbol { - pub fn new(base_symbol: Rc, granularity: Option) -> Self { + pub fn new( + base_symbol: Rc, + granularity: Option, + granularity_obj: Option, + ) -> Self { let name_suffix = if let Some(granularity) = &granularity { granularity.clone() } else { @@ -18,6 +24,7 @@ impl TimeDimensionSymbol { Self { base_symbol, granularity, + granularity_obj, full_name, } } @@ -30,6 +37,10 @@ impl TimeDimensionSymbol { &self.granularity } + pub fn granularity_obj(&self) -> &Option { + &self.granularity_obj + } + pub fn full_name(&self) -> String { self.full_name.clone() } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_templates/plan.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_templates/plan.rs index 3ee5f623478a3..df0c6cdf2cb9d 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_templates/plan.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/sql_templates/plan.rs @@ -55,6 +55,27 @@ impl PlanSqlTemplates { ) } + //FIXME duplicated with filter templates + pub fn add_interval(&self, date: String, interval: String) -> Result { + self.render.render_template( + &"expressions/add_interval", + context! { + date => date, + interval => interval + }, + ) + } + + pub fn sub_interval(&self, date: String, interval: String) -> Result { + self.render.render_template( + &"expressions/sub_interval", + context! { + date => date, + interval => interval + }, + ) + } + pub fn quote_string(&self, string: &str) -> Result { Ok(format!("'{}'", string)) } @@ -338,7 +359,6 @@ impl PlanSqlTemplates { end: &str, granularity: &str, ) -> Result { - let granularity = format!("1 {}", granularity); self.render.render_template( "statements/generated_time_series_select", context! { start => start, end => end, granularity => granularity }, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/date_time.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/date_time.rs new file mode 100644 index 0000000000000..ac5fca61fb285 --- /dev/null +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/date_time.rs @@ -0,0 +1,389 @@ +use super::{QueryDateTimeHelper, SqlInterval}; +use chrono::prelude::*; +use chrono::Duration; +use chrono_tz::Tz; +use cubenativeutils::CubeError; +use std::cmp::Ord; + +#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Debug)] +pub struct QueryDateTime { + date_time: DateTime, +} + +impl ToString for QueryDateTime { + fn to_string(&self) -> String { + self.default_format() + } +} + +impl QueryDateTime { + pub fn new(date_time: DateTime) -> QueryDateTime { + QueryDateTime { date_time } + } + pub fn now(tz: Tz) -> Result { + let local = Local::now().naive_local(); + Self::from_local_date_time(tz, local) + } + pub fn from_date_str(tz: Tz, date: &str) -> Result { + let local_dt = QueryDateTimeHelper::parse_native_date_time(date)?; + Self::from_local_date_time(tz, local_dt) + } + + pub fn from_local_date_time(tz: Tz, date: NaiveDateTime) -> Result { + let date_time = + QueryDateTimeHelper::resolve_local_result(&tz, &date, tz.from_local_datetime(&date))?; + Ok(Self { date_time }) + } + + pub fn start_of_year(&self) -> Self { + let tz = self.date_time.timezone(); + Self::new( + tz.with_ymd_and_hms(self.date_time.year(), 1, 1, 0, 0, 0) + .unwrap(), + ) + } + + pub fn date_time(&self) -> DateTime { + self.date_time + } + + pub fn naive_local(&self) -> NaiveDateTime { + self.date_time.naive_local() + } + + pub fn naive_utc(&self) -> NaiveDateTime { + self.date_time.naive_utc() + } + + pub fn format(&self, format: &str) -> String { + self.date_time.format(format).to_string() + } + + pub fn default_format(&self) -> String { + self.date_time.format("%Y-%m-%dT%H:%M:%S%.3f").to_string() + } + + pub fn add_interval(&self, interval: &SqlInterval) -> Result { + let date = self.naive_local().date(); + + // Step 1: add years and months with fallback logic + let mut year = date.year() + interval.year; + let mut month = date.month() as i32 + interval.month; + + while month > 12 { + year += 1; + month -= 12; + } + while month < 1 { + year -= 1; + month += 12; + } + + let day = date.day(); + // Adjust for overflowed day in shorter months (e.g. Feb 30 → Feb 28) + let adjusted_date = NaiveDate::from_ymd_opt(year, month as u32, day) + .or_else(|| { + (1..=31) + .rev() + .find_map(|d| NaiveDate::from_ymd_opt(year, month as u32, d)) + }) + .ok_or_else(|| { + CubeError::internal(format!( + "Failed to compute valid date while adding interval {:?} to date {}", + interval, self.date_time + )) + })?; + + // Step 2: Add weeks and days + let adjusted_date = + adjusted_date + Duration::days(interval.week as i64 * 7 + interval.day as i64); + + // Step 3: Recombine with original time + let time = self.naive_local().time(); + let mut naive = NaiveDateTime::new(adjusted_date, time); + + // Step 4: Add time-based parts + naive = naive + + Duration::hours(interval.hour as i64) + + Duration::minutes(interval.minute as i64) + + Duration::seconds(interval.second as i64); + + Self::from_local_date_time(self.date_time.timezone(), naive) + } + + pub fn sub_interval(&self, interval: &SqlInterval) -> Result { + self.add_interval(&interval.inverse()) + } + + pub fn granularity(&self) -> String { + let time = self.date_time.time(); + + let weekday = self.date_time.weekday(); + let is_zero_time = |t: chrono::NaiveTime| { + t.hour() == 0 && t.minute() == 0 && t.second() == 0 && t.nanosecond() == 0 + }; + + if self.date_time.month() == 1 && self.date_time.day() == 1 && is_zero_time(time) { + "year".to_string() + } else if self.date_time.day() == 1 && is_zero_time(time) { + "month".to_string() + } else if weekday == Weekday::Mon && is_zero_time(time) { + "week".to_string() + } else if is_zero_time(time) { + "day".to_string() + } else if time.minute() == 0 && time.second() == 0 && time.nanosecond() == 0 { + "hour".to_string() + } else if time.second() == 0 && time.nanosecond() == 0 { + "minute".to_string() + } else if time.nanosecond() == 0 { + "second".to_string() + } else { + "second".to_string() + } + } + + pub fn align_to_origin( + &self, + origin: &Self, + interval: &SqlInterval, + ) -> Result { + let mut aligned = self.clone(); + let mut offset = origin.clone(); + + if self < origin { + while &offset > self { + offset = offset.sub_interval(interval)?; + } + aligned = offset; + } else { + while &offset < self { + aligned = offset.clone(); + offset = offset.add_interval(interval)?; + } + + if &offset == self { + aligned = offset; + } + } + + Ok(aligned) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_date_time() { + let tz = "Etc/GMT-3".parse::().unwrap(); + let parsed = QueryDateTime::from_date_str(tz, "2021-01-01").unwrap(); + assert_eq!( + parsed.naive_utc(), + NaiveDate::from_ymd_opt(2020, 12, 31) + .unwrap() + .and_hms_opt(21, 0, 0) + .unwrap() + ); + + let tz = "Etc/GMT-3".parse::().unwrap(); + let parsed = QueryDateTime::from_date_str(tz, "2021-01-01 03:15:20").unwrap(); + + assert_eq!( + parsed.naive_utc(), + NaiveDate::from_ymd_opt(2021, 1, 1) + .unwrap() + .and_hms_opt(0, 15, 20) + .unwrap() + ); + + //Ambiguous time + let tz = "America/New_York".parse::().unwrap(); + let parsed = QueryDateTime::from_date_str(tz, "2024-11-03 01:30:00").unwrap(); + assert_eq!( + parsed.date_time().naive_utc(), + NaiveDate::from_ymd_opt(2024, 11, 3) + .unwrap() + .and_hms_opt(5, 30, 0) + .unwrap() + ); + //Not exist time + let tz = "America/New_York".parse::().unwrap(); + let parsed = QueryDateTime::from_date_str(tz, "2024-03-10 02:30:00").unwrap(); + assert_eq!( + parsed.date_time().naive_utc(), + NaiveDate::from_ymd_opt(2024, 3, 10) + .unwrap() + .and_hms_opt(7, 0, 0) + .unwrap() + ); + } + #[test] + fn test_start_of_year() { + let tz = "Etc/GMT-3".parse::().unwrap(); + let date = QueryDateTime::from_date_str(tz, "2024-11-03 01:30:00").unwrap(); + let start = date.start_of_year(); + assert_eq!( + start.date_time().naive_utc(), + NaiveDate::from_ymd_opt(2023, 12, 31) + .unwrap() + .and_hms_opt(21, 0, 0) + .unwrap() + ); + } + #[test] + fn test_add_interval() { + let tz = "Etc/GMT-3".parse::().unwrap(); + + let date = QueryDateTime::from_date_str(tz, "2024-11-03 01:30:00").unwrap(); + let interval = "4 hours 2 minutes 10 second" + .parse::() + .unwrap(); + let result = date.add_interval(&interval).unwrap().naive_utc(); + assert_eq!( + result, + NaiveDate::from_ymd_opt(2024, 11, 3) + .unwrap() + .and_hms_opt(2, 32, 10) + .unwrap() + ); + + let date = QueryDateTime::from_date_str(tz, "2024-11-03 01:30:00").unwrap(); + let interval = "2 hours -2 minutes 10 second" + .parse::() + .unwrap(); + let result = date.add_interval(&interval).unwrap().naive_utc(); + assert_eq!( + result, + NaiveDate::from_ymd_opt(2024, 11, 3) + .unwrap() + .and_hms_opt(0, 28, 10) + .unwrap() + ); + + let date = QueryDateTime::from_date_str(tz, "2024-11-03 4:30:00").unwrap(); + let interval = "-4 hours -31 minutes 10 second" + .parse::() + .unwrap(); + let result = date.add_interval(&interval).unwrap().naive_utc(); + assert_eq!( + result, + NaiveDate::from_ymd_opt(2024, 11, 2) + .unwrap() + .and_hms_opt(20, 59, 10) + .unwrap() + ); + + let date = QueryDateTime::from_date_str(tz, "2024-02-03 01:30:00").unwrap(); + let interval = "1 week".parse::().unwrap(); + let result = date.add_interval(&interval).unwrap().naive_local(); + assert_eq!( + result, + NaiveDate::from_ymd_opt(2024, 2, 10) + .unwrap() + .and_hms_opt(1, 30, 0) + .unwrap() + ); + + let date = QueryDateTime::from_date_str(tz, "2024-02-03 01:30:00").unwrap(); + let interval = "1 month 1 week".parse::().unwrap(); + let result = date.add_interval(&interval).unwrap().naive_local(); + assert_eq!( + result, + NaiveDate::from_ymd_opt(2024, 3, 10) + .unwrap() + .and_hms_opt(1, 30, 0) + .unwrap() + ); + + let date = QueryDateTime::from_date_str(tz, "2024-02-03 01:30:00").unwrap(); + let interval = "1 year 1 month 1 week 3 minute" + .parse::() + .unwrap(); + let result = date.add_interval(&interval).unwrap().naive_local(); + assert_eq!( + result, + NaiveDate::from_ymd_opt(2025, 3, 10) + .unwrap() + .and_hms_opt(1, 33, 0) + .unwrap() + ); + + let date = QueryDateTime::from_date_str(tz, "2024-02-03 01:30:00").unwrap(); + let interval = "11 month 1 week 3 minute".parse::().unwrap(); + let result = date.add_interval(&interval).unwrap().naive_local(); + assert_eq!( + result, + NaiveDate::from_ymd_opt(2025, 1, 10) + .unwrap() + .and_hms_opt(1, 33, 0) + .unwrap() + ); + } + #[test] + fn test_add_interval_age_cases() { + let tz = "Etc/GMT-3".parse::().unwrap(); + + let date = QueryDateTime::from_date_str(tz, "2024-01-31 01:30:00").unwrap(); + let interval = "1 month 3 minute".parse::().unwrap(); + let result = date.add_interval(&interval).unwrap().naive_local(); + assert_eq!( + result, + NaiveDate::from_ymd_opt(2024, 2, 29) + .unwrap() + .and_hms_opt(1, 33, 0) + .unwrap() + ); + + let date = QueryDateTime::from_date_str(tz, "2024-02-29 01:30:00").unwrap(); + let interval = "1 year 3 minute".parse::().unwrap(); + let result = date.add_interval(&interval).unwrap().naive_local(); + assert_eq!( + result, + NaiveDate::from_ymd_opt(2025, 2, 28) + .unwrap() + .and_hms_opt(1, 33, 0) + .unwrap() + ); + } + #[test] + fn test_align_to_origin() { + let tz = "Etc/GMT-3".parse::().unwrap(); + let date = QueryDateTime::from_date_str(tz, "2024-01-31").unwrap(); + let interval = "1 day".parse::().unwrap(); + let origin = date.start_of_year(); + let result = date.align_to_origin(&origin, &interval).unwrap(); + assert_eq!( + result.naive_local(), + NaiveDate::from_ymd_opt(2024, 1, 31) + .unwrap() + .and_hms_opt(0, 0, 0) + .unwrap() + ); + + let tz = "Etc/GMT-3".parse::().unwrap(); + let date = QueryDateTime::from_date_str(tz, "2024-01-31").unwrap(); + let interval = "2 day".parse::().unwrap(); + let origin = QueryDateTime::from_date_str(tz, "2024-01-30").unwrap(); + let result = date.align_to_origin(&origin, &interval).unwrap(); + assert_eq!( + result.naive_local(), + NaiveDate::from_ymd_opt(2024, 1, 30) + .unwrap() + .and_hms_opt(0, 0, 0) + .unwrap() + ); + let tz = "Etc/GMT-3".parse::().unwrap(); + let date = QueryDateTime::from_date_str(tz, "2024-01-31").unwrap(); + let interval = "2 month".parse::().unwrap(); + let origin = QueryDateTime::from_date_str(tz, "2024-05-15").unwrap(); + let result = date.align_to_origin(&origin, &interval).unwrap(); + assert_eq!( + result.naive_local(), + NaiveDate::from_ymd_opt(2024, 1, 15) + .unwrap() + .and_hms_opt(0, 0, 0) + .unwrap() + ); + } +} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/date_time_helper.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/date_time_helper.rs new file mode 100644 index 0000000000000..972297c6a392c --- /dev/null +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/date_time_helper.rs @@ -0,0 +1,125 @@ +use chrono::{DateTime, Duration, LocalResult, NaiveDate, NaiveDateTime, TimeZone}; +use chrono_tz::Tz; +use cubenativeutils::CubeError; +pub struct QueryDateTimeHelper {} + +impl QueryDateTimeHelper { + pub fn parse_native_date_time(date: &str) -> Result { + let formats = &[ + "%Y-%m-%d", + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%dT%H:%M:%S", + "%Y-%m-%d %H:%M:%S%.f", + "%Y-%m-%dT%H:%M:%S%.f", + ]; + + for format in formats { + if let Ok(dt) = NaiveDateTime::parse_from_str(date, format) { + return Ok(dt); + } + } + + if let Ok(d) = NaiveDate::parse_from_str(date, "%Y-%m-%d") { + return Ok(d.and_hms_opt(0, 0, 0).unwrap()); + } + + Err(CubeError::user(format!("Can't parse date: '{}'", date))) + } + + pub fn resolve_local_result( + tz: &Tz, + origin_date: &NaiveDateTime, + local_result: LocalResult>, + ) -> Result, CubeError> { + match local_result { + LocalResult::Single(dt) => Ok(dt), + LocalResult::Ambiguous(dt, _) => Ok(dt), + LocalResult::None => Self::resolve_nonexistent(tz, origin_date), + } + } + + /// Resolves a nonexistent local time (DST gap) using binary search, + /// mimicking `moment.tz` behavior — finds the nearest valid local time **after or equal to** the input. + /// + /// Assumes that any DST gap is less than 2 hours (true for almost all real-world time zones). + fn resolve_nonexistent(tz: &Tz, date: &NaiveDateTime) -> Result, CubeError> { + // Max time delta to look ahead — generous enough for most DST gaps + let max_offset = Duration::hours(2); + + let mut low = date.clone(); + let mut high = low + max_offset; + + // Ensure `high` is a valid local time (expand if needed) + while let LocalResult::None = tz.from_local_datetime(&high) { + high = high + max_offset; + } + + // Binary search for the first valid local time >= `naive` + while high - low > Duration::seconds(1) { + let mid = low + (high - low) / 2; + match tz.from_local_datetime(&mid) { + LocalResult::None => { + // Still in the invalid range — move lower bound up + low = mid; + } + _ => { + // Found a valid or ambiguous time — narrow upper bound + high = mid; + } + } + } + + // Return the first valid interpretation + match tz.from_local_datetime(&high) { + LocalResult::Single(dt) => Ok(dt), + LocalResult::Ambiguous(dt, _) => Ok(dt), + LocalResult::None => Err(CubeError::user(format!( + "Could not resolve nonexistent time {date} in timezone {}", + tz + ))), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn test_parse_native_date_time() { + assert_eq!( + QueryDateTimeHelper::parse_native_date_time("2021-01-01").unwrap(), + NaiveDate::from_ymd_opt(2021, 1, 1) + .unwrap() + .and_hms_opt(0, 0, 0) + .unwrap() + ); + assert_eq!( + QueryDateTimeHelper::parse_native_date_time("2021-01-01T12:10:15").unwrap(), + NaiveDate::from_ymd_opt(2021, 1, 1) + .unwrap() + .and_hms_opt(12, 10, 15) + .unwrap() + ); + assert_eq!( + QueryDateTimeHelper::parse_native_date_time("2021-01-01 12:10:15").unwrap(), + NaiveDate::from_ymd_opt(2021, 1, 1) + .unwrap() + .and_hms_opt(12, 10, 15) + .unwrap() + ); + assert_eq!( + QueryDateTimeHelper::parse_native_date_time("2021-01-01 12:10:15.345").unwrap(), + NaiveDate::from_ymd_opt(2021, 1, 1) + .unwrap() + .and_hms_milli_opt(12, 10, 15, 345) + .unwrap() + ); + assert_eq!( + QueryDateTimeHelper::parse_native_date_time("2021-01-01T12:10:15.345").unwrap(), + NaiveDate::from_ymd_opt(2021, 1, 1) + .unwrap() + .and_hms_milli_opt(12, 10, 15, 345) + .unwrap() + ); + } +} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity.rs new file mode 100644 index 0000000000000..963ea1e580b00 --- /dev/null +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity.rs @@ -0,0 +1,146 @@ +use super::{GranularityHelper, QueryDateTime, SqlInterval}; +use chrono_tz::Tz; +use cubenativeutils::CubeError; +use itertools::Itertools; +use std::str::FromStr; + +#[derive(Clone)] +pub struct Granularity { + granularity: String, + granularity_interval: String, + granularity_offset: Option, + origin: QueryDateTime, + is_predefined_granularity: bool, + is_natural_aligned: bool, +} + +impl Granularity { + pub fn try_new_predefined(timezone: Tz, granularity: String) -> Result { + let granularity_interval = format!("1 {}", granularity); + let origin = Self::default_origin(timezone)?; + + Ok(Self { + granularity, + granularity_interval, + granularity_offset: None, + origin, + is_predefined_granularity: true, + is_natural_aligned: true, + }) + } + pub fn try_new_custom( + timezone: Tz, + granularity: String, + origin: Option, + granularity_interval: String, + granularity_offset: Option, + ) -> Result { + let origin = if let Some(origin) = origin { + QueryDateTime::from_date_str(timezone, &origin)? + } else if let Some(offset) = &granularity_offset { + let origin = Self::default_origin(timezone)?; + let interval = SqlInterval::from_str(offset)?; + origin.add_interval(&interval)? + } else { + Self::default_origin(timezone)? + }; + + let mut interval_parts = granularity_interval.split_whitespace().tuples::<(_, _)>(); + let first_part = interval_parts.next(); + let second_part = interval_parts.next(); + let is_natural_aligned = if second_part.is_none() { + if let Some((value, _)) = first_part { + let value = value + .parse::() + .map_err(|_| CubeError::user(format!("Invalid interval value: {}", value)))?; + value == 1 + } else { + false + } + } else { + false + }; + + Ok(Self { + granularity, + granularity_interval, + granularity_offset, + origin, + is_predefined_granularity: false, + is_natural_aligned, + }) + } + + pub fn is_natural_aligned(&self) -> bool { + self.is_natural_aligned + } + + pub fn granularity_offset(&self) -> &Option { + &self.granularity_offset + } + + pub fn granularity(&self) -> &String { + &self.granularity + } + + pub fn granularity_interval(&self) -> &String { + &self.granularity_interval + } + + pub fn origin_local_formatted(&self) -> String { + self.origin.format("%Y-%m-%dT%H:%M:%S%.3f") + } + + pub fn granularity_from_interval(&self) -> Result { + self.granularity_interval + .parse::()? + .min_granularity() + } + + pub fn granularity_from_offset(&self) -> Result { + if let Some(offset) = &self.granularity_offset { + offset.parse::()?.min_granularity() + } else { + Ok("".to_string()) + } + } + + pub fn is_predefined_granularity(&self) -> bool { + self.is_predefined_granularity + } + + pub fn min_granularity(&self) -> Result, CubeError> { + if self.is_predefined_granularity { + return Ok(Some(self.granularity.clone())); + } + + if self.granularity_offset.is_some() { + return GranularityHelper::min_granularity( + &Some(self.granularity_from_interval()?), + &Some(self.granularity_from_offset()?), + ); + } + + GranularityHelper::min_granularity( + &Some(self.granularity_from_interval()?), + &Some(self.origin.granularity()), + ) + } + + pub fn resolve_granularity(&self) -> Result { + if self.is_predefined_granularity { + Ok(self.granularity.clone()) + } else { + self.granularity_from_interval() + } + } + + pub fn align_date_to_origin(&self, date: QueryDateTime) -> Result { + let interval = self.granularity_interval.parse::()?; + date.align_to_origin(&self.origin, &interval) + } + + fn default_origin(timezone: Tz) -> Result { + Ok(QueryDateTime::now(timezone)?.start_of_year()) + } +} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/granularity_helper.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity_helper.rs similarity index 70% rename from rust/cubesqlplanner/cubesqlplanner/src/planner/granularity_helper.rs rename to rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity_helper.rs index 9bede1a918470..8fc8be43e6355 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/planner/granularity_helper.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/granularity_helper.rs @@ -1,11 +1,14 @@ +use crate::cube_bridge::evaluator::CubeEvaluator; +use crate::planner::BaseTimeDimension; +use crate::planner::Granularity; +use chrono::prelude::*; +use chrono_tz::Tz; use cubenativeutils::CubeError; use itertools::Itertools; use lazy_static::lazy_static; use std::collections::HashMap; use std::rc::Rc; -use super::BaseTimeDimension; - pub struct GranularityHelper {} impl GranularityHelper { @@ -98,6 +101,10 @@ impl GranularityHelper { } } + pub fn is_predefined_granularity(granularity: &str) -> bool { + Self::standard_granularity_parents().contains_key(granularity) + } + pub fn standard_granularity_parents() -> &'static HashMap> { lazy_static! { static ref STANDARD_GRANULARITIES_PARENTS: HashMap> = { @@ -172,4 +179,72 @@ impl GranularityHelper { } &STANDARD_GRANULARITIES_PARENTS } + + pub fn parse_date_time_in_tz(date: &str, timezone: &Tz) -> Result, CubeError> { + let local_dt = Self::parse_date_time(date)?; + if let Some(result) = timezone.from_local_datetime(&local_dt).single() { + Ok(result) + } else { + Err(CubeError::user(format!( + "Error while parsing date `{date}` in timezone `{timezone}`" + ))) + } + } + + pub fn parse_date_time(date: &str) -> Result { + let formats = &[ + "%Y-%m-%d", + "%Y-%m-%d %H:%M:%S%.f", + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%dT%H:%M:%S%.f", + "%Y-%m-%dT%H:%M:%S", + ]; + + for format in formats { + if let Ok(dt) = NaiveDateTime::parse_from_str(date, format) { + return Ok(dt); + } + } + + if let Ok(d) = NaiveDate::parse_from_str(date, "%Y-%m-%d") { + return Ok(d.and_hms_opt(0, 0, 0).unwrap()); + } + + Err(CubeError::user(format!("Can't parse date: '{}'", date))) + } + + pub fn make_granularity_obj( + cube_evaluator: Rc, + timezone: Tz, + cube_name: &String, + name: &String, + granularity: Option, + ) -> Result, CubeError> { + let granularity_obj = if let Some(granularity) = &granularity { + if !Self::is_predefined_granularity(&granularity) { + let path = vec![ + cube_name.clone(), + name.clone(), + "granularities".to_string(), + granularity.clone(), + ]; + let granularity_definition = cube_evaluator.resolve_granularity(path)?; + Some(Granularity::try_new_custom( + timezone.clone(), + granularity.clone(), + granularity_definition.origin, + granularity_definition.interval, + granularity_definition.offset, + )?) + } else { + Some(Granularity::try_new_predefined( + timezone.clone(), + granularity.clone(), + )?) + } + } else { + None + }; + Ok(granularity_obj) + } } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/mod.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/mod.rs new file mode 100644 index 0000000000000..78bf440b206e4 --- /dev/null +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/mod.rs @@ -0,0 +1,11 @@ +mod date_time; +mod date_time_helper; +mod granularity; +mod granularity_helper; +mod sql_interval; + +pub use date_time::*; +pub use date_time_helper::*; +pub use granularity::*; +pub use granularity_helper::*; +pub use sql_interval::*; diff --git a/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/sql_interval.rs b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/sql_interval.rs new file mode 100644 index 0000000000000..cda53985fdbc3 --- /dev/null +++ b/rust/cubesqlplanner/cubesqlplanner/src/planner/time_dimension/sql_interval.rs @@ -0,0 +1,127 @@ +use cubenativeutils::CubeError; +use itertools::Itertools; +use std::str::FromStr; + +#[derive(Debug, PartialEq, Clone)] +pub struct SqlInterval { + pub year: i32, + pub month: i32, + pub week: i32, + pub day: i32, + pub hour: i32, + pub minute: i32, + pub second: i32, +} + +impl SqlInterval { + pub fn new( + year: i32, + month: i32, + week: i32, + day: i32, + hour: i32, + minute: i32, + second: i32, + ) -> Self { + Self { + year, + month, + week, + day, + hour, + minute, + second, + } + } + + pub fn min_granularity(&self) -> Result { + let res = if self.second != 0 { + "second" + } else if self.minute != 0 { + "minute" + } else if self.hour != 0 { + "hour" + } else if self.day != 0 { + "day" + } else if self.week != 0 { + "week" + } else if self.month != 0 { + "month" + } else if self.year != 0 { + "year" + } else { + return Err(CubeError::internal(format!( + "Attempt to get granularity from empty SqlInterval" + ))); + }; + Ok(res.to_string()) + } + + pub fn inverse(&self) -> Self { + Self::new( + -self.year, + -self.month, + -self.week, + -self.day, + -self.hour, + -self.minute, + -self.second, + ) + } +} + +impl Default for SqlInterval { + fn default() -> Self { + Self { + second: 0, + minute: 0, + hour: 0, + day: 0, + week: 0, + month: 0, + year: 0, + } + } +} + +impl FromStr for SqlInterval { + type Err = CubeError; + fn from_str(s: &str) -> Result { + let mut result = SqlInterval::default(); + for (value, unit) in s.split_whitespace().tuples() { + let value = value + .parse::() + .map_err(|_| CubeError::user(format!("Invalid interval value: {}", value)))?; + match unit { + "second" | "seconds" => result.second = value, + "minute" | "minutes" => result.minute = value, + "hour" | "hours" => result.hour = value, + "day" | "days" => result.day = value, + "week" | "weeks" => result.week = value, + "month" | "months" => result.month = value, + "year" | "years" => result.year = value, + other => return Err(CubeError::user(format!("Invalid interval unit: {}", other))), + } + } + Ok(result) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_from_str() { + assert_eq!( + SqlInterval::from_str("1 second").unwrap(), + SqlInterval::new(0, 0, 0, 0, 0, 0, 1) + ); + + assert_eq!( + SqlInterval::from_str("1 year 3 months 4 weeks 2 day 4 hours 2 minutes 1 second") + .unwrap(), + SqlInterval::new(1, 3, 4, 2, 4, 2, 1) + ); + } +}