Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/drivers-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ on:
- 'packages/cubejs-snowflake-driver/**'
- 'packages/cubejs-vertica-driver/**'

# To test SQL API Push down
- 'packages/cubejs-backend-native/**'
- 'rust/cubesql/**'
- 'rust/cubesqlplanner/**'
pull_request:
paths:
- '.github/workflows/drivers-tests.yml'
Expand All @@ -54,9 +54,9 @@ on:
- 'packages/cubejs-snowflake-driver/**'
- 'packages/cubejs-vertica-driver/**'

# To test SQL API Push down
- 'packages/cubejs-backend-native/**'
- 'rust/cubesql/**'
- 'rust/cubesqlplanner/**'
workflow_dispatch:
inputs:
use_tesseract_sql_planner:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ pub struct CompiledPreAggregation {
pub external: Option<bool>,
pub measures: Vec<Rc<MemberSymbol>>,
pub dimensions: Vec<Rc<MemberSymbol>>,
pub time_dimensions: Vec<(Rc<MemberSymbol>, Option<String>)>,
pub time_dimensions: Vec<Rc<MemberSymbol>>,
pub allow_non_strict_date_range_match: bool,
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ pub struct DimensionMatcher<'a> {
query_tools: Rc<QueryTools>,
pre_aggregation: &'a CompiledPreAggregation,
pre_aggregation_dimensions: HashMap<String, bool>,
pre_aggregation_time_dimensions: HashMap<String, (Option<String>, bool)>,
pre_aggregation_time_dimensions: HashMap<String, (Option<Rc<TimeDimensionSymbol>>, bool)>,
result: MatchState,
}

Expand All @@ -48,7 +48,13 @@ impl<'a> DimensionMatcher<'a> {
let pre_aggregation_time_dimensions = pre_aggregation
.time_dimensions
.iter()
.map(|(dim, granularity)| (dim.full_name(), (granularity.clone(), false)))
.map(|dim| {
if let Ok(td) = dim.as_time_dimension() {
(td.base_symbol().full_name(), (Some(td), false))
} else {
(dim.full_name(), (None, false))
}
})
.collect::<HashMap<_, _>>();
Self {
query_tools,
Expand Down Expand Up @@ -194,30 +200,39 @@ impl<'a> DimensionMatcher<'a> {
time_dimension.rollup_granularity(self.query_tools.clone())?
};
let base_symbol_name = time_dimension.base_symbol().full_name();

if let Some(found) = self
.pre_aggregation_time_dimensions
.get_mut(&base_symbol_name)
{
if add_to_matched_dimension {
found.1 = true;
}
let pre_aggr_granularity = &found.0;
if granularity.is_none() || pre_aggr_granularity == &granularity {

let pre_agg_td = &found.0;
let pre_aggr_granularity = if let Some(pre_agg_td) = pre_agg_td {
pre_agg_td.granularity().clone()
} else {
None
};

if granularity.is_none() || pre_aggr_granularity == granularity {
Ok(MatchState::Full)
} else if pre_aggr_granularity.is_none()
|| GranularityHelper::is_predefined_granularity(
pre_aggr_granularity.as_ref().unwrap(),
)
{
let min_granularity =
GranularityHelper::min_granularity(&granularity, &pre_aggr_granularity)?;
if &min_granularity == pre_aggr_granularity {
} else if pre_aggr_granularity.is_none() {
Ok(MatchState::NotMatched)
} else if let Some(pre_agg_td) = pre_agg_td {
let min_granularity = GranularityHelper::min_granularity_for_time_dimensions(
(&granularity, time_dimension),
(&pre_aggr_granularity, &pre_agg_td),
)?;

if min_granularity == pre_aggr_granularity {
Ok(MatchState::Partial)
} else {
Ok(MatchState::NotMatched)
}
} else {
Ok(MatchState::NotMatched) //TODO Custom granularities!!!
Ok(MatchState::NotMatched)
}
} else {
if time_dimension.owned_by_cube() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -387,12 +387,7 @@ impl PreAggregationOptimizer {
.dimensions
.iter()
.cloned()
.chain(
pre_aggregation
.time_dimensions
.iter()
.map(|(d, _)| d.clone()),
)
.chain(pre_aggregation.time_dimensions.iter().map(|d| d.clone()))
.collect(),
measures: pre_aggregation.measures.to_vec(),
multiplied_measures: HashSet::new(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ use crate::planner::planners::ResolvedJoinItem;
use crate::planner::query_tools::QueryTools;
use crate::planner::sql_evaluator::collectors::collect_cube_names_from_symbols;
use crate::planner::sql_evaluator::MemberSymbol;
use crate::planner::sql_evaluator::TimeDimensionSymbol;
use crate::planner::GranularityHelper;
use cubenativeutils::CubeError;
use itertools::Itertools;
use std::collections::HashMap;
Expand Down Expand Up @@ -136,7 +138,30 @@ impl PreAggregationsCompiler {
refs,
Self::check_is_time_dimension,
)?;
vec![(dims[0].clone(), static_data.granularity.clone())]

if static_data.granularity.is_some() {
let evaluator_compiler_cell = self.query_tools.evaluator_compiler().clone();
let mut evaluator_compiler = evaluator_compiler_cell.borrow_mut();
let base_symbol = dims[0].clone();

let granularity_obj = GranularityHelper::make_granularity_obj(
self.query_tools.cube_evaluator().clone(),
&mut evaluator_compiler,
&base_symbol.cube_name(),
&base_symbol.name(),
static_data.granularity.clone(),
)?;
let symbol = MemberSymbol::new_time_dimension(TimeDimensionSymbol::new(
base_symbol,
static_data.granularity.clone(),
granularity_obj,
None,
));

vec![symbol]
} else {
vec![dims[0].clone()]
}
} else {
Vec::new()
};
Expand Down Expand Up @@ -276,14 +301,16 @@ impl PreAggregationsCompiler {
}

fn match_time_dimensions(
a: &Vec<(Rc<MemberSymbol>, Option<String>)>,
b: &Vec<(Rc<MemberSymbol>, Option<String>)>,
a: &Vec<Rc<MemberSymbol>>,
b: &Vec<Rc<MemberSymbol>>,
) -> Result<(), CubeError> {
if !a
.iter()
.zip(b.iter())
.all(|(a, b)| a.0.name() == b.0.name() && a.1 == b.1)
{
if !a.iter().zip(b.iter()).all(|(a, b)| {
if let (Ok(td_a), Ok(td_b)) = (a.as_time_dimension(), b.as_time_dimension()) {
td_a.name() == td_a.name() && td_a.granularity() == td_b.granularity()
} else {
false
}
}) {
return Err(CubeError::user(format!(
"Names for pre-aggregation symbols in lambda pre-aggragation don't match"
)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ pub struct PreAggregation {
#[builder(default)]
dimensions: Vec<Rc<MemberSymbol>>,
#[builder(default)]
time_dimensions: Vec<(Rc<MemberSymbol>, Option<String>)>,
time_dimensions: Vec<Rc<MemberSymbol>>,
external: bool,
#[builder(default)]
granularity: Option<String>,
Expand All @@ -39,7 +39,7 @@ impl PreAggregation {
&self.dimensions
}

pub fn time_dimensions(&self) -> &Vec<(Rc<MemberSymbol>, Option<String>)> {
pub fn time_dimensions(&self) -> &Vec<Rc<MemberSymbol>> {
&self.time_dimensions
}

Expand Down Expand Up @@ -97,11 +97,11 @@ impl PreAggregation {
QualifiedColumnName::new(None, alias.clone()),
);
}
for (dim, granularity) in self.time_dimensions().iter() {
let base_symbol = if let Ok(td) = dim.as_time_dimension() {
td.base_symbol().clone()
for dim in self.time_dimensions().iter() {
let (base_symbol, granularity) = if let Ok(td) = dim.as_time_dimension() {
(td.base_symbol().clone(), td.granularity().clone())
} else {
dim.clone()
(dim.clone(), None)
};
let suffix = if let Some(granularity) = &granularity {
format!("_{}", granularity.clone())
Expand All @@ -110,7 +110,7 @@ impl PreAggregation {
};
let alias = format!("{}{}", base_symbol.alias(), suffix);
res.insert(
dim.full_name(),
base_symbol.full_name(),
QualifiedColumnName::new(None, alias.clone()),
);
}
Expand Down Expand Up @@ -184,11 +184,7 @@ impl PrettyPrint for PreAggregation {
&self
.time_dimensions()
.iter()
.map(|(d, granularity)| format!(
"({} {})",
d.full_name(),
granularity.clone().unwrap_or("None".to_string())
))
.map(|d| d.full_name())
.join(", ")
),
&state,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,18 +95,25 @@ impl PreAggregationProcessor<'_> {
Some(alias),
);
}
for (dim, granularity) in pre_aggregation.time_dimensions().iter() {
for dim in pre_aggregation.time_dimensions().iter() {
let (alias, granularity) = if let Ok(td) = dim.as_time_dimension() {
(td.base_symbol().alias(), td.granularity().clone())
} else {
(dim.alias(), None)
};

let name_in_table = PlanSqlTemplates::memeber_alias_name(
&item.cube_alias,
&dim.name(),
granularity,
&granularity,
);

let suffix = if let Some(granularity) = granularity {
format!("_{}", granularity.clone())
} else {
"_day".to_string()
};
let alias = format!("{}{}", dim.alias(), suffix);
let alias = format!("{}{}", alias, suffix);
select_builder.add_projection_reference_member(
&dim,
QualifiedColumnName::new(None, name_in_table.clone()),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -340,13 +340,32 @@ impl DimensionSymbolFactory {
full_name: &String,
cube_evaluator: Rc<dyn CubeEvaluator>,
) -> Result<Self, CubeError> {
let mut iter = cube_evaluator
.parse_path("dimensions".to_string(), full_name.clone())?
.into_iter();
let parts: Vec<&str> = full_name.split('.').collect();
let mut iter;
let member_short_path;

// try_new might be invoked with next full_name variants:
// 1. "cube.member"
// 2. "cube.member.granularity" might come from multistage things
// 3. "cube.cube.cube...cube.member" might come from pre-agg references (as it include full join paths)
// And we can not distinguish between "cube.member.granularity" and "cube.cube.member" here,
// so we have to try-catch 2 variants of evaluation.
if let Ok(iter_by_start) =
cube_evaluator.parse_path("dimensions".to_string(), full_name.clone())
{
member_short_path = full_name.clone();
iter = iter_by_start.into_iter();
} else {
member_short_path = format!("{}.{}", parts[parts.len() - 2], parts[parts.len() - 1]);
iter = cube_evaluator
.parse_path("dimensions".to_string(), member_short_path.clone())?
.into_iter();
}

let cube_name = iter.next().unwrap();
let name = iter.next().unwrap();
let granularity = iter.next();
let definition = cube_evaluator.dimension_by_path(full_name.clone())?;
let definition = cube_evaluator.dimension_by_path(member_short_path)?;
Ok(Self {
cube_name,
name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -511,12 +511,19 @@ impl MeasureSymbolFactory {
full_name: &String,
cube_evaluator: Rc<dyn CubeEvaluator>,
) -> Result<Self, CubeError> {
let parts: Vec<&str> = full_name.split('.').collect();
let member_short_path = if parts.len() > 2 {
format!("{}.{}", parts[parts.len() - 2], parts[parts.len() - 1])
} else {
full_name.clone()
};

let mut iter = cube_evaluator
.parse_path("measures".to_string(), full_name.clone())?
.parse_path("measures".to_string(), member_short_path.clone())?
.into_iter();
let cube_name = iter.next().unwrap();
let name = iter.next().unwrap();
let definition = cube_evaluator.measure_by_path(full_name.clone())?;
let definition = cube_evaluator.measure_by_path(member_short_path)?;
let sql = definition.sql()?;
Ok(Self {
cube_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -238,13 +238,38 @@ impl TimeDimensionSymbol {
query_tools: Rc<QueryTools>,
) -> Result<Option<String>, CubeError> {
if let Some(granularity_obj) = &self.granularity_obj {
let date_range_granularity = self.date_range_granularity(query_tools.clone())?;
let self_granularity = granularity_obj.min_granularity()?;

GranularityHelper::min_granularity(&date_range_granularity, &self_granularity)
if let Some(date_range) = &self.date_range {
let date_range_granularity = self.date_range_granularity(query_tools.clone())?;

// For predefined granularities or custom granularities not aligned with date range,
// we need to return the minimum granularity
if granularity_obj.is_predefined_granularity() {
let self_granularity = granularity_obj.min_granularity()?;
GranularityHelper::min_granularity(&date_range_granularity, &self_granularity)
} else {
let from_date_str = QueryDateTimeHelper::format_from_date(&date_range.0, 3)?;
let to_date_str = QueryDateTimeHelper::format_to_date(&date_range.1, 3)?;
let is_aligned = granularity_obj.is_aligned_with_date_range(
&from_date_str,
&to_date_str,
query_tools.timezone(),
)?;

if is_aligned {
Ok(self.granularity.clone())
} else {
let self_granularity = granularity_obj.min_granularity()?;
GranularityHelper::min_granularity(
&date_range_granularity,
&self_granularity,
)
}
}
} else {
Ok(self.granularity.clone())
}
} else {
let date_range_granularity = self.date_range_granularity(query_tools.clone())?;

Ok(date_range_granularity)
}
}
Expand Down
Loading
Loading