Skip to content

Commit 222cab8

Browse files
authored
fix(cubestore): Allow create an index from expressions (#9006)
1 parent 849790f commit 222cab8

File tree

2 files changed

+49
-2
lines changed

2 files changed

+49
-2
lines changed

rust/cubestore/cubestore/src/streaming/kafka_post_processing.rs

Lines changed: 31 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -470,16 +470,45 @@ impl KafkaPostProcessPlanner {
470470
}
471471

472472
fn get_source_unique_column(&self, expr: &Expr) -> Result<Column, CubeError> {
473+
fn find_column_name(expr: &Expr) -> Result<Option<String>, CubeError> {
474+
match expr {
475+
Expr::Column(c) => Ok(Some(c.name.clone())),
476+
Expr::Alias(e, _) => find_column_name(&**e),
477+
Expr::ScalarUDF { args, .. } => {
478+
let mut column_name: Option<String> = None;
479+
for arg in args {
480+
if let Some(name) = find_column_name(arg)? {
481+
if let Some(existing_name) = &column_name {
482+
if existing_name != &name {
483+
return Err(CubeError::user(
484+
format!("Scalar function can only use a single column, expression: {:?}", expr),
485+
));
486+
}
487+
} else {
488+
column_name = Some(name);
489+
}
490+
}
491+
}
492+
Ok(column_name)
493+
}
494+
_ => Ok(None),
495+
}
496+
}
497+
473498
let source_name = match expr {
474499
Expr::Column(c) => Ok(c.name.clone()),
475500
Expr::Alias(e, _) => match &**e {
476501
Expr::Column(c) => Ok(c.name.clone()),
502+
Expr::ScalarUDF { .. } => find_column_name(expr)?.ok_or_else(|| {
503+
CubeError::user(format!("Scalar function must contain at least one column, expression: {:?}", expr))
504+
}),
477505
_ => Err(CubeError::user(format!(
478-
"Unique key can't be an expression in kafka streaming queries"
506+
"Unique key can't be an expression in kafka streaming queries, expression: {:?}",
507+
expr
479508
))),
480509
},
481510
_ => Err(CubeError::user(
482-
"All expressions must have aliases in kafka streaming queries".to_string(),
511+
format!("All expressions must have aliases in kafka streaming queries, expression: {:?}", expr),
483512
)),
484513
}?;
485514

rust/cubestore/cubestore/src/streaming/mod.rs

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1710,6 +1710,24 @@ mod tests {
17101710
unique key (`message_id`, `an_id`) INDEX by_anonymous(`message_id`) location 'stream://kafka/EVENTS_BY_TYPE/0', 'stream://kafka/EVENTS_BY_TYPE/1'")
17111711
.await
17121712
.expect_err("Validation should fail");
1713+
1714+
service
1715+
.exec_query("CREATE TABLE test.events_by_type_6 (`ANONYMOUSID` text, `MESSAGEID` text, `FILTER_ID` int, `TIMESTAMP` timestamp, `TIMESTAMP_SECOND` timestamp) \
1716+
WITH (\
1717+
stream_offset = 'earliest',
1718+
select_statement = 'SELECT \
1719+
ANONYMOUSID, MESSAGEID, FILTER_ID, TIMESTAMP, \
1720+
PARSE_TIMESTAMP(FORMAT_TIMESTAMP(CONVERT_TZ(TIMESTAMP, \\'UTC\\', \\'UTC\\'), \\'yyyy-MM-dd\\'\\'T\\'\\'HH:mm:ss.000\\'), \\'yyyy-MM-dd\\'\\'T\\'\\'HH:mm:ss.SSS\\', \\'UTC\\') `TIMESTAMP_SECOND` \
1721+
FROM EVENTS_BY_TYPE \
1722+
WHERE PARSE_TIMESTAMP(TIMESTAMP, \\'yyyy-MM-dd\\'\\'T\\'\\'HH:mm:ss.SSSX\\', \\'UTC\\') >= PARSE_TIMESTAMP(\\'1970-01-01T01:00:00.000Z\\', \\'yyyy-MM-dd\\'\\'T\\'\\'HH:mm:ss.SSSX\\', \\'UTC\\') \
1723+
AND
1724+
PARSE_TIMESTAMP(TIMESTAMP, \\'yyyy-MM-dd\\'\\'T\\'\\'HH:mm:ss.SSSX\\', \\'UTC\\') < PARSE_TIMESTAMP(\\'1970-01-01T01:10:00.000Z\\', \\'yyyy-MM-dd\\'\\'T\\'\\'HH:mm:ss.SSSX\\', \\'UTC\\') \
1725+
\
1726+
'\
1727+
) \
1728+
unique key (`ANONYMOUSID`, `MESSAGEID`, `FILTER_ID`, `TIMESTAMP`, `TIMESTAMP_SECOND`) INDEX by_anonymous(`ANONYMOUSID`, `TIMESTAMP_SECOND`,`TIMESTAMP`) location 'stream://kafka/EVENTS_BY_TYPE/0', 'stream://kafka/EVENTS_BY_TYPE/1'")
1729+
.await
1730+
.unwrap();
17131731
})
17141732
.await;
17151733
}

0 commit comments

Comments
 (0)