diff --git a/rust/cubesql/cubesql/Cargo.toml b/rust/cubesql/cubesql/Cargo.toml index 1023d6a659ed8..2c075e6bc3c58 100644 --- a/rust/cubesql/cubesql/Cargo.toml +++ b/rust/cubesql/cubesql/Cargo.toml @@ -4,7 +4,7 @@ version = "0.28.0" authors = ["Cube Dev, Inc."] edition = "2018" license = "Apache-2.0" -description = "SQL API for Cube as proxy over MySQL protocol" +description = "SQL API for Cube as proxy over PostgreSQL protocol" documentation = "https://cube.dev/docs" homepage = "https://cube.dev" diff --git a/rust/cubesql/cubesql/README.md b/rust/cubesql/cubesql/README.md index 6967dc5542376..158c17ab91491 100644 --- a/rust/cubesql/cubesql/README.md +++ b/rust/cubesql/cubesql/README.md @@ -10,7 +10,7 @@ # Cube SQL API -Cube SQL API allows querying Cube via MySQL-compatible SQL. +Cube SQL API allows querying Cube via PostgreSQL-compatible SQL. ## License diff --git a/rust/cubesql/cubesql/src/compile/engine/context_mysql.rs b/rust/cubesql/cubesql/src/compile/engine/context_mysql.rs deleted file mode 100644 index d1681eca7e059..0000000000000 --- a/rust/cubesql/cubesql/src/compile/engine/context_mysql.rs +++ /dev/null @@ -1,65 +0,0 @@ -use std::sync::Arc; - -use datafusion::datasource::{self, TableProvider}; - -use crate::{ - compile::{ - engine::{CubeContext, CubeTableProvider, TableName}, - DatabaseProtocol, - }, - CubeError, -}; - -impl DatabaseProtocol { - pub fn get_mysql_table_name( - &self, - table_provider: Arc, - ) -> Result { - let any = table_provider.as_any(); - Ok(if let Some(t) = any.downcast_ref::() { - t.table_name().to_string() - } else { - return Err(CubeError::internal(format!( - "Unknown table provider with schema: {:?}", - table_provider.schema() - ))); - }) - } - - pub(crate) fn get_mysql_provider( - &self, - context: &CubeContext, - tr: datafusion::catalog::TableReference, - ) -> Option> { - let (db, table) = match tr { - datafusion::catalog::TableReference::Partial { schema, table, .. } => { - (schema.to_ascii_lowercase(), table.to_ascii_lowercase()) - } - datafusion::catalog::TableReference::Full { - catalog: _, - schema, - table, - } => (schema.to_ascii_lowercase(), table.to_ascii_lowercase()), - datafusion::catalog::TableReference::Bare { table } => { - ("db".to_string(), table.to_ascii_lowercase()) - } - }; - - match db.as_str() { - "db" => { - if let Some(cube) = context - .meta - .cubes - .iter() - .find(|c| c.name.eq_ignore_ascii_case(&table)) - { - // TODO .clone() - return Some(Arc::new(CubeTableProvider::new(cube.clone()))); - } else { - return None; - } - } - _ => return None, - } - } -} diff --git a/rust/cubesql/cubesql/src/compile/engine/mod.rs b/rust/cubesql/cubesql/src/compile/engine/mod.rs index 2fe1fb67d1faa..2c04a74fca7b8 100644 --- a/rust/cubesql/cubesql/src/compile/engine/mod.rs +++ b/rust/cubesql/cubesql/src/compile/engine/mod.rs @@ -3,10 +3,7 @@ pub mod information_schema; pub mod udf; mod context; -mod context_mysql; mod context_postgresql; -mod variable_provider; // Public API pub use context::*; -pub use variable_provider::*; diff --git a/rust/cubesql/cubesql/src/compile/engine/udf/common.rs b/rust/cubesql/cubesql/src/compile/engine/udf/common.rs index 8d664c5e8793b..8a40f628085f8 100644 --- a/rust/cubesql/cubesql/src/compile/engine/udf/common.rs +++ b/rust/cubesql/cubesql/src/compile/engine/udf/common.rs @@ -12,11 +12,11 @@ use datafusion::{ array::{ new_null_array, Array, ArrayBuilder, ArrayRef, BooleanArray, BooleanBuilder, Date32Array, Float64Array, Float64Builder, GenericStringArray, Int32Array, - Int32Builder, Int64Array, Int64Builder, IntervalDayTimeBuilder, - IntervalMonthDayNanoArray, ListArray, ListBuilder, PrimitiveArray, PrimitiveBuilder, - StringArray, StringBuilder, StructBuilder, TimestampMicrosecondArray, - TimestampMillisecondArray, TimestampNanosecondArray, TimestampNanosecondBuilder, - TimestampSecondArray, UInt32Builder, UInt64Builder, + Int32Builder, Int64Array, Int64Builder, IntervalMonthDayNanoArray, ListArray, + ListBuilder, PrimitiveArray, PrimitiveBuilder, StringArray, StringBuilder, + StructBuilder, TimestampMicrosecondArray, TimestampMillisecondArray, + TimestampNanosecondArray, TimestampNanosecondBuilder, TimestampSecondArray, + UInt32Builder, UInt64Builder, }, compute::{cast, concat}, datatypes::{ @@ -101,28 +101,6 @@ pub fn create_db_udf(name: String, state: Arc) -> ScalarUDF { ) } -// It's the same as current_user UDF, but with another host -pub fn create_user_udf(state: Arc) -> ScalarUDF { - let fun = make_scalar_function(move |_args: &[ArrayRef]| { - let mut builder = StringBuilder::new(1); - if let Some(user) = &state.user() { - builder.append_value(user.clone() + "@127.0.0.1").unwrap(); - } else { - builder.append_null()?; - } - - Ok(Arc::new(builder.finish()) as ArrayRef) - }); - - create_udf( - "user", - vec![], - Arc::new(DataType::Utf8), - Volatility::Immutable, - fun, - ) -} - pub fn create_current_user_udf(state: Arc, name: &str, with_host: bool) -> ScalarUDF { let fun = make_scalar_function(move |_args: &[ArrayRef]| { let mut builder = StringBuilder::new(1); @@ -776,134 +754,6 @@ pub fn create_greatest_udf() -> ScalarUDF { ) } -// CONVERT_TZ() converts a datetime value dt from the time zone given by from_tz to the time zone given by to_tz and returns the resulting value. -pub fn create_convert_tz_udf() -> ScalarUDF { - let fun = make_scalar_function(move |args: &[ArrayRef]| { - assert!(args.len() == 3); - - let input_dt = &args[0]; - let from_tz = &args[1]; - let to_tz = &args[2]; - - let (_, input_tz) = match input_dt.data_type() { - DataType::Timestamp(unit, tz) => (unit, tz), - _ => { - return Err(DataFusionError::Execution(format!( - "dt argument must be a Timestamp, actual: {}", - from_tz.data_type() - ))); - } - }; - - if from_tz.data_type() == &DataType::UInt8 { - return Err(DataFusionError::Execution(format!( - "from_tz argument must be a Utf8, actual: {}", - from_tz.data_type() - ))); - }; - - if to_tz.data_type() == &DataType::UInt8 { - return Err(DataFusionError::Execution(format!( - "to_tz argument must be a Utf8, actual: {}", - to_tz.data_type() - ))); - }; - - let from_tz = downcast_string_arg!(&from_tz, "from_tz", i32); - let to_tz = downcast_string_arg!(&to_tz, "to_tz", i32); - - if from_tz.value(0) != "SYSTEM" || to_tz.value(0) != "+00:00" { - return Err(DataFusionError::NotImplemented(format!( - "convert_tz is not implemented, it's stub" - ))); - } - - if let Some(tz) = input_tz { - if tz != "UTC" { - return Err(DataFusionError::NotImplemented(format!( - "convert_tz does not non UTC timezone as input, actual {}", - tz - ))); - }; - }; - - Ok(input_dt.clone()) - }); - - let return_type: ReturnTypeFunction = Arc::new(move |types| { - assert!(types.len() == 3); - - Ok(Arc::new(types[0].clone())) - }); - - ScalarUDF::new( - "convert_tz", - &Signature::any(3, Volatility::Immutable), - &return_type, - &fun, - ) -} - -pub fn create_timediff_udf() -> ScalarUDF { - let fun = make_scalar_function(move |args: &[ArrayRef]| { - assert!(args.len() == 2); - - let left_dt = &args[0]; - let right_dt = &args[1]; - - let left_date = match left_dt.data_type() { - DataType::Timestamp(TimeUnit::Nanosecond, _) => { - let arr = downcast_primitive_arg!(left_dt, "left_dt", TimestampNanosecondType); - let ts = arr.value(0); - - // NaiveDateTime::from_timestamp(ts, 0) - ts - } - _ => { - return Err(DataFusionError::Execution(format!( - "left_dt argument must be a Timestamp, actual: {}", - left_dt.data_type() - ))); - } - }; - - let right_date = match right_dt.data_type() { - DataType::Timestamp(TimeUnit::Nanosecond, _) => { - let arr = downcast_primitive_arg!(right_dt, "right_dt", TimestampNanosecondType); - arr.value(0) - } - _ => { - return Err(DataFusionError::Execution(format!( - "right_dt argument must be a Timestamp, actual: {}", - right_dt.data_type() - ))); - } - }; - - let diff = right_date - left_date; - if diff != 0 { - return Err(DataFusionError::NotImplemented(format!( - "timediff is not implemented, it's stub" - ))); - } - - let mut interal_arr = IntervalDayTimeBuilder::new(1); - interal_arr.append_value(diff)?; - - Ok(Arc::new(interal_arr.finish()) as ArrayRef) - }); - - let return_type: ReturnTypeFunction = - Arc::new(move |_| Ok(Arc::new(DataType::Interval(IntervalUnit::DayTime)))); - - ScalarUDF::new( - "timediff", - &Signature::any(2, Volatility::Immutable), - &return_type, - &fun, - ) -} - pub fn create_ends_with_udf() -> ScalarUDF { let fun = make_scalar_function(move |args: &[ArrayRef]| { assert!(args.len() == 2); diff --git a/rust/cubesql/cubesql/src/compile/engine/variable_provider.rs b/rust/cubesql/cubesql/src/compile/engine/variable_provider.rs deleted file mode 100644 index 24afdb8444268..0000000000000 --- a/rust/cubesql/cubesql/src/compile/engine/variable_provider.rs +++ /dev/null @@ -1,102 +0,0 @@ -use std::sync::Arc; - -use datafusion::{ - arrow::datatypes::DataType, - error::Result, - scalar::ScalarValue, - variable::{VarProvider, VarType}, -}; -use log::warn; - -use crate::{ - compile::DatabaseProtocol, - sql::{ServerManager, SessionState}, -}; - -pub struct VariablesProvider { - session: Arc, - server: Arc, -} - -impl VariablesProvider { - pub fn new(session: Arc, server: Arc) -> Self { - Self { session, server } - } - - fn get_session_value(&self, identifier: Vec, var_type: VarType) -> Result { - let key = if identifier.len() > 1 { - let ignore_first = identifier[0].eq_ignore_ascii_case("@@session"); - if ignore_first { - identifier[1..].concat() - } else { - identifier.concat()[1..].to_string() - } - } else { - identifier.concat()[1..].to_string() - }; - - if let Some(var) = self.session.get_variable(&key) { - if var.var_type == var_type { - return Ok(var.value.clone()); - } - } - - warn!("Unknown session variable: {}", key); - - Ok(ScalarValue::Utf8(None)) - } - - fn get_global_value(&self, identifier: Vec) -> Result { - let key = if identifier.len() > 1 { - let ignore_first = identifier[0].eq_ignore_ascii_case("@@global"); - - if ignore_first { - identifier[1..].concat() - } else { - identifier.concat()[2..].to_string() - } - } else { - identifier.concat()[2..].to_string() - }; - - if let Some(var) = self - .server - .read_variables(DatabaseProtocol::MySQL) - .get(&key) - { - if var.var_type == VarType::System { - return Ok(var.value.clone()); - } - } - - warn!("Unknown system variable: {}", key); - - Ok(ScalarValue::Utf8(None)) - } -} - -impl VarProvider for VariablesProvider { - /// get variable value - fn get_value(&self, identifier: Vec) -> Result { - let first_word_vec: Vec = identifier[0].chars().collect(); - if first_word_vec.len() < 2 { - return Ok(ScalarValue::Utf8(None)); - } - - match (&first_word_vec[0], &first_word_vec[1]) { - ('@', '@') => { - if identifier.len() > 1 && identifier[0].eq_ignore_ascii_case("@@session") { - return self.get_session_value(identifier, VarType::System); - } - - return self.get_global_value(identifier); - } - ('@', _) => return self.get_session_value(identifier, VarType::UserDefined), - (_, _) => return Ok(ScalarValue::Utf8(None)), - }; - } - - fn get_type(&self, _var_names: &[String]) -> Option { - Some(DataType::Utf8) - } -} diff --git a/rust/cubesql/cubesql/src/compile/mod.rs b/rust/cubesql/cubesql/src/compile/mod.rs index b901a89a1dbe5..545539c881d5f 100644 --- a/rust/cubesql/cubesql/src/compile/mod.rs +++ b/rust/cubesql/cubesql/src/compile/mod.rs @@ -197,30 +197,6 @@ mod tests { ); } - #[tokio::test] - async fn test_select_compound_identifiers() { - init_testing_logger(); - - let query_plan = convert_select_to_query_plan( - "SELECT MEASURE(`KibanaSampleDataEcommerce`.`maxPrice`) AS maxPrice, MEASURE(`KibanaSampleDataEcommerce`.`minPrice`) AS minPrice FROM KibanaSampleDataEcommerce".to_string(), DatabaseProtocol::MySQL - ).await; - - let logical_plan = query_plan.as_logical_plan(); - assert_eq!( - logical_plan.find_cube_scan().request, - V1LoadRequestQuery { - measures: Some(vec![ - "KibanaSampleDataEcommerce.maxPrice".to_string(), - "KibanaSampleDataEcommerce.minPrice".to_string(), - ]), - segments: Some(vec![]), - dimensions: Some(vec![]), - order: Some(vec![]), - ..Default::default() - } - ); - } - #[tokio::test] async fn test_select_measure_aggregate_functions() { init_testing_logger(); @@ -228,7 +204,7 @@ mod tests { let query_plan = convert_select_to_query_plan( "SELECT MAX(maxPrice), MIN(minPrice), AVG(avgPrice) FROM KibanaSampleDataEcommerce" .to_string(), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await; @@ -376,7 +352,7 @@ mod tests { async fn test_order_alias_for_measure_default() { let query_plan = convert_select_to_query_plan( "SELECT COUNT(*) as cnt FROM KibanaSampleDataEcommerce ORDER BY cnt".to_string(), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await; @@ -455,7 +431,7 @@ mod tests { ), // test_order_compound_identifier_default ( - "SELECT taxful_total_price FROM `db`.`KibanaSampleDataEcommerce` ORDER BY `KibanaSampleDataEcommerce`.`taxful_total_price`".to_string(), + "SELECT taxful_total_price FROM \"public\".\"KibanaSampleDataEcommerce\" ORDER BY \"taxful_total_price\"".to_string(), V1LoadRequestQuery { measures: Some(vec![]), segments: Some(vec![]), @@ -523,7 +499,7 @@ mod tests { ), // test_order_identifer_alias_ident_escape ( - "SELECT taxful_total_price as `alias1` FROM KibanaSampleDataEcommerce ORDER BY `alias1` DESC".to_string(), + "SELECT taxful_total_price as \"alias1\" FROM KibanaSampleDataEcommerce ORDER BY \"alias1\" DESC".to_string(), V1LoadRequestQuery { measures: Some(vec![]), segments: Some(vec![]), @@ -542,7 +518,7 @@ mod tests { for (sql, expected_request) in supported_orders.iter() { let query_plan = - convert_select_to_query_plan(sql.to_string(), DatabaseProtocol::MySQL).await; + convert_select_to_query_plan(sql.to_string(), DatabaseProtocol::PostgreSQL).await; assert_eq!( &query_plan.as_logical_plan().find_cube_scan().request, @@ -561,7 +537,7 @@ mod tests { let query_plan = convert_select_to_query_plan( "SELECT DATE(order_date) FROM KibanaSampleDataEcommerce ORDER BY DATE(order_date) DESC" .to_string(), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await; @@ -588,7 +564,7 @@ mod tests { let query_plan = convert_select_to_query_plan( "SELECT DATE(order_date) FROM KibanaSampleDataEcommerce GROUP BY DATE(order_date) ORDER BY DATE(order_date) DESC" .to_string(), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ).await; assert_eq!( @@ -615,7 +591,7 @@ mod tests { async fn test_select_all_fields_by_asterisk_limit_100() { let query_plan = convert_select_to_query_plan( "SELECT * FROM KibanaSampleDataEcommerce LIMIT 100".to_string(), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await; @@ -641,7 +617,7 @@ mod tests { async fn test_select_all_fields_by_asterisk_limit_100_offset_50() { let query_plan = convert_select_to_query_plan( "SELECT * FROM KibanaSampleDataEcommerce LIMIT 100 OFFSET 50".to_string(), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await; @@ -670,7 +646,7 @@ mod tests { } let query_plan = convert_select_to_query_plan( "SELECT order_date, customer_gender FROM KibanaSampleDataEcommerce".to_string(), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await; @@ -697,7 +673,7 @@ mod tests { } let query_plan = convert_select_to_query_plan( "SELECT order_date as order_date, customer_gender as customer_gender FROM KibanaSampleDataEcommerce" - .to_string(), DatabaseProtocol::MySQL + .to_string(), DatabaseProtocol::PostgreSQL ).await; let logical_plan = query_plan.as_logical_plan(); @@ -2639,7 +2615,7 @@ limit }, ), ( - "SELECT COUNT(*) FROM db.KibanaSampleDataEcommerce".to_string(), + "SELECT COUNT(*) FROM \"public\".\"KibanaSampleDataEcommerce\"".to_string(), V1LoadRequestQuery { measures: Some(vec!["KibanaSampleDataEcommerce.count".to_string()]), dimensions: Some(vec![]), @@ -2689,7 +2665,7 @@ limit }, ), ( - "SELECT MAX(`maxPrice`) FROM KibanaSampleDataEcommerce".to_string(), + "SELECT MAX(maxPrice) FROM KibanaSampleDataEcommerce".to_string(), V1LoadRequestQuery { measures: Some(vec!["KibanaSampleDataEcommerce.maxPrice".to_string()]), dimensions: Some(vec![]), @@ -2702,7 +2678,7 @@ limit for (input_query, expected_request) in variants.iter() { let logical_plan = - convert_select_to_query_plan(input_query.clone(), DatabaseProtocol::MySQL) + convert_select_to_query_plan(input_query.clone(), DatabaseProtocol::PostgreSQL) .await .as_logical_plan(); @@ -2843,16 +2819,11 @@ limit "DATE_TRUNC('year', order_date)".to_string(), "year".to_string(), ], - // with escaping - [ - "DATE_TRUNC('second', `order_date`)".to_string(), - "second".to_string(), - ], ]; for [subquery, expected_granularity] in supported_granularities.iter() { let logical_plan = convert_select_to_query_plan( - format!("SELECT COUNT(*), {} AS __timestamp FROM KibanaSampleDataEcommerce GROUP BY __timestamp", subquery), DatabaseProtocol::MySQL + format!("SELECT COUNT(*), {} AS __timestamp FROM KibanaSampleDataEcommerce GROUP BY __timestamp", subquery), DatabaseProtocol::PostgreSQL ).await.as_logical_plan(); assert_eq!( @@ -2945,7 +2916,7 @@ limit ( "COUNT(*), DATE(order_date) AS __timestamp".to_string(), // Now replaced with exact date - "`KibanaSampleDataEcommerce`.`order_date` >= date(date_add(date('2021-09-30 00:00:00.000000'), INTERVAL -30 day)) AND `KibanaSampleDataEcommerce`.`order_date` < date('2021-09-07 00:00:00.000000')".to_string(), + "order_date >= date(date_add(date('2021-09-30 00:00:00.000000'), INTERVAL -30 day)) AND order_date < date('2021-09-07 00:00:00.000000')".to_string(), Some(vec![V1LoadRequestQueryTimeDimension { dimension: "KibanaSampleDataEcommerce.order_date".to_string(), granularity: Some("day".to_string()), @@ -2959,7 +2930,7 @@ limit ( "COUNT(*), DATE(order_date) AS order_date".to_string(), // Now replaced with exact date - "`KibanaSampleDataEcommerce`.`order_date` >= date(date_add(date('2021-09-30 00:00:00.000000'), INTERVAL -30 day)) AND `KibanaSampleDataEcommerce`.`order_date` < date('2021-09-07 00:00:00.000000')".to_string(), + "order_date >= date(date_add(date('2021-09-30 00:00:00.000000'), INTERVAL -30 day)) AND order_date < date('2021-09-07 00:00:00.000000')".to_string(), Some(vec![V1LoadRequestQueryTimeDimension { dimension: "KibanaSampleDataEcommerce.order_date".to_string(), granularity: Some("day".to_string()), @@ -3044,7 +3015,7 @@ limit "" } ); - let logical_plan = convert_select_to_query_plan(query, DatabaseProtocol::MySQL) + let logical_plan = convert_select_to_query_plan(query, DatabaseProtocol::PostgreSQL) .await .as_logical_plan(); @@ -3063,7 +3034,7 @@ limit FROM KibanaSampleDataEcommerce WHERE order_date >= STR_TO_DATE('2021-08-31 00:00:00.000000', '%Y-%m-%d %H:%i:%s.%f') OR order_date < STR_TO_DATE('2021-09-07 00:00:00.000000', '%Y-%m-%d %H:%i:%s.%f') GROUP BY __timestamp" - .to_string(), DatabaseProtocol::MySQL + .to_string(), DatabaseProtocol::PostgreSQL ).await; assert_eq!( @@ -3442,7 +3413,7 @@ limit WHERE {}", sql ), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await .as_logical_plan(); @@ -3514,7 +3485,7 @@ limit sql ), meta.clone(), - get_test_session(DatabaseProtocol::MySQL, meta).await, + get_test_session(DatabaseProtocol::PostgreSQL, meta).await, ) .await; @@ -3729,7 +3700,7 @@ limit GROUP BY __timestamp", sql ), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await .as_logical_plan(); @@ -5112,13 +5083,17 @@ ORDER BY async fn test_explain() -> Result<(), CubeError> { // SELECT with no tables (inline eval) insta::assert_snapshot!( - execute_query("EXPLAIN SELECT 1+1;".to_string(), DatabaseProtocol::MySQL).await? + execute_query( + "EXPLAIN SELECT 1+1;".to_string(), + DatabaseProtocol::PostgreSQL + ) + .await? ); insta::assert_snapshot!( execute_query( "EXPLAIN VERBOSE SELECT 1+1;".to_string(), - DatabaseProtocol::MySQL + DatabaseProtocol::PostgreSQL ) .await? ); @@ -5126,14 +5101,14 @@ ORDER BY // Execute without asserting with fixture, because metrics can change execute_query( "EXPLAIN ANALYZE SELECT 1+1;".to_string(), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await?; // SELECT with table and specific columns execute_query( "EXPLAIN SELECT count, avgPrice FROM KibanaSampleDataEcommerce;".to_string(), - DatabaseProtocol::MySQL, + DatabaseProtocol::PostgreSQL, ) .await?; diff --git a/rust/cubesql/cubesql/src/compile/parser.rs b/rust/cubesql/cubesql/src/compile/parser.rs index f4a3d543bda14..76893b6055db4 100644 --- a/rust/cubesql/cubesql/src/compile/parser.rs +++ b/rust/cubesql/cubesql/src/compile/parser.rs @@ -1,41 +1,12 @@ use std::{collections::HashMap, sync::LazyLock}; use regex::Regex; -use sqlparser::{ - ast::Statement, - dialect::{Dialect, PostgreSqlDialect}, - parser::Parser, -}; +use sqlparser::{ast::Statement, dialect::PostgreSqlDialect, parser::Parser}; use super::{qtrace::Qtrace, CompilationError, DatabaseProtocol}; use super::CompilationResult; -#[derive(Debug)] -pub struct MySqlDialectWithBackTicks {} - -impl Dialect for MySqlDialectWithBackTicks { - fn is_delimited_identifier_start(&self, ch: char) -> bool { - ch == '"' || ch == '`' - } - - fn is_identifier_start(&self, ch: char) -> bool { - // See https://dev.mysql.com/doc/refman/8.0/en/identifiers.html. - // We don't yet support identifiers beginning with numbers, as that - // makes it hard to distinguish numeric literals. - ch.is_ascii_lowercase() - || ch.is_ascii_uppercase() - || ch == '_' - || ch == '$' - || ch == '@' - || ('\u{0080}'..='\u{ffff}').contains(&ch) - } - - fn is_identifier_part(&self, ch: char) -> bool { - self.is_identifier_start(ch) || ch.is_ascii_digit() - } -} - static SIGMA_WORKAROUND: LazyLock = LazyLock::new(|| { Regex::new(r#"(?s)^\s*with\s+nsp\sas\s\(.*nspname\s=\s.*\),\s+tbl\sas\s\(.*relname\s=\s.*\).*select\s+attname.*from\spg_attribute.*$"#).unwrap() }); @@ -48,17 +19,6 @@ pub fn parse_sql_to_statements( let original_query = query; log::debug!("Parsing SQL: {}", query); - // @todo Support without workarounds - // metabase - let query = query.replace("IF(TABLE_TYPE='BASE TABLE' or TABLE_TYPE='SYSTEM VERSIONED', 'TABLE', TABLE_TYPE) as TABLE_TYPE", "TABLE_TYPE"); - let query = query.replace("ORDER BY TABLE_TYPE, TABLE_SCHEMA, TABLE_NAME", ""); - // @todo Implement CONVERT function - let query = query.replace("CONVERT (CASE DATA_TYPE WHEN 'year' THEN NUMERIC_SCALE WHEN 'tinyint' THEN 0 ELSE NUMERIC_SCALE END, UNSIGNED INTEGER)", "0"); - // @todo problem with parser, space in types - let query = query.replace("signed integer", "bigint"); - let query = query.replace("SIGNED INTEGER", "bigint"); - let query = query.replace("unsigned integer", "bigint"); - let query = query.replace("UNSIGNED INTEGER", "bigint"); // DBeaver let query = query.replace( @@ -224,7 +184,6 @@ pub fn parse_sql_to_statements( } let parse_result = match protocol { - DatabaseProtocol::MySQL => Parser::parse_sql(&MySqlDialectWithBackTicks {}, query.as_str()), DatabaseProtocol::PostgreSQL => Parser::parse_sql(&PostgreSqlDialect {}, query.as_str()), DatabaseProtocol::Extension(_) => unimplemented!(), }; @@ -271,58 +230,6 @@ pub fn parse_sql_to_statement( mod tests { use super::*; - #[test] - fn test_no_statements_mysql() { - let result = parse_sql_to_statement( - &"-- 6dcd92a04feb50f14bbcf07c661680ba SELECT NOW".to_string(), - DatabaseProtocol::MySQL, - &mut None, - ); - match result { - Ok(_) => panic!("This test should throw an error"), - Err(err) => assert!(err - .to_string() - .contains("Invalid query, no statements was specified")), - } - } - - #[test] - fn test_multiple_statements_mysql() { - let result = parse_sql_to_statement( - &"SELECT NOW(); SELECT NOW();".to_string(), - DatabaseProtocol::MySQL, - &mut None, - ); - match result { - Ok(_) => panic!("This test should throw an error"), - Err(err) => assert!(err - .to_string() - .contains("Multiple statements was specified in one query")), - } - } - - #[test] - fn test_single_line_comments_mysql() { - let result = parse_sql_to_statement( - &"-- 6dcd92a04feb50f14bbcf07c661680ba - SELECT DATE(`createdAt`) AS __timestamp, - COUNT(*) AS count - FROM db.`Orders` - GROUP BY DATE(`createdAt`) - ORDER BY count DESC - LIMIT 10000 - -- 6dcd92a04feb50f14bbcf07c661680ba - " - .to_string(), - DatabaseProtocol::MySQL, - &mut None, - ); - match result { - Ok(_) => {} - Err(err) => panic!("{}", err), - } - } - #[test] fn test_no_statements_postgres() { let result = parse_sql_to_statement( diff --git a/rust/cubesql/cubesql/src/compile/protocol.rs b/rust/cubesql/cubesql/src/compile/protocol.rs index 936a2f07e832b..6005453aa3464 100644 --- a/rust/cubesql/cubesql/src/compile/protocol.rs +++ b/rust/cubesql/cubesql/src/compile/protocol.rs @@ -51,7 +51,6 @@ impl Hash for dyn DatabaseProtocolDetails { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum DatabaseProtocol { - MySQL, PostgreSQL, Extension(Arc), } @@ -60,7 +59,6 @@ impl DatabaseProtocolDetails for DatabaseProtocol { fn get_name(&self) -> &'static str { match &self { DatabaseProtocol::PostgreSQL => "postgres", - DatabaseProtocol::MySQL => "mysql", DatabaseProtocol::Extension(ext) => ext.get_name(), } } @@ -74,7 +72,6 @@ impl DatabaseProtocolDetails for DatabaseProtocol { fn support_transactions(&self) -> bool { match &self { - DatabaseProtocol::MySQL => false, DatabaseProtocol::PostgreSQL => true, DatabaseProtocol::Extension(ext) => ext.support_transactions(), } @@ -82,12 +79,6 @@ impl DatabaseProtocolDetails for DatabaseProtocol { fn get_session_default_variables(&self) -> DatabaseVariables { match &self { - DatabaseProtocol::MySQL => { - // TODO(ovr): Should we move it from session? - error!("get_session_default_variables was called on MySQL protocol"); - - DatabaseVariables::default() - } DatabaseProtocol::PostgreSQL => { // TODO(ovr): Should we move it from session? error!("get_session_default_variables was called on PostgreSQL protocol"); @@ -108,7 +99,6 @@ impl DatabaseProtocolDetails for DatabaseProtocol { tr: datafusion::catalog::TableReference, ) -> Option> { match self { - DatabaseProtocol::MySQL => self.get_mysql_provider(context, tr), DatabaseProtocol::PostgreSQL => self.get_postgres_provider(context, tr), DatabaseProtocol::Extension(ext) => ext.get_provider(&context, tr), } @@ -119,7 +109,6 @@ impl DatabaseProtocolDetails for DatabaseProtocol { table_provider: Arc, ) -> Result { match self { - DatabaseProtocol::MySQL => self.get_mysql_table_name(table_provider), DatabaseProtocol::PostgreSQL => self.get_postgres_table_name(table_provider), DatabaseProtocol::Extension(ext) => ext.table_name_by_table_provider(table_provider), } diff --git a/rust/cubesql/cubesql/src/compile/query_engine.rs b/rust/cubesql/cubesql/src/compile/query_engine.rs index d6b4325871b3c..8c5217b9b140f 100644 --- a/rust/cubesql/cubesql/src/compile/query_engine.rs +++ b/rust/cubesql/cubesql/src/compile/query_engine.rs @@ -15,7 +15,7 @@ use crate::{ wrapper::{CubeScanWrappedSqlNode, CubeScanWrapperNode}, }, udf::*, - CubeContext, VariablesProvider, + CubeContext, }, qtrace::Qtrace, rewrite::{ @@ -47,7 +47,6 @@ use datafusion::{ }, physical_plan::planner::DefaultPhysicalPlanner, sql::{parser::Statement as DFStatement, planner::SqlToRel}, - variable::VarType, }; use uuid::Uuid; @@ -451,27 +450,8 @@ impl QueryEngine for SqlQueryEngine { .retain(|r| r.name() != "projection_push_down"); let mut ctx = DFSessionContext::with_state(df_state); - if state.protocol == DatabaseProtocol::MySQL { - let system_variable_provider = - VariablesProvider::new(state.clone(), self.session_manager.server.clone()); - let user_defined_variable_provider = - VariablesProvider::new(state.clone(), self.session_manager.server.clone()); - - ctx.register_variable(VarType::System, Arc::new(system_variable_provider)); - ctx.register_variable( - VarType::UserDefined, - Arc::new(user_defined_variable_provider), - ); - } - // udf - if state.protocol == DatabaseProtocol::MySQL { - ctx.register_udf(create_version_udf("8.0.25".to_string())); - ctx.register_udf(create_db_udf("database".to_string(), state.clone())); - ctx.register_udf(create_db_udf("schema".to_string(), state.clone())); - ctx.register_udf(create_current_user_udf(state.clone(), "current_user", true)); - ctx.register_udf(create_user_udf(state.clone())); - } else if state.protocol == DatabaseProtocol::PostgreSQL { + if state.protocol == DatabaseProtocol::PostgreSQL { ctx.register_udf(create_version_udf( "PostgreSQL 14.2 on x86_64-cubesql".to_string(), )); @@ -495,8 +475,6 @@ impl QueryEngine for SqlQueryEngine { ctx.register_udf(create_if_udf()); ctx.register_udf(create_least_udf()); ctx.register_udf(create_greatest_udf()); - ctx.register_udf(create_convert_tz_udf()); - ctx.register_udf(create_timediff_udf()); ctx.register_udf(create_time_format_udf()); ctx.register_udf(create_locate_udf()); ctx.register_udf(create_date_udf()); diff --git a/rust/cubesql/cubesql/src/compile/router.rs b/rust/cubesql/cubesql/src/compile/router.rs index 064eea803c452..325a50731b0a9 100644 --- a/rust/cubesql/cubesql/src/compile/router.rs +++ b/rust/cubesql/cubesql/src/compile/router.rs @@ -324,12 +324,8 @@ impl QueryRouter { &self, key_values: &Vec, ) -> Result { - let mut flags = StatusFlags::SERVER_STATE_CHANGED; - let mut session_columns_to_update = DatabaseVariablesToUpdate::with_capacity(key_values.len()); - let mut global_columns_to_update = - DatabaseVariablesToUpdate::with_capacity(key_values.len()); match self.state.protocol { DatabaseProtocol::PostgreSQL => { @@ -366,69 +362,6 @@ impl QueryRouter { )); } } - DatabaseProtocol::MySQL => { - for key_value in key_values.iter() { - if key_value.key.value.to_lowercase() == "autocommit" { - flags |= StatusFlags::AUTOCOMMIT; - - break; - } - - let symbols: Vec = key_value.key.value.chars().collect(); - if symbols.len() < 2 { - continue; - } - - let is_user_defined_var = symbols[0] == '@' && symbols[1] != '@'; - let is_global_var = - (symbols[0] == '@' && symbols[1] == '@') || symbols[0] != '@'; - - let value: String = match &key_value.value[0] { - ast::Expr::Identifier(ident) => ident.value.to_string(), - ast::Expr::Value(val) => match val { - ast::Value::SingleQuotedString(single_quoted_str) => { - single_quoted_str.to_string() - } - ast::Value::DoubleQuotedString(double_quoted_str) => { - double_quoted_str.to_string() - } - ast::Value::Number(number, _) => number.to_string(), - _ => { - return Err(CompilationError::user(format!( - "invalid {} variable format", - key_value.key.value - ))) - } - }, - _ => { - return Err(CompilationError::user(format!( - "invalid {} variable format", - key_value.key.value - ))) - } - }; - - if is_global_var { - let key = if symbols[0] == '@' { - key_value.key.value[2..].to_lowercase() - } else { - key_value.key.value.to_lowercase() - }; - global_columns_to_update.push(DatabaseVariable::system( - key.to_lowercase(), - ScalarValue::Utf8(Some(value.clone())), - None, - )); - } else if is_user_defined_var { - let key = key_value.key.value[1..].to_lowercase(); - session_columns_to_update.push(DatabaseVariable::user_defined( - key.to_lowercase(), - ScalarValue::Utf8(Some(value.clone())), - None, - )); - } - } - } DatabaseProtocol::Extension(_) => { log::warn!("set_variable_to_plan is not supported for custom protocol"); } @@ -456,13 +389,10 @@ impl QueryRouter { self.state.set_variables(session_columns_to_update); } - if !global_columns_to_update.is_empty() { - self.session_manager - .server - .set_variables(global_columns_to_update, self.state.protocol.clone()); - } - - Ok(QueryPlan::MetaOk(flags, CommandCompletion::Set)) + Ok(QueryPlan::MetaOk( + StatusFlags::empty(), + CommandCompletion::Set, + )) } async fn change_user(&self, username: String) -> Result<(), CompilationError> { diff --git a/rust/cubesql/cubesql/src/compile/test/mod.rs b/rust/cubesql/cubesql/src/compile/test/mod.rs index ce0d8fd6576a6..df7a13227d6cb 100644 --- a/rust/cubesql/cubesql/src/compile/test/mod.rs +++ b/rust/cubesql/cubesql/src/compile/test/mod.rs @@ -785,10 +785,6 @@ async fn get_test_session_with_config_and_transport( config_obj, )); - let db_name = match &protocol { - DatabaseProtocol::MySQL => "db", - _ => "cubedb", - }; let session_manager = Arc::new(SessionManager::new(server.clone())); let session = session_manager .create_session(protocol, "127.0.0.1".to_string(), 1234, None) @@ -796,7 +792,7 @@ async fn get_test_session_with_config_and_transport( .unwrap(); // Populate like shims - session.state.set_database(Some(db_name.to_string())); + session.state.set_database(Some("cubedb".to_string())); session.state.set_user(Some("ovr".to_string())); session.state.set_original_user(Some("ovr".to_string())); diff --git a/rust/cubesql/cubesql/src/compile/test/test_udfs.rs b/rust/cubesql/cubesql/src/compile/test/test_udfs.rs index a2e41efa3c856..bed130f4b62f5 100644 --- a/rust/cubesql/cubesql/src/compile/test/test_udfs.rs +++ b/rust/cubesql/cubesql/src/compile/test/test_udfs.rs @@ -18,7 +18,7 @@ async fn test_instr() -> Result<(), CubeError> { instr('Rust is killing me', 'unknown') as r3; " .to_string(), - DatabaseProtocol::MySQL + DatabaseProtocol::PostgreSQL ) .await?, "+----+----+----+\n\ @@ -31,25 +31,6 @@ async fn test_instr() -> Result<(), CubeError> { Ok(()) } -#[tokio::test] -async fn test_timediff() -> Result<(), CubeError> { - assert_eq!( - execute_query( - "select \ - timediff('1994-11-26T13:25:00.000Z'::timestamp, '1994-11-26T13:25:00.000Z'::timestamp) as r1 - ".to_string(), DatabaseProtocol::MySQL - ) - .await?, - "+------------------------------------------------+\n\ - | r1 |\n\ - +------------------------------------------------+\n\ - | 0 years 0 mons 0 days 0 hours 0 mins 0.00 secs |\n\ - +------------------------------------------------+" - ); - - Ok(()) -} - #[tokio::test] async fn test_ends_with() -> Result<(), CubeError> { insta::assert_snapshot!( @@ -60,7 +41,7 @@ async fn test_ends_with() -> Result<(), CubeError> { ends_with('rust is killing me', 'no') as r2 " .to_string(), - DatabaseProtocol::MySQL + DatabaseProtocol::PostgreSQL ) .await? ); @@ -78,7 +59,7 @@ async fn test_locate() -> Result<(), CubeError> { locate('unknown', 'Rust is killing me') as r3 " .to_string(), - DatabaseProtocol::MySQL + DatabaseProtocol::PostgreSQL ) .await?, "+----+----+----+\n\ @@ -104,7 +85,7 @@ async fn test_if() -> Result<(), CubeError> { if(true, CAST(1 as bigint), CAST(2 as int)) as c3 "# .to_string(), - DatabaseProtocol::MySQL + DatabaseProtocol::PostgreSQL ) .await?, "+-------+-------+------+----+----+----+\n\ @@ -258,7 +239,7 @@ async fn test_ucase() -> Result<(), CubeError> { ucase('super stroka') as r1 " .to_string(), - DatabaseProtocol::MySQL + DatabaseProtocol::PostgreSQL ) .await?, "+--------------+\n\ @@ -271,23 +252,6 @@ async fn test_ucase() -> Result<(), CubeError> { Ok(()) } -#[tokio::test] -async fn test_convert_tz() -> Result<(), CubeError> { - assert_eq!( - execute_query( - "select convert_tz('2021-12-08T15:50:14.337Z'::timestamp, @@GLOBAL.time_zone, '+00:00') as r1;".to_string(), DatabaseProtocol::MySQL - ) - .await?, - "+-------------------------+\n\ - | r1 |\n\ - +-------------------------+\n\ - | 2021-12-08T15:50:14.337 |\n\ - +-------------------------+" - ); - - Ok(()) -} - #[tokio::test] async fn test_pg_backend_pid() -> Result<(), CubeError> { insta::assert_snapshot!( diff --git a/rust/cubesql/cubesql/src/sql/database_variables/mod.rs b/rust/cubesql/cubesql/src/sql/database_variables/mod.rs index 58bc7464f8bc1..741e9afe667b7 100644 --- a/rust/cubesql/cubesql/src/sql/database_variables/mod.rs +++ b/rust/cubesql/cubesql/src/sql/database_variables/mod.rs @@ -1,16 +1,7 @@ use crate::compile::DatabaseVariables; -pub mod mysql; pub mod postgres; -pub fn mysql_default_session_variables() -> DatabaseVariables { - mysql::session_vars::defaults() -} - -pub fn mysql_default_global_variables() -> DatabaseVariables { - mysql::global_vars::defaults() -} - pub fn postgres_default_session_variables() -> DatabaseVariables { postgres::session_vars::defaults() } diff --git a/rust/cubesql/cubesql/src/sql/database_variables/mysql/global_vars.rs b/rust/cubesql/cubesql/src/sql/database_variables/mysql/global_vars.rs deleted file mode 100644 index 67cc213cc2ffd..0000000000000 --- a/rust/cubesql/cubesql/src/sql/database_variables/mysql/global_vars.rs +++ /dev/null @@ -1,138 +0,0 @@ -use crate::compile::{DatabaseVariable, DatabaseVariables}; -use datafusion::scalar::ScalarValue; - -pub fn defaults() -> DatabaseVariables { - let variables = [ - DatabaseVariable::system( - "max_allowed_packet".to_string(), - ScalarValue::UInt32(Some(67108864)), - None, - ), - DatabaseVariable::system( - "auto_increment_increment".to_string(), - ScalarValue::UInt32(Some(1)), - None, - ), - DatabaseVariable::system( - "version_comment".to_string(), - ScalarValue::Utf8(Some("mysql".to_string())), - None, - ), - DatabaseVariable::system( - "system_time_zone".to_string(), - ScalarValue::Utf8(Some("UTC".to_string())), - None, - ), - DatabaseVariable::system( - "time_zone".to_string(), - ScalarValue::Utf8(Some("SYSTEM".to_string())), - None, - ), - DatabaseVariable::system( - "tx_isolation".to_string(), - ScalarValue::Utf8(Some("REPEATABLE-READ".to_string())), - None, - ), - DatabaseVariable::system( - "tx_read_only".to_string(), - ScalarValue::Boolean(Some(false)), - None, - ), - DatabaseVariable::system( - "transaction_isolation".to_string(), - ScalarValue::Utf8(Some("REPEATABLE-READ".to_string())), - None, - ), - DatabaseVariable::system( - "transaction_read_only".to_string(), - ScalarValue::Boolean(Some(false)), - None, - ), - DatabaseVariable::system( - "sessiontransaction_isolation".to_string(), - ScalarValue::Utf8(Some("REPEATABLE-READ".to_string())), - None, - ), - DatabaseVariable::system( - "sessionauto_increment_increment".to_string(), - ScalarValue::Int64(Some(1)), - None, - ), - DatabaseVariable::system( - "character_set_client".to_string(), - ScalarValue::Utf8(Some("utf8mb4".to_string())), - None, - ), - DatabaseVariable::system( - "character_set_connection".to_string(), - ScalarValue::Utf8(Some("utf8mb4".to_string())), - None, - ), - DatabaseVariable::system( - "character_set_results".to_string(), - ScalarValue::Utf8(Some("utf8mb4".to_string())), - None, - ), - DatabaseVariable::system( - "character_set_server".to_string(), - ScalarValue::Utf8(Some("utf8mb4".to_string())), - None, - ), - DatabaseVariable::system( - "collation_connection".to_string(), - ScalarValue::Utf8(Some("utf8mb4_general_ci".to_string())), - None, - ), - DatabaseVariable::system( - "collation_server".to_string(), - ScalarValue::Utf8(Some("utf8mb4_0900_ai_ci".to_string())), - None, - ), - DatabaseVariable::system( - "init_connect".to_string(), - ScalarValue::Utf8(Some("".to_string())), - None, - ), - DatabaseVariable::system( - "interactive_timeout".to_string(), - ScalarValue::Int32(Some(28800)), - None, - ), - DatabaseVariable::system( - "license".to_string(), - ScalarValue::Utf8(Some("Apache 2".to_string())), - None, - ), - DatabaseVariable::system( - "lower_case_table_names".to_string(), - ScalarValue::Int32(Some(0)), - None, - ), - DatabaseVariable::system( - "net_buffer_length".to_string(), - ScalarValue::Int32(Some(16384)), - None, - ), - DatabaseVariable::system( - "net_write_timeout".to_string(), - ScalarValue::Int32(Some(600)), - None, - ), - DatabaseVariable::system( - "wait_timeout".to_string(), - ScalarValue::Int32(Some(28800)), - None, - ), - DatabaseVariable::system( - "sql_mode".to_string(), - ScalarValue::Utf8(Some("ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION".to_string())), - None, - ), - ]; - - let variables = IntoIterator::into_iter(variables) - .map(|v| (v.name.clone(), v)) - .collect::(); - - variables -} diff --git a/rust/cubesql/cubesql/src/sql/database_variables/mysql/mod.rs b/rust/cubesql/cubesql/src/sql/database_variables/mysql/mod.rs deleted file mode 100644 index a3a708237c712..0000000000000 --- a/rust/cubesql/cubesql/src/sql/database_variables/mysql/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod global_vars; -pub mod session_vars; diff --git a/rust/cubesql/cubesql/src/sql/database_variables/mysql/session_vars.rs b/rust/cubesql/cubesql/src/sql/database_variables/mysql/session_vars.rs deleted file mode 100644 index 67cc213cc2ffd..0000000000000 --- a/rust/cubesql/cubesql/src/sql/database_variables/mysql/session_vars.rs +++ /dev/null @@ -1,138 +0,0 @@ -use crate::compile::{DatabaseVariable, DatabaseVariables}; -use datafusion::scalar::ScalarValue; - -pub fn defaults() -> DatabaseVariables { - let variables = [ - DatabaseVariable::system( - "max_allowed_packet".to_string(), - ScalarValue::UInt32(Some(67108864)), - None, - ), - DatabaseVariable::system( - "auto_increment_increment".to_string(), - ScalarValue::UInt32(Some(1)), - None, - ), - DatabaseVariable::system( - "version_comment".to_string(), - ScalarValue::Utf8(Some("mysql".to_string())), - None, - ), - DatabaseVariable::system( - "system_time_zone".to_string(), - ScalarValue::Utf8(Some("UTC".to_string())), - None, - ), - DatabaseVariable::system( - "time_zone".to_string(), - ScalarValue::Utf8(Some("SYSTEM".to_string())), - None, - ), - DatabaseVariable::system( - "tx_isolation".to_string(), - ScalarValue::Utf8(Some("REPEATABLE-READ".to_string())), - None, - ), - DatabaseVariable::system( - "tx_read_only".to_string(), - ScalarValue::Boolean(Some(false)), - None, - ), - DatabaseVariable::system( - "transaction_isolation".to_string(), - ScalarValue::Utf8(Some("REPEATABLE-READ".to_string())), - None, - ), - DatabaseVariable::system( - "transaction_read_only".to_string(), - ScalarValue::Boolean(Some(false)), - None, - ), - DatabaseVariable::system( - "sessiontransaction_isolation".to_string(), - ScalarValue::Utf8(Some("REPEATABLE-READ".to_string())), - None, - ), - DatabaseVariable::system( - "sessionauto_increment_increment".to_string(), - ScalarValue::Int64(Some(1)), - None, - ), - DatabaseVariable::system( - "character_set_client".to_string(), - ScalarValue::Utf8(Some("utf8mb4".to_string())), - None, - ), - DatabaseVariable::system( - "character_set_connection".to_string(), - ScalarValue::Utf8(Some("utf8mb4".to_string())), - None, - ), - DatabaseVariable::system( - "character_set_results".to_string(), - ScalarValue::Utf8(Some("utf8mb4".to_string())), - None, - ), - DatabaseVariable::system( - "character_set_server".to_string(), - ScalarValue::Utf8(Some("utf8mb4".to_string())), - None, - ), - DatabaseVariable::system( - "collation_connection".to_string(), - ScalarValue::Utf8(Some("utf8mb4_general_ci".to_string())), - None, - ), - DatabaseVariable::system( - "collation_server".to_string(), - ScalarValue::Utf8(Some("utf8mb4_0900_ai_ci".to_string())), - None, - ), - DatabaseVariable::system( - "init_connect".to_string(), - ScalarValue::Utf8(Some("".to_string())), - None, - ), - DatabaseVariable::system( - "interactive_timeout".to_string(), - ScalarValue::Int32(Some(28800)), - None, - ), - DatabaseVariable::system( - "license".to_string(), - ScalarValue::Utf8(Some("Apache 2".to_string())), - None, - ), - DatabaseVariable::system( - "lower_case_table_names".to_string(), - ScalarValue::Int32(Some(0)), - None, - ), - DatabaseVariable::system( - "net_buffer_length".to_string(), - ScalarValue::Int32(Some(16384)), - None, - ), - DatabaseVariable::system( - "net_write_timeout".to_string(), - ScalarValue::Int32(Some(600)), - None, - ), - DatabaseVariable::system( - "wait_timeout".to_string(), - ScalarValue::Int32(Some(28800)), - None, - ), - DatabaseVariable::system( - "sql_mode".to_string(), - ScalarValue::Utf8(Some("ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION".to_string())), - None, - ), - ]; - - let variables = IntoIterator::into_iter(variables) - .map(|v| (v.name.clone(), v)) - .collect::(); - - variables -} diff --git a/rust/cubesql/cubesql/src/sql/mod.rs b/rust/cubesql/cubesql/src/sql/mod.rs index 6cd813d0f90f4..f07e408b1de9a 100644 --- a/rust/cubesql/cubesql/src/sql/mod.rs +++ b/rust/cubesql/cubesql/src/sql/mod.rs @@ -18,6 +18,6 @@ pub use auth_service::{ pub use database_variables::postgres::session_vars::CUBESQL_PENALIZE_POST_PROCESSING_VAR; pub use postgres::*; pub use server_manager::ServerManager; -pub use session::{Session, SessionProcessList, SessionProperties, SessionState}; +pub use session::{Session, SessionProperties, SessionState}; pub use session_manager::SessionManager; pub use types::{ColumnFlags, ColumnType}; diff --git a/rust/cubesql/cubesql/src/sql/server_manager.rs b/rust/cubesql/cubesql/src/sql/server_manager.rs index 17375e86ddd4f..9d1e6edb3d22d 100644 --- a/rust/cubesql/cubesql/src/sql/server_manager.rs +++ b/rust/cubesql/cubesql/src/sql/server_manager.rs @@ -2,10 +2,8 @@ use crate::{ compile::{DatabaseProtocol, DatabaseVariables, DatabaseVariablesToUpdate}, config::ConfigObj, sql::{ - compiler_cache::CompilerCache, - database_variables::{mysql_default_global_variables, postgres_default_global_variables}, - pg_auth_service::PostgresAuthService, - SqlAuthService, + compiler_cache::CompilerCache, database_variables::postgres_default_global_variables, + pg_auth_service::PostgresAuthService, SqlAuthService, }, transport::TransportService, CubeError, @@ -45,7 +43,6 @@ pub struct ServerManager { pub config_obj: Arc, pub compiler_cache: Arc, postgres_variables: RwLockSync, - mysql_variables: RwLockSync, } crate::di_service!(ServerManager, []); @@ -68,7 +65,6 @@ impl ServerManager { config_obj, configuration: ServerConfiguration::default(), postgres_variables: RwLockSync::new(postgres_default_global_variables()), - mysql_variables: RwLockSync::new(mysql_default_global_variables()), } } @@ -77,10 +73,6 @@ impl ServerManager { protocol: DatabaseProtocol, ) -> RwLockReadGuard<'_, DatabaseVariables> { match protocol { - DatabaseProtocol::MySQL => self - .mysql_variables - .read() - .expect("failed to unlock variables for reading"), DatabaseProtocol::PostgreSQL => self .postgres_variables .read() @@ -97,10 +89,6 @@ impl ServerManager { protocol: DatabaseProtocol, ) -> RwLockWriteGuard<'_, DatabaseVariables> { match protocol { - DatabaseProtocol::MySQL => self - .mysql_variables - .write() - .expect("failed to unlock variables for reading"), DatabaseProtocol::PostgreSQL => self .postgres_variables .write() diff --git a/rust/cubesql/cubesql/src/sql/session.rs b/rust/cubesql/cubesql/src/sql/session.rs index dd7a7d39cfa85..4c08e4b2f3aaa 100644 --- a/rust/cubesql/cubesql/src/sql/session.rs +++ b/rust/cubesql/cubesql/src/sql/session.rs @@ -15,8 +15,7 @@ use crate::{ DatabaseVariablesToUpdate, }, sql::{ - database_variables::{mysql_default_session_variables, postgres_default_session_variables}, - extended::PreparedStatement, + database_variables::postgres_default_session_variables, extended::PreparedStatement, temp_tables::TempTableManager, }, transport::LoadRequestMeta, @@ -37,8 +36,6 @@ impl SessionProperties { static POSTGRES_DEFAULT_VARIABLES: LazyLock = LazyLock::new(postgres_default_session_variables); -static MYSQL_DEFAULT_VARIABLES: LazyLock = - LazyLock::new(mysql_default_session_variables); #[derive(Debug)] pub enum TransactionState { @@ -347,7 +344,6 @@ impl SessionState { match guard { Some(vars) => vars, _ => match &self.protocol { - DatabaseProtocol::MySQL => return MYSQL_DEFAULT_VARIABLES.clone(), DatabaseProtocol::PostgreSQL => return POSTGRES_DEFAULT_VARIABLES.clone(), DatabaseProtocol::Extension(ext) => ext.get_session_default_variables(), }, @@ -363,7 +359,6 @@ impl SessionState { match &*guard { Some(vars) => vars.get(name).cloned(), _ => match &self.protocol { - DatabaseProtocol::MySQL => MYSQL_DEFAULT_VARIABLES.get(name).cloned(), DatabaseProtocol::PostgreSQL => POSTGRES_DEFAULT_VARIABLES.get(name).cloned(), DatabaseProtocol::Extension(ext) => ext.get_session_variable_default(name), }, @@ -423,26 +418,6 @@ pub struct Session { pub state: Arc, } -/// Specific representation of session for MySQL -#[derive(Debug)] -pub struct SessionProcessList { - pub id: u32, - pub user: Option, - pub host: String, - pub database: Option, -} - -impl From<&Session> for SessionProcessList { - fn from(session: &Session) -> Self { - Self { - id: session.state.connection_id, - host: session.state.client_ip.clone(), - user: session.state.user(), - database: session.state.database(), - } - } -} - /// Specific representation of session for PostgreSQL #[derive(Debug)] pub struct SessionStatActivity {