diff --git a/rust/cubesqlplanner/Cargo.lock b/rust/cubesqlplanner/Cargo.lock index 368ae95946be8..51f2ff66c728b 100644 --- a/rust/cubesqlplanner/Cargo.lock +++ b/rust/cubesqlplanner/Cargo.lock @@ -278,6 +278,7 @@ dependencies = [ "minijinja", "nativebridge", "neon", + "petgraph", "regex", "serde", "serde_json", @@ -302,6 +303,12 @@ version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + [[package]] name = "event-listener" version = "5.3.1" @@ -323,6 +330,12 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "fnv" version = "1.0.7" @@ -390,6 +403,12 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +[[package]] +name = "hashbrown" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" + [[package]] name = "hermit-abi" version = "0.3.9" @@ -655,6 +674,16 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "indexmap" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" +dependencies = [ + "equivalent", + "hashbrown", +] + [[package]] name = "ipnet" version = "2.9.0" @@ -906,6 +935,16 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap", +] + [[package]] name = "phf" version = "0.11.2" diff --git a/rust/cubesqlplanner/cubesqlplanner/Cargo.toml b/rust/cubesqlplanner/cubesqlplanner/Cargo.toml index bf06699756180..fe623e8bfb2fc 100644 --- a/rust/cubesqlplanner/cubesqlplanner/Cargo.toml +++ b/rust/cubesqlplanner/cubesqlplanner/Cargo.toml @@ -22,6 +22,9 @@ lazy_static = "1.4.0" regex = "1.3.9" typed-builder = "0.21.2" +[dev-dependencies] +petgraph = "0.6" + [dependencies.neon] version = "=1" default-features = false diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/macros.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/macros.rs index 2b99022d69fed..07beaa6d05c6b 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/macros.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/macros.rs @@ -3,28 +3,6 @@ /// This macro generates a helper method that returns an owned StaticData struct. /// The helper is used by the trait's static_data() method which applies Box::leak. /// -/// # Usage -/// ```ignore -/// impl_static_data!( -/// MockDimensionDefinition, // The mock type -/// DimensionDefinitionStatic, // The static data type -/// dimension_type, // Fields to include -/// owned_by_cube, -/// multi_stage -/// ); -/// ``` -/// -/// # Generated Code -/// ```ignore -/// impl MockDimensionDefinition { -/// pub fn static_data(&self) -> DimensionDefinitionStatic { -/// DimensionDefinitionStatic { -/// dimension_type: self.dimension_type.clone(), -/// owned_by_cube: self.owned_by_cube.clone(), -/// multi_stage: self.multi_stage.clone(), -/// } -/// } -/// } /// ``` #[macro_export] macro_rules! impl_static_data { @@ -54,24 +32,6 @@ macro_rules! impl_static_data { /// - The leaked memory is minimal and reclaimed when the test process exits /// - This approach significantly simplifies test code by avoiding complex lifetime management /// -/// # Usage -/// ```ignore -/// impl DimensionDefinition for MockDimensionDefinition { -/// impl_static_data_method!(DimensionDefinitionStatic); -/// -/// fn sql(&self) -> Result>, CubeError> { -/// // ... other trait methods -/// } -/// } -/// ``` -/// -/// # Generated Code -/// ```ignore -/// fn static_data(&self) -> &DimensionDefinitionStatic { -/// // Intentional memory leak - acceptable for test mocks -/// // The Box::leak pattern converts the owned value to a static reference -/// Box::leak(Box::new(Self::static_data(self))) -/// } /// ``` #[macro_export] macro_rules! impl_static_data_method { diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_base_tools.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_base_tools.rs index 6397dd12a2a75..d453a905286ac 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_base_tools.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_base_tools.rs @@ -20,21 +20,6 @@ use typed_builder::TypedBuilder; /// security_context_for_rust, and sql_utils_for_rust. /// Other methods throw todo!() errors. /// -/// # Example -/// -/// ``` -/// use cubesqlplanner::test_fixtures::cube_bridge::MockBaseTools; -/// -/// // Use builder pattern -/// let tools = MockBaseTools::builder().build(); -/// let driver_tools = tools.driver_tools(false).unwrap(); -/// let sql_templates = tools.sql_templates().unwrap(); -/// -/// // Or with custom components -/// let custom_driver = MockDriverTools::with_timezone("Europe/London".to_string()); -/// let tools = MockBaseTools::builder() -/// .driver_tools(custom_driver) -/// .build(); /// ``` #[derive(Clone, TypedBuilder)] pub struct MockBaseTools { @@ -62,27 +47,22 @@ impl BaseTools for MockBaseTools { self } - /// Returns driver tools - uses MockDriverTools fn driver_tools(&self, _external: bool) -> Result, CubeError> { Ok(self.driver_tools.clone()) } - /// Returns SQL templates renderer - uses MockSqlTemplatesRender fn sql_templates(&self) -> Result, CubeError> { Ok(self.sql_templates.clone()) } - /// Returns security context - uses MockSecurityContext fn security_context_for_rust(&self) -> Result, CubeError> { Ok(self.security_context.clone()) } - /// Returns SQL utils - uses MockSqlUtils fn sql_utils_for_rust(&self) -> Result, CubeError> { Ok(self.sql_utils.clone()) } - /// Generate time series - not implemented in mock fn generate_time_series( &self, _granularity: String, @@ -91,7 +71,6 @@ impl BaseTools for MockBaseTools { todo!("generate_time_series not implemented in mock") } - /// Generate custom time series - not implemented in mock fn generate_custom_time_series( &self, _granularity: String, @@ -101,22 +80,18 @@ impl BaseTools for MockBaseTools { todo!("generate_custom_time_series not implemented in mock") } - /// Get allocated parameters - not implemented in mock fn get_allocated_params(&self) -> Result, CubeError> { todo!("get_allocated_params not implemented in mock") } - /// Get all cube members - not implemented in mock fn all_cube_members(&self, _path: String) -> Result, CubeError> { todo!("all_cube_members not implemented in mock") } - /// Get interval and minimal time unit - not implemented in mock fn interval_and_minimal_time_unit(&self, _interval: String) -> Result, CubeError> { todo!("interval_and_minimal_time_unit not implemented in mock") } - /// Get pre-aggregation by name - not implemented in mock fn get_pre_aggregation_by_name( &self, _cube_name: String, @@ -125,7 +100,6 @@ impl BaseTools for MockBaseTools { todo!("get_pre_aggregation_by_name not implemented in mock") } - /// Get pre-aggregation table name - not implemented in mock fn pre_aggregation_table_name( &self, _cube_name: String, @@ -134,7 +108,6 @@ impl BaseTools for MockBaseTools { todo!("pre_aggregation_table_name not implemented in mock") } - /// Get join tree for hints - not implemented in mock fn join_tree_for_hints( &self, _hints: Vec, @@ -142,120 +115,3 @@ impl BaseTools for MockBaseTools { todo!("join_tree_for_hints not implemented in mock") } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_builder_default() { - let tools = MockBaseTools::builder().build(); - assert!(tools.driver_tools(false).is_ok()); - assert!(tools.sql_templates().is_ok()); - assert!(tools.security_context_for_rust().is_ok()); - assert!(tools.sql_utils_for_rust().is_ok()); - } - - #[test] - fn test_default_trait() { - let tools = MockBaseTools::default(); - assert!(tools.driver_tools(false).is_ok()); - assert!(tools.sql_templates().is_ok()); - assert!(tools.security_context_for_rust().is_ok()); - assert!(tools.sql_utils_for_rust().is_ok()); - } - - #[test] - fn test_driver_tools() { - let tools = MockBaseTools::builder().build(); - let driver_tools = tools.driver_tools(false).unwrap(); - - // Test that it returns a valid DriverTools implementation - let result = driver_tools - .time_grouped_column("day".to_string(), "created_at".to_string()) - .unwrap(); - assert_eq!(result, "date_trunc('day', created_at)"); - } - - #[test] - fn test_driver_tools_external_flag() { - let tools = MockBaseTools::builder().build(); - - // Both external true and false should work (mock ignores the flag) - assert!(tools.driver_tools(false).is_ok()); - assert!(tools.driver_tools(true).is_ok()); - } - - #[test] - fn test_sql_templates() { - let tools = MockBaseTools::builder().build(); - let templates = tools.sql_templates().unwrap(); - - // Test that it returns a valid SqlTemplatesRender implementation - assert!(templates.contains_template("filters/equals")); - assert!(templates.contains_template("functions/SUM")); - } - - #[test] - fn test_security_context() { - let tools = MockBaseTools::builder().build(); - // Just verify it returns without error - assert!(tools.security_context_for_rust().is_ok()); - } - - #[test] - fn test_sql_utils() { - let tools = MockBaseTools::builder().build(); - // Just verify it returns without error - assert!(tools.sql_utils_for_rust().is_ok()); - } - - #[test] - fn test_builder_with_custom_driver_tools() { - let custom_driver = MockDriverTools::with_timezone("Europe/London".to_string()); - let tools = MockBaseTools::builder() - .driver_tools(Rc::new(custom_driver)) - .build(); - - let driver_tools = tools.driver_tools(false).unwrap(); - let result = driver_tools.convert_tz("timestamp".to_string()).unwrap(); - assert_eq!( - result, - "(timestamp::timestamptz AT TIME ZONE 'Europe/London')" - ); - } - - #[test] - fn test_builder_with_custom_sql_templates() { - let mut custom_templates = std::collections::HashMap::new(); - custom_templates.insert("test/template".to_string(), "TEST {{value}}".to_string()); - let sql_templates = MockSqlTemplatesRender::try_new(custom_templates).unwrap(); - - let tools = MockBaseTools::builder() - .sql_templates(Rc::new(sql_templates)) - .build(); - - let templates = tools.sql_templates().unwrap(); - assert!(templates.contains_template("test/template")); - } - - #[test] - fn test_builder_with_all_custom_components() { - let driver_tools = MockDriverTools::with_timezone("Asia/Tokyo".to_string()); - let sql_templates = MockSqlTemplatesRender::default_templates(); - let security_context = MockSecurityContext; - let sql_utils = MockSqlUtils; - - let tools = MockBaseTools::builder() - .driver_tools(Rc::new(driver_tools)) - .sql_templates(Rc::new(sql_templates)) - .security_context(Rc::new(security_context)) - .sql_utils(Rc::new(sql_utils)) - .build(); - - assert!(tools.driver_tools(false).is_ok()); - assert!(tools.sql_templates().is_ok()); - assert!(tools.security_context_for_rust().is_ok()); - assert!(tools.sql_utils_for_rust().is_ok()); - } -} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_definition.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_definition.rs index 2351975d53a8e..cc81ae109d1fc 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_definition.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_definition.rs @@ -7,7 +7,6 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of CaseDefinition for testing #[derive(TypedBuilder)] pub struct MockCaseDefinition { when: Vec>, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_else_item.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_else_item.rs index 820de6f753871..eb789c3050a73 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_else_item.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_else_item.rs @@ -5,7 +5,6 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of CaseElseItem for testing #[derive(Debug, Clone, TypedBuilder)] pub struct MockCaseElseItem { label: StringOrSql, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_item.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_item.rs index f857ea1ef796d..1f348991b1c49 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_item.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_item.rs @@ -7,7 +7,6 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of CaseItem for testing #[derive(Debug, Clone, TypedBuilder)] pub struct MockCaseItem { sql: String, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_definition.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_definition.rs index 05d3792e46e31..3c04b91dddb76 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_definition.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_definition.rs @@ -10,7 +10,6 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of CaseSwitchDefinition for testing #[derive(TypedBuilder)] pub struct MockCaseSwitchDefinition { switch: String, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_else_item.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_else_item.rs index cf67520fc15ea..c6cfb093d07a4 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_else_item.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_else_item.rs @@ -6,7 +6,6 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of CaseSwitchElseItem for testing #[derive(Debug, Clone, TypedBuilder)] pub struct MockCaseSwitchElseItem { sql: String, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_item.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_item.rs index 7f7ca43b6e63c..709d29d233334 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_item.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_case_switch_item.rs @@ -7,7 +7,6 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of CaseSwitchItem for testing #[derive(Debug, Clone, TypedBuilder)] pub struct MockCaseSwitchItem { value: String, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_cube_definition.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_cube_definition.rs index 9a00a52e43ae4..e2ca93151992f 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_cube_definition.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_cube_definition.rs @@ -1,16 +1,15 @@ use crate::cube_bridge::cube_definition::{CubeDefinition, CubeDefinitionStatic}; use crate::cube_bridge::member_sql::MemberSql; use crate::impl_static_data; -use crate::test_fixtures::cube_bridge::MockMemberSql; +use crate::test_fixtures::cube_bridge::{MockJoinItemDefinition, MockMemberSql}; use cubenativeutils::CubeError; use std::any::Any; +use std::collections::HashMap; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of CubeDefinition for testing #[derive(Clone, TypedBuilder)] pub struct MockCubeDefinition { - // Fields from CubeDefinitionStatic name: String, #[builder(default)] sql_alias: Option, @@ -21,11 +20,13 @@ pub struct MockCubeDefinition { #[builder(default)] join_map: Option>>, - // Optional trait fields #[builder(default, setter(strip_option))] sql_table: Option, #[builder(default, setter(strip_option))] sql: Option, + + #[builder(default)] + joins: HashMap, } impl_static_data!( @@ -68,9 +69,21 @@ impl CubeDefinition for MockCubeDefinition { } } +impl MockCubeDefinition { + pub fn joins(&self) -> &HashMap { + &self.joins + } + + pub fn get_join(&self, name: &str) -> Option<&MockJoinItemDefinition> { + self.joins.get(name) + } +} + #[cfg(test)] mod tests { use super::*; + use crate::cube_bridge::join_item_definition::JoinItemDefinition; + use std::collections::HashMap; #[test] fn test_basic_cube() { @@ -170,4 +183,97 @@ mod tests { let sql_table = cube.sql_table().unwrap().unwrap(); assert_eq!(sql_table.args_names(), &vec!["database"]); } + + #[test] + fn test_cube_with_single_join() { + let mut joins = HashMap::new(); + joins.insert( + "users".to_string(), + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.user_id = {users.id}".to_string()) + .build(), + ); + + let cube = MockCubeDefinition::builder() + .name("orders".to_string()) + .sql_table("public.orders".to_string()) + .joins(joins) + .build(); + + assert_eq!(cube.joins().len(), 1); + assert!(cube.get_join("users").is_some()); + + let users_join = cube.get_join("users").unwrap(); + assert_eq!(users_join.static_data().relationship, "many_to_one"); + } + + #[test] + fn test_cube_with_multiple_joins() { + let mut joins = HashMap::new(); + joins.insert( + "users".to_string(), + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.user_id = {users.id}".to_string()) + .build(), + ); + joins.insert( + "products".to_string(), + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.product_id = {products.id}".to_string()) + .build(), + ); + + let cube = MockCubeDefinition::builder() + .name("orders".to_string()) + .sql_table("public.orders".to_string()) + .joins(joins) + .build(); + + assert_eq!(cube.joins().len(), 2); + assert!(cube.get_join("users").is_some()); + assert!(cube.get_join("products").is_some()); + assert!(cube.get_join("nonexistent").is_none()); + } + + #[test] + fn test_join_accessor_methods() { + let mut joins = HashMap::new(); + joins.insert( + "countries".to_string(), + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.country_id = {countries.id}".to_string()) + .build(), + ); + + let cube = MockCubeDefinition::builder() + .name("users".to_string()) + .sql_table("public.users".to_string()) + .joins(joins) + .build(); + + let all_joins = cube.joins(); + assert_eq!(all_joins.len(), 1); + assert!(all_joins.contains_key("countries")); + + let country_join = cube.get_join("countries").unwrap(); + let sql = country_join.sql().unwrap(); + assert_eq!(sql.args_names(), &vec!["CUBE", "countries"]); + + assert!(cube.get_join("nonexistent").is_none()); + } + + #[test] + fn test_cube_without_joins() { + let cube = MockCubeDefinition::builder() + .name("users".to_string()) + .sql_table("public.users".to_string()) + .build(); + + assert_eq!(cube.joins().len(), 0); + assert!(cube.get_join("any").is_none()); + } } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_dimension_definition.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_dimension_definition.rs index 8245c520e8e72..2a623451f7dda 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_dimension_definition.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_dimension_definition.rs @@ -10,10 +10,8 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of DimensionDefinition for testing #[derive(TypedBuilder)] pub struct MockDimensionDefinition { - // Fields from DimensionDefinitionStatic #[builder(default = "string".to_string())] dimension_type: String, #[builder(default = Some(false))] @@ -31,7 +29,6 @@ pub struct MockDimensionDefinition { #[builder(default)] primary_key: Option, - // Optional trait fields #[builder(default, setter(strip_option))] sql: Option, #[builder(default)] @@ -129,324 +126,3 @@ impl DimensionDefinition for MockDimensionDefinition { self } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_string_dimension() { - let dim = MockDimensionDefinition::builder() - .dimension_type("string".to_string()) - .sql("{CUBE.name}".to_string()) - .build(); - - assert_eq!(dim.static_data().dimension_type, "string"); - assert!(dim.has_sql().unwrap()); - assert!(dim.sql().unwrap().is_some()); - } - - #[test] - fn test_number_dimension() { - let dim = MockDimensionDefinition::builder() - .dimension_type("number".to_string()) - .sql("{CUBE.count}".to_string()) - .build(); - - assert_eq!(dim.static_data().dimension_type, "number"); - assert!(dim.has_sql().unwrap()); - } - - #[test] - fn test_time_dimension() { - let dim = MockDimensionDefinition::builder() - .dimension_type("time".to_string()) - .sql("{CUBE.created_at}".to_string()) - .build(); - - assert_eq!(dim.static_data().dimension_type, "time"); - assert!(dim.has_sql().unwrap()); - } - - #[test] - fn test_geo_dimension() { - let dim = MockDimensionDefinition::builder() - .dimension_type("geo".to_string()) - .latitude("{CUBE.lat}".to_string()) - .longitude("{CUBE.lon}".to_string()) - .build(); - - assert_eq!(dim.static_data().dimension_type, "geo"); - assert!(dim.has_latitude().unwrap()); - assert!(dim.has_longitude().unwrap()); - assert!(!dim.has_sql().unwrap()); - } - - #[test] - fn test_switch_dimension() { - let dim = MockDimensionDefinition::builder() - .dimension_type("switch".to_string()) - .values(Some(vec!["active".to_string(), "inactive".to_string()])) - .build(); - - assert_eq!(dim.static_data().dimension_type, "switch"); - assert_eq!( - dim.static_data().values, - Some(vec!["active".to_string(), "inactive".to_string()]) - ); - assert!(!dim.has_sql().unwrap()); - } - - #[test] - fn test_dimension_with_time_shift() { - let time_shift = Rc::new( - MockTimeShiftDefinition::builder() - .interval(Some("1 day".to_string())) - .name(Some("yesterday".to_string())) - .build(), - ); - - let dim = MockDimensionDefinition::builder() - .dimension_type("time".to_string()) - .sql("{CUBE.date}".to_string()) - .time_shift(Some(vec![time_shift])) - .build(); - - assert!(dim.has_time_shift().unwrap()); - let shifts = dim.time_shift().unwrap().unwrap(); - assert_eq!(shifts.len(), 1); - } - - #[test] - fn test_dimension_with_flags() { - let dim = MockDimensionDefinition::builder() - .dimension_type("string".to_string()) - .sql("{CUBE.field}".to_string()) - .multi_stage(Some(true)) - .sub_query(Some(true)) - .owned_by_cube(Some(false)) - .build(); - - assert_eq!(dim.static_data().multi_stage, Some(true)); - assert_eq!(dim.static_data().sub_query, Some(true)); - assert_eq!(dim.static_data().owned_by_cube, Some(false)); - } - - #[test] - fn test_sql_parsing_simple() { - let dim = MockDimensionDefinition::builder() - .dimension_type("string".to_string()) - .sql("{CUBE.field}".to_string()) - .build(); - - let sql = dim.sql().unwrap().unwrap(); - assert_eq!(sql.args_names(), &vec!["CUBE"]); - - // Check compiled template - use crate::test_fixtures::cube_bridge::{MockSecurityContext, MockSqlUtils}; - let (template, args) = sql - .compile_template_sql(Rc::new(MockSqlUtils), Rc::new(MockSecurityContext)) - .unwrap(); - - match template { - crate::cube_bridge::member_sql::SqlTemplate::String(s) => { - assert_eq!(s, "{arg:0}"); - } - _ => panic!("Expected String template"), - } - - assert_eq!(args.symbol_paths.len(), 1); - assert_eq!(args.symbol_paths[0], vec!["CUBE", "field"]); - } - - #[test] - fn test_sql_parsing_multiple_refs() { - let dim = MockDimensionDefinition::builder() - .dimension_type("string".to_string()) - .sql("{CUBE.first_name} || ' ' || {CUBE.last_name}".to_string()) - .build(); - - let sql = dim.sql().unwrap().unwrap(); - assert_eq!(sql.args_names(), &vec!["CUBE"]); - - // Check compiled template - use crate::test_fixtures::cube_bridge::{MockSecurityContext, MockSqlUtils}; - let (template, args) = sql - .compile_template_sql(Rc::new(MockSqlUtils), Rc::new(MockSecurityContext)) - .unwrap(); - - match template { - crate::cube_bridge::member_sql::SqlTemplate::String(s) => { - assert_eq!(s, "{arg:0} || ' ' || {arg:1}"); - } - _ => panic!("Expected String template"), - } - - assert_eq!(args.symbol_paths.len(), 2); - assert_eq!(args.symbol_paths[0], vec!["CUBE", "first_name"]); - assert_eq!(args.symbol_paths[1], vec!["CUBE", "last_name"]); - } - - #[test] - fn test_sql_parsing_cross_cube_refs() { - let dim = MockDimensionDefinition::builder() - .dimension_type("number".to_string()) - .sql("{CUBE.amount} / {other_cube.total}".to_string()) - .build(); - - let sql = dim.sql().unwrap().unwrap(); - assert_eq!(sql.args_names(), &vec!["CUBE", "other_cube"]); - - // Check compiled template - use crate::test_fixtures::cube_bridge::{MockSecurityContext, MockSqlUtils}; - let (template, args) = sql - .compile_template_sql(Rc::new(MockSqlUtils), Rc::new(MockSecurityContext)) - .unwrap(); - - match template { - crate::cube_bridge::member_sql::SqlTemplate::String(s) => { - assert_eq!(s, "{arg:0} / {arg:1}"); - } - _ => panic!("Expected String template"), - } - - assert_eq!(args.symbol_paths.len(), 2); - assert_eq!(args.symbol_paths[0], vec!["CUBE", "amount"]); - assert_eq!(args.symbol_paths[1], vec!["other_cube", "total"]); - } - - #[test] - fn test_geo_sql_parsing() { - let dim = MockDimensionDefinition::builder() - .dimension_type("geo".to_string()) - .latitude("{CUBE.latitude}".to_string()) - .longitude("{CUBE.longitude}".to_string()) - .build(); - - assert!(!dim.has_sql().unwrap()); - - let lat = dim.latitude().unwrap().unwrap(); - let lat_sql = lat.sql().unwrap(); - - use crate::test_fixtures::cube_bridge::{MockSecurityContext, MockSqlUtils}; - let (template, args) = lat_sql - .compile_template_sql(Rc::new(MockSqlUtils), Rc::new(MockSecurityContext)) - .unwrap(); - - match template { - crate::cube_bridge::member_sql::SqlTemplate::String(s) => { - assert_eq!(s, "{arg:0}"); - } - _ => panic!("Expected String template"), - } - - assert_eq!(args.symbol_paths[0], vec!["CUBE", "latitude"]); - } - - #[test] - fn test_case_dimension() { - use crate::cube_bridge::case_variant::CaseVariant; - use crate::cube_bridge::string_or_sql::StringOrSql; - use crate::test_fixtures::cube_bridge::{ - MockCaseDefinition, MockCaseElseItem, MockCaseItem, - }; - - let when_items = vec![ - Rc::new( - MockCaseItem::builder() - .sql("{CUBE.status} = 'active'".to_string()) - .label(StringOrSql::String("Active".to_string())) - .build(), - ), - Rc::new( - MockCaseItem::builder() - .sql("{CUBE.status} = 'inactive'".to_string()) - .label(StringOrSql::String("Inactive".to_string())) - .build(), - ), - ]; - - let else_item = Rc::new( - MockCaseElseItem::builder() - .label(StringOrSql::String("Unknown".to_string())) - .build(), - ); - - let case_def = Rc::new( - MockCaseDefinition::builder() - .when(when_items) - .else_label(else_item) - .build(), - ); - - let dim = MockDimensionDefinition::builder() - .dimension_type("string".to_string()) - .case(Some(Rc::new(CaseVariant::Case(case_def)))) - .build(); - - assert!(dim.has_case().unwrap()); - let case_result = dim.case().unwrap(); - assert!(case_result.is_some()); - - if let Some(CaseVariant::Case(case)) = case_result { - let when = case.when().unwrap(); - assert_eq!(when.len(), 2); - } else { - panic!("Expected Case variant"); - } - } - - #[test] - fn test_case_switch_dimension() { - use crate::cube_bridge::case_variant::CaseVariant; - use crate::test_fixtures::cube_bridge::{ - MockCaseSwitchDefinition, MockCaseSwitchElseItem, MockCaseSwitchItem, - }; - - let when_items = vec![ - Rc::new( - MockCaseSwitchItem::builder() - .value("1".to_string()) - .sql("{CUBE.active_value}".to_string()) - .build(), - ), - Rc::new( - MockCaseSwitchItem::builder() - .value("0".to_string()) - .sql("{CUBE.inactive_value}".to_string()) - .build(), - ), - ]; - - let else_item = Rc::new( - MockCaseSwitchElseItem::builder() - .sql("{CUBE.default_value}".to_string()) - .build(), - ); - - let case_switch = Rc::new( - MockCaseSwitchDefinition::builder() - .switch("{CUBE.status_code}".to_string()) - .when(when_items) - .else_sql(else_item) - .build(), - ); - - let dim = MockDimensionDefinition::builder() - .dimension_type("string".to_string()) - .case(Some(Rc::new(CaseVariant::CaseSwitch(case_switch)))) - .build(); - - assert!(dim.has_case().unwrap()); - let case_result = dim.case().unwrap(); - assert!(case_result.is_some()); - - if let Some(CaseVariant::CaseSwitch(case_switch)) = case_result { - assert!(case_switch.switch().is_ok()); - let when = case_switch.when().unwrap(); - assert_eq!(when.len(), 2); - } else { - panic!("Expected CaseSwitch variant"); - } - } -} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_driver_tools.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_driver_tools.rs index 79274a8cb9b37..23a7693e1b332 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_driver_tools.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_driver_tools.rs @@ -10,15 +10,6 @@ use std::rc::Rc; /// This mock provides implementations based on PostgresQuery.ts and BaseQuery.js /// from packages/cubejs-schema-compiler/src/adapter/ /// -/// # Example -/// -/// ``` -/// use cubesqlplanner::test_fixtures::cube_bridge::MockDriverTools; -/// -/// let tools = MockDriverTools::new(); -/// let result = tools.time_grouped_column("day".to_string(), "created_at".to_string()).unwrap(); -/// assert_eq!(result, "date_trunc('day', created_at)"); -/// ``` #[derive(Clone)] pub struct MockDriverTools { timezone: String, @@ -27,7 +18,6 @@ pub struct MockDriverTools { } impl MockDriverTools { - /// Creates a new MockDriverTools with default settings (UTC timezone) pub fn new() -> Self { Self { timezone: "UTC".to_string(), @@ -36,7 +26,7 @@ impl MockDriverTools { } } - /// Creates a new MockDriverTools with a specific timezone + #[allow(dead_code)] pub fn with_timezone(timezone: String) -> Self { Self { timezone, @@ -45,7 +35,6 @@ impl MockDriverTools { } } - /// Creates a new MockDriverTools with custom SQL templates #[allow(dead_code)] pub fn with_sql_templates(sql_templates: MockSqlTemplatesRender) -> Self { Self { @@ -67,8 +56,6 @@ impl DriverTools for MockDriverTools { self } - /// Convert timezone - based on PostgresQuery.ts:26-28 - /// Returns: `(field::timestamptz AT TIME ZONE 'timezone')` fn convert_tz(&self, field: String) -> Result { Ok(format!( "({}::timestamptz AT TIME ZONE '{}')", @@ -76,8 +63,6 @@ impl DriverTools for MockDriverTools { )) } - /// Time grouped column - based on PostgresQuery.ts:30-32 - /// Uses date_trunc function with granularity mapping fn time_grouped_column( &self, granularity: String, @@ -104,74 +89,49 @@ impl DriverTools for MockDriverTools { Ok(format!("date_trunc('{}', {})", interval, dimension)) } - /// Returns SQL templates renderer fn sql_templates(&self) -> Result, CubeError> { Ok(self.sql_templates.clone()) } - /// Timestamp precision - based on BaseQuery.js:3834-3836 fn timestamp_precision(&self) -> Result { Ok(self.timestamp_precision) } - /// Timestamp cast - based on BaseQuery.js:2101-2103 - /// Returns: `value::timestamptz` fn time_stamp_cast(&self, field: String) -> Result { Ok(format!("{}::timestamptz", field)) } - /// DateTime cast - based on BaseQuery.js:2105-2107 - /// Returns: `value::timestamp` fn date_time_cast(&self, field: String) -> Result { Ok(format!("{}::timestamp", field)) } - /// Convert date to DB timezone - based on BaseQuery.js:3820-3822 - /// This is a simplified version that returns the date as-is - /// The full implementation would use localTimestampToUtc utility fn in_db_time_zone(&self, date: String) -> Result { - // In real implementation this calls localTimestampToUtc(timezone, timestampFormat(), date) - // For mock we just return the date as-is Ok(date) } - /// Get allocated parameters - returns empty vec for mock fn get_allocated_params(&self) -> Result, CubeError> { Ok(Vec::new()) } - /// Subtract interval - based on BaseQuery.js:1166-1169 - /// Returns: `date - interval 'interval'` fn subtract_interval(&self, date: String, interval: String) -> Result { let interval_str = self.interval_string(interval)?; Ok(format!("{} - interval {}", date, interval_str)) } - /// Add interval - based on BaseQuery.js:1176-1179 - /// Returns: `date + interval 'interval'` fn add_interval(&self, date: String, interval: String) -> Result { let interval_str = self.interval_string(interval)?; Ok(format!("{} + interval {}", date, interval_str)) } - /// Format interval string - based on BaseQuery.js:1190-1192 - /// Returns: `'interval'` fn interval_string(&self, interval: String) -> Result { Ok(format!("'{}'", interval)) } - /// Add timestamp interval - based on BaseQuery.js:1199-1201 - /// Delegates to add_interval fn add_timestamp_interval(&self, date: String, interval: String) -> Result { self.add_interval(date, interval) } - /// Get interval and minimal time unit - based on BaseQuery.js:2116-2119 - /// Returns: [interval, minimal_time_unit] - /// The minimal time unit is the lowest unit in the interval (e.g., "day" for "5 days") fn interval_and_minimal_time_unit(&self, interval: String) -> Result, CubeError> { - // Parse minimal granularity from interval - // This is a simplified version - full implementation would call diffTimeUnitForInterval let min_unit = if interval.contains("second") { "second" } else if interval.contains("minute") { @@ -195,26 +155,18 @@ impl DriverTools for MockDriverTools { Ok(vec![interval, min_unit.to_string()]) } - /// HLL init - based on PostgresQuery.ts:48-50 - /// Returns: `hll_add_agg(hll_hash_any(sql))` fn hll_init(&self, sql: String) -> Result { Ok(format!("hll_add_agg(hll_hash_any({}))", sql)) } - /// HLL merge - based on PostgresQuery.ts:52-54 - /// Returns: `round(hll_cardinality(hll_union_agg(sql)))` fn hll_merge(&self, sql: String) -> Result { Ok(format!("round(hll_cardinality(hll_union_agg({})))", sql)) } - /// HLL cardinality merge - based on BaseQuery.js:3734-3736 - /// Delegates to hll_merge fn hll_cardinality_merge(&self, sql: String) -> Result { self.hll_merge(sql) } - /// Count distinct approx - based on PostgresQuery.ts:56-58 - /// Returns: `round(hll_cardinality(hll_add_agg(hll_hash_any(sql))))` fn count_distinct_approx(&self, sql: String) -> Result { Ok(format!( "round(hll_cardinality(hll_add_agg(hll_hash_any({}))))", @@ -222,15 +174,10 @@ impl DriverTools for MockDriverTools { )) } - /// Support generated series for custom time dimensions - based on PostgresQuery.ts:60-62 - /// Postgres supports this, so returns true fn support_generated_series_for_custom_td(&self) -> Result { Ok(true) } - /// Date bin function - based on PostgresQuery.ts:40-46 - /// Returns sql for source expression floored to timestamps aligned with - /// intervals relative to origin timestamp point fn date_bin( &self, interval: String, @@ -243,232 +190,3 @@ impl DriverTools for MockDriverTools { )) } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_convert_tz() { - let tools = MockDriverTools::new(); - let result = tools.convert_tz("created_at".to_string()).unwrap(); - assert_eq!(result, "(created_at::timestamptz AT TIME ZONE 'UTC')"); - } - - #[test] - fn test_convert_tz_with_custom_timezone() { - let tools = MockDriverTools::with_timezone("America/Los_Angeles".to_string()); - let result = tools.convert_tz("created_at".to_string()).unwrap(); - assert_eq!( - result, - "(created_at::timestamptz AT TIME ZONE 'America/Los_Angeles')" - ); - } - - #[test] - fn test_time_grouped_column() { - let tools = MockDriverTools::new(); - - // Test various granularities - assert_eq!( - tools - .time_grouped_column("day".to_string(), "created_at".to_string()) - .unwrap(), - "date_trunc('day', created_at)" - ); - - assert_eq!( - tools - .time_grouped_column("month".to_string(), "updated_at".to_string()) - .unwrap(), - "date_trunc('month', updated_at)" - ); - - assert_eq!( - tools - .time_grouped_column("year".to_string(), "timestamp".to_string()) - .unwrap(), - "date_trunc('year', timestamp)" - ); - } - - #[test] - fn test_time_grouped_column_invalid_granularity() { - let tools = MockDriverTools::new(); - let result = tools.time_grouped_column("invalid".to_string(), "created_at".to_string()); - assert!(result.is_err()); - } - - #[test] - fn test_timestamp_precision() { - let tools = MockDriverTools::new(); - assert_eq!(tools.timestamp_precision().unwrap(), 3); - } - - #[test] - fn test_time_stamp_cast() { - let tools = MockDriverTools::new(); - assert_eq!( - tools.time_stamp_cast("?".to_string()).unwrap(), - "?::timestamptz" - ); - } - - #[test] - fn test_date_time_cast() { - let tools = MockDriverTools::new(); - assert_eq!( - tools.date_time_cast("date_from".to_string()).unwrap(), - "date_from::timestamp" - ); - } - - #[test] - fn test_subtract_interval() { - let tools = MockDriverTools::new(); - assert_eq!( - tools - .subtract_interval("NOW()".to_string(), "1 day".to_string()) - .unwrap(), - "NOW() - interval '1 day'" - ); - } - - #[test] - fn test_add_interval() { - let tools = MockDriverTools::new(); - assert_eq!( - tools - .add_interval("created_at".to_string(), "7 days".to_string()) - .unwrap(), - "created_at + interval '7 days'" - ); - } - - #[test] - fn test_interval_string() { - let tools = MockDriverTools::new(); - assert_eq!( - tools.interval_string("1 hour".to_string()).unwrap(), - "'1 hour'" - ); - } - - #[test] - fn test_add_timestamp_interval() { - let tools = MockDriverTools::new(); - assert_eq!( - tools - .add_timestamp_interval("timestamp".to_string(), "5 minutes".to_string()) - .unwrap(), - "timestamp + interval '5 minutes'" - ); - } - - #[test] - fn test_interval_and_minimal_time_unit() { - let tools = MockDriverTools::new(); - - let result = tools - .interval_and_minimal_time_unit("5 days".to_string()) - .unwrap(); - assert_eq!(result, vec!["5 days", "day"]); - - let result = tools - .interval_and_minimal_time_unit("2 hours".to_string()) - .unwrap(); - assert_eq!(result, vec!["2 hours", "hour"]); - - let result = tools - .interval_and_minimal_time_unit("30 seconds".to_string()) - .unwrap(); - assert_eq!(result, vec!["30 seconds", "second"]); - } - - #[test] - fn test_hll_init() { - let tools = MockDriverTools::new(); - assert_eq!( - tools.hll_init("user_id".to_string()).unwrap(), - "hll_add_agg(hll_hash_any(user_id))" - ); - } - - #[test] - fn test_hll_merge() { - let tools = MockDriverTools::new(); - assert_eq!( - tools.hll_merge("hll_column".to_string()).unwrap(), - "round(hll_cardinality(hll_union_agg(hll_column)))" - ); - } - - #[test] - fn test_hll_cardinality_merge() { - let tools = MockDriverTools::new(); - assert_eq!( - tools.hll_cardinality_merge("hll_data".to_string()).unwrap(), - "round(hll_cardinality(hll_union_agg(hll_data)))" - ); - } - - #[test] - fn test_count_distinct_approx() { - let tools = MockDriverTools::new(); - assert_eq!( - tools - .count_distinct_approx("visitor_id".to_string()) - .unwrap(), - "round(hll_cardinality(hll_add_agg(hll_hash_any(visitor_id))))" - ); - } - - #[test] - fn test_support_generated_series_for_custom_td() { - let tools = MockDriverTools::new(); - assert!(tools.support_generated_series_for_custom_td().unwrap()); - } - - #[test] - fn test_date_bin() { - let tools = MockDriverTools::new(); - let result = tools - .date_bin( - "1 day".to_string(), - "created_at".to_string(), - "2024-01-01".to_string(), - ) - .unwrap(); - - assert_eq!( - result, - "('2024-01-01' ::timestamp + INTERVAL '1 day' * FLOOR(EXTRACT(EPOCH FROM (created_at - '2024-01-01'::timestamp)) / EXTRACT(EPOCH FROM INTERVAL '1 day')))" - ); - } - - #[test] - fn test_in_db_time_zone() { - let tools = MockDriverTools::new(); - let result = tools - .in_db_time_zone("2024-01-01T00:00:00".to_string()) - .unwrap(); - assert_eq!(result, "2024-01-01T00:00:00"); - } - - #[test] - fn test_get_allocated_params() { - let tools = MockDriverTools::new(); - let result = tools.get_allocated_params().unwrap(); - assert_eq!(result, Vec::::new()); - } - - #[test] - fn test_sql_templates() { - let tools = MockDriverTools::new(); - let templates = tools.sql_templates().unwrap(); - - // Verify it returns a valid SqlTemplatesRender - assert!(templates.contains_template("filters/equals")); - assert!(templates.contains_template("functions/SUM")); - } -} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_evaluator.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_evaluator.rs index a731d54998bec..5ff1644f6a15d 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_evaluator.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_evaluator.rs @@ -8,27 +8,28 @@ use crate::cube_bridge::pre_aggregation_description::PreAggregationDescription; use crate::cube_bridge::segment_definition::SegmentDefinition; use crate::impl_static_data; use crate::test_fixtures::cube_bridge::mock_schema::MockSchema; +use crate::test_fixtures::cube_bridge::MockJoinGraph; use cubenativeutils::CubeError; use std::any::Any; use std::collections::HashMap; use std::rc::Rc; -/// Mock implementation of CubeEvaluator for testing pub struct MockCubeEvaluator { schema: MockSchema, primary_keys: HashMap>, + join_graph: Option>, } impl MockCubeEvaluator { - /// Create a new MockCubeEvaluator with the given schema + #[allow(dead_code)] pub fn new(schema: MockSchema) -> Self { Self { schema, primary_keys: HashMap::new(), + join_graph: None, } } - /// Create a new MockCubeEvaluator with schema and primary keys pub fn with_primary_keys( schema: MockSchema, primary_keys: HashMap>, @@ -36,11 +37,36 @@ impl MockCubeEvaluator { Self { schema, primary_keys, + join_graph: None, } } - /// Parse a path string like "cube.member" into ["cube", "member"] - /// Returns error if the path doesn't exist in schema for the given type + pub fn with_join_graph( + schema: MockSchema, + primary_keys: HashMap>, + join_graph: MockJoinGraph, + ) -> Self { + Self { + schema, + primary_keys, + join_graph: Some(Rc::new(join_graph)), + } + } + + pub fn join_graph(&self) -> Option> { + self.join_graph.clone() + } + + pub fn measures_for_cube( + &self, + cube_name: &str, + ) -> HashMap> { + self.schema + .get_cube(cube_name) + .map(|cube| cube.measures.clone()) + .unwrap_or_default() + } + fn parse_and_validate_path( &self, path_type: &str, @@ -48,7 +74,6 @@ impl MockCubeEvaluator { ) -> Result, CubeError> { let parts: Vec = path.split('.').map(|s| s.to_string()).collect(); - // Allow 2 parts (cube.member) or 3 parts (cube.dimension.granularity for time dimensions) if parts.len() != 2 && parts.len() != 3 { return Err(CubeError::user(format!( "Invalid path format: '{}'. Expected format: 'cube.member' or 'cube.time_dimension.granularity'", @@ -59,14 +84,11 @@ impl MockCubeEvaluator { let cube_name = &parts[0]; let member_name = &parts[1]; - // Check if cube exists if self.schema.get_cube(cube_name).is_none() { return Err(CubeError::user(format!("Cube '{}' not found", cube_name))); } - // If we have 3 parts, check if the dimension is a time dimension if parts.len() == 3 { - // Only dimensions can have granularity if path_type != "dimension" && path_type != "dimensions" { return Err(CubeError::user(format!( "Granularity can only be specified for dimensions, not for {}", @@ -74,7 +96,6 @@ impl MockCubeEvaluator { ))); } - // Check if the dimension exists and is of type 'time' if let Some(dimension) = self.schema.get_dimension(cube_name, member_name) { if dimension.static_data().dimension_type != "time" { return Err(CubeError::user(format!( @@ -83,7 +104,6 @@ impl MockCubeEvaluator { dimension.static_data().dimension_type ))); } - // Granularity is valid - return all 3 parts return Ok(parts); } else { return Err(CubeError::user(format!( @@ -93,7 +113,6 @@ impl MockCubeEvaluator { } } - // For 2-part paths, validate member exists for the given type let exists = match path_type { "measure" | "measures" => self.schema.get_measure(cube_name, member_name).is_some(), "dimension" | "dimensions" => { @@ -221,7 +240,6 @@ impl CubeEvaluator for MockCubeEvaluator { &self, path: Vec, ) -> Result, CubeError> { - // path should be [cube_name, dimension_name, "granularities", granularity] if path.len() != 4 { return Err(CubeError::user(format!( "Invalid granularity path: expected 4 parts (cube.dimension.granularities.granularity), got {}", @@ -238,7 +256,6 @@ impl CubeEvaluator for MockCubeEvaluator { let granularity = &path[3]; - // Validate granularity is one of the supported ones let valid_granularities = [ "second", "minute", "hour", "day", "week", "month", "quarter", "year", ]; @@ -250,7 +267,6 @@ impl CubeEvaluator for MockCubeEvaluator { ))); } - // Create mock granularity definition with interval equal to granularity use crate::test_fixtures::cube_bridge::MockGranularityDefinition; Ok(Rc::new( MockGranularityDefinition::builder() @@ -290,387 +306,3 @@ impl CubeEvaluator for MockCubeEvaluator { self } } - -#[cfg(test)] -mod tests { - use super::*; - use crate::test_fixtures::cube_bridge::{ - MockDimensionDefinition, MockMeasureDefinition, MockSchemaBuilder, MockSegmentDefinition, - }; - - fn create_test_schema() -> MockSchema { - MockSchemaBuilder::new() - .add_cube("users") - .add_dimension( - "id", - MockDimensionDefinition::builder() - .dimension_type("number".to_string()) - .sql("id".to_string()) - .build(), - ) - .add_dimension( - "name", - MockDimensionDefinition::builder() - .dimension_type("string".to_string()) - .sql("name".to_string()) - .build(), - ) - .add_measure( - "count", - MockMeasureDefinition::builder() - .measure_type("count".to_string()) - .sql("COUNT(*)".to_string()) - .build(), - ) - .add_segment( - "active", - MockSegmentDefinition::builder() - .sql("{CUBE.status} = 'active'".to_string()) - .build(), - ) - .finish_cube() - .add_cube("orders") - .add_dimension( - "id", - MockDimensionDefinition::builder() - .dimension_type("number".to_string()) - .sql("id".to_string()) - .build(), - ) - .add_measure( - "total", - MockMeasureDefinition::builder() - .measure_type("sum".to_string()) - .sql("amount".to_string()) - .build(), - ) - .finish_cube() - .build() - } - - #[test] - fn test_parse_path_measure() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let result = evaluator.parse_path("measure".to_string(), "users.count".to_string()); - assert!(result.is_ok()); - assert_eq!(result.unwrap(), vec!["users", "count"]); - } - - #[test] - fn test_parse_path_dimension() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let result = evaluator.parse_path("dimension".to_string(), "users.name".to_string()); - assert!(result.is_ok()); - assert_eq!(result.unwrap(), vec!["users", "name"]); - } - - #[test] - fn test_parse_path_segment() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let result = evaluator.parse_path("segment".to_string(), "users.active".to_string()); - assert!(result.is_ok()); - assert_eq!(result.unwrap(), vec!["users", "active"]); - } - - #[test] - fn test_parse_path_invalid_format() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let result = evaluator.parse_path("measure".to_string(), "invalid".to_string()); - assert!(result.is_err()); - assert!(result.unwrap_err().message.contains("Invalid path format")); - } - - #[test] - fn test_parse_path_cube_not_found() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let result = evaluator.parse_path("measure".to_string(), "nonexistent.count".to_string()); - assert!(result.is_err()); - assert!(result - .unwrap_err() - .message - .contains("Cube 'nonexistent' not found")); - } - - #[test] - fn test_parse_path_member_not_found() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let result = evaluator.parse_path("measure".to_string(), "users.nonexistent".to_string()); - assert!(result.is_err()); - assert!(result - .unwrap_err() - .message - .contains("measure 'nonexistent' not found")); - } - - #[test] - fn test_measure_by_path() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let measure = evaluator - .measure_by_path("users.count".to_string()) - .unwrap(); - assert_eq!(measure.static_data().measure_type, "count"); - } - - #[test] - fn test_dimension_by_path() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let dimension = evaluator - .dimension_by_path("users.name".to_string()) - .unwrap(); - assert_eq!(dimension.static_data().dimension_type, "string"); - } - - #[test] - fn test_segment_by_path() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let segment = evaluator - .segment_by_path("users.active".to_string()) - .unwrap(); - // Verify it's a valid segment - assert!(segment.sql().is_ok()); - } - - #[test] - fn test_cube_from_path() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let cube = evaluator.cube_from_path("users".to_string()).unwrap(); - assert_eq!(cube.static_data().name, "users"); - } - - #[test] - fn test_cube_from_path_not_found() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let result = evaluator.cube_from_path("nonexistent".to_string()); - assert!(result.is_err()); - if let Err(err) = result { - assert!(err.message.contains("Cube 'nonexistent' not found")); - } - } - - #[test] - fn test_is_measure() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - assert!(evaluator - .is_measure(vec!["users".to_string(), "count".to_string()]) - .unwrap()); - assert!(!evaluator - .is_measure(vec!["users".to_string(), "name".to_string()]) - .unwrap()); - assert!(!evaluator - .is_measure(vec!["users".to_string(), "nonexistent".to_string()]) - .unwrap()); - } - - #[test] - fn test_is_dimension() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - assert!(evaluator - .is_dimension(vec!["users".to_string(), "name".to_string()]) - .unwrap()); - assert!(!evaluator - .is_dimension(vec!["users".to_string(), "count".to_string()]) - .unwrap()); - assert!(!evaluator - .is_dimension(vec!["users".to_string(), "nonexistent".to_string()]) - .unwrap()); - } - - #[test] - fn test_is_segment() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - assert!(evaluator - .is_segment(vec!["users".to_string(), "active".to_string()]) - .unwrap()); - assert!(!evaluator - .is_segment(vec!["users".to_string(), "count".to_string()]) - .unwrap()); - assert!(!evaluator - .is_segment(vec!["users".to_string(), "nonexistent".to_string()]) - .unwrap()); - } - - #[test] - fn test_cube_exists() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - assert!(evaluator.cube_exists("users".to_string()).unwrap()); - assert!(evaluator.cube_exists("orders".to_string()).unwrap()); - assert!(!evaluator.cube_exists("nonexistent".to_string()).unwrap()); - } - - #[test] - fn test_with_primary_keys() { - let schema = create_test_schema(); - let mut primary_keys = HashMap::new(); - primary_keys.insert("users".to_string(), vec!["id".to_string()]); - primary_keys.insert( - "orders".to_string(), - vec!["id".to_string(), "user_id".to_string()], - ); - - let evaluator = MockCubeEvaluator::with_primary_keys(schema, primary_keys.clone()); - - let static_data = evaluator.static_data(); - assert_eq!(static_data.primary_keys, primary_keys); - } - - #[test] - fn test_multiple_cubes() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - // Test users cube - assert!(evaluator.cube_exists("users".to_string()).unwrap()); - assert!(evaluator - .is_measure(vec!["users".to_string(), "count".to_string()]) - .unwrap()); - assert!(evaluator - .is_dimension(vec!["users".to_string(), "name".to_string()]) - .unwrap()); - - // Test orders cube - assert!(evaluator.cube_exists("orders".to_string()).unwrap()); - assert!(evaluator - .is_measure(vec!["orders".to_string(), "total".to_string()]) - .unwrap()); - assert!(evaluator - .is_dimension(vec!["orders".to_string(), "id".to_string()]) - .unwrap()); - } - - #[test] - fn test_resolve_granularity() { - let schema = MockSchemaBuilder::new() - .add_cube("users") - .add_dimension( - "created_at", - MockDimensionDefinition::builder() - .dimension_type("time".to_string()) - .sql("created_at".to_string()) - .build(), - ) - .finish_cube() - .build(); - let evaluator = MockCubeEvaluator::new(schema); - - // Test valid granularities with 4-part path: [cube, dimension, "granularities", granularity] - let granularities = vec![ - "second", "minute", "hour", "day", "week", "month", "quarter", "year", - ]; - for gran in granularities { - let result = evaluator.resolve_granularity(vec![ - "users".to_string(), - "created_at".to_string(), - "granularities".to_string(), - gran.to_string(), - ]); - assert!(result.is_ok()); - let granularity_def = result.unwrap(); - assert_eq!(granularity_def.static_data().interval, gran); - assert_eq!(granularity_def.static_data().origin, None); - assert_eq!(granularity_def.static_data().offset, None); - } - } - - #[test] - fn test_resolve_granularity_invalid_path_length() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let result = evaluator.resolve_granularity(vec![ - "users".to_string(), - "created_at".to_string(), - "granularities".to_string(), - ]); - assert!(result.is_err()); - if let Err(err) = result { - assert!(err.message.contains("expected 4 parts")); - } - } - - #[test] - fn test_resolve_granularity_unsupported() { - let schema = MockSchemaBuilder::new() - .add_cube("users") - .add_dimension( - "created_at", - MockDimensionDefinition::builder() - .dimension_type("time".to_string()) - .sql("created_at".to_string()) - .build(), - ) - .finish_cube() - .build(); - let evaluator = MockCubeEvaluator::new(schema); - - let result = evaluator.resolve_granularity(vec![ - "users".to_string(), - "created_at".to_string(), - "granularities".to_string(), - "invalid".to_string(), - ]); - assert!(result.is_err()); - if let Err(err) = result { - assert!(err.message.contains("Unsupported granularity")); - } - } - - #[test] - #[should_panic(expected = "pre_aggregations_for_cube_as_array is not implemented")] - fn test_pre_aggregations_for_cube_panics() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let _ = evaluator.pre_aggregations_for_cube_as_array("users".to_string()); - } - - #[test] - #[should_panic(expected = "pre_aggregation_description_by_name is not implemented")] - fn test_pre_aggregation_by_name_panics() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - let _ = - evaluator.pre_aggregation_description_by_name("users".to_string(), "main".to_string()); - } - - #[test] - #[should_panic(expected = "evaluate_rollup_references is not implemented")] - fn test_evaluate_rollup_references_panics() { - let schema = create_test_schema(); - let evaluator = MockCubeEvaluator::new(schema); - - use crate::test_fixtures::cube_bridge::MockMemberSql; - let sql = Rc::new(MockMemberSql::new("{CUBE.id}").unwrap()); - let _ = evaluator.evaluate_rollup_references("users".to_string(), sql); - } -} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_expression_struct.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_expression_struct.rs index 9f10d02066baa..e31e136ed25fe 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_expression_struct.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_expression_struct.rs @@ -7,17 +7,14 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of ExpressionStruct for testing #[derive(TypedBuilder)] pub struct MockExpressionStruct { - // Fields from ExpressionStructStatic expression_type: String, #[builder(default)] source_measure: Option, #[builder(default)] replace_aggregation_type: Option, - // Optional trait fields #[builder(default)] add_filters: Option>>, } @@ -54,68 +51,3 @@ impl ExpressionStruct for MockExpressionStruct { self } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_basic_expression_struct() { - let expr = MockExpressionStruct::builder() - .expression_type("aggregate".to_string()) - .build(); - - assert_eq!(expr.static_data().expression_type, "aggregate"); - assert!(!expr.has_add_filters().unwrap()); - } - - #[test] - fn test_expression_struct_with_source_measure() { - let expr = MockExpressionStruct::builder() - .expression_type("measure_reference".to_string()) - .source_measure(Some("users.count".to_string())) - .build(); - - let static_data = expr.static_data(); - assert_eq!(static_data.source_measure, Some("users.count".to_string())); - } - - #[test] - fn test_expression_struct_with_replace_aggregation() { - let expr = MockExpressionStruct::builder() - .expression_type("aggregate".to_string()) - .replace_aggregation_type(Some("avg".to_string())) - .build(); - - let static_data = expr.static_data(); - assert_eq!( - static_data.replace_aggregation_type, - Some("avg".to_string()) - ); - } - - #[test] - fn test_expression_struct_with_add_filters() { - let filters = vec![ - Rc::new( - MockStructWithSqlMember::builder() - .sql("{CUBE.status} = 'active'".to_string()) - .build(), - ), - Rc::new( - MockStructWithSqlMember::builder() - .sql("{CUBE.deleted} = false".to_string()) - .build(), - ), - ]; - - let expr = MockExpressionStruct::builder() - .expression_type("aggregate".to_string()) - .add_filters(Some(filters)) - .build(); - - assert!(expr.has_add_filters().unwrap()); - let result = expr.add_filters().unwrap().unwrap(); - assert_eq!(result.len(), 2); - } -} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_geo_item.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_geo_item.rs index 80be311aa8ed1..dfa9e9560dfec 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_geo_item.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_geo_item.rs @@ -6,7 +6,6 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of GeoItem for testing #[derive(Debug, TypedBuilder)] pub struct MockGeoItem { sql: String, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_granularity_definition.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_granularity_definition.rs index 805854ea535c1..db762c779661a 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_granularity_definition.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_granularity_definition.rs @@ -8,7 +8,6 @@ use std::any::Any; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of GranularityDefinition for testing #[derive(Clone, TypedBuilder)] pub struct MockGranularityDefinition { #[builder(setter(into))] diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_definition.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_definition.rs index 973ced3782e1f..118d2aa49c922 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_definition.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_definition.rs @@ -8,15 +8,12 @@ use std::collections::HashMap; use std::rc::Rc; use typed_builder::TypedBuilder; -/// Mock implementation of JoinDefinition for testing -#[derive(TypedBuilder)] +#[derive(Debug, TypedBuilder)] pub struct MockJoinDefinition { - // Fields from JoinDefinitionStatic root: String, #[builder(default)] multiplication_factor: HashMap, - // Trait field joins: Vec>, } @@ -42,168 +39,3 @@ impl JoinDefinition for MockJoinDefinition { self } } - -#[cfg(test)] -mod tests { - use super::*; - use crate::test_fixtures::cube_bridge::MockJoinItemDefinition; - - #[test] - fn test_basic_join_definition() { - let join_item = Rc::new( - MockJoinItem::builder() - .from("orders".to_string()) - .to("users".to_string()) - .original_from("Orders".to_string()) - .original_to("Users".to_string()) - .join(Rc::new( - MockJoinItemDefinition::builder() - .relationship("many_to_one".to_string()) - .sql("{orders.user_id} = {users.id}".to_string()) - .build(), - )) - .build(), - ); - - let join_def = MockJoinDefinition::builder() - .root("orders".to_string()) - .joins(vec![join_item]) - .build(); - - assert_eq!(join_def.static_data().root, "orders"); - let joins = join_def.joins().unwrap(); - assert_eq!(joins.len(), 1); - } - - #[test] - fn test_join_definition_with_multiplication_factor() { - let mut mult_factor = HashMap::new(); - mult_factor.insert("orders".to_string(), true); - mult_factor.insert("users".to_string(), false); - - let join_def = MockJoinDefinition::builder() - .root("orders".to_string()) - .multiplication_factor(mult_factor.clone()) - .joins(vec![]) - .build(); - - assert_eq!(join_def.static_data().multiplication_factor, mult_factor); - } - - #[test] - fn test_join_definition_with_multiple_joins() { - let join_to_users = Rc::new( - MockJoinItem::builder() - .from("orders".to_string()) - .to("users".to_string()) - .original_from("Orders".to_string()) - .original_to("Users".to_string()) - .join(Rc::new( - MockJoinItemDefinition::builder() - .relationship("many_to_one".to_string()) - .sql("{orders.user_id} = {users.id}".to_string()) - .build(), - )) - .build(), - ); - - let join_to_products = Rc::new( - MockJoinItem::builder() - .from("orders".to_string()) - .to("products".to_string()) - .original_from("Orders".to_string()) - .original_to("Products".to_string()) - .join(Rc::new( - MockJoinItemDefinition::builder() - .relationship("many_to_one".to_string()) - .sql("{orders.product_id} = {products.id}".to_string()) - .build(), - )) - .build(), - ); - - let join_def = MockJoinDefinition::builder() - .root("orders".to_string()) - .joins(vec![join_to_users, join_to_products]) - .build(); - - let joins = join_def.joins().unwrap(); - assert_eq!(joins.len(), 2); - assert_eq!(joins[0].static_data().to, "users"); - assert_eq!(joins[1].static_data().to, "products"); - } - - #[test] - fn test_complex_join_graph() { - // Orders -> Users - let join_orders_users = Rc::new( - MockJoinItem::builder() - .from("orders".to_string()) - .to("users".to_string()) - .original_from("Orders".to_string()) - .original_to("Users".to_string()) - .join(Rc::new( - MockJoinItemDefinition::builder() - .relationship("many_to_one".to_string()) - .sql("{orders.user_id} = {users.id}".to_string()) - .build(), - )) - .build(), - ); - - // Users -> Countries - let join_users_countries = Rc::new( - MockJoinItem::builder() - .from("users".to_string()) - .to("countries".to_string()) - .original_from("Users".to_string()) - .original_to("Countries".to_string()) - .join(Rc::new( - MockJoinItemDefinition::builder() - .relationship("many_to_one".to_string()) - .sql("{users.country_id} = {countries.id}".to_string()) - .build(), - )) - .build(), - ); - - // Orders -> Products - let join_orders_products = Rc::new( - MockJoinItem::builder() - .from("orders".to_string()) - .to("products".to_string()) - .original_from("Orders".to_string()) - .original_to("Products".to_string()) - .join(Rc::new( - MockJoinItemDefinition::builder() - .relationship("many_to_many".to_string()) - .sql("{orders.id} = {order_items.order_id} AND {order_items.product_id} = {products.id}".to_string()) - .build(), - )) - .build(), - ); - - let mut mult_factor = HashMap::new(); - mult_factor.insert("products".to_string(), true); - - let join_def = MockJoinDefinition::builder() - .root("orders".to_string()) - .joins(vec![ - join_orders_users, - join_users_countries, - join_orders_products, - ]) - .multiplication_factor(mult_factor) - .build(); - - let static_data = join_def.static_data(); - assert_eq!(static_data.root, "orders"); - assert_eq!( - static_data.multiplication_factor.get("products"), - Some(&true) - ); - - let joins = join_def.joins().unwrap(); - assert_eq!(joins.len(), 3); - } -} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_graph.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_graph.rs index 808ec59c3a82b..46101d6ca314c 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_graph.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_graph.rs @@ -1,24 +1,501 @@ +use crate::cube_bridge::evaluator::CubeEvaluator; use crate::cube_bridge::join_definition::JoinDefinition; use crate::cube_bridge::join_graph::JoinGraph; use crate::cube_bridge::join_hints::JoinHintItem; +use crate::test_fixtures::cube_bridge::{MockJoinDefinition, MockJoinItemDefinition}; use cubenativeutils::CubeError; use std::any::Any; +use std::cell::RefCell; +use std::collections::HashMap; use std::rc::Rc; -/// Mock implementation of JoinGraph for testing -/// -/// This mock provides a placeholder implementation. -/// The build_join method is not implemented and will panic with todo!(). +/// Represents an edge in the join graph /// -/// # Example +/// Each edge represents a join relationship between two cubes, including both +/// the current routing (from/to) and the original cube names (original_from/original_to). +/// This distinction is important when dealing with cube aliases. /// /// ``` -/// use cubesqlplanner::test_fixtures::cube_bridge::MockJoinGraph; +#[derive(Debug, Clone)] +pub struct JoinEdge { + pub join: Rc, + pub from: String, + pub to: String, + pub original_from: String, + pub original_to: String, +} + +/// Mock implementation of JoinGraph for testing +/// +/// This implementation provides a graph-based representation of join relationships +/// between cubes, matching the TypeScript JoinGraph structure from +/// `/packages/cubejs-schema-compiler/src/compiler/JoinGraph.ts`. +/// +/// The graph maintains both directed and undirected representations to support +/// pathfinding and connectivity queries. It also caches built join trees to avoid +/// redundant computation. /// -/// let join_graph = MockJoinGraph; -/// // Note: calling build_join will panic with todo!() /// ``` -pub struct MockJoinGraph; +#[derive(Clone)] +pub struct MockJoinGraph { + /// Directed graph: source -> destination -> weight + /// Represents the directed join relationships between cubes + pub(crate) nodes: HashMap>, + + /// Undirected graph: destination -> source -> weight + /// Used for connectivity checks and pathfinding + pub(crate) undirected_nodes: HashMap>, + + /// Edge lookup: "from-to" -> JoinEdge + /// Maps edge keys to their corresponding join definitions + pub(crate) edges: HashMap, + + /// Cache of built join trees: serialized cubes -> JoinDefinition + /// Stores previously computed join paths for reuse + /// Uses RefCell for interior mutability (allows caching through &self) + pub(crate) built_joins: RefCell>>, + + /// Cache for connected components + /// Stores the connected component ID for each cube + /// None until first calculation + pub(crate) cached_connected_components: Option>, +} + +impl MockJoinGraph { + pub fn new() -> Self { + Self { + nodes: HashMap::new(), + undirected_nodes: HashMap::new(), + edges: HashMap::new(), + built_joins: RefCell::new(HashMap::new()), + cached_connected_components: None, + } + } + + pub(crate) fn edge_key(from: &str, to: &str) -> String { + format!("{}-{}", from, to) + } + + fn build_join_edges( + &self, + cube: &crate::test_fixtures::cube_bridge::MockCubeDefinition, + evaluator: &crate::test_fixtures::cube_bridge::MockCubeEvaluator, + ) -> Result, CubeError> { + let joins = cube.joins(); + if joins.is_empty() { + return Ok(Vec::new()); + } + + let mut result = Vec::new(); + let cube_name = &cube.static_data().name; + + for (join_name, join_def) in joins { + if !evaluator.cube_exists(join_name.clone())? { + return Err(CubeError::user(format!("Cube {} doesn't exist", join_name))); + } + + let from_multiplied = self.get_multiplied_measures(cube_name, evaluator)?; + if !from_multiplied.is_empty() { + let static_data = evaluator.static_data(); + let primary_keys = static_data.primary_keys.get(cube_name); + if primary_keys.is_none_or(|pk| pk.is_empty()) { + return Err(CubeError::user(format!( + "primary key for '{}' is required when join is defined in order to make aggregates work properly", + cube_name + ))); + } + } + + let to_multiplied = self.get_multiplied_measures(join_name, evaluator)?; + if !to_multiplied.is_empty() { + let static_data = evaluator.static_data(); + let primary_keys = static_data.primary_keys.get(join_name); + if primary_keys.is_none_or(|pk| pk.is_empty()) { + return Err(CubeError::user(format!( + "primary key for '{}' is required when join is defined in order to make aggregates work properly", + join_name + ))); + } + } + + let edge = JoinEdge { + join: Rc::new(join_def.clone()), + from: cube_name.clone(), + to: join_name.clone(), + original_from: cube_name.clone(), + original_to: join_name.clone(), + }; + + let edge_key = Self::edge_key(cube_name, join_name); + result.push((edge_key, edge)); + } + + Ok(result) + } + + fn get_multiplied_measures( + &self, + cube_name: &str, + evaluator: &crate::test_fixtures::cube_bridge::MockCubeEvaluator, + ) -> Result, CubeError> { + let measures = evaluator.measures_for_cube(cube_name); + let multiplied_types = ["sum", "avg", "count", "number"]; + + let mut result = Vec::new(); + for (measure_name, measure) in measures { + let measure_type = &measure.static_data().measure_type; + if multiplied_types.contains(&measure_type.as_str()) { + result.push(measure_name); + } + } + + Ok(result) + } + + fn cube_from_path(&self, cube_path: &JoinHintItem) -> String { + match cube_path { + JoinHintItem::Single(name) => name.clone(), + JoinHintItem::Vector(path) => path + .last() + .expect("Vector path should not be empty") + .clone(), + } + } + + fn joins_by_path(&self, path: &[String]) -> Vec { + let mut result = Vec::new(); + for i in 0..path.len().saturating_sub(1) { + let key = Self::edge_key(&path[i], &path[i + 1]); + if let Some(edge) = self.edges.get(&key) { + result.push(edge.clone()); + } + } + result + } + + fn build_join_tree_for_root( + &self, + root: &JoinHintItem, + cubes_to_join: &[JoinHintItem], + ) -> Option<(String, Vec)> { + use crate::test_fixtures::graph_utils::find_shortest_path; + use std::collections::HashSet; + + let (root_name, additional_cubes) = match root { + JoinHintItem::Single(name) => (name.clone(), Vec::new()), + JoinHintItem::Vector(path) => { + if path.is_empty() { + return None; + } + let root_name = path[0].clone(); + let additional = if path.len() > 1 { + vec![JoinHintItem::Vector(path[1..].to_vec())] + } else { + Vec::new() + }; + (root_name, additional) + } + }; + + let mut all_cubes_to_join = additional_cubes; + all_cubes_to_join.extend_from_slice(cubes_to_join); + + let mut nodes_joined: HashSet = HashSet::new(); + + let mut all_joins: Vec<(usize, JoinEdge)> = Vec::new(); + let mut next_index = 0; + + for join_hint in &all_cubes_to_join { + let path_elements = match join_hint { + JoinHintItem::Single(name) => vec![name.clone()], + JoinHintItem::Vector(path) => path.clone(), + }; + + let mut prev_node = root_name.clone(); + + for to_join in &path_elements { + if to_join == &prev_node { + continue; + } + + if nodes_joined.contains(to_join) { + prev_node = to_join.clone(); + continue; + } + + let path = find_shortest_path(&self.nodes, &prev_node, to_join); + path.as_ref()?; + + let path = path.unwrap(); + + let found_joins = self.joins_by_path(&path); + + for join in found_joins { + all_joins.push((next_index, join)); + next_index += 1; + } + + nodes_joined.insert(to_join.clone()); + prev_node = to_join.clone(); + } + } + + all_joins.sort_by_key(|(idx, _)| *idx); + + let mut seen_keys: HashSet = HashSet::new(); + let mut unique_joins: Vec = Vec::new(); + + for (_, join) in all_joins { + let key = Self::edge_key(&join.from, &join.to); + if !seen_keys.contains(&key) { + seen_keys.insert(key); + unique_joins.push(join); + } + } + + Some((root_name, unique_joins)) + } + + pub fn build_join( + &self, + cubes_to_join: Vec, + ) -> Result, CubeError> { + if cubes_to_join.is_empty() { + return Err(CubeError::user( + "Cannot build join with empty cube list".to_string(), + )); + } + + let cache_key = serde_json::to_string(&cubes_to_join).map_err(|e| { + CubeError::internal(format!("Failed to serialize cubes_to_join: {}", e)) + })?; + + { + let cache = self.built_joins.borrow(); + if let Some(cached) = cache.get(&cache_key) { + return Ok(cached.clone()); + } + } + + let mut join_trees: Vec<(String, Vec)> = Vec::new(); + + for i in 0..cubes_to_join.len() { + let root = &cubes_to_join[i]; + let mut other_cubes = Vec::new(); + other_cubes.extend_from_slice(&cubes_to_join[0..i]); + other_cubes.extend_from_slice(&cubes_to_join[i + 1..]); + + if let Some(tree) = self.build_join_tree_for_root(root, &other_cubes) { + join_trees.push(tree); + } + } + + join_trees.sort_by_key(|(_, joins)| joins.len()); + + let (root_name, joins) = join_trees.first().ok_or_else(|| { + let cube_names: Vec = cubes_to_join + .iter() + .map(|hint| match hint { + JoinHintItem::Single(name) => format!("'{}'", name), + JoinHintItem::Vector(path) => format!("'{}'", path.join(".")), + }) + .collect(); + CubeError::user(format!( + "Can't find join path to join {}", + cube_names.join(", ") + )) + })?; + + let mut multiplication_factor: HashMap = HashMap::new(); + for cube_hint in &cubes_to_join { + let cube_name = self.cube_from_path(cube_hint); + let factor = self.find_multiplication_factor_for(&cube_name, joins); + multiplication_factor.insert(cube_name, factor); + } + + let join_items: Vec> = joins + .iter() + .map(|edge| self.join_edge_to_mock_join_item(edge)) + .collect(); + + let join_def = Rc::new( + MockJoinDefinition::builder() + .root(root_name.clone()) + .joins(join_items) + .multiplication_factor(multiplication_factor) + .build(), + ); + + self.built_joins + .borrow_mut() + .insert(cache_key, join_def.clone()); + + Ok(join_def) + } + + fn join_edge_to_mock_join_item( + &self, + edge: &JoinEdge, + ) -> Rc { + use crate::test_fixtures::cube_bridge::MockJoinItem; + + Rc::new( + MockJoinItem::builder() + .from(edge.from.clone()) + .to(edge.to.clone()) + .original_from(edge.original_from.clone()) + .original_to(edge.original_to.clone()) + .join(edge.join.clone()) + .build(), + ) + } + + pub(crate) fn check_if_cube_multiplied(&self, cube: &str, join: &JoinEdge) -> bool { + let relationship = &join.join.static_data().relationship; + + (join.from == cube && relationship == "hasMany") + || (join.to == cube && relationship == "belongsTo") + } + + pub(crate) fn find_multiplication_factor_for(&self, cube: &str, joins: &[JoinEdge]) -> bool { + use std::collections::HashSet; + + let mut visited: HashSet = HashSet::new(); + + fn find_if_multiplied_recursive( + graph: &MockJoinGraph, + current_cube: &str, + joins: &[JoinEdge], + visited: &mut HashSet, + ) -> bool { + if visited.contains(current_cube) { + return false; + } + visited.insert(current_cube.to_string()); + + let next_node = |join: &JoinEdge| -> String { + if join.from == current_cube { + join.to.clone() + } else { + join.from.clone() + } + }; + + let next_joins: Vec<&JoinEdge> = joins + .iter() + .filter(|j| j.from == current_cube || j.to == current_cube) + .collect(); + + if next_joins.iter().any(|next_join| { + let next = next_node(next_join); + graph.check_if_cube_multiplied(current_cube, next_join) && !visited.contains(&next) + }) { + return true; + } + + next_joins.iter().any(|next_join| { + let next = next_node(next_join); + find_if_multiplied_recursive(graph, &next, joins, visited) + }) + } + + find_if_multiplied_recursive(self, cube, joins, &mut visited) + } + + pub fn compile( + &mut self, + cubes: &[Rc], + evaluator: &crate::test_fixtures::cube_bridge::MockCubeEvaluator, + ) -> Result<(), CubeError> { + self.edges.clear(); + self.nodes.clear(); + self.undirected_nodes.clear(); + self.cached_connected_components = None; + + for cube in cubes { + let cube_name = cube.static_data().name.clone(); + self.nodes.entry(cube_name).or_default(); + } + + for cube in cubes { + let cube_edges = self.build_join_edges(cube, evaluator)?; + for (key, edge) in cube_edges { + self.edges.insert(key, edge); + } + } + + for edge in self.edges.values() { + self.nodes + .entry(edge.from.clone()) + .or_default() + .insert(edge.to.clone(), 1); + } + + for edge in self.edges.values() { + self.undirected_nodes + .entry(edge.to.clone()) + .or_default() + .insert(edge.from.clone(), 1); + } + + Ok(()) + } + + #[allow(dead_code)] + fn find_connected_component( + &self, + component_id: u32, + node: &str, + components: &mut HashMap, + ) { + if components.contains_key(node) { + return; + } + + components.insert(node.to_string(), component_id); + + if let Some(connected_nodes) = self.undirected_nodes.get(node) { + for connected_node in connected_nodes.keys() { + self.find_connected_component(component_id, connected_node, components); + } + } + + if let Some(connected_nodes) = self.nodes.get(node) { + for connected_node in connected_nodes.keys() { + self.find_connected_component(component_id, connected_node, components); + } + } + } + + #[allow(dead_code)] + pub fn connected_components(&mut self) -> HashMap { + if let Some(cached) = &self.cached_connected_components { + return cached.clone(); + } + + let mut component_id: u32 = 1; + let mut components: HashMap = HashMap::new(); + + let node_names: Vec = self.nodes.keys().cloned().collect(); + + for node in node_names { + if !components.contains_key(&node) { + self.find_connected_component(component_id, &node, &mut components); + component_id += 1; + } + } + + self.cached_connected_components = Some(components.clone()); + + components + } +} + +impl Default for MockJoinGraph { + fn default() -> Self { + Self::new() + } +} impl JoinGraph for MockJoinGraph { fn as_any(self: Rc) -> Rc { @@ -27,19 +504,9 @@ impl JoinGraph for MockJoinGraph { fn build_join( &self, - _cubes_to_join: Vec, + cubes_to_join: Vec, ) -> Result, CubeError> { - todo!("build_join not implemented in MockJoinGraph") - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_can_create() { - let _join_graph = MockJoinGraph; - // Just verify we can create the mock + let result = self.build_join(cubes_to_join)?; + Ok(result as Rc) } } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_item.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_item.rs index 29c412c602d70..5ee6e85c78a02 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_item.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_item.rs @@ -8,7 +8,7 @@ use std::rc::Rc; use typed_builder::TypedBuilder; /// Mock implementation of JoinItem for testing -#[derive(TypedBuilder)] +#[derive(Debug, TypedBuilder)] pub struct MockJoinItem { // Fields from JoinItemStatic from: String, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_item_definition.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_item_definition.rs index fe138a6fafe6f..b207ebefa7c7a 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_item_definition.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_join_item_definition.rs @@ -8,7 +8,7 @@ use std::rc::Rc; use typed_builder::TypedBuilder; /// Mock implementation of JoinItemDefinition for testing -#[derive(TypedBuilder)] +#[derive(Debug, Clone, TypedBuilder)] pub struct MockJoinItemDefinition { // Fields from JoinItemDefinitionStatic relationship: String, diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_schema.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_schema.rs index 177971315c2ce..0e71c14db0cce 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_schema.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mock_schema.rs @@ -1,16 +1,19 @@ use crate::test_fixtures::cube_bridge::{ - MockCubeDefinition, MockCubeEvaluator, MockDimensionDefinition, MockMeasureDefinition, - MockSegmentDefinition, + MockCubeDefinition, MockCubeEvaluator, MockDimensionDefinition, MockJoinGraph, + MockJoinItemDefinition, MockMeasureDefinition, MockSegmentDefinition, }; +use cubenativeutils::CubeError; use std::collections::HashMap; use std::rc::Rc; /// Mock schema containing cubes with their measures and dimensions +#[derive(Clone)] pub struct MockSchema { cubes: HashMap, } /// Single cube with its definition and members +#[derive(Clone)] pub struct MockCube { pub definition: MockCubeDefinition, pub measures: HashMap>, @@ -95,6 +98,84 @@ impl MockSchema { ) -> Rc { Rc::new(MockCubeEvaluator::with_primary_keys(self, primary_keys)) } + + /// Create a MockJoinGraph from this schema + /// + /// This method: + /// 1. Extracts all cubes as Vec> + /// 2. Creates a temporary MockCubeEvaluator for validation + /// 3. Creates and compiles a MockJoinGraph + /// + /// # Returns + /// * `Ok(MockJoinGraph)` - Compiled join graph + /// * `Err(CubeError)` - If join graph compilation fails (invalid joins, missing PKs, etc.) + pub fn create_join_graph(&self) -> Result { + // Collect cubes as Vec> + let cubes: Vec> = self + .cubes + .values() + .map(|mock_cube| Rc::new(mock_cube.definition.clone())) + .collect(); + + // Extract primary keys for evaluator + let mut primary_keys = HashMap::new(); + for (cube_name, cube) in &self.cubes { + let mut pk_dimensions = Vec::new(); + for (dim_name, dimension) in &cube.dimensions { + if dimension.static_data().primary_key == Some(true) { + pk_dimensions.push(dim_name.clone()); + } + } + pk_dimensions.sort(); + if !pk_dimensions.is_empty() { + primary_keys.insert(cube_name.clone(), pk_dimensions); + } + } + + // Clone self for evaluator + let evaluator = MockCubeEvaluator::with_primary_keys(self.clone(), primary_keys); + + // Create and compile join graph + let mut join_graph = MockJoinGraph::new(); + join_graph.compile(&cubes, &evaluator)?; + + Ok(join_graph) + } + + /// Create a MockCubeEvaluator with join graph from this schema + /// + /// This method creates an evaluator with a fully compiled join graph, + /// enabling join path resolution in tests. + /// + /// # Returns + /// * `Ok(Rc)` - Evaluator with join graph + /// * `Err(CubeError)` - If join graph compilation fails + pub fn create_evaluator_with_join_graph(self) -> Result, CubeError> { + // Extract primary keys + let mut primary_keys = HashMap::new(); + for (cube_name, cube) in &self.cubes { + let mut pk_dimensions = Vec::new(); + for (dim_name, dimension) in &cube.dimensions { + if dimension.static_data().primary_key == Some(true) { + pk_dimensions.push(dim_name.clone()); + } + } + pk_dimensions.sort(); + if !pk_dimensions.is_empty() { + primary_keys.insert(cube_name.clone(), pk_dimensions); + } + } + + // Compile join graph + let join_graph = self.create_join_graph()?; + + // Create evaluator with join graph + Ok(Rc::new(MockCubeEvaluator::with_join_graph( + self, + primary_keys, + join_graph, + ))) + } } /// Builder for MockSchema with fluent API @@ -119,6 +200,7 @@ impl MockSchemaBuilder { measures: HashMap::new(), dimensions: HashMap::new(), segments: HashMap::new(), + joins: HashMap::new(), } } @@ -154,6 +236,7 @@ pub struct MockCubeBuilder { measures: HashMap>, dimensions: HashMap>, segments: HashMap>, + joins: HashMap, } impl MockCubeBuilder { @@ -193,9 +276,15 @@ impl MockCubeBuilder { self } + /// Add a join to the cube + pub fn add_join(mut self, name: impl Into, definition: MockJoinItemDefinition) -> Self { + self.joins.insert(name.into(), definition); + self + } + /// Finish building this cube and return to schema builder pub fn finish_cube(mut self) -> MockSchemaBuilder { - let cube_def = self.cube_definition.unwrap_or_else(|| { + let mut cube_def = self.cube_definition.unwrap_or_else(|| { // Create default cube definition with the cube name MockCubeDefinition::builder() .name(self.cube_name.clone()) @@ -203,6 +292,21 @@ impl MockCubeBuilder { .build() }); + // Merge joins from builder with joins from cube definition + let mut all_joins = cube_def.joins().clone(); + all_joins.extend(self.joins); + + // Rebuild cube definition with merged joins + let static_data = cube_def.static_data(); + cube_def = MockCubeDefinition::builder() + .name(static_data.name.clone()) + .sql_alias(static_data.sql_alias.clone()) + .is_view(static_data.is_view) + .is_calendar(static_data.is_calendar) + .join_map(static_data.join_map.clone()) + .joins(all_joins) + .build(); + let cube = MockCube { definition: cube_def, measures: self.measures, @@ -400,6 +504,7 @@ impl MockViewBuilder { mod tests { use super::*; use crate::cube_bridge::dimension_definition::DimensionDefinition; + use crate::cube_bridge::join_item_definition::JoinItemDefinition; use crate::cube_bridge::measure_definition::MeasureDefinition; use crate::cube_bridge::segment_definition::SegmentDefinition; @@ -1102,4 +1207,273 @@ mod tests { .finish_view() .build(); } + + #[test] + fn test_schema_builder_with_joins() { + let schema = MockSchemaBuilder::new() + .add_cube("users") + .add_dimension( + "id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("id".to_string()) + .primary_key(Some(true)) + .build(), + ) + .finish_cube() + .add_cube("orders") + .add_dimension( + "id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("id".to_string()) + .build(), + ) + .add_dimension( + "user_id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("user_id".to_string()) + .build(), + ) + .add_join( + "users", + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.user_id = {users.id}".to_string()) + .build(), + ) + .finish_cube() + .build(); + + // Verify cubes exist + assert!(schema.get_cube("users").is_some()); + assert!(schema.get_cube("orders").is_some()); + + // Verify join in orders cube + let orders_cube = schema.get_cube("orders").unwrap(); + assert_eq!(orders_cube.definition.joins().len(), 1); + assert!(orders_cube.definition.get_join("users").is_some()); + + let users_join = orders_cube.definition.get_join("users").unwrap(); + assert_eq!(users_join.static_data().relationship, "many_to_one"); + } + + #[test] + fn test_complex_schema_with_join_relationships() { + let schema = MockSchemaBuilder::new() + .add_cube("countries") + .add_dimension( + "id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("id".to_string()) + .primary_key(Some(true)) + .build(), + ) + .add_dimension( + "name", + MockDimensionDefinition::builder() + .dimension_type("string".to_string()) + .sql("name".to_string()) + .build(), + ) + .finish_cube() + .add_cube("users") + .add_dimension( + "id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("id".to_string()) + .primary_key(Some(true)) + .build(), + ) + .add_dimension( + "country_id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("country_id".to_string()) + .build(), + ) + .add_join( + "countries", + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.country_id = {countries.id}".to_string()) + .build(), + ) + .finish_cube() + .add_cube("orders") + .add_dimension( + "id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("id".to_string()) + .primary_key(Some(true)) + .build(), + ) + .add_dimension( + "user_id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("user_id".to_string()) + .build(), + ) + .add_measure( + "count", + MockMeasureDefinition::builder() + .measure_type("count".to_string()) + .sql("COUNT(*)".to_string()) + .build(), + ) + .add_join( + "users", + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.user_id = {users.id}".to_string()) + .build(), + ) + .finish_cube() + .build(); + + // Verify all cubes exist + assert_eq!(schema.cube_names().len(), 3); + + // Verify countries has no joins + let countries_cube = schema.get_cube("countries").unwrap(); + assert_eq!(countries_cube.definition.joins().len(), 0); + + // Verify users has join to countries + let users_cube = schema.get_cube("users").unwrap(); + assert_eq!(users_cube.definition.joins().len(), 1); + assert!(users_cube.definition.get_join("countries").is_some()); + + // Verify orders has join to users + let orders_cube = schema.get_cube("orders").unwrap(); + assert_eq!(orders_cube.definition.joins().len(), 1); + assert!(orders_cube.definition.get_join("users").is_some()); + + // Verify join SQL + let orders_users_join = orders_cube.definition.get_join("users").unwrap(); + let sql = orders_users_join.sql().unwrap(); + assert_eq!(sql.args_names(), &vec!["CUBE", "users"]); + } + + #[test] + fn test_cube_with_multiple_joins_via_builder() { + let schema = MockSchemaBuilder::new() + .add_cube("orders") + .add_dimension( + "id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("id".to_string()) + .build(), + ) + .add_join( + "users", + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.user_id = {users.id}".to_string()) + .build(), + ) + .add_join( + "products", + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.product_id = {products.id}".to_string()) + .build(), + ) + .add_join( + "warehouses", + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.warehouse_id = {warehouses.id}".to_string()) + .build(), + ) + .finish_cube() + .build(); + + let orders_cube = schema.get_cube("orders").unwrap(); + assert_eq!(orders_cube.definition.joins().len(), 3); + assert!(orders_cube.definition.get_join("users").is_some()); + assert!(orders_cube.definition.get_join("products").is_some()); + assert!(orders_cube.definition.get_join("warehouses").is_some()); + } + + #[test] + fn test_schema_with_join_graph_integration() { + use crate::cube_bridge::join_hints::JoinHintItem; + + // Small schema: orders -> users (one join) + let schema = MockSchemaBuilder::new() + .add_cube("users") + .add_dimension( + "id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("id".to_string()) + .primary_key(Some(true)) + .build(), + ) + .finish_cube() + .add_cube("orders") + .add_dimension( + "id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("id".to_string()) + .primary_key(Some(true)) + .build(), + ) + .add_dimension( + "user_id", + MockDimensionDefinition::builder() + .dimension_type("number".to_string()) + .sql("user_id".to_string()) + .build(), + ) + .add_join( + "users", + MockJoinItemDefinition::builder() + .relationship("many_to_one".to_string()) + .sql("{CUBE}.user_id = {users.id}".to_string()) + .build(), + ) + .finish_cube() + .build(); + + // Verify create_join_graph() succeeds + let join_graph_result = schema.create_join_graph(); + assert!( + join_graph_result.is_ok(), + "create_join_graph should succeed" + ); + + // Verify create_evaluator_with_join_graph() succeeds + let evaluator_result = schema.create_evaluator_with_join_graph(); + assert!( + evaluator_result.is_ok(), + "create_evaluator_with_join_graph should succeed" + ); + let evaluator = evaluator_result.unwrap(); + + // Verify evaluator.join_graph() returns Some(graph) + assert!( + evaluator.join_graph().is_some(), + "Evaluator should have join graph" + ); + let graph = evaluator.join_graph().unwrap(); + + // Verify graph.build_join() works + let cubes = vec![ + JoinHintItem::Single("orders".to_string()), + JoinHintItem::Single("users".to_string()), + ]; + let join_def_result = graph.build_join(cubes); + assert!( + join_def_result.is_ok(), + "graph.build_join should succeed for orders -> users" + ); + } } diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mod.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mod.rs index 4296a6e61abf1..63975224378ce 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mod.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/cube_bridge/mod.rs @@ -35,7 +35,6 @@ pub use mock_base_tools::MockBaseTools; pub use mock_case_definition::MockCaseDefinition; pub use mock_case_else_item::MockCaseElseItem; pub use mock_case_item::MockCaseItem; -pub use mock_case_switch_definition::MockCaseSwitchDefinition; pub use mock_case_switch_else_item::MockCaseSwitchElseItem; pub use mock_case_switch_item::MockCaseSwitchItem; pub use mock_cube_definition::MockCubeDefinition; @@ -45,6 +44,7 @@ pub use mock_evaluator::MockCubeEvaluator; pub use mock_expression_struct::MockExpressionStruct; pub use mock_geo_item::MockGeoItem; pub use mock_granularity_definition::MockGranularityDefinition; +pub use mock_join_definition::MockJoinDefinition; pub use mock_join_graph::MockJoinGraph; pub use mock_join_item::MockJoinItem; pub use mock_join_item_definition::MockJoinItemDefinition; diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/graph_utils.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/graph_utils.rs new file mode 100644 index 0000000000000..a537a50f53471 --- /dev/null +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/graph_utils.rs @@ -0,0 +1,299 @@ +use petgraph::graph::NodeIndex; +use petgraph::Graph; +use std::collections::HashMap; + +/// Converts a HashMap-based graph representation to a petgraph directed graph. +/// +/// # Input Format +/// +/// The input is a nested HashMap where: +/// - Outer map keys are cube names (all nodes in the graph) +/// - Inner map represents edges: destination cube name -> edge weight +/// +/// +/// # Returns +/// +/// A tuple containing: +/// - Directed graph with cube names as node data and weights as edge data +/// - Mapping from cube name to NodeIndex for quick lookups +/// +/// # Note +/// +/// All cube names that appear in edges must also exist as keys in the outer HashMap. +pub fn build_petgraph_from_hashmap( + nodes: &HashMap>, +) -> (Graph, HashMap) { + let mut graph = Graph::::new(); + let mut node_indices = HashMap::new(); + + // First pass: Add all nodes to the graph + for cube_name in nodes.keys() { + let node_index = graph.add_node(cube_name.clone()); + node_indices.insert(cube_name.clone(), node_index); + } + + // Second pass: Add all edges + for (from_cube, edges) in nodes.iter() { + let from_index = node_indices[from_cube]; + for (to_cube, weight) in edges.iter() { + let to_index = node_indices[to_cube]; + graph.add_edge(from_index, to_index, *weight); + } + } + + (graph, node_indices) +} + +/// Finds the shortest path between two cubes using Dijkstra's algorithm. +/// +/// This function wraps petgraph's A* algorithm with a zero heuristic, which is +/// equivalent to Dijkstra's algorithm. It provides an API similar to node-dijkstra +/// for JavaScript compatibility. +/// +/// # Arguments +/// +/// * `nodes` - Graph representation as nested HashMap (see `build_petgraph_from_hashmap`) +/// * `start` - Name of the starting cube +/// * `end` - Name of the destination cube +/// +/// # Returns +/// +/// - `Some(Vec)` - Path from start to end (inclusive) if a path exists +/// - `None` - If no path exists or if start/end nodes don't exist in the graph +/// +/// # Edge Cases +/// +/// - If `start == end`, returns `Some(vec![start])` +/// - If `start` or `end` don't exist in the graph, returns `None` +/// - If nodes are disconnected, returns `None` +/// +/// ``` +pub fn find_shortest_path( + nodes: &HashMap>, + start: &str, + end: &str, +) -> Option> { + if start == end { + return Some(vec![start.to_string()]); + } + + if !nodes.contains_key(start) || !nodes.contains_key(end) { + return None; + } + + let (graph, node_indices) = build_petgraph_from_hashmap(nodes); + + let start_index = node_indices[start]; + let end_index = node_indices[end]; + + let result = petgraph::algo::astar( + &graph, + start_index, + |n| n == end_index, + |e| *e.weight(), + |_| 0, // Zero heuristic makes this equivalent to Dijkstra + ); + + match result { + Some((_cost, path)) => { + let cube_names: Vec = path + .iter() + .map(|&node_index| graph[node_index].clone()) + .collect(); + Some(cube_names) + } + None => None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_simple_path() { + // Graph: A -> B (weight 1) + let mut nodes = HashMap::new(); + let mut a_edges = HashMap::new(); + a_edges.insert("B".to_string(), 1); + nodes.insert("A".to_string(), a_edges); + nodes.insert("B".to_string(), HashMap::new()); + + let path = find_shortest_path(&nodes, "A", "B"); + assert_eq!(path, Some(vec!["A".to_string(), "B".to_string()])); + } + + #[test] + fn test_multi_hop_path() { + // Graph: A -> B -> C + let mut nodes = HashMap::new(); + let mut a_edges = HashMap::new(); + a_edges.insert("B".to_string(), 1); + nodes.insert("A".to_string(), a_edges); + + let mut b_edges = HashMap::new(); + b_edges.insert("C".to_string(), 1); + nodes.insert("B".to_string(), b_edges); + + nodes.insert("C".to_string(), HashMap::new()); + + let path = find_shortest_path(&nodes, "A", "C"); + assert_eq!( + path, + Some(vec!["A".to_string(), "B".to_string(), "C".to_string()]) + ); + } + + #[test] + fn test_shortest_path_selection() { + // Graph: A -> B -> C (total weight 2) + // A -> D -> C (total weight 5) + let mut nodes = HashMap::new(); + + let mut a_edges = HashMap::new(); + a_edges.insert("B".to_string(), 1); + a_edges.insert("D".to_string(), 3); + nodes.insert("A".to_string(), a_edges); + + let mut b_edges = HashMap::new(); + b_edges.insert("C".to_string(), 1); + nodes.insert("B".to_string(), b_edges); + + let mut d_edges = HashMap::new(); + d_edges.insert("C".to_string(), 2); + nodes.insert("D".to_string(), d_edges); + + nodes.insert("C".to_string(), HashMap::new()); + + let path = find_shortest_path(&nodes, "A", "C"); + // Should take the shorter path: A -> B -> C + assert_eq!( + path, + Some(vec!["A".to_string(), "B".to_string(), "C".to_string()]) + ); + } + + #[test] + fn test_disconnected_nodes() { + // Graph: A -> B, C -> D (no connection between them) + let mut nodes = HashMap::new(); + + let mut a_edges = HashMap::new(); + a_edges.insert("B".to_string(), 1); + nodes.insert("A".to_string(), a_edges); + + nodes.insert("B".to_string(), HashMap::new()); + + let mut c_edges = HashMap::new(); + c_edges.insert("D".to_string(), 1); + nodes.insert("C".to_string(), c_edges); + + nodes.insert("D".to_string(), HashMap::new()); + + // No path from A to D + let path = find_shortest_path(&nodes, "A", "D"); + assert_eq!(path, None); + } + + #[test] + fn test_same_start_and_end() { + // Graph: A -> B + let mut nodes = HashMap::new(); + let mut a_edges = HashMap::new(); + a_edges.insert("B".to_string(), 1); + nodes.insert("A".to_string(), a_edges); + nodes.insert("B".to_string(), HashMap::new()); + + // Path from A to A should be just [A] + let path = find_shortest_path(&nodes, "A", "A"); + assert_eq!(path, Some(vec!["A".to_string()])); + } + + #[test] + fn test_nonexistent_node() { + // Graph: A -> B + let mut nodes = HashMap::new(); + let mut a_edges = HashMap::new(); + a_edges.insert("B".to_string(), 1); + nodes.insert("A".to_string(), a_edges); + nodes.insert("B".to_string(), HashMap::new()); + + // C doesn't exist + let path = find_shortest_path(&nodes, "A", "C"); + assert_eq!(path, None); + + // Z doesn't exist either + let path = find_shortest_path(&nodes, "Z", "A"); + assert_eq!(path, None); + } + + #[test] + fn test_graph_with_cycles() { + // Graph: A -> B -> C -> A (cycle) + // A -> D -> C (alternate path) + let mut nodes = HashMap::new(); + + let mut a_edges = HashMap::new(); + a_edges.insert("B".to_string(), 1); + a_edges.insert("D".to_string(), 5); + nodes.insert("A".to_string(), a_edges); + + let mut b_edges = HashMap::new(); + b_edges.insert("C".to_string(), 1); + nodes.insert("B".to_string(), b_edges); + + let mut c_edges = HashMap::new(); + c_edges.insert("A".to_string(), 1); // Cycle back to A + nodes.insert("C".to_string(), c_edges); + + let mut d_edges = HashMap::new(); + d_edges.insert("C".to_string(), 1); + nodes.insert("D".to_string(), d_edges); + + // Should find shortest path A -> B -> C + let path = find_shortest_path(&nodes, "A", "C"); + assert_eq!( + path, + Some(vec!["A".to_string(), "B".to_string(), "C".to_string()]) + ); + } + + #[test] + fn test_build_petgraph_from_hashmap() { + // Verify graph is constructed correctly + let mut nodes = HashMap::new(); + + let mut a_edges = HashMap::new(); + a_edges.insert("B".to_string(), 1); + a_edges.insert("C".to_string(), 2); + nodes.insert("A".to_string(), a_edges); + + let mut b_edges = HashMap::new(); + b_edges.insert("C".to_string(), 1); + nodes.insert("B".to_string(), b_edges); + + nodes.insert("C".to_string(), HashMap::new()); + + let (graph, node_indices) = build_petgraph_from_hashmap(&nodes); + + // Check node count + assert_eq!(graph.node_count(), 3); + + // Check edge count: A->B, A->C, B->C = 3 edges + assert_eq!(graph.edge_count(), 3); + + // Check that all node names are in the mapping + assert!(node_indices.contains_key("A")); + assert!(node_indices.contains_key("B")); + assert!(node_indices.contains_key("C")); + + // Check that node indices are valid + let a_index = node_indices["A"]; + let b_index = node_indices["B"]; + let c_index = node_indices["C"]; + + assert_eq!(graph[a_index], "A"); + assert_eq!(graph[b_index], "B"); + assert_eq!(graph[c_index], "C"); + } +} diff --git a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/mod.rs b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/mod.rs index 4baca0ca1977f..d3cdba19e4dfc 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/mod.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/test_fixtures/mod.rs @@ -1,2 +1,3 @@ pub mod cube_bridge; +pub mod graph_utils; pub mod schemas; diff --git a/rust/cubesqlplanner/cubesqlplanner/src/tests/cube_evaluator/symbol_evaluator.rs b/rust/cubesqlplanner/cubesqlplanner/src/tests/cube_evaluator/symbol_evaluator.rs index 021e00e3b05eb..e7cd72feb450b 100644 --- a/rust/cubesqlplanner/cubesqlplanner/src/tests/cube_evaluator/symbol_evaluator.rs +++ b/rust/cubesqlplanner/cubesqlplanner/src/tests/cube_evaluator/symbol_evaluator.rs @@ -239,7 +239,7 @@ impl SqlEvaluationContext { // Create QueryTools with mocks let security_context = Rc::new(MockSecurityContext); let base_tools = Rc::new(MockBaseTools::builder().build()); - let join_graph = Rc::new(MockJoinGraph); + let join_graph = Rc::new(MockJoinGraph::new()); let query_tools = QueryTools::try_new( evaluator.clone(),