From c98d6aa260c5d1846a78c95b82c49c8fefc96656 Mon Sep 17 00:00:00 2001 From: Alexey Kartashov Date: Thu, 13 Feb 2025 12:52:27 +0100 Subject: [PATCH 1/4] tests: rename cql_types.rs -> serialization.rs --- scylla/tests/integration/main.rs | 2 +- scylla/tests/integration/{cql_types.rs => serialization.rs} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename scylla/tests/integration/{cql_types.rs => serialization.rs} (100%) diff --git a/scylla/tests/integration/main.rs b/scylla/tests/integration/main.rs index a87649ef48..0cedfde103 100644 --- a/scylla/tests/integration/main.rs +++ b/scylla/tests/integration/main.rs @@ -1,7 +1,6 @@ mod batch; mod consistency; mod cql_collections; -mod cql_types; mod cql_value; mod default_policy; mod execution_profiles; @@ -13,6 +12,7 @@ mod new_session; mod query_result; mod retries; mod self_identity; +mod serialization; mod session; mod shards; mod silent_prepare_batch; diff --git a/scylla/tests/integration/cql_types.rs b/scylla/tests/integration/serialization.rs similarity index 100% rename from scylla/tests/integration/cql_types.rs rename to scylla/tests/integration/serialization.rs From 92b0c7fd276a352e4c01ae8a852875788420e529 Mon Sep 17 00:00:00 2001 From: Alexey Kartashov Date: Thu, 13 Feb 2025 12:54:20 +0100 Subject: [PATCH 2/4] tests: move cql_value.rs tests into serialization.rs --- scylla/tests/integration/cql_value.rs | 150 ---------------------- scylla/tests/integration/main.rs | 1 - scylla/tests/integration/serialization.rs | 148 ++++++++++++++++++++- 3 files changed, 147 insertions(+), 152 deletions(-) delete mode 100644 scylla/tests/integration/cql_value.rs diff --git a/scylla/tests/integration/cql_value.rs b/scylla/tests/integration/cql_value.rs deleted file mode 100644 index e0aacd41fe..0000000000 --- a/scylla/tests/integration/cql_value.rs +++ /dev/null @@ -1,150 +0,0 @@ -use assert_matches::assert_matches; - -use scylla::client::session::Session; -use scylla::value::{CqlDuration, CqlValue}; - -use crate::utils::{create_new_session_builder, setup_tracing, unique_keyspace_name, PerformDDL}; - -#[tokio::test] -async fn test_cqlvalue_udt() { - setup_tracing(); - let session: Session = create_new_session_builder().build().await.unwrap(); - let ks = unique_keyspace_name(); - session - .ddl(format!( - "CREATE KEYSPACE IF NOT EXISTS {} WITH REPLICATION = \ - {{'class' : 'NetworkTopologyStrategy', 'replication_factor' : 1}}", - ks - )) - .await - .unwrap(); - session.use_keyspace(&ks, false).await.unwrap(); - - session - .ddl("CREATE TYPE IF NOT EXISTS cqlvalue_udt_type (int_val int, text_val text)") - .await - .unwrap(); - session.ddl("CREATE TABLE IF NOT EXISTS cqlvalue_udt_test (k int, my cqlvalue_udt_type, primary key (k))").await.unwrap(); - - let udt_cql_value = CqlValue::UserDefinedType { - keyspace: ks, - name: "cqlvalue_udt_type".to_string(), - fields: vec![ - ("int_val".to_string(), Some(CqlValue::Int(42))), - ("text_val".to_string(), Some(CqlValue::Text("hi".into()))), - ], - }; - - session - .query_unpaged( - "INSERT INTO cqlvalue_udt_test (k, my) VALUES (5, ?)", - (&udt_cql_value,), - ) - .await - .unwrap(); - - let rows_result = session - .query_unpaged("SELECT my FROM cqlvalue_udt_test", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap(); - - let (received_udt_cql_value,) = rows_result.single_row::<(CqlValue,)>().unwrap(); - - assert_eq!(received_udt_cql_value, udt_cql_value); -} - -#[tokio::test] -async fn test_cqlvalue_duration() { - setup_tracing(); - let session: Session = create_new_session_builder().build().await.unwrap(); - - let ks = unique_keyspace_name(); - session - .ddl(format!( - "CREATE KEYSPACE IF NOT EXISTS {} WITH REPLICATION = \ - {{'class' : 'NetworkTopologyStrategy', 'replication_factor' : 1}}", - ks - )) - .await - .unwrap(); - session.use_keyspace(&ks, false).await.unwrap(); - - let duration_cql_value = CqlValue::Duration(CqlDuration { - months: 6, - days: 9, - nanoseconds: 21372137, - }); - - session.ddl("CREATE TABLE IF NOT EXISTS cqlvalue_duration_test (pk int, ck int, v duration, primary key (pk, ck))").await.unwrap(); - let fixture_queries = vec![ - ( - "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 0, ?)", - vec![&duration_cql_value], - ), - ( - "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 1, 89h4m48s)", - vec![], - ), - ( - "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 2, PT89H8M53S)", - vec![], - ), - ( - "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 3, P0000-00-00T89:09:09)", - vec![], - ), - ]; - - for query in fixture_queries { - session.query_unpaged(query.0, query.1).await.unwrap(); - } - - let rows_result = session - .query_unpaged( - "SELECT v FROM cqlvalue_duration_test WHERE pk = ?", - (CqlValue::Int(0),), - ) - .await - .unwrap() - .into_rows_result() - .unwrap(); - - let mut rows_iter = rows_result.rows::<(CqlValue,)>().unwrap(); - - let (first_value,) = rows_iter.next().unwrap().unwrap(); - assert_eq!(first_value, duration_cql_value); - - let (second_value,) = rows_iter.next().unwrap().unwrap(); - assert_eq!( - second_value, - CqlValue::Duration(CqlDuration { - months: 0, - days: 0, - nanoseconds: 320_688_000_000_000, - }) - ); - - let (third_value,) = rows_iter.next().unwrap().unwrap(); - assert_eq!( - third_value, - CqlValue::Duration(CqlDuration { - months: 0, - days: 0, - nanoseconds: 320_933_000_000_000, - }) - ); - - let (fourth_value,) = rows_iter.next().unwrap().unwrap(); - assert_eq!( - fourth_value, - CqlValue::Duration(CqlDuration { - months: 0, - days: 0, - nanoseconds: 320_949_000_000_000, - }) - ); - - assert_matches!(rows_iter.next(), None); -} diff --git a/scylla/tests/integration/main.rs b/scylla/tests/integration/main.rs index 0cedfde103..1a47f01a13 100644 --- a/scylla/tests/integration/main.rs +++ b/scylla/tests/integration/main.rs @@ -1,7 +1,6 @@ mod batch; mod consistency; mod cql_collections; -mod cql_value; mod default_policy; mod execution_profiles; mod history; diff --git a/scylla/tests/integration/serialization.rs b/scylla/tests/integration/serialization.rs index bec26c382c..40d84be2eb 100644 --- a/scylla/tests/integration/serialization.rs +++ b/scylla/tests/integration/serialization.rs @@ -1,7 +1,8 @@ +use assert_matches::assert_matches; use itertools::Itertools; use scylla::client::session::Session; use scylla::serialize::value::SerializeValue; -use scylla::value::{Counter, CqlDate, CqlTime, CqlTimestamp, CqlTimeuuid, CqlValue, CqlVarint}; +use scylla::value::{Counter, CqlDate, CqlDuration, CqlTime, CqlTimestamp, CqlTimeuuid, CqlValue, CqlVarint}; use scylla::{DeserializeValue, SerializeValue}; use std::cmp::PartialEq; use std::fmt::Debug; @@ -1863,3 +1864,148 @@ async fn test_udt_with_missing_field() { ) .await; } + + +#[tokio::test] +async fn test_cqlvalue_udt() { + setup_tracing(); + let session: Session = create_new_session_builder().build().await.unwrap(); + let ks = unique_keyspace_name(); + session + .ddl(format!( + "CREATE KEYSPACE IF NOT EXISTS {} WITH REPLICATION = \ + {{'class' : 'NetworkTopologyStrategy', 'replication_factor' : 1}}", + ks + )) + .await + .unwrap(); + session.use_keyspace(&ks, false).await.unwrap(); + + session + .ddl("CREATE TYPE IF NOT EXISTS cqlvalue_udt_type (int_val int, text_val text)") + .await + .unwrap(); + session.ddl("CREATE TABLE IF NOT EXISTS cqlvalue_udt_test (k int, my cqlvalue_udt_type, primary key (k))").await.unwrap(); + + let udt_cql_value = CqlValue::UserDefinedType { + keyspace: ks, + name: "cqlvalue_udt_type".to_string(), + fields: vec![ + ("int_val".to_string(), Some(CqlValue::Int(42))), + ("text_val".to_string(), Some(CqlValue::Text("hi".into()))), + ], + }; + + session + .query_unpaged( + "INSERT INTO cqlvalue_udt_test (k, my) VALUES (5, ?)", + (&udt_cql_value,), + ) + .await + .unwrap(); + + let rows_result = session + .query_unpaged("SELECT my FROM cqlvalue_udt_test", &[]) + .await + .unwrap() + .into_rows_result() + .unwrap(); + + let (received_udt_cql_value,) = rows_result.single_row::<(CqlValue,)>().unwrap(); + + assert_eq!(received_udt_cql_value, udt_cql_value); +} + +#[tokio::test] +async fn test_cqlvalue_duration() { + setup_tracing(); + let session: Session = create_new_session_builder().build().await.unwrap(); + + let ks = unique_keyspace_name(); + session + .ddl(format!( + "CREATE KEYSPACE IF NOT EXISTS {} WITH REPLICATION = \ + {{'class' : 'NetworkTopologyStrategy', 'replication_factor' : 1}}", + ks + )) + .await + .unwrap(); + session.use_keyspace(&ks, false).await.unwrap(); + + let duration_cql_value = CqlValue::Duration(CqlDuration { + months: 6, + days: 9, + nanoseconds: 21372137, + }); + + session.ddl("CREATE TABLE IF NOT EXISTS cqlvalue_duration_test (pk int, ck int, v duration, primary key (pk, ck))").await.unwrap(); + let fixture_queries = vec![ + ( + "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 0, ?)", + vec![&duration_cql_value], + ), + ( + "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 1, 89h4m48s)", + vec![], + ), + ( + "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 2, PT89H8M53S)", + vec![], + ), + ( + "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 3, P0000-00-00T89:09:09)", + vec![], + ), + ]; + + for query in fixture_queries { + session.query_unpaged(query.0, query.1).await.unwrap(); + } + + let rows_result = session + .query_unpaged( + "SELECT v FROM cqlvalue_duration_test WHERE pk = ?", + (CqlValue::Int(0),), + ) + .await + .unwrap() + .into_rows_result() + .unwrap(); + + let mut rows_iter = rows_result.rows::<(CqlValue,)>().unwrap(); + + let (first_value,) = rows_iter.next().unwrap().unwrap(); + assert_eq!(first_value, duration_cql_value); + + let (second_value,) = rows_iter.next().unwrap().unwrap(); + assert_eq!( + second_value, + CqlValue::Duration(CqlDuration { + months: 0, + days: 0, + nanoseconds: 320_688_000_000_000, + }) + ); + + let (third_value,) = rows_iter.next().unwrap().unwrap(); + assert_eq!( + third_value, + CqlValue::Duration(CqlDuration { + months: 0, + days: 0, + nanoseconds: 320_933_000_000_000, + }) + ); + + let (fourth_value,) = rows_iter.next().unwrap().unwrap(); + assert_eq!( + fourth_value, + CqlValue::Duration(CqlDuration { + months: 0, + days: 0, + nanoseconds: 320_949_000_000_000, + }) + ); + + assert_matches!(rows_iter.next(), None); +} From af38b247267ffb22677f5073588980aea5c9d2f7 Mon Sep 17 00:00:00 2001 From: Alexey Kartashov Date: Thu, 13 Feb 2025 13:05:20 +0100 Subject: [PATCH 3/4] tests: refactor and split old serialization tests Generalized most tests and split some of them into distinct tests to see the results easier --- scylla/tests/integration/serialization.rs | 1521 +++++++++------------ 1 file changed, 628 insertions(+), 893 deletions(-) diff --git a/scylla/tests/integration/serialization.rs b/scylla/tests/integration/serialization.rs index 40d84be2eb..57d8fd1f41 100644 --- a/scylla/tests/integration/serialization.rs +++ b/scylla/tests/integration/serialization.rs @@ -1,27 +1,26 @@ -use assert_matches::assert_matches; -use itertools::Itertools; -use scylla::client::session::Session; -use scylla::serialize::value::SerializeValue; -use scylla::value::{Counter, CqlDate, CqlDuration, CqlTime, CqlTimestamp, CqlTimeuuid, CqlValue, CqlVarint}; -use scylla::{DeserializeValue, SerializeValue}; -use std::cmp::PartialEq; -use std::fmt::Debug; +use core::str; +use std::fmt::{Debug, Display}; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; use std::str::FromStr; +use std::vec; use crate::utils::{ create_new_session_builder, scylla_supports_tablets, setup_tracing, unique_keyspace_name, DeserializeOwnedValue, PerformDDL, }; +use assert_matches::assert_matches; +use itertools::Itertools; +use scylla::client::session::Session; +use scylla::deserialize::row::BuiltinDeserializationError; +use scylla::deserialize::value::{Emptiable, MaybeEmpty}; +use scylla::errors::DeserializationError; +use scylla::serialize::value::SerializeValue; +use scylla::value::{ + Counter, CqlDate, CqlDuration, CqlTime, CqlTimestamp, CqlTimeuuid, CqlValue, CqlVarint, +}; +use scylla::{DeserializeValue, SerializeValue}; -// Used to prepare a table for test -// Creates a new keyspace, without tablets if requested and the ScyllaDB instance supports them. -// Drops and creates table {table_name} (id int PRIMARY KEY, val {type_name}) -async fn init_test_maybe_without_tablets( - table_name: &str, - type_name: &str, - supports_tablets: bool, -) -> Session { +async fn prepare_test_table(table_name: &str, type_name: &str, supports_tablets: bool) -> Session { let session: Session = create_new_session_builder().build().await.unwrap(); let ks = unique_keyspace_name(); @@ -54,72 +53,268 @@ async fn init_test_maybe_without_tablets( session } -// Used to prepare a table for test -// Creates a new keyspace -// Drops and creates table {table_name} (id int PRIMARY KEY, val {type_name}) -async fn init_test(table_name: &str, type_name: &str) -> Session { - init_test_maybe_without_tablets(table_name, type_name, true).await +fn assert_error_is_not_expected_non_null(error: &DeserializationError) { + match &error.downcast_ref::().unwrap().kind{ + scylla::deserialize::row::BuiltinDeserializationErrorKind::ColumnDeserializationFailed { err, .. } => match err.downcast_ref::().unwrap().kind { + scylla::deserialize::value::BuiltinDeserializationErrorKind::ExpectedNonNull => { + }, + _ => panic!("Unexpected error: {:?}", error), + } + _ => panic!("Unexpected error: {:?}", error), + } } -// This function tests serialization and deserialization mechanisms by sending insert and select -// queries to running Scylla instance. -// To do so, it: -// Prepares a table for tests (by creating test keyspace and table {table_name} using init_test) -// Runs a test that, for every element of `tests`: -// - inserts 2 values (one encoded as string and one as bound values) into table {type_name} -// - selects this 2 values and compares them with expected value -// Expected values and bound values are computed using T::from_str -async fn run_tests(tests: &[&str], type_name: &str) +#[cfg(any(feature = "chrono-04", feature = "time-03",))] +fn assert_error_is_not_overflow_or_expected_non_null(error: &DeserializationError) { + match &error.downcast_ref::().unwrap().kind { + scylla::deserialize::row::BuiltinDeserializationErrorKind::ColumnDeserializationFailed { err, .. } => match err.downcast_ref::().unwrap().kind { + scylla::deserialize::value::BuiltinDeserializationErrorKind::ValueOverflow => { + }, + scylla::deserialize::value::BuiltinDeserializationErrorKind::ExpectedNonNull => { + }, + _ => panic!("Unexpected error: {:?}", error), + } + _ => panic!("Unexpected error: {:?}", error), + } +} + +// Test for types that either don't have the Display trait or foreign types from other libraries +async fn run_foreign_serialize_test(table_name: &str, type_name: &str, values: &[Option]) where - T: SerializeValue + DeserializeOwnedValue + FromStr + Debug + Clone + PartialEq, + T: SerializeValue + DeserializeOwnedValue + Debug + Clone + PartialEq, { - let session: Session = init_test(type_name, type_name).await; - session.await_schema_agreement().await.unwrap(); + let session = prepare_test_table(table_name, type_name, true).await; - for test in tests.iter() { - let insert_string_encoded_value = - format!("INSERT INTO {} (id, val) VALUES (0, {})", type_name, test); + for original_value in values { session - .query_unpaged(insert_string_encoded_value, &[]) + .query_unpaged( + format!("INSERT INTO {} (id, val) VALUES (1, ?)", table_name), + (&original_value,), + ) .await .unwrap(); - let insert_bound_value = format!("INSERT INTO {} (id, val) VALUES (1, ?)", type_name); - let value_to_bound = T::from_str(test).ok().unwrap(); + let selected_value: Vec = session + .query_unpaged(format!("SELECT val FROM {}", table_name), &[]) + .await + .unwrap() + .into_rows_result() + .unwrap() + .rows::<(T,)>() + .unwrap() + .filter_map(|row| match row { + Ok((_,)) => row.ok(), + Err(e) => { + assert_error_is_not_expected_non_null(&e); + None + } + }) + .map(|row| row.0) + .collect::>(); + + for value in selected_value { + assert_eq!(Some(value), *original_value); + } + } +} + +async fn run_literal_and_bound_value_test( + table_name: &str, + type_name: &str, + values: &[(LitVal, BoundVal)], + escape_literal: bool, +) where + LitVal: Display + Clone + PartialEq, + BoundVal: SerializeValue + DeserializeOwnedValue + Debug + Clone + PartialEq, +{ + fn error_handler(error: &DeserializationError) { + assert_error_is_not_expected_non_null(error); + } + + run_literal_and_bound_value_test_with_callback( + table_name, + type_name, + values, + escape_literal, + &error_handler, + ) + .await; +} + +// Test that inserts a raw literal value inside the query and compares it to a boundable type +async fn run_literal_and_bound_value_test_with_callback( + table_name: &str, + type_name: &str, + values: &[(LitVal, BoundVal)], + escape_literal: bool, + err_handler: &ErrHandler, +) where + LitVal: Display + Clone + PartialEq, + BoundVal: SerializeValue + DeserializeOwnedValue + Debug + Clone + PartialEq, + ErrHandler: Fn(&DeserializationError), +{ + let session = prepare_test_table(table_name, type_name, true).await; + + for (literal_val, bound_val) in values { session - .query_unpaged(insert_bound_value, (value_to_bound,)) + .query_unpaged( + format!( + "INSERT INTO {} (id, val) VALUES (1, {})", + table_name, + if escape_literal { + format!("'{}'", literal_val) + } else { + literal_val.to_string() + } + ), + &[], + ) .await .unwrap(); - let select_values = format!("SELECT val from {}", type_name); - let read_values: Vec = session - .query_unpaged(select_values, &[]) + let read_literal_val: Vec = session + .query_unpaged(format!("SELECT val FROM {}", table_name), &[]) .await .unwrap() .into_rows_result() .unwrap() - .rows::<(T,)>() + .rows::<(BoundVal,)>() .unwrap() - .map(Result::unwrap) + .filter_map(|row| match row { + Ok((_,)) => row.ok(), + Err(e) => { + err_handler(&e); + None + } + }) + .map(|row| row.0) + .collect::>(); + + session + .query_unpaged( + format!("INSERT INTO {} (id, val) VALUES (1, ?)", table_name), + (&bound_val,), + ) + .await + .unwrap(); + + let read_bound_val: Vec = session + .query_unpaged(format!("SELECT val FROM {}", table_name), &[]) + .await + .unwrap() + .into_rows_result() + .unwrap() + .rows::<(BoundVal,)>() + .unwrap() + .filter_map(|row| match row { + Ok((_,)) => row.ok(), + Err(e) => { + err_handler(&e); + None + } + }) .map(|row| row.0) .collect::>(); - let expected_value = T::from_str(test).ok().unwrap(); - assert_eq!(read_values, vec![expected_value.clone(), expected_value]); + for (read_literal, read_bound) in read_literal_val.iter().zip(read_bound_val.iter()) { + assert_eq!(*read_literal, *bound_val); + assert_eq!(*read_bound, *bound_val); + } } } -#[cfg(any(feature = "num-bigint-03", feature = "num-bigint-04"))] -fn varint_test_cases() -> Vec<&'static str> { +// Special test for types that can return MaybeEmpty containers for certain results +async fn run_literal_input_maybe_empty_output_test( + table_name: &str, + type_name: &str, + values: &[(Option<&str>, Option>)], +) where + X: SerializeValue + DeserializeOwnedValue + Debug + Clone + PartialEq + Emptiable, +{ + let session = prepare_test_table(table_name, type_name, true).await; + + for (input_val, expected_val) in values { + let query = format!( + "INSERT INTO {} (id, val) VALUES (1, '{}')", + table_name, + &input_val.unwrap() + ); + tracing::debug!("Executing query: {}", query); + session.query_unpaged(query, &[]).await.unwrap(); + + let selected_values: Vec> = session + .query_unpaged(format!("SELECT val FROM {}", table_name), &[]) + .await + .unwrap() + .into_rows_result() + .unwrap() + .rows::<(MaybeEmpty,)>() + .unwrap() + .filter_map(|row| match row { + Ok((_,)) => row.ok(), + Err(e) => { + assert_error_is_not_expected_non_null(&e); + None + } + }) + .map(|row| row.0) + .collect::>(); + + for read_value in selected_values { + assert_eq!(Some(read_value), *expected_val); + } + } +} + +// Arbitrary precision types + +#[tokio::test] +async fn test_serialize_deserialize_cql_varint() { + setup_tracing(); + const TABLE_NAME: &str = "varint_serialization_test"; + + let tests = [ + vec![0x00], // 0 + vec![0x01], // 1 + vec![0x00, 0x01], // 1 (with leading zeros) + vec![0x7F], // 127 + vec![0x00, 0x80], // 128 + vec![0x00, 0x81], // 129 + vec![0xFF], // -1 + vec![0x80], // -128 + vec![0xFF, 0x7F], // -129 + vec![ + 0x01, 0x8E, 0xE9, 0x0F, 0xF6, 0xC3, 0x73, 0xE0, 0xEE, 0x4E, 0x3F, 0x0A, 0xD2, + ], // 123456789012345678901234567890 + vec![ + 0xFE, 0x71, 0x16, 0xF0, 0x09, 0x3C, 0x8C, 0x1F, 0x11, 0xB1, 0xC0, 0xF5, 0x2E, + ], // -123456789012345678901234567890 + ]; + + let mut test_cases: Vec> = tests + .iter() + .map(|val| Some(CqlVarint::from_signed_bytes_be_slice(val))) + .collect(); + test_cases.push(None); + + run_foreign_serialize_test(TABLE_NAME, "varint", test_cases.as_slice()).await; +} + +#[cfg(any( + feature = "num-bigint-03", + feature = "num-bigint-04", + feature = "bigdecimal-04" +))] +fn test_num_set() -> Vec<&'static str> { vec![ "0", + "-1", "1", "127", "128", - "129", - "-1", + "-127", "-128", - "-129", + "255", "123456789012345678901234567890", "-123456789012345678901234567890", // Test cases for numbers that can't be contained in u/i128. @@ -130,185 +325,250 @@ fn varint_test_cases() -> Vec<&'static str> { #[cfg(feature = "num-bigint-03")] #[tokio::test] -async fn test_varint03() { +async fn test_serialize_deserialize_num_bigint_03_varint() { + use num_bigint_03::BigInt; + setup_tracing(); - let tests = varint_test_cases(); - run_tests::(&tests, "varint").await; + const TABLE_NAME: &str = "bn03_varint_serialization_test"; + + let mut test_cases: Vec> = test_num_set() + .iter() + .map(|val| Some(BigInt::from_str(val).expect("Failed to parse BigInt"))) + .collect(); + test_cases.push(None); + + run_foreign_serialize_test(TABLE_NAME, "varint", test_cases.as_slice()).await; } #[cfg(feature = "num-bigint-04")] #[tokio::test] -async fn test_varint04() { +async fn test_serialize_deserialize_num_bigint_04_varint() { + use num_bigint_04::BigInt; + setup_tracing(); - let tests = varint_test_cases(); - run_tests::(&tests, "varint").await; + const TABLE_NAME: &str = "bn04_varint_serialization_test"; + + let mut test_cases: Vec> = test_num_set() + .iter() + .map(|val| Some(BigInt::from_str(val).expect("Failed to parse BigInt"))) + .collect(); + test_cases.push(None); + + run_foreign_serialize_test(TABLE_NAME, "varint", test_cases.as_slice()).await; } +#[cfg(feature = "bigdecimal-04")] #[tokio::test] -async fn test_cql_varint() { +async fn test_serialize_deserialize_num_bigdecimal_04_varint() { + use bigdecimal_04::BigDecimal; + setup_tracing(); - let tests = [ - vec![0x00], // 0 - vec![0x01], // 1 - vec![0x00, 0x01], // 1 (with leading zeros) - vec![0x7F], // 127 - vec![0x00, 0x80], // 128 - vec![0x00, 0x81], // 129 - vec![0xFF], // -1 - vec![0x80], // -128 - vec![0xFF, 0x7F], // -129 - vec![ - 0x01, 0x8E, 0xE9, 0x0F, 0xF6, 0xC3, 0x73, 0xE0, 0xEE, 0x4E, 0x3F, 0x0A, 0xD2, - ], // 123456789012345678901234567890 - vec![ - 0xFE, 0x71, 0x16, 0xF0, 0x09, 0x3C, 0x8C, 0x1F, 0x11, 0xB1, 0xC0, 0xF5, 0x2E, - ], // -123456789012345678901234567890 - ]; + const TABLE_NAME: &str = "bd04_decimal_serialization_test"; - let table_name = "cql_varint_tests"; - let session: Session = create_new_session_builder().build().await.unwrap(); - let ks = unique_keyspace_name(); + let mut test_cases: Vec> = test_num_set() + .iter() + .map(|val| Some(BigDecimal::from_str(val).expect("Failed to parse BigInt"))) + .collect(); + test_cases.push(None); - session - .ddl(format!( - "CREATE KEYSPACE IF NOT EXISTS {} WITH REPLICATION = \ - {{'class' : 'NetworkTopologyStrategy', 'replication_factor' : 1}}", - ks - )) - .await - .unwrap(); - session.use_keyspace(ks, false).await.unwrap(); + run_foreign_serialize_test(TABLE_NAME, "decimal", test_cases.as_slice()).await; +} - session - .ddl(format!( - "CREATE TABLE IF NOT EXISTS {} (id int PRIMARY KEY, val varint)", - table_name - )) - .await - .unwrap(); +// Special types - let prepared_insert = session - .prepare(format!( - "INSERT INTO {} (id, val) VALUES (0, ?)", - table_name - )) - .await - .unwrap(); - let prepared_select = session - .prepare(format!("SELECT val FROM {} WHERE id = 0", table_name)) - .await - .unwrap(); +#[tokio::test] +async fn test_serialize_deserialize_counter() { + setup_tracing(); + const TABLE_NAME: &str = "counter_serialization_test"; + let test_cases = [-1, 0, 1, 127, 1000, i64::MAX, i64::MIN]; + let session: Session = prepare_test_table(TABLE_NAME, "counter", false).await; - for test in tests { - let cql_varint = CqlVarint::from_signed_bytes_be_slice(&test); + for (i, test_value) in test_cases.iter().enumerate() { + let prepared_statement = format!("UPDATE {} SET val = val + ? WHERE id = ?", TABLE_NAME); + let value_to_bind = Counter(*test_value); session - .execute_unpaged(&prepared_insert, (&cql_varint,)) + .query_unpaged(prepared_statement, (value_to_bind, i as i32)) .await .unwrap(); - let read_values: Vec = session - .execute_unpaged(&prepared_select, &[]) + let select_values = format!("SELECT val FROM {} WHERE id = ?", TABLE_NAME); + let read_values: Vec = session + .query_unpaged(select_values, (i as i32,)) .await .unwrap() .into_rows_result() .unwrap() - .rows::<(CqlVarint,)>() + .rows::<(Counter,)>() .unwrap() .map(Result::unwrap) .map(|row| row.0) .collect::>(); - assert_eq!(read_values, vec![cql_varint]) + let expected_value = Counter(*test_value); + assert_eq!(read_values, vec![expected_value]); } } -#[cfg(feature = "bigdecimal-04")] #[tokio::test] -async fn test_decimal() { +async fn test_serialize_deserialize_inet() { setup_tracing(); - let tests = [ - "4.2", - "0", - "1.999999999999999999999999999999999999999", - "997", - "123456789012345678901234567890.1234567890", - "-123456789012345678901234567890.1234567890", - ]; + const TABLE_NAME: &str = "inet_serialization_test"; - run_tests::(&tests, "decimal").await; -} + let mut test_cases: Vec<(Option<&str>, Option>)> = vec![ + ("0.0.0.0", IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0))), + ("127.0.0.1", IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))), + ("10.0.0.1", IpAddr::V4(Ipv4Addr::new(10, 0, 0, 1))), + ( + "255.255.255.255", + IpAddr::V4(Ipv4Addr::new(255, 255, 255, 255)), + ), + ("::0", IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0))), + ("::1", IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1))), + ( + "2001:db8::8a2e:370:7334", + IpAddr::V6(Ipv6Addr::new( + 0x2001, 0x0db8, 0, 0, 0, 0x8a2e, 0x0370, 0x7334, + )), + ), + ( + "2001:0db8:0000:0000:0000:8a2e:0370:7334", + IpAddr::V6(Ipv6Addr::new( + 0x2001, 0x0db8, 0, 0, 0, 0x8a2e, 0x0370, 0x7334, + )), + ), + ( + "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", + IpAddr::V6(Ipv6Addr::new( + u16::MAX, + u16::MAX, + u16::MAX, + u16::MAX, + u16::MAX, + u16::MAX, + u16::MAX, + u16::MAX, + )), + ), + ] + .iter() + .map(|(str_val, inet_val)| (Some(*str_val), Some(MaybeEmpty::Value(*inet_val)))) + .collect(); -#[tokio::test] -async fn test_bool() { - setup_tracing(); - let tests = ["true", "false"]; + test_cases.push((Some(""), Some(MaybeEmpty::Empty))); - run_tests::(&tests, "boolean").await; + run_literal_input_maybe_empty_output_test(TABLE_NAME, "inet", test_cases.as_slice()).await; } +// Blob type + #[tokio::test] -async fn test_float() { +async fn test_serialize_deserialize_blob() { setup_tracing(); - let max = f32::MAX.to_string(); - let min = f32::MIN.to_string(); - let tests = [ - "3.14", - "997", - "0.1", - "128", - "-128", - max.as_str(), - min.as_str(), - ]; + const TABLE_NAME: &str = "blob_serialization_test"; - run_tests::(&tests, "float").await; -} + let long_blob: Vec = vec![0x11; 1234]; + let mut long_blob_str: String = "0x".to_string(); + long_blob_str.extend(std::iter::repeat('1').take(2 * 1234)); -#[tokio::test] -async fn test_counter() { - setup_tracing(); - let big_increment = i64::MAX.to_string(); - let tests = ["1", "997", big_increment.as_str()]; + let test_cases: Vec<(Option<&str>, Option>)> = vec![ + ("0x", vec![]), + ("0x00", vec![0x00]), + ("0x01", vec![0x01]), + ("0xff", vec![0xff]), + ("0x1122", vec![0x11, 0x22]), + ("0x112233", vec![0x11, 0x22, 0x33]), + ("0x11223344", vec![0x11, 0x22, 0x33, 0x44]), + ("0x1122334455", vec![0x11, 0x22, 0x33, 0x44, 0x55]), + ("0x112233445566", vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66]), + ( + "0x11223344556677", + vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77], + ), + ( + "0x1122334455667788", + vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88], + ), + (&long_blob_str, long_blob), + ] + .iter() + .map(|(str_val, blob_val)| (Some(*str_val), Some(blob_val.clone()))) + .collect(); + + let session = prepare_test_table(TABLE_NAME, "blob", true).await; + + for (input_val, expected_val) in test_cases { + let query = format!( + "INSERT INTO {} (id, val) VALUES (1, {})", + TABLE_NAME, + &input_val.unwrap() + ); + tracing::debug!("Executing query: {}", query); + session.query_unpaged(query, &[]).await.unwrap(); + + let selected_values: Vec> = session + .query_unpaged(format!("SELECT val FROM {}", TABLE_NAME), &[]) + .await + .unwrap() + .into_rows_result() + .unwrap() + .rows::<(Vec,)>() + .unwrap() + .filter_map(|row| match row { + Ok((_,)) => row.ok(), + Err(e) => { + assert_error_is_not_expected_non_null(&e); + None + } + }) + .map(|row| row.0) + .collect::>(); - // Can't use run_tests, because counters are special and can't be inserted - let type_name = "counter"; - let session: Session = init_test_maybe_without_tablets(type_name, type_name, false).await; + for read_value in selected_values { + assert_eq!(Some(read_value), expected_val); + } - for (i, test) in tests.iter().enumerate() { - let update_bound_value = format!("UPDATE {} SET val = val + ? WHERE id = ?", type_name); - let value_to_bound = Counter(i64::from_str(test).unwrap()); + //as bound value + let query = format!("INSERT INTO {} (id, val) VALUES (1, ?)", TABLE_NAME,); + tracing::debug!("Executing query: {}", query); session - .query_unpaged(update_bound_value, (value_to_bound, i as i32)) + .query_unpaged(query, (expected_val.clone().unwrap(),)) .await .unwrap(); - let select_values = format!("SELECT val FROM {} WHERE id = ?", type_name); - let read_values: Vec = session - .query_unpaged(select_values, (i as i32,)) + let selected_values: Vec> = session + .query_unpaged(format!("SELECT val FROM {}", TABLE_NAME), &[]) .await .unwrap() .into_rows_result() .unwrap() - .rows::<(Counter,)>() + .rows::<(Vec,)>() .unwrap() - .map(Result::unwrap) + .filter_map(|row: Result<(Vec,), DeserializationError>| match row { + Ok((_,)) => row.ok(), + Err(e) => { + assert_error_is_not_expected_non_null(&e); + None + } + }) .map(|row| row.0) .collect::>(); - let expected_value = Counter(i64::from_str(test).unwrap()); - assert_eq!(read_values, vec![expected_value]); + for read_value in selected_values { + assert_eq!(Some(read_value), expected_val); + } } } +// Date, Time, Duration Types + #[cfg(feature = "chrono-04")] #[tokio::test] -async fn test_naive_date_04() { +async fn test_serialize_deserialize_naive_date_04() { setup_tracing(); + const TABLE_NAME: &str = "chrono_04_serialization_test"; use chrono::Datelike; use chrono::NaiveDate; - let session: Session = init_test("chrono_naive_date_tests", "date").await; - let min_naive_date: NaiveDate = NaiveDate::MIN; let min_naive_date_string = min_naive_date.format("%Y-%m-%d").to_string(); let min_naive_date_out_of_range_string = (min_naive_date.year() - 1).to_string() + "-12-31"; @@ -349,63 +609,22 @@ async fn test_naive_date_04() { //("5881580-07-11", None), ]; - for (date_text, date) in tests.iter() { - session - .query_unpaged( - format!( - "INSERT INTO chrono_naive_date_tests (id, val) VALUES (0, '{}')", - date_text - ), - &[], - ) - .await - .unwrap(); - - let read_date: Option = session - .query_unpaged("SELECT val from chrono_naive_date_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .rows::<(NaiveDate,)>() - .unwrap() - .next() - .unwrap() - .ok() - .map(|row| row.0); - - assert_eq!(read_date, *date); - - // If date is representable by NaiveDate try inserting it and reading again - if let Some(naive_date) = date { - session - .query_unpaged( - "INSERT INTO chrono_naive_date_tests (id, val) VALUES (0, ?)", - (naive_date,), - ) - .await - .unwrap(); - - let (read_date,): (NaiveDate,) = session - .query_unpaged("SELECT val from chrono_naive_date_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(NaiveDate,)>() - .unwrap(); - assert_eq!(read_date, *naive_date); - } - } + run_literal_and_bound_value_test_with_callback( + TABLE_NAME, + "date", + &tests, + true, + &assert_error_is_not_overflow_or_expected_non_null, + ) + .await; } #[tokio::test] -async fn test_cql_date() { +async fn test_serialize_deserialize_cql_date() { setup_tracing(); + const TABLE_NAME: &str = "cql_date_serialization_test"; // Tests value::Date which allows to insert dates outside NaiveDate range - let session: Session = init_test("cql_date_tests", "date").await; - let tests = [ ("1970-01-01", CqlDate(2_u32.pow(31))), ("1969-12-02", CqlDate(2_u32.pow(31) - 30)), @@ -415,29 +634,14 @@ async fn test_cql_date() { //("5881580-07-11", Date(u32::MAX)), ]; - for (date_text, date) in &tests { - session - .query_unpaged( - format!( - "INSERT INTO cql_date_tests (id, val) VALUES (0, '{}')", - date_text - ), - &[], - ) - .await - .unwrap(); - - let (read_date,): (CqlDate,) = session - .query_unpaged("SELECT val from cql_date_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(CqlDate,)>() - .unwrap(); + run_literal_and_bound_value_test(TABLE_NAME, "date", &tests, true).await; +} - assert_eq!(read_date, *date); - } +#[tokio::test] +async fn test_serialize_deserialize_cql_date_invalid_dates() { + setup_tracing(); + const TABLE_NAME: &str = "cql_date_serialization_test"; + let session = prepare_test_table(TABLE_NAME, "date", true).await; // 1 less/more than min/max values allowed by the database should cause error session @@ -459,11 +663,10 @@ async fn test_cql_date() { #[cfg(feature = "time-03")] #[tokio::test] -async fn test_date_03() { +async fn test_serialize_deserialize_date_03() { setup_tracing(); use time::{Date, Month::*}; - - let session: Session = init_test("time_date_tests", "date").await; + const TABLE_NAME: &str = "date_03_serialization_test"; let tests = [ // Basic test values @@ -500,119 +703,43 @@ async fn test_date_03() { ("-5877641-06-23", None), ]; - for (date_text, date) in tests.iter() { - session - .query_unpaged( - format!( - "INSERT INTO time_date_tests (id, val) VALUES (0, '{}')", - date_text - ), - &[], - ) - .await - .unwrap(); - - let read_date = session - .query_unpaged("SELECT val from time_date_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(Date,)>() - .ok() - .map(|val| val.0); - - assert_eq!(read_date, *date); - - // If date is representable by time::Date try inserting it and reading again - if let Some(date) = date { - session - .query_unpaged( - "INSERT INTO time_date_tests (id, val) VALUES (0, ?)", - (date,), - ) - .await - .unwrap(); - - let (read_date,) = session - .query_unpaged("SELECT val from time_date_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(Date,)>() - .unwrap(); - assert_eq!(read_date, *date); - } - } + run_literal_and_bound_value_test_with_callback( + TABLE_NAME, + "date", + &tests, + true, + &assert_error_is_not_overflow_or_expected_non_null, + ) + .await; } #[tokio::test] -async fn test_cql_time() { +async fn test_serialize_deserialize_cql_time() { setup_tracing(); + const TABLE_NAME: &str = "time_serialization_test"; // CqlTime is an i64 - nanoseconds since midnight // in range 0..=86399999999999 - let session: Session = init_test("cql_time_tests", "time").await; - let max_time: i64 = 24 * 60 * 60 * 1_000_000_000 - 1; - assert_eq!(max_time, 86399999999999); - - let tests = [ - ("00:00:00", CqlTime(0)), - ("01:01:01", CqlTime((60 * 60 + 60 + 1) * 1_000_000_000)), - ("00:00:00.000000000", CqlTime(0)), - ("00:00:00.000000001", CqlTime(1)), - ("23:59:59.999999999", CqlTime(max_time)), - ]; - - for (time_str, time_duration) in &tests { - // Insert time as a string and verify that it matches - session - .query_unpaged( - format!( - "INSERT INTO cql_time_tests (id, val) VALUES (0, '{}')", - time_str - ), - &[], - ) - .await - .unwrap(); - - let (read_time,) = session - .query_unpaged("SELECT val from cql_time_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(CqlTime,)>() - .unwrap(); - - assert_eq!(read_time, *time_duration); + assert_eq!(max_time, 86399999999999); - // Insert time as a bound CqlTime value and verify that it matches - session - .query_unpaged( - "INSERT INTO cql_time_tests (id, val) VALUES (0, ?)", - (*time_duration,), - ) - .await - .unwrap(); + let tests = [ + ("00:00:00", CqlTime(0)), + ("01:01:01", CqlTime((60 * 60 + 60 + 1) * 1_000_000_000)), + ("00:00:00.000000000", CqlTime(0)), + ("00:00:00.000000001", CqlTime(1)), + ("23:59:59.999999999", CqlTime(max_time)), + ]; - let (read_time,) = session - .query_unpaged("SELECT val from cql_time_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(CqlTime,)>() - .unwrap(); + run_literal_and_bound_value_test(TABLE_NAME, "time", &tests, true).await; +} - assert_eq!(read_time, *time_duration); - } +#[tokio::test] +async fn test_serialize_deserialize_cql_time_invalid_values() { + setup_tracing(); + const TABLE_NAME: &str = "time_serialization_test"; + let session = prepare_test_table(TABLE_NAME, "time", true).await; - // Tests with invalid time values - // Make sure that database rejects them let invalid_tests = [ "-01:00:00", // "-00:00:01", - actually this gets parsed as 0h 0m 1s, looks like a harmless bug @@ -627,8 +754,8 @@ async fn test_cql_time() { session .query_unpaged( format!( - "INSERT INTO cql_time_tests (id, val) VALUES (0, '{}')", - time_str + "INSERT INTO {} (id, val) VALUES (0, '{}')", + TABLE_NAME, time_str ), &[], ) @@ -639,11 +766,10 @@ async fn test_cql_time() { #[cfg(feature = "chrono-04")] #[tokio::test] -async fn test_naive_time_04() { +async fn test_serialize_deserialize_naive_time_04() { setup_tracing(); use chrono::NaiveTime; - - let session = init_test("chrono_time_tests", "time").await; + const TABLE_NAME: &str = "chrono_04_time_serialization_test"; let tests = [ ("00:00:00", NaiveTime::MIN), @@ -666,55 +792,22 @@ async fn test_naive_time_04() { ), ]; - for (time_text, time) in tests.iter() { - // Insert as string and read it again - session - .query_unpaged( - format!( - "INSERT INTO chrono_time_tests (id, val) VALUES (0, '{}')", - time_text - ), - &[], - ) - .await - .unwrap(); - - let (read_time,) = session - .query_unpaged("SELECT val from chrono_time_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(NaiveTime,)>() - .unwrap(); - - assert_eq!(read_time, *time); - - // Insert as type and read it again - session - .query_unpaged( - "INSERT INTO chrono_time_tests (id, val) VALUES (0, ?)", - (time,), - ) - .await - .unwrap(); + run_literal_and_bound_value_test(TABLE_NAME, "time", &tests, true).await; +} - let (read_time,) = session - .query_unpaged("SELECT val from chrono_time_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(NaiveTime,)>() - .unwrap(); - assert_eq!(read_time, *time); - } +#[cfg(feature = "chrono-04")] +#[tokio::test] +async fn test_serialize_deserialize_naive_time_04_leap_seconds() { + setup_tracing(); + use chrono::NaiveTime; + const TABLE_NAME: &str = "chrono_04_time_serialization_test"; + let session = prepare_test_table(TABLE_NAME, "time", true).await; // chrono can represent leap seconds, this should not panic let leap_second = NaiveTime::from_hms_nano_opt(23, 59, 59, 1_500_000_000); session .query_unpaged( - "INSERT INTO cql_time_tests (id, val) VALUES (0, ?)", + format!("INSERT INTO {} (id, val) VALUES (0, ?)", TABLE_NAME), (leap_second,), ) .await @@ -723,11 +816,10 @@ async fn test_naive_time_04() { #[cfg(feature = "time-03")] #[tokio::test] -async fn test_time_03() { +async fn test_serialize_deserialize_time_03() { setup_tracing(); use time::Time; - - let session = init_test("time_time_tests", "time").await; + const TABLE_NAME: &str = "chrono_03_time_serialization_test"; let tests = [ ("00:00:00", Time::MIDNIGHT), @@ -750,63 +842,13 @@ async fn test_time_03() { ), ]; - for (time_text, time) in tests.iter() { - // Insert as string and read it again - session - .query_unpaged( - format!( - "INSERT INTO time_time_tests (id, val) VALUES (0, '{}')", - time_text - ), - &[], - ) - .await - .unwrap(); - - let (read_time,) = session - .query_unpaged("SELECT val from time_time_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(Time,)>() - .unwrap(); - - assert_eq!(read_time, *time); - - // Insert as type and read it again - session - .query_unpaged( - "INSERT INTO time_time_tests (id, val) VALUES (0, ?)", - (time,), - ) - .await - .unwrap(); - - let (read_time,) = session - .query_unpaged("SELECT val from time_time_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(Time,)>() - .unwrap(); - assert_eq!(read_time, *time); - } + run_literal_and_bound_value_test(TABLE_NAME, "time", &tests, true).await; } #[tokio::test] -async fn test_cql_timestamp() { +async fn test_serialize_deserialize_cql_timestamp() { setup_tracing(); - let session: Session = init_test("cql_timestamp_tests", "timestamp").await; - - //let epoch_date = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap(); - - //let before_epoch = NaiveDate::from_ymd_opt(1333, 4, 30).unwrap(); - //let before_epoch_offset = before_epoch.signed_duration_since(epoch_date); - - //let after_epoch = NaiveDate::from_ymd_opt(2020, 3, 8).unwrap(); - //let after_epoch_offset = after_epoch.signed_duration_since(epoch_date); + const TABLE_NAME: &str = "timestamp_serialization_test"; let tests = [ ("0", CqlTimestamp(0)), @@ -825,59 +867,109 @@ async fn test_cql_timestamp() { //("2011-02-03T04:05:00.000+0000", Duration::milliseconds(1299038700000)), ]; - for (timestamp_str, timestamp_duration) in &tests { - // Insert timestamp as a string and verify that it matches - session - .query_unpaged( - format!( - "INSERT INTO cql_timestamp_tests (id, val) VALUES (0, '{}')", - timestamp_str - ), - &[], - ) - .await - .unwrap(); + run_literal_and_bound_value_test(TABLE_NAME, "timestamp", &tests, true).await; +} - let (read_timestamp,) = session - .query_unpaged("SELECT val from cql_timestamp_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(CqlTimestamp,)>() - .unwrap(); +#[tokio::test] +async fn test_serialize_deserialize_cqlvalue_duration() { + setup_tracing(); + let session: Session = create_new_session_builder().build().await.unwrap(); - assert_eq!(read_timestamp, *timestamp_duration); + let ks = unique_keyspace_name(); + session + .ddl(format!( + "CREATE KEYSPACE IF NOT EXISTS {} WITH REPLICATION = \ + {{'class' : 'NetworkTopologyStrategy', 'replication_factor' : 1}}", + ks + )) + .await + .unwrap(); + session.use_keyspace(&ks, false).await.unwrap(); - // Insert timestamp as a bound CqlTimestamp value and verify that it matches - session - .query_unpaged( - "INSERT INTO cql_timestamp_tests (id, val) VALUES (0, ?)", - (*timestamp_duration,), - ) - .await - .unwrap(); + let duration_cql_value = CqlValue::Duration(CqlDuration { + months: 6, + days: 9, + nanoseconds: 21372137, + }); - let (read_timestamp,) = session - .query_unpaged("SELECT val from cql_timestamp_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(CqlTimestamp,)>() - .unwrap(); + session.ddl("CREATE TABLE IF NOT EXISTS cqlvalue_duration_test (pk int, ck int, v duration, primary key (pk, ck))").await.unwrap(); + let fixture_queries = vec![ + ( + "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 0, ?)", + vec![&duration_cql_value], + ), + ( + "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 1, 89h4m48s)", + vec![], + ), + ( + "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 2, PT89H8M53S)", + vec![], + ), + ( + "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 3, P0000-00-00T89:09:09)", + vec![], + ), + ]; - assert_eq!(read_timestamp, *timestamp_duration); + for query in fixture_queries { + session.query_unpaged(query.0, query.1).await.unwrap(); } + + let rows_result = session + .query_unpaged( + "SELECT v FROM cqlvalue_duration_test WHERE pk = ?", + (CqlValue::Int(0),), + ) + .await + .unwrap() + .into_rows_result() + .unwrap(); + + let mut rows_iter = rows_result.rows::<(CqlValue,)>().unwrap(); + + let (first_value,) = rows_iter.next().unwrap().unwrap(); + assert_eq!(first_value, duration_cql_value); + + let (second_value,) = rows_iter.next().unwrap().unwrap(); + assert_eq!( + second_value, + CqlValue::Duration(CqlDuration { + months: 0, + days: 0, + nanoseconds: 320_688_000_000_000, + }) + ); + + let (third_value,) = rows_iter.next().unwrap().unwrap(); + assert_eq!( + third_value, + CqlValue::Duration(CqlDuration { + months: 0, + days: 0, + nanoseconds: 320_933_000_000_000, + }) + ); + + let (fourth_value,) = rows_iter.next().unwrap().unwrap(); + assert_eq!( + fourth_value, + CqlValue::Duration(CqlDuration { + months: 0, + days: 0, + nanoseconds: 320_949_000_000_000, + }) + ); + + assert_matches!(rows_iter.next(), None); } #[cfg(feature = "chrono-04")] #[tokio::test] -async fn test_date_time_04() { +async fn test_serialize_deserialize_date_time_04() { setup_tracing(); - use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc}; - - let session = init_test("chrono_datetime_tests", "timestamp").await; + use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime}; + const TABLE_NAME: &str = "timestamp_serialization_test"; let tests = [ ("0", DateTime::from_timestamp(0, 0).unwrap()), @@ -924,49 +1016,16 @@ async fn test_date_time_04() { ), ]; - for (datetime_text, datetime) in tests.iter() { - // Insert as string and read it again - session - .query_unpaged( - format!( - "INSERT INTO chrono_datetime_tests (id, val) VALUES (0, '{}')", - datetime_text - ), - &[], - ) - .await - .unwrap(); - - let (read_datetime,) = session - .query_unpaged("SELECT val from chrono_datetime_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(DateTime,)>() - .unwrap(); - - assert_eq!(read_datetime, *datetime); - - // Insert as type and read it again - session - .query_unpaged( - "INSERT INTO chrono_datetime_tests (id, val) VALUES (0, ?)", - (datetime,), - ) - .await - .unwrap(); + run_literal_and_bound_value_test(TABLE_NAME, "timestamp", &tests, true).await; +} - let (read_datetime,) = session - .query_unpaged("SELECT val from chrono_datetime_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(DateTime,)>() - .unwrap(); - assert_eq!(read_datetime, *datetime); - } +#[cfg(feature = "chrono-04")] +#[tokio::test] +async fn test_serialize_deserialize_date_time_04_high_precision_round_down() { + setup_tracing(); + use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc}; + const TABLE_NAME: &str = "timestamp_serialization_test"; + let session = prepare_test_table(TABLE_NAME, "timestamp", true).await; // chrono datetime has higher precision, round excessive submillisecond time down let nanosecond_precision_1st_half = NaiveDateTime::new( @@ -981,14 +1040,14 @@ async fn test_date_time_04() { .and_utc(); session .query_unpaged( - "INSERT INTO chrono_datetime_tests (id, val) VALUES (0, ?)", + format!("INSERT INTO {} (id, val) VALUES (0, ?)", TABLE_NAME), (nanosecond_precision_1st_half,), ) .await .unwrap(); let (read_datetime,) = session - .query_unpaged("SELECT val from chrono_datetime_tests", &[]) + .query_unpaged(format!("SELECT val from {}", TABLE_NAME), &[]) .await .unwrap() .into_rows_result() @@ -1009,14 +1068,14 @@ async fn test_date_time_04() { .and_utc(); session .query_unpaged( - "INSERT INTO chrono_datetime_tests (id, val) VALUES (0, ?)", + format!("INSERT INTO {} (id, val) VALUES (0, ?)", TABLE_NAME), (nanosecond_precision_2nd_half,), ) .await .unwrap(); let (read_datetime,) = session - .query_unpaged("SELECT val from chrono_datetime_tests", &[]) + .query_unpaged(format!("SELECT val from {}", TABLE_NAME), &[]) .await .unwrap() .into_rows_result() @@ -1033,20 +1092,19 @@ async fn test_date_time_04() { .and_utc(); session .query_unpaged( - "INSERT INTO cql_datetime_tests (id, val) VALUES (0, ?)", + format!("INSERT INTO {} (id, val) VALUES (0, ?)", TABLE_NAME), (leap_second,), ) .await - .unwrap_err(); + .unwrap(); } #[cfg(feature = "time-03")] #[tokio::test] -async fn test_offset_date_time_03() { +async fn test_serialize_deserialize_offset_date_time_03() { setup_tracing(); use time::{Date, Month::*, OffsetDateTime, PrimitiveDateTime, Time, UtcOffset}; - - let session = init_test("time_datetime_tests", "timestamp").await; + const TABLE_NAME: &str = "timestamp_serialization_test"; let tests = [ ("0", OffsetDateTime::UNIX_EPOCH), @@ -1093,49 +1151,16 @@ async fn test_offset_date_time_03() { ), ]; - for (datetime_text, datetime) in tests.iter() { - // Insert as string and read it again - session - .query_unpaged( - format!( - "INSERT INTO time_datetime_tests (id, val) VALUES (0, '{}')", - datetime_text - ), - &[], - ) - .await - .unwrap(); - - let (read_datetime,) = session - .query_unpaged("SELECT val from time_datetime_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(OffsetDateTime,)>() - .unwrap(); - - assert_eq!(read_datetime, *datetime); - - // Insert as type and read it again - session - .query_unpaged( - "INSERT INTO time_datetime_tests (id, val) VALUES (0, ?)", - (datetime,), - ) - .await - .unwrap(); + run_literal_and_bound_value_test(TABLE_NAME, "timestamp", &tests, true).await; +} - let (read_datetime,) = session - .query_unpaged("SELECT val from time_datetime_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .first_row::<(OffsetDateTime,)>() - .unwrap(); - assert_eq!(read_datetime, *datetime); - } +#[cfg(feature = "time-03")] +#[tokio::test] +async fn test_serialize_deserialize_offset_date_time_03_high_precision_rounding_down() { + setup_tracing(); + use time::{Date, Month::*, OffsetDateTime, PrimitiveDateTime, Time}; + const TABLE_NAME: &str = "timestamp_serialization_test"; + let session = prepare_test_table(TABLE_NAME, "timestamp", true).await; // time datetime has higher precision, round excessive submillisecond time down let nanosecond_precision_1st_half = PrimitiveDateTime::new( @@ -1150,14 +1175,14 @@ async fn test_offset_date_time_03() { .assume_utc(); session .query_unpaged( - "INSERT INTO time_datetime_tests (id, val) VALUES (0, ?)", + format!("INSERT INTO {} (id, val) VALUES (0, ?)", TABLE_NAME), (nanosecond_precision_1st_half,), ) .await .unwrap(); let (read_datetime,) = session - .query_unpaged("SELECT val from time_datetime_tests", &[]) + .query_unpaged(format!("SELECT val from {}", TABLE_NAME), &[]) .await .unwrap() .into_rows_result() @@ -1178,14 +1203,14 @@ async fn test_offset_date_time_03() { .assume_utc(); session .query_unpaged( - "INSERT INTO time_datetime_tests (id, val) VALUES (0, ?)", + format!("INSERT INTO {} (id, val) VALUES (0, ?)", TABLE_NAME), (nanosecond_precision_2nd_half,), ) .await .unwrap(); let (read_datetime,) = session - .query_unpaged("SELECT val from time_datetime_tests", &[]) + .query_unpaged(format!("SELECT val from {}", TABLE_NAME), &[]) .await .unwrap() .into_rows_result() @@ -1195,82 +1220,40 @@ async fn test_offset_date_time_03() { assert_eq!(read_datetime, nanosecond_precision_2nd_half_rounded); } +//UUID Types + #[tokio::test] -async fn test_timeuuid() { +async fn test_serialize_deserialize_timeuuid() { setup_tracing(); - let session: Session = init_test("timeuuid_tests", "timeuuid").await; + const TABLE_NAME: &str = "timeuuid_serialization_test"; // A few random timeuuids generated manually let tests = [ ( "8e14e760-7fa8-11eb-bc66-000000000001", - [ + CqlTimeuuid::from_bytes([ 0x8e, 0x14, 0xe7, 0x60, 0x7f, 0xa8, 0x11, 0xeb, 0xbc, 0x66, 0, 0, 0, 0, 0, 0x01, - ], + ]), ), ( "9b349580-7fa8-11eb-bc66-000000000001", - [ + CqlTimeuuid::from_bytes([ 0x9b, 0x34, 0x95, 0x80, 0x7f, 0xa8, 0x11, 0xeb, 0xbc, 0x66, 0, 0, 0, 0, 0, 0x01, - ], + ]), ), ( "5d74bae0-7fa3-11eb-bc66-000000000001", - [ + CqlTimeuuid::from_bytes([ 0x5d, 0x74, 0xba, 0xe0, 0x7f, 0xa3, 0x11, 0xeb, 0xbc, 0x66, 0, 0, 0, 0, 0, 0x01, - ], + ]), ), ]; - for (timeuuid_str, timeuuid_bytes) in &tests { - // Insert timeuuid as a string and verify that it matches - session - .query_unpaged( - format!( - "INSERT INTO timeuuid_tests (id, val) VALUES (0, {})", - timeuuid_str - ), - &[], - ) - .await - .unwrap(); - - let (read_timeuuid,): (CqlTimeuuid,) = session - .query_unpaged("SELECT val from timeuuid_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(CqlTimeuuid,)>() - .unwrap(); - - assert_eq!(read_timeuuid.as_bytes(), timeuuid_bytes); - - // Insert timeuuid as a bound value and verify that it matches - let test_uuid: CqlTimeuuid = CqlTimeuuid::from_slice(timeuuid_bytes.as_ref()).unwrap(); - session - .query_unpaged( - "INSERT INTO timeuuid_tests (id, val) VALUES (0, ?)", - (test_uuid,), - ) - .await - .unwrap(); - - let (read_timeuuid,): (CqlTimeuuid,) = session - .query_unpaged("SELECT val from timeuuid_tests", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(CqlTimeuuid,)>() - .unwrap(); - - assert_eq!(read_timeuuid.as_bytes(), timeuuid_bytes); - } + run_literal_and_bound_value_test(TABLE_NAME, "timeuuid", &tests, false).await; } #[tokio::test] -async fn test_timeuuid_ordering() { +async fn test_serialize_deserialize_timeuuid_ordering() { setup_tracing(); let session: Session = create_new_session_builder().build().await.unwrap(); let ks = unique_keyspace_name(); @@ -1347,163 +1330,10 @@ async fn test_timeuuid_ordering() { } } -#[tokio::test] -async fn test_inet() { - setup_tracing(); - let session: Session = init_test("inet_tests", "inet").await; - - let tests = [ - ("0.0.0.0", IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0))), - ("127.0.0.1", IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))), - ("10.0.0.1", IpAddr::V4(Ipv4Addr::new(10, 0, 0, 1))), - ( - "255.255.255.255", - IpAddr::V4(Ipv4Addr::new(255, 255, 255, 255)), - ), - ("::0", IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0))), - ("::1", IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1))), - ( - "2001:db8::8a2e:370:7334", - IpAddr::V6(Ipv6Addr::new( - 0x2001, 0x0db8, 0, 0, 0, 0x8a2e, 0x0370, 0x7334, - )), - ), - ( - "2001:0db8:0000:0000:0000:8a2e:0370:7334", - IpAddr::V6(Ipv6Addr::new( - 0x2001, 0x0db8, 0, 0, 0, 0x8a2e, 0x0370, 0x7334, - )), - ), - ( - "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", - IpAddr::V6(Ipv6Addr::new( - u16::MAX, - u16::MAX, - u16::MAX, - u16::MAX, - u16::MAX, - u16::MAX, - u16::MAX, - u16::MAX, - )), - ), - ]; - - for (inet_str, inet) in &tests { - // Insert inet as a string and verify that it matches - session - .query_unpaged( - format!( - "INSERT INTO inet_tests (id, val) VALUES (0, '{}')", - inet_str - ), - &[], - ) - .await - .unwrap(); - - let (read_inet,): (IpAddr,) = session - .query_unpaged("SELECT val from inet_tests WHERE id = 0", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(IpAddr,)>() - .unwrap(); - - assert_eq!(read_inet, *inet); - - // Insert inet as a bound value and verify that it matches - session - .query_unpaged("INSERT INTO inet_tests (id, val) VALUES (0, ?)", (inet,)) - .await - .unwrap(); - - let (read_inet,): (IpAddr,) = session - .query_unpaged("SELECT val from inet_tests WHERE id = 0", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(IpAddr,)>() - .unwrap(); - - assert_eq!(read_inet, *inet); - } -} - -#[tokio::test] -async fn test_blob() { - setup_tracing(); - let session: Session = init_test("blob_tests", "blob").await; - - let long_blob: Vec = vec![0x11; 1234]; - let mut long_blob_str: String = "0x".to_string(); - long_blob_str.extend(std::iter::repeat('1').take(2 * 1234)); - - let tests = [ - ("0x", vec![]), - ("0x00", vec![0x00]), - ("0x01", vec![0x01]), - ("0xff", vec![0xff]), - ("0x1122", vec![0x11, 0x22]), - ("0x112233", vec![0x11, 0x22, 0x33]), - ("0x11223344", vec![0x11, 0x22, 0x33, 0x44]), - ("0x1122334455", vec![0x11, 0x22, 0x33, 0x44, 0x55]), - ("0x112233445566", vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66]), - ( - "0x11223344556677", - vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77], - ), - ( - "0x1122334455667788", - vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88], - ), - (&long_blob_str, long_blob), - ]; - - for (blob_str, blob) in &tests { - // Insert blob as a string and verify that it matches - session - .query_unpaged( - format!("INSERT INTO blob_tests (id, val) VALUES (0, {})", blob_str), - &[], - ) - .await - .unwrap(); - - let (read_blob,): (Vec,) = session - .query_unpaged("SELECT val from blob_tests WHERE id = 0", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(Vec,)>() - .unwrap(); - - assert_eq!(read_blob, *blob); - - // Insert blob as a bound value and verify that it matches - session - .query_unpaged("INSERT INTO blob_tests (id, val) VALUES (0, ?)", (blob,)) - .await - .unwrap(); - - let (read_blob,): (Vec,) = session - .query_unpaged("SELECT val from blob_tests WHERE id = 0", &[]) - .await - .unwrap() - .into_rows_result() - .unwrap() - .single_row::<(Vec,)>() - .unwrap(); - - assert_eq!(read_blob, *blob); - } -} +// UDT Types #[tokio::test] -async fn test_udt_after_schema_update() { +async fn test_serialize_deserialize_udt_after_schema_update() { setup_tracing(); let table_name = "udt_tests"; let type_name = "usertype1"; @@ -1631,9 +1461,9 @@ async fn test_udt_after_schema_update() { } #[tokio::test] -async fn test_empty() { +async fn test_serialize_deserialize_empty() { setup_tracing(); - let session: Session = init_test("empty_tests", "int").await; + let session: Session = prepare_test_table("empty_tests", "int", true).await; session .query_unpaged( @@ -1865,9 +1695,8 @@ async fn test_udt_with_missing_field() { .await; } - #[tokio::test] -async fn test_cqlvalue_udt() { +async fn test_serialize_deserialize_cqlvalue_udt() { setup_tracing(); let session: Session = create_new_session_builder().build().await.unwrap(); let ks = unique_keyspace_name(); @@ -1915,97 +1744,3 @@ async fn test_cqlvalue_udt() { assert_eq!(received_udt_cql_value, udt_cql_value); } - -#[tokio::test] -async fn test_cqlvalue_duration() { - setup_tracing(); - let session: Session = create_new_session_builder().build().await.unwrap(); - - let ks = unique_keyspace_name(); - session - .ddl(format!( - "CREATE KEYSPACE IF NOT EXISTS {} WITH REPLICATION = \ - {{'class' : 'NetworkTopologyStrategy', 'replication_factor' : 1}}", - ks - )) - .await - .unwrap(); - session.use_keyspace(&ks, false).await.unwrap(); - - let duration_cql_value = CqlValue::Duration(CqlDuration { - months: 6, - days: 9, - nanoseconds: 21372137, - }); - - session.ddl("CREATE TABLE IF NOT EXISTS cqlvalue_duration_test (pk int, ck int, v duration, primary key (pk, ck))").await.unwrap(); - let fixture_queries = vec![ - ( - "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 0, ?)", - vec![&duration_cql_value], - ), - ( - "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 1, 89h4m48s)", - vec![], - ), - ( - "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 2, PT89H8M53S)", - vec![], - ), - ( - "INSERT INTO cqlvalue_duration_test (pk, ck, v) VALUES (0, 3, P0000-00-00T89:09:09)", - vec![], - ), - ]; - - for query in fixture_queries { - session.query_unpaged(query.0, query.1).await.unwrap(); - } - - let rows_result = session - .query_unpaged( - "SELECT v FROM cqlvalue_duration_test WHERE pk = ?", - (CqlValue::Int(0),), - ) - .await - .unwrap() - .into_rows_result() - .unwrap(); - - let mut rows_iter = rows_result.rows::<(CqlValue,)>().unwrap(); - - let (first_value,) = rows_iter.next().unwrap().unwrap(); - assert_eq!(first_value, duration_cql_value); - - let (second_value,) = rows_iter.next().unwrap().unwrap(); - assert_eq!( - second_value, - CqlValue::Duration(CqlDuration { - months: 0, - days: 0, - nanoseconds: 320_688_000_000_000, - }) - ); - - let (third_value,) = rows_iter.next().unwrap().unwrap(); - assert_eq!( - third_value, - CqlValue::Duration(CqlDuration { - months: 0, - days: 0, - nanoseconds: 320_933_000_000_000, - }) - ); - - let (fourth_value,) = rows_iter.next().unwrap().unwrap(); - assert_eq!( - fourth_value, - CqlValue::Duration(CqlDuration { - months: 0, - days: 0, - nanoseconds: 320_949_000_000_000, - }) - ); - - assert_matches!(rows_iter.next(), None); -} From 8b93622d9d41632e93e217e74c808d26e94396a5 Mon Sep 17 00:00:00 2001 From: Alexey Kartashov Date: Thu, 13 Feb 2025 13:06:38 +0100 Subject: [PATCH 4/4] tests: Add more scalar type tests, normal uuid and string type tests This commit adds additional test to test various simple types such as int and float, as well as one more uuid test and some simple string type tests --- scylla/tests/integration/serialization.rs | 239 ++++++++++++++++++++++ 1 file changed, 239 insertions(+) diff --git a/scylla/tests/integration/serialization.rs b/scylla/tests/integration/serialization.rs index 57d8fd1f41..7c69bec4a9 100644 --- a/scylla/tests/integration/serialization.rs +++ b/scylla/tests/integration/serialization.rs @@ -78,6 +78,63 @@ fn assert_error_is_not_overflow_or_expected_non_null(error: &DeserializationErro } } +// Native type test, which expects value to have the display trait and be trivially copyable +async fn run_native_serialize_test(table_name: &str, type_name: &str, values: &[Option]) +where + T: SerializeValue + + DeserializeOwnedValue + + FromStr + + Debug + + Clone + + PartialEq + + Default + + Display + + Copy, +{ + let session = prepare_test_table(table_name, type_name, true).await; + tracing::debug!("Active keyspace: {}", session.get_keyspace().unwrap()); + + for original_value in values { + session + .query_unpaged( + format!("INSERT INTO {} (id, val) VALUES (1, ?)", table_name), + (&original_value,), + ) + .await + .unwrap(); + + let selected_value: Vec = session + .query_unpaged(format!("SELECT val FROM {}", table_name), &[]) + .await + .unwrap() + .into_rows_result() + .unwrap() + .rows::<(T,)>() + .unwrap() + .map(|row| match row { + Ok((val,)) => val, + Err(e) => { + assert_error_is_not_expected_non_null(&e); + T::default() + } + }) + .collect::>(); + + for value in selected_value { + tracing::debug!( + "Received: {} | Expected: {}", + value, + if original_value.is_none() { + T::default() + } else { + original_value.unwrap() + } + ); + assert_eq!(value, original_value.unwrap_or(T::default())); + } + } +} + // Test for types that either don't have the Display trait or foreign types from other libraries async fn run_foreign_serialize_test(table_name: &str, type_name: &str, values: &[Option]) where @@ -266,6 +323,146 @@ async fn run_literal_input_maybe_empty_output_test( } } +// Scalar types + +#[tokio::test] +async fn test_serialize_deserialize_bigint() { + setup_tracing(); + const TABLE_NAME: &str = "bigint_serialization_test"; + + let test_cases = [ + Some(i64::MIN), + Some(i64::MAX), + Some(1), + Some(-1), + Some(0), + None, + ]; + + run_native_serialize_test(TABLE_NAME, "bigint", &test_cases).await; +} + +#[tokio::test] +async fn test_serialize_deserialize_int() { + setup_tracing(); + const TABLE_NAME: &str = "int_serialization_test"; + + let test_cases = [ + Some(i32::MIN), + Some(i32::MAX), + Some(1), + Some(-1), + Some(0), + None, + ]; + + run_native_serialize_test(TABLE_NAME, "int", &test_cases).await; +} + +#[tokio::test] +async fn test_serialize_deserialize_smallint() { + setup_tracing(); + const TABLE_NAME: &str = "smallint_serialization_test"; + + let test_cases = [ + Some(i16::MIN), + Some(i16::MAX), + Some(1), + Some(-1), + Some(0), + None, + ]; + + run_native_serialize_test(TABLE_NAME, "smallint", &test_cases).await; +} + +#[tokio::test] +async fn test_serialize_deserialize_tinyint() { + setup_tracing(); + const TABLE_NAME: &str = "tinyint_serialization_test"; + + let test_cases = [ + Some(i8::MIN), + Some(i8::MAX), + Some(1), + Some(-1), + Some(0), + None, + ]; + + run_native_serialize_test(TABLE_NAME, "tinyint", &test_cases).await; +} + +#[tokio::test] +async fn test_serialize_deserialize_float() { + setup_tracing(); + const TABLE_NAME: &str = "float_serialization_test"; + + let test_cases = [ + Some(f32::MIN), + Some(f32::MAX), + Some(1.0), + Some(-1.0), + Some(0.0), + None, + ]; + + run_native_serialize_test(TABLE_NAME, "float", &test_cases).await; +} + +#[tokio::test] +async fn test_serialize_deserialize_bool() { + setup_tracing(); + const TABLE_NAME: &str = "bool_serialization_test"; + + let test_cases = [Some(true), Some(false), None]; + + run_native_serialize_test(TABLE_NAME, "boolean", &test_cases).await; +} + +#[tokio::test] +async fn test_serialize_deserialize_double() { + setup_tracing(); + const TABLE_NAME: &str = "double_serialization_test"; + + let test_cases = [ + Some(f64::MIN), + Some(f64::MAX), + Some(1.0), + Some(-1.0), + Some(0.0), + None, + ]; + + run_native_serialize_test(TABLE_NAME, "double", &test_cases).await; +} + +#[tokio::test] +async fn test_serialize_deserialize_double_literal() { + setup_tracing(); + const TABLE_NAME: &str = "double_serialization_test"; + + let test_cases: Vec<(String, f64)> = [f64::MIN, f64::MAX, 1.0, -1.0, 0.0, 0.1] + .iter() + .map(|val| (val.to_string(), *val)) + .collect(); + + run_literal_and_bound_value_test(TABLE_NAME, "double", test_cases.as_slice(), false).await; +} + +#[tokio::test] +async fn test_serialize_deserialize_float_literal() { + setup_tracing(); + const TABLE_NAME: &str = "float_serialization_test"; + + let test_cases: Vec<(String, f32)> = [f32::MIN, f32::MAX, 1.0, -1.0, 0.0, 0.1] + .iter() + .map(|val| (val.to_string(), *val)) + .collect(); + + run_literal_and_bound_value_test(TABLE_NAME, "float", test_cases.as_slice(), false).await; +} + // Arbitrary precision types #[tokio::test] @@ -1222,6 +1419,19 @@ async fn test_serialize_deserialize_offset_date_time_03_high_precision_rounding_ //UUID Types +#[tokio::test] +async fn test_serialize_deserialize_uuid() { + setup_tracing(); + const TABLE_NAME: &str = "uuid_serialization_test"; + + let tests: Vec<(String, uuid::Uuid)> = (0..100) + .map(|_| uuid::Uuid::new_v4()) + .map(|new_uuid| (new_uuid.to_string(), new_uuid)) + .collect(); + + run_literal_and_bound_value_test(TABLE_NAME, "uuid", tests.as_slice(), false).await; +} + #[tokio::test] async fn test_serialize_deserialize_timeuuid() { setup_tracing(); @@ -1330,6 +1540,35 @@ async fn test_serialize_deserialize_timeuuid_ordering() { } } +#[tokio::test] +async fn test_serialize_deserialize_strings_varchar() { + let table_name = "varchar_serialization_test"; + + let test_cases: Vec<(&str, String)> = vec![ + ("Hello, World!", String::from("Hello, World!")), + ("Hello, Мир", String::from("Hello, Мир")), // multi-byte string + ("🦀A🦀B🦀C", String::from("🦀A🦀B🦀C")), + ("おはようございます", String::from("おはようございます")), + ]; + + run_literal_and_bound_value_test(table_name, "varchar", test_cases.as_slice(), true).await; +} + +#[tokio::test] +async fn test_serialize_deserialize_strings_ascii() { + let table_name = "ascii_serialization_test"; + + let test_cases: Vec<(&str, String)> = vec![ + ("Hello, World!", String::from("Hello, World!")), + ( + str::from_utf8(&[0x0, 0x7f]).unwrap(), + String::from_utf8(vec![0x0, 0x7f]).unwrap(), + ), // min/max ASCII values + ]; + + run_literal_and_bound_value_test(table_name, "varchar", test_cases.as_slice(), true).await; +} + // UDT Types #[tokio::test]