Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/expr/src/relation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1707,7 +1707,7 @@ impl MirRelationExpr {
.column_types
.iter()
.zip_eq(typ.column_types.iter())
.all(|(t1, t2)| t1.scalar_type.base_eq(&t2.scalar_type)));
.all(|(t1, t2)| t1.scalar_type.physical_eq(&t2.scalar_type)));
}
let mut typ = typ.unwrap_or_else(|| self.typ());
typ.keys = vec![vec![]];
Expand Down
8 changes: 5 additions & 3 deletions src/expr/src/scalar/func.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ use mz_ore::fmt::FormatBuffer;
use mz_ore::lex::LexBuf;
use mz_ore::option::OptionExt;
use mz_ore::result::ResultExt;
use mz_ore::soft_assert_eq_or_log;
use mz_ore::str::StrExt;
use mz_ore::{soft_assert_eq_or_log, soft_assert_or_log};
use mz_pgrepr::Type;
use mz_pgtz::timezone::{Timezone, TimezoneSpec};
use mz_proto::chrono::any_naive_datetime;
Expand Down Expand Up @@ -8033,8 +8033,10 @@ impl VariadicFunc {
}
.nullable(true),
ArrayCreate { elem_type } => {
debug_assert!(
input_types.iter().all(|t| t.scalar_type.base_eq(elem_type)),
soft_assert_or_log!(
input_types
.iter()
.all(|t| t.scalar_type.physical_eq(elem_type)),
"Args to ArrayCreate should have types that are compatible with the elem_type"
);
match elem_type {
Expand Down
18 changes: 16 additions & 2 deletions src/expr/src/scalar/func/impls/string.rs
Original file line number Diff line number Diff line change
Expand Up @@ -579,7 +579,14 @@ impl<'a> EagerUnaryFunc<'a> for CastStringToChar {

impl fmt::Display for CastStringToChar {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("text_to_char")
match self.length {
Some(length) => {
write!(f, "text_to_char[len={}, fail_on_len={}]", length.into_u32(), self.fail_on_len)
},
None => {
f.write_str("text_to_char[len=None]")
},
}
}
}

Expand Down Expand Up @@ -707,7 +714,14 @@ impl<'a> EagerUnaryFunc<'a> for CastStringToVarChar {

impl fmt::Display for CastStringToVarChar {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("text_to_varchar")
match self.length {
Some(length) => {
write!(f, "text_to_varchar[len={}, fail_on_len={}]", length.into_u32(), self.fail_on_len)
},
None => {
f.write_str("text_to_varchar[len=None]")
},
}
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/repr/src/relation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ impl ColumnType {
nullable: self.nullable || other.nullable,
})
}
(scalar_type, other_scalar_type) if scalar_type.base_eq(&other_scalar_type) => {
(scalar_type, other_scalar_type) if scalar_type.physical_eq(&other_scalar_type) => {
Ok(ColumnType {
scalar_type: scalar_type.without_modifiers(),
nullable: self.nullable || other.nullable,
Expand Down
17 changes: 15 additions & 2 deletions src/repr/src/scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2908,12 +2908,25 @@ impl ScalarType {
self.eq_inner(other, false)
}

// Determines equality among scalar types that ignores any custom OIDs or
// embedded values.
/// Determines equality among scalar types that ignores any custom OIDs or
/// embedded values.
pub fn structural_eq(&self, other: &ScalarType) -> bool {
self.eq_inner(other, true)
}

/// //////////// todo: comment
pub fn physical_eq(&self, other: &ScalarType) -> bool {
use ScalarType::*;
if self.base_eq(other) {
return true;
}
match (self, other) {
(String, VarChar { max_length: None }) => true,
(VarChar { max_length: None }, String) => true,
_ => false,
}
}

pub fn eq_inner(&self, other: &ScalarType, structure_only: bool) -> bool {
use ScalarType::*;
match (self, other) {
Expand Down
44 changes: 28 additions & 16 deletions src/sql/src/plan/lowering.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ use std::iter::repeat;

use itertools::Itertools;
use mz_expr::visit::Visit;
use mz_expr::{AccessStrategy, AggregateFunc, MirRelationExpr, MirScalarExpr};
use mz_expr::{AccessStrategy, AggregateFunc, MirRelationExpr, MirScalarExpr, UnaryFunc};
use mz_ore::collections::CollectionExt;
use mz_ore::stack::maybe_grow;
use mz_repr::*;
Expand Down Expand Up @@ -955,17 +955,23 @@ impl HirScalarExpr {
Literal(row, typ) => SS::Literal(Ok(row), typ),
Parameter(_) => panic!("cannot decorrelate expression with unbound parameters"),
CallUnmaterializable(func) => SS::CallUnmaterializable(func),
CallUnary { func, expr } => SS::CallUnary {
func,
expr: Box::new(expr.applied_to(
id_gen,
col_map,
cte_map,
inner,
subquery_map,
context,
)?),
},
CallUnary { func, expr } => {
if !matches!(func, UnaryFunc::CastVarCharToString(..)) {
SS::CallUnary {
func,
expr: Box::new(expr.applied_to(
id_gen,
col_map,
cte_map,
inner,
subquery_map,
context,
)?),
}
} else {
expr.applied_to(id_gen, col_map, cte_map, inner, subquery_map, context)?
}
}
CallBinary { func, expr1, expr2 } => SS::CallBinary {
func,
expr1: Box::new(expr1.applied_to(
Expand Down Expand Up @@ -1621,10 +1627,16 @@ impl HirScalarExpr {
Column(ColumnRef { level: 0, column }) => SS::Column(column),
Literal(datum, typ) => SS::Literal(Ok(datum), typ),
CallUnmaterializable(func) => SS::CallUnmaterializable(func),
CallUnary { func, expr } => SS::CallUnary {
func,
expr: Box::new(expr.lower_uncorrelated()?),
},
CallUnary { func, expr } => {
if !matches!(func, UnaryFunc::CastVarCharToString(..)) {
SS::CallUnary {
func,
expr: Box::new(expr.lower_uncorrelated()?),
}
} else {
expr.lower_uncorrelated()?
}
}
CallBinary { func, expr1, expr2 } => SS::CallBinary {
func,
expr1: Box::new(expr1.lower_uncorrelated()?),
Expand Down
4 changes: 2 additions & 2 deletions src/transform/src/column_knowledge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -635,7 +635,7 @@ impl DatumKnowledge {
unreachable!();
};

if !s_typ.base_eq(o_typ) {
if !s_typ.physical_eq(o_typ) {
::tracing::error!("Undefined join of non-equal base types {s_typ:?} != {o_typ:?}");
*self = Self::top();
} else if s_val != o_val {
Expand Down Expand Up @@ -735,7 +735,7 @@ impl DatumKnowledge {
unreachable!();
};

if !s_typ.base_eq(o_typ) {
if !s_typ.physical_eq(o_typ) {
soft_panic_or_log!("Undefined meet of non-equal base types {s_typ:?} != {o_typ:?}");
*self = Self::top(); // this really should be Nothing
} else if s_val != o_val {
Expand Down
2 changes: 1 addition & 1 deletion src/transform/src/normalize_lets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,7 @@ mod support {
.column_types
.iter()
.zip(typ.column_types.iter())
.all(|(t1, t2)| t1.scalar_type.base_eq(&t2.scalar_type))
.all(|(t1, t2)| t1.scalar_type.physical_eq(&t2.scalar_type))
{
Err(crate::TransformError::Internal(format!(
"scalar types do not match: {:?} v {:?}",
Expand Down
21 changes: 3 additions & 18 deletions src/transform/src/typecheck.rs
Original file line number Diff line number Diff line change
Expand Up @@ -397,8 +397,7 @@ pub fn scalar_subtype_difference(sub: &ScalarType, sup: &ScalarType) -> Vec<Colu
}
}
(_, _) => {
// TODO(mgree) confirm that we don't want to allow numeric subtyping
if ScalarBaseType::from(sub) != ScalarBaseType::from(sup) {
if !sub.physical_eq(sup) {
diffs.push(ColumnTypeDifference::NotSubtype {
sub: sub.clone(),
sup: sup.clone(),
Expand All @@ -410,20 +409,6 @@ pub fn scalar_subtype_difference(sub: &ScalarType, sup: &ScalarType) -> Vec<Colu
diffs
}

/// Returns true when it is safe to treat a `sub` row as an `sup` row
///
/// In particular, the core types must be equal, and if a column in `sup` is nullable, that column should also be nullable in `sub`
/// Conversely, it is okay to treat a known non-nullable column as nullable: `sub` may be nullable when `sup` is not
pub fn is_subtype_of(sub: &[ColumnType], sup: &[ColumnType]) -> bool {
if sub.len() != sup.len() {
return false;
}

sub.iter().zip_eq(sup.iter()).all(|(got, known)| {
(!known.nullable || got.nullable) && got.scalar_type.base_eq(&known.scalar_type)
})
}

/// Check that the visible type of each query has not been changed
#[derive(Debug)]
pub struct Typecheck {
Expand Down Expand Up @@ -459,7 +444,7 @@ impl Typecheck {

/// New non-transient global IDs will be treated as an error
///
/// Only turn this on after the context has been appropraitely populated by, e.g., an earlier run
/// Only turn this on after the context has been appropriately populated by, e.g., an earlier run
pub fn disallow_new_globals(mut self) -> Self {
self.disallow_new_globals = true;
self
Expand Down Expand Up @@ -1362,7 +1347,7 @@ impl ColumnTypeDifference {
let sub = h.humanize_scalar_type(sub);
let sup = h.humanize_scalar_type(sup);

writeln!(f, "{sub} is a not a subtype of {sup}")
writeln!(f, "{sub} is not a subtype of {sup}")
}
Nullability { sub, sup } => {
let sub = h.humanize_column_type(sub);
Expand Down
4 changes: 2 additions & 2 deletions src/transform/tests/test_transforms/typecheck.spec
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ Return
mismatched column types: couldn't compute union of column types in let rec: Can't union types: Bool and Int64
got Int64
expected Bool?
Bool is a not a subtype of Int64
Bool is not a subtype of Int64
Bool? is nullable but Int64 is not
----
----
Expand Down Expand Up @@ -344,7 +344,7 @@ Filter #2
mismatched column types: expected boolean condition
got String
expected Bool?
String is a not a subtype of Bool
String is not a subtype of Bool
----
----

Expand Down
6 changes: 3 additions & 3 deletions test/sqllogictest/freshmart.slt
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ Explained Query:
Get l6
Return
Project (#0, #13, #11)
Map (case when (#8) IS NULL then null else #7 end, (((((#2 * case when (#3 <= 3) then 1.2 else case when ((#3 <= 10) AND (#3 >= 4)) then 1.1 else 0.9 end end) * coalesce((1 - (#5 / 100)), 1)) * case when (#12 <= 3) then 1.1 else case when ((#12 <= 10) AND (#12 >= 4)) then 1.05 else 1 end end) * case when (#2 > #4) then (1 + ((#2 - #4) / #4)) else (1 - ((#4 - #2) / #4)) end) * case when ilike["%cheap%"](varchar_to_text(#1)) then 0.8 else 1 end))
Map (case when (#8) IS NULL then null else #7 end, (((((#2 * case when (#3 <= 3) then 1.2 else case when ((#3 <= 10) AND (#3 >= 4)) then 1.1 else 0.9 end end) * coalesce((1 - (#5 / 100)), 1)) * case when (#12 <= 3) then 1.1 else case when ((#12 <= 10) AND (#12 >= 4)) then 1.05 else 1 end end) * case when (#2 > #4) then (1 + ((#2 - #4) / #4)) else (1 - ((#4 - #2) / #4)) end) * case when ilike["%cheap%"](#1) then 0.8 else 1 end))
Join on=(#0 = #6 = #9 = #10) type=delta
ArrangeBy keys=[[#0]]
Get l6
Expand Down Expand Up @@ -639,7 +639,7 @@ Explained Query:
Get l7
cte l11 =
Project (#0, #1, #3, #5, #18)
Map (case when (#14) IS NULL then null else #13 end, (((((#8 * case when (#9 <= 3) then 1.2 else case when ((#9 <= 10) AND (#9 >= 4)) then 1.1 else 0.9 end end) * coalesce((1 - (#11 / 100)), 1)) * case when (#17 <= 3) then 1.1 else case when ((#17 <= 10) AND (#17 >= 4)) then 1.05 else 1 end end) * case when (#8 > #10) then (1 + ((#8 - #10) / #10)) else (1 - ((#10 - #8) / #10)) end) * case when ilike["%cheap%"](varchar_to_text(#7)) then 0.8 else 1 end))
Map (case when (#14) IS NULL then null else #13 end, (((((#8 * case when (#9 <= 3) then 1.2 else case when ((#9 <= 10) AND (#9 >= 4)) then 1.1 else 0.9 end end) * coalesce((1 - (#11 / 100)), 1)) * case when (#17 <= 3) then 1.1 else case when ((#17 <= 10) AND (#17 >= 4)) then 1.05 else 1 end end) * case when (#8 > #10) then (1 + ((#8 - #10) / #10)) else (1 - ((#10 - #8) / #10)) end) * case when ilike["%cheap%"](#7) then 0.8 else 1 end))
Join on=(#0 = #2 = #6 = #12 = #15 = #16 AND #3 = #4) type=delta
ArrangeBy keys=[[#0]]
Project (#0, #1)
Expand Down Expand Up @@ -895,7 +895,7 @@ Explained Query:
Get l7
cte l11 =
Project (#0, #1, #3, #5, #18)
Map (case when (#14) IS NULL then null else #13 end, (((((#8 * case when (#9 <= 3) then 1.2 else case when ((#9 <= 10) AND (#9 >= 4)) then 1.1 else 0.9 end end) * coalesce((1 - (#11 / 100)), 1)) * case when (#17 <= 3) then 1.1 else case when ((#17 <= 10) AND (#17 >= 4)) then 1.05 else 1 end end) * case when (#8 > #10) then (1 + ((#8 - #10) / #10)) else (1 - ((#10 - #8) / #10)) end) * case when ilike["%cheap%"](varchar_to_text(#7)) then 0.8 else 1 end))
Map (case when (#14) IS NULL then null else #13 end, (((((#8 * case when (#9 <= 3) then 1.2 else case when ((#9 <= 10) AND (#9 >= 4)) then 1.1 else 0.9 end end) * coalesce((1 - (#11 / 100)), 1)) * case when (#17 <= 3) then 1.1 else case when ((#17 <= 10) AND (#17 >= 4)) then 1.05 else 1 end end) * case when (#8 > #10) then (1 + ((#8 - #10) / #10)) else (1 - ((#10 - #8) / #10)) end) * case when ilike["%cheap%"](#7) then 0.8 else 1 end))
Join on=(#0 = #2 = #6 = #12 = #15 = #16 AND #3 = #4) type=delta
ArrangeBy keys=[[#0]]
Project (#0, #1)
Expand Down
4 changes: 2 additions & 2 deletions test/sqllogictest/github-5536.slt
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,14 @@ Explained Query:
Filter (#0 <= #0) AND (#0 >= #0)
ReadStorage materialize.public.t5
ArrangeBy keys=[[]]
Filter (#1 = text_to_char(text_to_varchar(boolean_to_text(like["0.31161855206970124"](padchar(#1))))))
Filter (#1 = text_to_char[len=None](text_to_varchar[len=None](boolean_to_text(like["0.31161855206970124"](padchar(#1))))))
ReadStorage materialize.public.t3
ArrangeBy keys=[[]]
ReadStorage materialize.public.t5

Source materialize.public.t0
Source materialize.public.t3
filter=((#1 = text_to_char(text_to_varchar(boolean_to_text(like["0.31161855206970124"](padchar(#1)))))))
filter=((#1 = text_to_char[len=None](text_to_varchar[len=None](boolean_to_text(like["0.31161855206970124"](padchar(#1)))))))
Source materialize.public.t5

Target cluster: quickstart
Expand Down
2 changes: 1 addition & 1 deletion test/sqllogictest/ldbc_bi.slt
Original file line number Diff line number Diff line change
Expand Up @@ -2017,7 +2017,7 @@ Explained Query:
ArrangeBy keys=[[#0{rootpostlanguage}]] // { arity: 1 }
Distinct project=[#0{rootpostlanguage}] // { arity: 1 }
Project (#0{rootpostlanguage}) // { arity: 1 }
Filter (#0{rootpostlanguage} = varchar_to_text(#1)) // { arity: 2 }
Filter (#0{rootpostlanguage} = #1) // { arity: 2 }
FlatMap unnest_array({"es", "ta", "pt"}) // { arity: 2 }
Distinct project=[#0{rootpostlanguage}] // { arity: 1 }
Project (#13{rootpostlanguage}) // { arity: 1 }
Expand Down
2 changes: 1 addition & 1 deletion test/sqllogictest/ldbc_bi_eager.slt
Original file line number Diff line number Diff line change
Expand Up @@ -2024,7 +2024,7 @@ Explained Query:
ArrangeBy keys=[[#0{rootpostlanguage}]] // { arity: 1 }
Distinct project=[#0{rootpostlanguage}] // { arity: 1 }
Project (#0{rootpostlanguage}) // { arity: 1 }
Filter (#0{rootpostlanguage} = varchar_to_text(#1)) // { arity: 2 }
Filter (#0{rootpostlanguage} = #1) // { arity: 2 }
FlatMap unnest_array({"es", "ta", "pt"}) // { arity: 2 }
Distinct project=[#0{rootpostlanguage}] // { arity: 1 }
Project (#13{rootpostlanguage}) // { arity: 1 }
Expand Down
2 changes: 1 addition & 1 deletion test/sqllogictest/sqlite
Submodule sqlite updated 0 files
Loading