Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion enginetest/enginetests.go
Original file line number Diff line number Diff line change
Expand Up @@ -3046,7 +3046,7 @@ func TestRenameColumn(t *testing.T, harness Harness) {
TestQueryWithContext(t, ctx, e, harness, "ALTER TABLE mydb.tabletest RENAME COLUMN s TO i1", []sql.Row{{types.NewOkResult(0)}}, nil, nil, nil)
TestQueryWithContext(t, ctx, e, harness, "SHOW FULL COLUMNS FROM mydb.tabletest", []sql.Row{
{"i", "int", nil, "NO", "PRI", nil, "", "", ""},
{"i1", "varchar(20)", "utf8mb4_0900_bin", "NO", "", nil, "", "", ""},
{"i1", "text", "utf8mb4_0900_bin", "NO", "", nil, "", "", ""},
}, nil, nil, nil)
})
}
Expand Down
26 changes: 26 additions & 0 deletions enginetest/join_planning_tests.go
Original file line number Diff line number Diff line change
Expand Up @@ -945,6 +945,32 @@ where u in (select * from rec);`,
},
},
},
{
name: "join varchar and text columns",
setup: []string{
"CREATE table varchartable (pk int primary key, s varchar(20));",
"CREATE table texttable (pk int primary key, t text);",
"insert into varchartable values (1,'first'), (2,'second'), (3,'third');",
"insert into texttable values (1,'first'), (2,'second'), (3,'third');",
},
// write a bunch of left joins and make sure they are converted to anti joins
tests: []JoinPlanTest{
{
q: "select /*+ HASH_JOIN(varchartable,texttable) */ * from varchartable where s in (select t from texttable) order by pk",
types: []plan.JoinType{plan.JoinTypeHash},
exp: []sql.Row{
{1, "first"},
{2, "second"},
{3, "third"},
},
},
{
q: "select /*+ HASH_JOIN(varchartable,texttable) */ * from varchartable where s not in (select t from texttable) order by pk",
types: []plan.JoinType{plan.JoinTypeLeftOuterHashExcludeNulls},
exp: []sql.Row{},
},
},
},
{
name: "join concat tests",
setup: []string{
Expand Down
20 changes: 20 additions & 0 deletions enginetest/queries/queries.go
Original file line number Diff line number Diff line change
Expand Up @@ -5665,6 +5665,18 @@ SELECT * FROM cte WHERE d = 2;`,
{"third row"},
},
},
{
Query: "select * from mytable intersect select * from tabletest",
Expected: []sql.Row{{1, "first row"}, {2, "second row"}, {3, "third row"}},
},
{
Query: "select * from mytable union distinct select * from tabletest",
Expected: []sql.Row{{1, "first row"}, {2, "second row"}, {3, "third row"}},
},
{
Query: "select * from mytable except select * from tabletest",
Expected: []sql.Row{},
},
{
SkipPrepared: true,
Query: "",
Expand Down Expand Up @@ -6772,6 +6784,10 @@ SELECT * FROM cte WHERE d = 2;`,
Query: "select replace(s, 'row', '') from mytable order by i",
Expected: []sql.Row{{"first "}, {"second "}, {"third "}},
},
{
Query: "select replace(s, 'row', '') from tabletest order by i",
Expected: []sql.Row{{"first "}, {"second "}, {"third "}},
},
{
Query: "select rpad(s, 13, ' ') from mytable order by i",
Expected: []sql.Row{{"first row "}, {"second row "}, {"third row "}},
Expand All @@ -6780,6 +6796,10 @@ SELECT * FROM cte WHERE d = 2;`,
Query: "select lpad(s, 13, ' ') from mytable order by i",
Expected: []sql.Row{{" first row"}, {" second row"}, {" third row"}},
},
{
Query: "select lpad(s, 13, ' ') from tabletest order by i",
Expected: []sql.Row{{" first row"}, {" second row"}, {" third row"}},
},
{
Query: "select sqrt(i) from mytable order by i",
Expected: []sql.Row{{1.0}, {1.4142135623730951}, {1.7320508075688772}},
Expand Down
2 changes: 1 addition & 1 deletion enginetest/scriptgen/setup/scripts/tabletest
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
exec
create table tabletest (
i int primary key,
s varchar(20) not null
s text not null
)
----

Expand Down
2 changes: 1 addition & 1 deletion enginetest/scriptgen/setup/setup_data.sg.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 5 additions & 5 deletions memory/table.go
Original file line number Diff line number Diff line change
Expand Up @@ -1971,7 +1971,7 @@ func (t *Table) DropCheck(ctx *sql.Context, chName string) error {
return fmt.Errorf("check '%s' was not found on the table", chName)
}

func (t *Table) createIndex(data *TableData, name string, columns []sql.IndexColumn, constraint sql.IndexConstraint, comment string) (sql.Index, error) {
func (t *Table) createIndex(ctx *sql.Context, data *TableData, name string, columns []sql.IndexColumn, constraint sql.IndexConstraint, comment string) (sql.Index, error) {
if name == "" {
for _, column := range columns {
name += column.Name + "_"
Expand Down Expand Up @@ -2005,7 +2005,7 @@ func (t *Table) createIndex(data *TableData, name string, columns []sql.IndexCol
}

if constraint == sql.IndexConstraint_Unique {
err := data.errIfDuplicateEntryExist(colNames, name)
err := data.errIfDuplicateEntryExist(ctx, colNames, name)
if err != nil {
return nil, err
}
Expand Down Expand Up @@ -2041,7 +2041,7 @@ func (t *Table) CreateIndex(ctx *sql.Context, idx sql.IndexDef) error {
data.indexes = make(map[string]sql.Index)
}

index, err := t.createIndex(data, idx.Name, idx.Columns, idx.Constraint, idx.Comment)
index, err := t.createIndex(ctx, data, idx.Name, idx.Columns, idx.Constraint, idx.Comment)
if err != nil {
return err
}
Expand Down Expand Up @@ -2107,7 +2107,7 @@ func (t *Table) CreateFulltextIndex(ctx *sql.Context, indexDef sql.IndexDef, key
data.indexes = make(map[string]sql.Index)
}

index, err := t.createIndex(data, indexDef.Name, indexDef.Columns, indexDef.Constraint, indexDef.Comment)
index, err := t.createIndex(ctx, data, indexDef.Name, indexDef.Columns, indexDef.Constraint, indexDef.Comment)
if err != nil {
return err
}
Expand Down Expand Up @@ -2138,7 +2138,7 @@ func (t *Table) CreateVectorIndex(ctx *sql.Context, idx sql.IndexDef, distanceTy
data.indexes = make(map[string]sql.Index)
}

index, err := t.createIndex(data, idx.Name, idx.Columns, idx.Constraint, idx.Comment)
index, err := t.createIndex(ctx, data, idx.Name, idx.Columns, idx.Constraint, idx.Comment)
if err != nil {
return err
}
Expand Down
5 changes: 3 additions & 2 deletions memory/table_data.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
package memory

import (
"context"
"fmt"
"sort"
"strconv"
Expand Down Expand Up @@ -274,7 +275,7 @@ func (td *TableData) numRows(ctx *sql.Context) (uint64, error) {
}

// throws an error if any two or more rows share the same |cols| values.
func (td *TableData) errIfDuplicateEntryExist(cols []string, idxName string) error {
func (td *TableData) errIfDuplicateEntryExist(ctx context.Context, cols []string, idxName string) error {
columnMapping, err := td.columnIndexes(cols)

// We currently skip validating duplicates on unique virtual columns.
Expand All @@ -296,7 +297,7 @@ func (td *TableData) errIfDuplicateEntryExist(cols []string, idxName string) err
if hasNulls(idxPrefixKey) {
continue
}
h, err := sql.HashOf(idxPrefixKey)
h, err := sql.HashOf(ctx, idxPrefixKey)
if err != nil {
return err
}
Expand Down
8 changes: 6 additions & 2 deletions sql/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
package sql

import (
"context"
"fmt"
"runtime"
"sync"
Expand All @@ -25,7 +26,7 @@ import (
)

// HashOf returns a hash of the given value to be used as key in a cache.
func HashOf(v Row) (uint64, error) {
func HashOf(ctx context.Context, v Row) (uint64, error) {
hash := digestPool.Get().(*xxhash.Digest)
hash.Reset()
defer digestPool.Put(hash)
Expand All @@ -36,7 +37,10 @@ func HashOf(v Row) (uint64, error) {
return 0, err
}
}

x, err := UnwrapAny(ctx, x)
if err != nil {
return 0, err
}
// TODO: probably much faster to do this with a type switch
// TODO: we don't have the type info necessary to appropriately encode the value of a string with a non-standard
// collation, which means that two strings that differ only in their collations will hash to the same value.
Expand Down
7 changes: 5 additions & 2 deletions sql/cache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
package sql

import (
"context"
"errors"
"testing"

Expand Down Expand Up @@ -179,10 +180,11 @@ func TestRowsCache(t *testing.T) {
}

func BenchmarkHashOf(b *testing.B) {
ctx := context.Background()
row := NewRow(1, "1")
b.ResetTimer()
for i := 0; i < b.N; i++ {
sum, err := HashOf(row)
sum, err := HashOf(ctx, row)
if err != nil {
b.Fatal(err)
}
Expand All @@ -193,11 +195,12 @@ func BenchmarkHashOf(b *testing.B) {
}

func BenchmarkParallelHashOf(b *testing.B) {
ctx := context.Background()
row := NewRow(1, "1")
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
sum, err := HashOf(row)
sum, err := HashOf(ctx, row)
if err != nil {
b.Fatal(err)
}
Expand Down
10 changes: 5 additions & 5 deletions sql/iters/rel_iters.go
Original file line number Diff line number Diff line change
Expand Up @@ -571,7 +571,7 @@ func (di *distinctIter) Next(ctx *sql.Context) (sql.Row, error) {
return nil, err
}

hash, err := sql.HashOf(row)
hash, err := sql.HashOf(ctx, row)
if err != nil {
return nil, err
}
Expand Down Expand Up @@ -647,7 +647,7 @@ func (ii *IntersectIter) Next(ctx *sql.Context) (sql.Row, error) {
return nil, err
}

hash, herr := sql.HashOf(res)
hash, herr := sql.HashOf(ctx, res)
if herr != nil {
return nil, herr
}
Expand All @@ -669,7 +669,7 @@ func (ii *IntersectIter) Next(ctx *sql.Context) (sql.Row, error) {
return nil, err
}

hash, herr := sql.HashOf(res)
hash, herr := sql.HashOf(ctx, res)
if herr != nil {
return nil, herr
}
Expand Down Expand Up @@ -714,7 +714,7 @@ func (ei *ExceptIter) Next(ctx *sql.Context) (sql.Row, error) {
return nil, err
}

hash, herr := sql.HashOf(res)
hash, herr := sql.HashOf(ctx, res)
if herr != nil {
return nil, herr
}
Expand All @@ -736,7 +736,7 @@ func (ei *ExceptIter) Next(ctx *sql.Context) (sql.Row, error) {
return nil, err
}

hash, herr := sql.HashOf(res)
hash, herr := sql.HashOf(ctx, res)
if herr != nil {
return nil, herr
}
Expand Down
2 changes: 1 addition & 1 deletion sql/plan/hash_lookup.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ func (n *HashLookup) GetHashKey(ctx *sql.Context, e sql.Expression, row sql.Row)
return nil, err
}
if s, ok := key.([]interface{}); ok {
return sql.HashOf(s)
return sql.HashOf(ctx, s)
}
// byte slices are not hashable
if k, ok := key.([]byte); ok {
Expand Down
4 changes: 2 additions & 2 deletions sql/plan/insubquery.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ func NewInSubquery(left sql.Expression, right sql.Expression) *InSubquery {
return &InSubquery{expression.BinaryExpressionStub{LeftChild: left, RightChild: right}}
}

var nilKey, _ = sql.HashOf(sql.NewRow(nil))
var nilKey, _ = sql.HashOf(nil, sql.NewRow(nil))

// Eval implements the Expression interface.
func (in *InSubquery) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
Expand Down Expand Up @@ -96,7 +96,7 @@ func (in *InSubquery) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
return false, nil
}

key, err := sql.HashOf(sql.NewRow(nLeft))
key, err := sql.HashOf(ctx, sql.NewRow(nLeft))
if err != nil {
return nil, err
}
Expand Down
2 changes: 1 addition & 1 deletion sql/plan/subquery.go
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ func putAllRows(ctx *sql.Context, cache sql.KeyValueCache, vals []interface{}) e
if err != nil {
return err
}
rowKey, err := sql.HashOf(sql.NewRow(val))
rowKey, err := sql.HashOf(ctx, sql.NewRow(val))
if err != nil {
return err
}
Expand Down
8 changes: 4 additions & 4 deletions sql/rowexec/join_iters.go
Original file line number Diff line number Diff line change
Expand Up @@ -462,7 +462,7 @@ func (i *fullJoinIter) Next(ctx *sql.Context) (sql.Row, error) {

rightRow, err := i.r.Next(ctx)
if err == io.EOF {
key, err := sql.HashOf(i.leftRow)
key, err := sql.HashOf(ctx, i.leftRow)
if err != nil {
return nil, err
}
Expand All @@ -485,12 +485,12 @@ func (i *fullJoinIter) Next(ctx *sql.Context) (sql.Row, error) {
if !sql.IsTrue(matches) {
continue
}
rkey, err := sql.HashOf(rightRow)
rkey, err := sql.HashOf(ctx, rightRow)
if err != nil {
return nil, err
}
i.seenRight[rkey] = struct{}{}
lKey, err := sql.HashOf(i.leftRow)
lKey, err := sql.HashOf(ctx, i.leftRow)
if err != nil {
return nil, err
}
Expand All @@ -517,7 +517,7 @@ func (i *fullJoinIter) Next(ctx *sql.Context) (sql.Row, error) {
return nil, io.EOF
}

key, err := sql.HashOf(rightRow)
key, err := sql.HashOf(ctx, rightRow)
if err != nil {
return nil, err
}
Expand Down
2 changes: 1 addition & 1 deletion sql/rowexec/other_iters.go
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ func (ci *concatIter) Next(ctx *sql.Context) (sql.Row, error) {
if err != nil {
return nil, err
}
hash, err := sql.HashOf(res)
hash, err := sql.HashOf(ctx, res)
if err != nil {
return nil, err
}
Expand Down
2 changes: 1 addition & 1 deletion sql/rowexec/rel_iters.go
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,7 @@ func (r *recursiveCteIter) Next(ctx *sql.Context) (sql.Row, error) {

var key uint64
if r.deduplicate {
key, _ = sql.HashOf(row)
key, _ = sql.HashOf(ctx, row)
if k, _ := r.cache.Get(key); k != nil {
// skip duplicate
continue
Expand Down
2 changes: 1 addition & 1 deletion sql/rowexec/update.go
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ func (u *updateJoinIter) Next(ctx *sql.Context) (sql.Row, error) {

// Determine whether this row in the table has already been updated
cache := u.getOrCreateCache(ctx, tableName)
hash, err := sql.HashOf(oldTableRow)
hash, err := sql.HashOf(ctx, oldTableRow)
if err != nil {
return nil, err
}
Expand Down
Loading