Skip to content

Commit 4886c90

Browse files
committed
pass sqllogictest
1 parent 3023026 commit 4886c90

File tree

13 files changed

+74
-109
lines changed

13 files changed

+74
-109
lines changed

datafusion/sqllogictest/test_files/aggregate.slt

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1098,10 +1098,9 @@ physical_plan
10981098
04)------AggregateExec: mode=Partial, gby=[], aggr=[median(alias1)]
10991099
05)--------AggregateExec: mode=FinalPartitioned, gby=[alias1@0 as alias1], aggr=[]
11001100
06)----------CoalesceBatchesExec: target_batch_size=8192
1101-
07)------------RepartitionExec: partitioning=Hash([alias1@0], 4), input_partitions=4
1102-
08)--------------RepartitionExec: partitioning=RoundRobinBatch(4), input_partitions=1
1103-
09)----------------AggregateExec: mode=Partial, gby=[c@0 as alias1], aggr=[]
1104-
10)------------------DataSourceExec: partitions=1, partition_sizes=[1]
1101+
07)------------RepartitionExec: partitioning=Hash([alias1@0], 4), input_partitions=1
1102+
08)--------------AggregateExec: mode=Partial, gby=[c@0 as alias1], aggr=[]
1103+
09)----------------DataSourceExec: partitions=1, partition_sizes=[1]
11051104

11061105
statement ok
11071106
drop table t;
@@ -6185,10 +6184,9 @@ physical_plan
61856184
02)--FilterExec: max(having_test.v1)@2 = 3, projection=[v1@0, v2@1]
61866185
03)----AggregateExec: mode=FinalPartitioned, gby=[v1@0 as v1, v2@1 as v2], aggr=[max(having_test.v1)]
61876186
04)------CoalesceBatchesExec: target_batch_size=8192
6188-
05)--------RepartitionExec: partitioning=Hash([v1@0, v2@1], 4), input_partitions=4
6189-
06)----------RepartitionExec: partitioning=RoundRobinBatch(4), input_partitions=1
6190-
07)------------AggregateExec: mode=Partial, gby=[v1@0 as v1, v2@1 as v2], aggr=[max(having_test.v1)]
6191-
08)--------------DataSourceExec: partitions=1, partition_sizes=[1]
6187+
05)--------RepartitionExec: partitioning=Hash([v1@0, v2@1], 4), input_partitions=1
6188+
06)----------AggregateExec: mode=Partial, gby=[v1@0 as v1, v2@1 as v2], aggr=[max(having_test.v1)]
6189+
07)------------DataSourceExec: partitions=1, partition_sizes=[1]
61926190

61936191

61946192
query error

datafusion/sqllogictest/test_files/aggregates_topk.slt

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,9 @@ physical_plan
4747
02)--SortExec: TopK(fetch=4), expr=[max(traces.timestamp)@1 DESC], preserve_partitioning=[true]
4848
03)----AggregateExec: mode=FinalPartitioned, gby=[trace_id@0 as trace_id], aggr=[max(traces.timestamp)]
4949
04)------CoalesceBatchesExec: target_batch_size=8192
50-
05)--------RepartitionExec: partitioning=Hash([trace_id@0], 4), input_partitions=4
51-
06)----------RepartitionExec: partitioning=RoundRobinBatch(4), input_partitions=1
52-
07)------------AggregateExec: mode=Partial, gby=[trace_id@0 as trace_id], aggr=[max(traces.timestamp)]
53-
08)--------------DataSourceExec: partitions=1, partition_sizes=[1]
50+
05)--------RepartitionExec: partitioning=Hash([trace_id@0], 4), input_partitions=1
51+
06)----------AggregateExec: mode=Partial, gby=[trace_id@0 as trace_id], aggr=[max(traces.timestamp)]
52+
07)------------DataSourceExec: partitions=1, partition_sizes=[1]
5453

5554
query TI
5655
select * from (select trace_id, MAX(timestamp) max_ts from traces t group by trace_id) where trace_id != 'b' order by max_ts desc limit 3;
@@ -111,10 +110,9 @@ physical_plan
111110
02)--SortExec: TopK(fetch=4), expr=[max(traces.timestamp)@1 DESC], preserve_partitioning=[true]
112111
03)----AggregateExec: mode=FinalPartitioned, gby=[trace_id@0 as trace_id], aggr=[max(traces.timestamp)], lim=[4]
113112
04)------CoalesceBatchesExec: target_batch_size=8192
114-
05)--------RepartitionExec: partitioning=Hash([trace_id@0], 4), input_partitions=4
115-
06)----------RepartitionExec: partitioning=RoundRobinBatch(4), input_partitions=1
116-
07)------------AggregateExec: mode=Partial, gby=[trace_id@0 as trace_id], aggr=[max(traces.timestamp)], lim=[4]
117-
08)--------------DataSourceExec: partitions=1, partition_sizes=[1]
113+
05)--------RepartitionExec: partitioning=Hash([trace_id@0], 4), input_partitions=1
114+
06)----------AggregateExec: mode=Partial, gby=[trace_id@0 as trace_id], aggr=[max(traces.timestamp)], lim=[4]
115+
07)------------DataSourceExec: partitions=1, partition_sizes=[1]
118116

119117
query TT
120118
explain select trace_id, MIN(timestamp) from traces group by trace_id order by MIN(timestamp) desc limit 4;

datafusion/sqllogictest/test_files/count_star_rule.slt

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -50,10 +50,9 @@ physical_plan
5050
01)ProjectionExec: expr=[a@0 as a, count(Int64(1))@1 as count()]
5151
02)--AggregateExec: mode=FinalPartitioned, gby=[a@0 as a], aggr=[count(Int64(1))]
5252
03)----CoalesceBatchesExec: target_batch_size=8192
53-
04)------RepartitionExec: partitioning=Hash([a@0], 4), input_partitions=4
54-
05)--------RepartitionExec: partitioning=RoundRobinBatch(4), input_partitions=1
55-
06)----------AggregateExec: mode=Partial, gby=[a@0 as a], aggr=[count(Int64(1))]
56-
07)------------DataSourceExec: partitions=1, partition_sizes=[1]
53+
04)------RepartitionExec: partitioning=Hash([a@0], 4), input_partitions=1
54+
05)--------AggregateExec: mode=Partial, gby=[a@0 as a], aggr=[count(Int64(1))]
55+
06)----------DataSourceExec: partitions=1, partition_sizes=[1]
5756

5857
query TT
5958
EXPLAIN SELECT t1.a, COUNT() AS cnt FROM t1 GROUP BY t1.a HAVING COUNT() > 0;

datafusion/sqllogictest/test_files/explain_tree.slt

Lines changed: 10 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -312,26 +312,17 @@ physical_plan
312312
20)│ RepartitionExec ││ RepartitionExec │
313313
21)│ -------------------- ││ -------------------- │
314314
22)│ output_partition_count: ││ output_partition_count: │
315-
23)│ 4 ││ 4
315+
23)│ 1 ││ 1
316316
24)│ ││ │
317317
25)│ partitioning_scheme: ││ partitioning_scheme: │
318318
26)│ Hash([int_col@0], 4) ││ Hash([int_col@0], 4) │
319319
27)└─────────────┬─────────────┘└─────────────┬─────────────┘
320320
28)┌─────────────┴─────────────┐┌─────────────┴─────────────┐
321-
29)│ RepartitionExec ││ RepartitionExec
321+
29)│ DataSourceExec ││ DataSourceExec
322322
30)│ -------------------- ││ -------------------- │
323-
31)│ output_partition_count: ││ output_partition_count: │
324-
32)│ 1 ││ 1 │
325-
33)│ ││ │
326-
34)│ partitioning_scheme: ││ partitioning_scheme: │
327-
35)│ RoundRobinBatch(4) ││ RoundRobinBatch(4) │
328-
36)└─────────────┬─────────────┘└─────────────┬─────────────┘
329-
37)┌─────────────┴─────────────┐┌─────────────┴─────────────┐
330-
38)│ DataSourceExec ││ DataSourceExec │
331-
39)│ -------------------- ││ -------------------- │
332-
40)│ files: 1 ││ files: 1 │
333-
41)│ format: csv ││ format: parquet │
334-
42)└───────────────────────────┘└───────────────────────────┘
323+
31)│ files: 1 ││ files: 1 │
324+
32)│ format: csv ││ format: parquet │
325+
33)└───────────────────────────┘└───────────────────────────┘
335326

336327
# 3 Joins
337328
query TT
@@ -387,26 +378,17 @@ physical_plan
387378
41)-----------------------------│ RepartitionExec ││ RepartitionExec │
388379
42)-----------------------------│ -------------------- ││ -------------------- │
389380
43)-----------------------------│ output_partition_count: ││ output_partition_count: │
390-
44)-----------------------------│ 4 ││ 4
381+
44)-----------------------------│ 1 ││ 1
391382
45)-----------------------------│ ││ │
392383
46)-----------------------------│ partitioning_scheme: ││ partitioning_scheme: │
393384
47)-----------------------------│ Hash([int_col@0], 4) ││ Hash([int_col@0], 4) │
394385
48)-----------------------------└─────────────┬─────────────┘└─────────────┬─────────────┘
395386
49)-----------------------------┌─────────────┴─────────────┐┌─────────────┴─────────────┐
396-
50)-----------------------------│ RepartitionExec ││ RepartitionExec
387+
50)-----------------------------│ DataSourceExec ││ DataSourceExec
397388
51)-----------------------------│ -------------------- ││ -------------------- │
398-
52)-----------------------------│ output_partition_count: ││ output_partition_count: │
399-
53)-----------------------------│ 1 ││ 1 │
400-
54)-----------------------------│ ││ │
401-
55)-----------------------------│ partitioning_scheme: ││ partitioning_scheme: │
402-
56)-----------------------------│ RoundRobinBatch(4) ││ RoundRobinBatch(4) │
403-
57)-----------------------------└─────────────┬─────────────┘└─────────────┬─────────────┘
404-
58)-----------------------------┌─────────────┴─────────────┐┌─────────────┴─────────────┐
405-
59)-----------------------------│ DataSourceExec ││ DataSourceExec │
406-
60)-----------------------------│ -------------------- ││ -------------------- │
407-
61)-----------------------------│ files: 1 ││ files: 1 │
408-
62)-----------------------------│ format: csv ││ format: parquet │
409-
63)-----------------------------└───────────────────────────┘└───────────────────────────┘
389+
52)-----------------------------│ files: 1 ││ files: 1 │
390+
53)-----------------------------│ format: csv ││ format: parquet │
391+
54)-----------------------------└───────────────────────────┘└───────────────────────────┘
410392

411393
# Long Filter (demonstrate what happens with wrapping)
412394
query TT

datafusion/sqllogictest/test_files/group_by.slt

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2962,10 +2962,9 @@ physical_plan
29622962
03)----ProjectionExec: expr=[country@0 as country, first_value(sales_global.amount) ORDER BY [sales_global.ts ASC NULLS LAST]@1 as fv1, last_value(sales_global.amount) ORDER BY [sales_global.ts ASC NULLS LAST]@2 as fv2]
29632963
04)------AggregateExec: mode=FinalPartitioned, gby=[country@0 as country], aggr=[first_value(sales_global.amount) ORDER BY [sales_global.ts ASC NULLS LAST], last_value(sales_global.amount) ORDER BY [sales_global.ts ASC NULLS LAST]]
29642964
05)--------CoalesceBatchesExec: target_batch_size=8192
2965-
06)----------RepartitionExec: partitioning=Hash([country@0], 8), input_partitions=8
2966-
07)------------RepartitionExec: partitioning=RoundRobinBatch(8), input_partitions=1
2967-
08)--------------AggregateExec: mode=Partial, gby=[country@0 as country], aggr=[first_value(sales_global.amount) ORDER BY [sales_global.ts ASC NULLS LAST], last_value(sales_global.amount) ORDER BY [sales_global.ts ASC NULLS LAST]]
2968-
09)----------------DataSourceExec: partitions=1, partition_sizes=[1]
2965+
06)----------RepartitionExec: partitioning=Hash([country@0], 8), input_partitions=1
2966+
07)------------AggregateExec: mode=Partial, gby=[country@0 as country], aggr=[first_value(sales_global.amount) ORDER BY [sales_global.ts ASC NULLS LAST], last_value(sales_global.amount) ORDER BY [sales_global.ts ASC NULLS LAST]]
2967+
08)--------------DataSourceExec: partitions=1, partition_sizes=[1]
29692968

29702969
query TRR
29712970
SELECT country, FIRST_VALUE(amount ORDER BY ts ASC) AS fv1,

datafusion/sqllogictest/test_files/insert.slt

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,9 +71,8 @@ physical_plan
7171
05)--------BoundedWindowAggExec: wdw=[sum(aggregate_test_100.c4) PARTITION BY [aggregate_test_100.c1] ORDER BY [aggregate_test_100.c9 ASC NULLS LAST] ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING: Ok(Field { name: "sum(aggregate_test_100.c4) PARTITION BY [aggregate_test_100.c1] ORDER BY [aggregate_test_100.c9 ASC NULLS LAST] ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING", data_type: Int64, nullable: true, dict_id: 0, dict_is_ordered: false, metadata: {} }), frame: WindowFrame { units: Rows, start_bound: Preceding(UInt64(1)), end_bound: Following(UInt64(1)), is_causal: false }, count(Int64(1)) PARTITION BY [aggregate_test_100.c1] ORDER BY [aggregate_test_100.c9 ASC NULLS LAST] ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING: Ok(Field { name: "count(Int64(1)) PARTITION BY [aggregate_test_100.c1] ORDER BY [aggregate_test_100.c9 ASC NULLS LAST] ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING", data_type: Int64, nullable: false, dict_id: 0, dict_is_ordered: false, metadata: {} }), frame: WindowFrame { units: Rows, start_bound: Preceding(UInt64(1)), end_bound: Following(UInt64(1)), is_causal: false }], mode=[Sorted]
7272
06)----------SortExec: expr=[c1@0 ASC NULLS LAST, c9@2 ASC NULLS LAST], preserve_partitioning=[true]
7373
07)------------CoalesceBatchesExec: target_batch_size=8192
74-
08)--------------RepartitionExec: partitioning=Hash([c1@0], 8), input_partitions=8
75-
09)----------------RepartitionExec: partitioning=RoundRobinBatch(8), input_partitions=1
76-
10)------------------DataSourceExec: file_groups={1 group: [[WORKSPACE_ROOT/testing/data/csv/aggregate_test_100.csv]]}, projection=[c1, c4, c9], file_type=csv, has_header=true
74+
08)--------------RepartitionExec: partitioning=Hash([c1@0], 8), input_partitions=1
75+
09)----------------DataSourceExec: file_groups={1 group: [[WORKSPACE_ROOT/testing/data/csv/aggregate_test_100.csv]]}, projection=[c1, c4, c9], file_type=csv, has_header=true
7776

7877
query I
7978
INSERT INTO table_without_values SELECT

datafusion/sqllogictest/test_files/insert_to_external.slt

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -362,9 +362,8 @@ physical_plan
362362
05)--------BoundedWindowAggExec: wdw=[sum(aggregate_test_100.c4) PARTITION BY [aggregate_test_100.c1] ORDER BY [aggregate_test_100.c9 ASC NULLS LAST] ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING: Ok(Field { name: "sum(aggregate_test_100.c4) PARTITION BY [aggregate_test_100.c1] ORDER BY [aggregate_test_100.c9 ASC NULLS LAST] ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING", data_type: Int64, nullable: true, dict_id: 0, dict_is_ordered: false, metadata: {} }), frame: WindowFrame { units: Rows, start_bound: Preceding(UInt64(1)), end_bound: Following(UInt64(1)), is_causal: false }, count(Int64(1)) PARTITION BY [aggregate_test_100.c1] ORDER BY [aggregate_test_100.c9 ASC NULLS LAST] ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING: Ok(Field { name: "count(Int64(1)) PARTITION BY [aggregate_test_100.c1] ORDER BY [aggregate_test_100.c9 ASC NULLS LAST] ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING", data_type: Int64, nullable: false, dict_id: 0, dict_is_ordered: false, metadata: {} }), frame: WindowFrame { units: Rows, start_bound: Preceding(UInt64(1)), end_bound: Following(UInt64(1)), is_causal: false }], mode=[Sorted]
363363
06)----------SortExec: expr=[c1@0 ASC NULLS LAST, c9@2 ASC NULLS LAST], preserve_partitioning=[true]
364364
07)------------CoalesceBatchesExec: target_batch_size=8192
365-
08)--------------RepartitionExec: partitioning=Hash([c1@0], 8), input_partitions=8
366-
09)----------------RepartitionExec: partitioning=RoundRobinBatch(8), input_partitions=1
367-
10)------------------DataSourceExec: file_groups={1 group: [[WORKSPACE_ROOT/testing/data/csv/aggregate_test_100.csv]]}, projection=[c1, c4, c9], file_type=csv, has_header=true
365+
08)--------------RepartitionExec: partitioning=Hash([c1@0], 8), input_partitions=1
366+
09)----------------DataSourceExec: file_groups={1 group: [[WORKSPACE_ROOT/testing/data/csv/aggregate_test_100.csv]]}, projection=[c1, c4, c9], file_type=csv, has_header=true
368367

369368
query I
370369
INSERT INTO table_without_values SELECT

datafusion/sqllogictest/test_files/joins.slt

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2742,14 +2742,12 @@ physical_plan
27422742
01)SortMergeJoin: join_type=Inner, on=[(c1@0, c1@0)]
27432743
02)--SortExec: expr=[c1@0 ASC], preserve_partitioning=[true]
27442744
03)----CoalesceBatchesExec: target_batch_size=2
2745-
04)------RepartitionExec: partitioning=Hash([c1@0], 2), input_partitions=2
2746-
05)--------RepartitionExec: partitioning=RoundRobinBatch(2), input_partitions=1
2747-
06)----------DataSourceExec: partitions=1, partition_sizes=[1]
2748-
07)--SortExec: expr=[c1@0 ASC], preserve_partitioning=[true]
2749-
08)----CoalesceBatchesExec: target_batch_size=2
2750-
09)------RepartitionExec: partitioning=Hash([c1@0], 2), input_partitions=2
2751-
10)--------RepartitionExec: partitioning=RoundRobinBatch(2), input_partitions=1
2752-
11)----------DataSourceExec: partitions=1, partition_sizes=[1]
2745+
04)------RepartitionExec: partitioning=Hash([c1@0], 2), input_partitions=1
2746+
05)----------DataSourceExec: partitions=1, partition_sizes=[1]
2747+
06)--SortExec: expr=[c1@0 ASC], preserve_partitioning=[true]
2748+
07)----CoalesceBatchesExec: target_batch_size=2
2749+
08)------RepartitionExec: partitioning=Hash([c1@0], 2), input_partitions=1
2750+
09)--------DataSourceExec: partitions=1, partition_sizes=[1]
27532751

27542752
# sort_merge_join_on_date32 inner sort merge join on data type (Date32)
27552753
query DDRTDDRT rowsort

datafusion/sqllogictest/test_files/limit.slt

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -406,10 +406,9 @@ logical_plan
406406
physical_plan
407407
01)AggregateExec: mode=FinalPartitioned, gby=[i@0 as i], aggr=[]
408408
02)--CoalesceBatchesExec: target_batch_size=8192
409-
03)----RepartitionExec: partitioning=Hash([i@0], 4), input_partitions=4
410-
04)------RepartitionExec: partitioning=RoundRobinBatch(4), input_partitions=1
411-
05)--------AggregateExec: mode=Partial, gby=[i@0 as i], aggr=[]
412-
06)----------DataSourceExec: partitions=1
409+
03)----RepartitionExec: partitioning=Hash([i@0], 4), input_partitions=1
410+
04)--------AggregateExec: mode=Partial, gby=[i@0 as i], aggr=[]
411+
05)----------DataSourceExec: partitions=1
413412

414413
statement ok
415414
set datafusion.explain.show_sizes = true;

datafusion/sqllogictest/test_files/order.slt

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -795,19 +795,17 @@ physical_plan
795795
04)------ProjectionExec: expr=[0 as m, t@0 as t]
796796
05)--------AggregateExec: mode=FinalPartitioned, gby=[t@0 as t], aggr=[]
797797
06)----------CoalesceBatchesExec: target_batch_size=8192
798-
07)------------RepartitionExec: partitioning=Hash([t@0], 2), input_partitions=2
799-
08)--------------RepartitionExec: partitioning=RoundRobinBatch(2), input_partitions=1
800-
09)----------------AggregateExec: mode=Partial, gby=[t@0 as t], aggr=[]
801-
10)------------------ProjectionExec: expr=[column1@0 as t]
802-
11)--------------------DataSourceExec: partitions=1, partition_sizes=[1]
803-
12)------ProjectionExec: expr=[1 as m, t@0 as t]
804-
13)--------AggregateExec: mode=FinalPartitioned, gby=[t@0 as t], aggr=[]
805-
14)----------CoalesceBatchesExec: target_batch_size=8192
806-
15)------------RepartitionExec: partitioning=Hash([t@0], 2), input_partitions=2
807-
16)--------------RepartitionExec: partitioning=RoundRobinBatch(2), input_partitions=1
808-
17)----------------AggregateExec: mode=Partial, gby=[t@0 as t], aggr=[]
809-
18)------------------ProjectionExec: expr=[column1@0 as t]
810-
19)--------------------DataSourceExec: partitions=1, partition_sizes=[1]
798+
07)------------RepartitionExec: partitioning=Hash([t@0], 2), input_partitions=1
799+
08)--------------AggregateExec: mode=Partial, gby=[t@0 as t], aggr=[]
800+
09)----------------ProjectionExec: expr=[column1@0 as t]
801+
10)------------------DataSourceExec: partitions=1, partition_sizes=[1]
802+
11)------ProjectionExec: expr=[1 as m, t@0 as t]
803+
12)--------AggregateExec: mode=FinalPartitioned, gby=[t@0 as t], aggr=[]
804+
13)----------CoalesceBatchesExec: target_batch_size=8192
805+
14)------------RepartitionExec: partitioning=Hash([t@0], 2), input_partitions=1
806+
15)--------------AggregateExec: mode=Partial, gby=[t@0 as t], aggr=[]
807+
16)----------------ProjectionExec: expr=[column1@0 as t]
808+
17)------------------DataSourceExec: partitions=1, partition_sizes=[1]
811809

812810
#####
813811
# Multi column sorting with lists

0 commit comments

Comments
 (0)