Skip to content

Commit 6aab4ad

Browse files
committed
feat: support literal for ARRAY top level
1 parent 2148f5d commit 6aab4ad

File tree

3 files changed

+13
-18
lines changed

3 files changed

+13
-18
lines changed

native/spark-expr/src/conversion_funcs/cast.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1021,7 +1021,7 @@ fn is_datafusion_spark_compatible(
10211021
DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 => {
10221022
// note that the cast from Int32/Int64 -> Decimal128 here is actually
10231023
// not compatible with Spark (no overflow checks) but we have tests that
1024-
// rely on this cast working so we have to leave it here for now
1024+
// rely on this cast working, so we have to leave it here for now
10251025
matches!(
10261026
to_type,
10271027
DataType::Boolean

spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2180,7 +2180,9 @@ object QueryPlanSerde extends Logging with CometExprShim {
21802180
op match {
21812181

21822182
// Fully native scan for V1
2183-
case scan: CometScanExec if scan.scanImpl == CometConf.SCAN_NATIVE_DATAFUSION =>
2183+
case scan: CometScanExec
2184+
if scan.scanImpl == CometConf.SCAN_NATIVE_DATAFUSION
2185+
|| scan.scanImpl == CometConf.SCAN_NATIVE_ICEBERG_COMPAT =>
21842186
val nativeScanBuilder = OperatorOuterClass.NativeScan.newBuilder()
21852187
nativeScanBuilder.setSource(op.simpleStringWithNodeId())
21862188

spark/src/test/scala/org/apache/comet/exec/CometNativeReaderSuite.scala

Lines changed: 9 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import org.scalatest.Tag
2424

2525
import org.apache.spark.sql.CometTestBase
2626
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
27+
import org.apache.spark.sql.functions.{array, col}
2728
import org.apache.spark.sql.internal.SQLConf
2829
import org.apache.spark.sql.types.{IntegerType, StringType, StructType}
2930

@@ -253,18 +254,11 @@ class CometNativeReaderSuite extends CometTestBase with AdaptiveSparkPlanHelper
253254
}
254255

255256
test("native reader - read a STRUCT subfield - field from second") {
256-
withSQLConf(
257-
CometConf.COMET_EXEC_ENABLED.key -> "true",
258-
SQLConf.USE_V1_SOURCE_LIST.key -> "parquet",
259-
CometConf.COMET_ENABLED.key -> "true",
260-
CometConf.COMET_EXPLAIN_FALLBACK_ENABLED.key -> "false",
261-
CometConf.COMET_NATIVE_SCAN_IMPL.key -> "native_datafusion") {
262-
testSingleLineQuery(
263-
"""
257+
testSingleLineQuery(
258+
"""
264259
|select 1 a, named_struct('a', 1, 'b', 'n') c0
265260
|""".stripMargin,
266-
"select c0.b from tbl")
267-
}
261+
"select c0.b from tbl")
268262
}
269263

270264
test("native reader - read a STRUCT subfield from ARRAY of STRUCTS - field from first") {
@@ -557,11 +551,10 @@ class CometNativeReaderSuite extends CometTestBase with AdaptiveSparkPlanHelper
557551
"select array(cast('a' as binary), cast('bc' as binary), cast('def' as binary), null) from tbl")
558552
}
559553

560-
test("native reader - array equality") {
561-
testSingleLineQuery(
562-
"""
563-
| select array(1) a union all select array(2)
564-
|""".stripMargin,
565-
"select * from tbl where a = array(1L)")
554+
test("SPARK-18053: ARRAY equality is broken") {
555+
withTable("array_tbl") {
556+
spark.range(10).select(array(col("id")).as("arr")).write.saveAsTable("array_tbl")
557+
assert(sql("SELECT * FROM array_tbl where arr = ARRAY(1L)").count == 1)
558+
}
566559
}
567560
}

0 commit comments

Comments
 (0)