@@ -27,12 +27,14 @@ import org.apache.spark.{SparkConf, SparkFunSuite}
27
27
import org .apache .spark .serializer .{JavaSerializer , KryoSerializer }
28
28
import org .apache .spark .sql .{RandomDataGenerator , Row }
29
29
import org .apache .spark .sql .catalyst .InternalRow
30
- import org .apache .spark .sql .catalyst .analysis .ResolveTimeZone
31
- import org .apache .spark .sql .catalyst .encoders .{ExamplePointUDT , ExpressionEncoder , RowEncoder }
30
+ import org .apache .spark .sql .catalyst .analysis .{ResolveTimeZone , SimpleAnalyzer , UnresolvedDeserializer }
31
+ import org .apache .spark .sql .catalyst .dsl .expressions ._
32
+ import org .apache .spark .sql .catalyst .encoders ._
32
33
import org .apache .spark .sql .catalyst .expressions .codegen .GenerateUnsafeProjection
33
34
import org .apache .spark .sql .catalyst .expressions .objects ._
34
- import org .apache .spark .sql .catalyst .util ._
35
- import org .apache .spark .sql .catalyst .util .DateTimeUtils .{SQLDate , SQLTimestamp }
35
+ import org .apache .spark .sql .catalyst .plans .logical .{LocalRelation , Project }
36
+ import org .apache .spark .sql .catalyst .util .{ArrayBasedMapData , ArrayData , GenericArrayData }
37
+ import org .apache .spark .sql .catalyst .util .DateTimeUtils
36
38
import org .apache .spark .sql .internal .SQLConf
37
39
import org .apache .spark .sql .types ._
38
40
import org .apache .spark .unsafe .types .UTF8String
@@ -162,9 +164,10 @@ class ObjectExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
162
164
" fromPrimitiveArray" , ObjectType (classOf [Array [Int ]]),
163
165
Array [Int ](1 , 2 , 3 ), UnsafeArrayData .fromPrimitiveArray(Array [Int ](1 , 2 , 3 ))),
164
166
(DateTimeUtils .getClass, ObjectType (classOf [Date ]),
165
- " toJavaDate" , ObjectType (classOf [SQLDate ]), 77777 , DateTimeUtils .toJavaDate(77777 )),
167
+ " toJavaDate" , ObjectType (classOf [DateTimeUtils .SQLDate ]), 77777 ,
168
+ DateTimeUtils .toJavaDate(77777 )),
166
169
(DateTimeUtils .getClass, ObjectType (classOf [Timestamp ]),
167
- " toJavaTimestamp" , ObjectType (classOf [SQLTimestamp ]),
170
+ " toJavaTimestamp" , ObjectType (classOf [DateTimeUtils . SQLTimestamp ]),
168
171
88888888 .toLong, DateTimeUtils .toJavaTimestamp(88888888 ))
169
172
).foreach { case (cls, dataType, methodName, argType, arg, expected) =>
170
173
checkObjectExprEvaluation(StaticInvoke (cls, dataType, methodName,
@@ -450,6 +453,25 @@ class ObjectExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
450
453
}
451
454
}
452
455
}
456
+
457
+ implicit private def mapIntStrEncoder = ExpressionEncoder [Map [Int , String ]]()
458
+
459
+ test(" SPARK-23588 CatalystToExternalMap should support interpreted execution" ) {
460
+ // To get a resolved `CatalystToExternalMap` expression, we build a deserializer plan
461
+ // with dummy input, resolve the plan by the analyzer, and replace the dummy input
462
+ // with a literal for tests.
463
+ val unresolvedDeser = UnresolvedDeserializer (encoderFor[Map [Int , String ]].deserializer)
464
+ val dummyInputPlan = LocalRelation (' value .map(MapType (IntegerType , StringType )))
465
+ val plan = Project (Alias (unresolvedDeser, " none" )() :: Nil , dummyInputPlan)
466
+
467
+ val analyzedPlan = SimpleAnalyzer .execute(plan)
468
+ val Alias (toMapExpr : CatalystToExternalMap , _) = analyzedPlan.expressions.head
469
+
470
+ // Replaces the dummy input with a literal for tests here
471
+ val data = Map [Int , String ](0 -> " v0" , 1 -> " v1" , 2 -> null , 3 -> " v3" )
472
+ val deserializer = toMapExpr.copy(inputData = Literal .create(data))
473
+ checkObjectExprEvaluation(deserializer, expected = data)
474
+ }
453
475
}
454
476
455
477
class TestBean extends Serializable {
0 commit comments