Skip to content
This repository was archived by the owner on Aug 31, 2021. It is now read-only.

Commit 92f21b3

Browse files
committed
Treat an uncaught exception. Thank you, @juanyunism.
1 parent 485b449 commit 92f21b3

File tree

2 files changed

+55
-1
lines changed

2 files changed

+55
-1
lines changed

src/main/scala/com/audienceproject/spark/dynamodb/datasource/TypeConversion.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
*/
2121
package com.audienceproject.spark.dynamodb.datasource
2222

23-
import com.amazonaws.services.dynamodbv2.document.Item
23+
import com.amazonaws.services.dynamodbv2.document.{IncompatibleTypeException, Item}
2424
import org.apache.spark.sql.catalyst.InternalRow
2525
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, GenericArrayData}
2626
import org.apache.spark.sql.types._
@@ -87,6 +87,7 @@ private[dynamodb] object TypeConversion {
8787
private def nullableGet(getter: Item => String => Any)(attrName: String): Item => Any = {
8888
case item if item.hasAttribute(attrName) => try getter(item)(attrName) catch {
8989
case _: NumberFormatException => null
90+
case _: IncompatibleTypeException => null
9091
}
9192
case _ => null
9293
}
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
package com.audienceproject.spark.dynamodb
2+
3+
import com.amazonaws.services.dynamodbv2.document.Item
4+
import com.amazonaws.services.dynamodbv2.model.{
5+
AttributeDefinition,
6+
CreateTableRequest,
7+
KeySchemaElement,
8+
ProvisionedThroughput
9+
}
10+
import com.audienceproject.spark.dynamodb.implicits._
11+
12+
class NullBooleanTest extends AbstractInMemoryTest {
13+
test("Test Null") {
14+
dynamoDB.createTable(
15+
new CreateTableRequest()
16+
.withTableName("TestNullBoolean")
17+
.withAttributeDefinitions(new AttributeDefinition("Pk", "S"))
18+
.withKeySchema(new KeySchemaElement("Pk", "HASH"))
19+
.withProvisionedThroughput(new ProvisionedThroughput(5L, 5L))
20+
)
21+
22+
val table = dynamoDB.getTable("TestNullBoolean")
23+
24+
for ((_pk, _type, _value) <- Seq(
25+
("id1", "type1", true),
26+
("id2", "type2", null)
27+
)) {
28+
if (_type != "type2") {
29+
table.putItem(
30+
new Item()
31+
.withString("Pk", _pk)
32+
.withString("Type", _type)
33+
.withBoolean("Value", _value.asInstanceOf[Boolean])
34+
)
35+
} else {
36+
table.putItem(
37+
new Item()
38+
.withString("Pk", _pk)
39+
.withString("Type", _type)
40+
.withNull("Value")
41+
)
42+
}
43+
}
44+
45+
val df = spark.read.dynamodbAs[BooleanClass]("TestNullBoolean")
46+
47+
import spark.implicits._
48+
df.where($"Type" === "type2").show()
49+
client.deleteTable("TestNullBoolean")
50+
}
51+
}
52+
53+
case class BooleanClass(Pk: String, Type: String, Value: Boolean)

0 commit comments

Comments
 (0)