Skip to content

Commit 2fea3c8

Browse files
authored
[Cosmos DB] Support converting from DateType to Int (Azure#25156)
* Adding case switch * tests * Remove old code
1 parent 017c7d0 commit 2fea3c8

File tree

2 files changed

+21
-3
lines changed

2 files changed

+21
-3
lines changed

sdk/cosmos/azure-cosmos-spark_3-1_2-12/src/main/scala/com/azure/cosmos/spark/CosmosRowConverter.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -244,9 +244,13 @@ private class CosmosRowConverter(
244244
convertToJsonNodeConditionally(rowData.asInstanceOf[java.math.BigDecimal])
245245
case DateType if rowData.isInstanceOf[java.lang.Long] =>
246246
convertToJsonNodeConditionally(rowData.asInstanceOf[Long])
247+
case DateType if rowData.isInstanceOf[java.lang.Integer] =>
248+
convertToJsonNodeConditionally(rowData.asInstanceOf[Integer])
247249
case DateType => convertToJsonNodeConditionally(rowData.asInstanceOf[Date].getTime)
248250
case TimestampType if rowData.isInstanceOf[java.lang.Long] =>
249251
convertToJsonNodeConditionally(rowData.asInstanceOf[Long])
252+
case TimestampType if rowData.isInstanceOf[java.lang.Integer] =>
253+
convertToJsonNodeConditionally(rowData.asInstanceOf[Integer])
250254
case TimestampType => convertToJsonNodeConditionally(rowData.asInstanceOf[Timestamp].getTime)
251255
case arrayType: ArrayType if rowData.isInstanceOf[ArrayData] =>
252256
val arrayDataValue = rowData.asInstanceOf[ArrayData]
@@ -310,8 +314,10 @@ private class CosmosRowConverter(
310314
case DecimalType() if rowData.isInstanceOf[Long] => objectMapper.convertValue(new java.math.BigDecimal(rowData.asInstanceOf[java.lang.Long]), classOf[JsonNode])
311315
case DecimalType() => objectMapper.convertValue(rowData.asInstanceOf[java.math.BigDecimal], classOf[JsonNode])
312316
case DateType if rowData.isInstanceOf[java.lang.Long] => objectMapper.convertValue(rowData.asInstanceOf[java.lang.Long], classOf[JsonNode])
317+
case DateType if rowData.isInstanceOf[java.lang.Integer] => objectMapper.convertValue(rowData.asInstanceOf[java.lang.Integer], classOf[JsonNode])
313318
case DateType => objectMapper.convertValue(rowData.asInstanceOf[Date].getTime, classOf[JsonNode])
314319
case TimestampType if rowData.isInstanceOf[java.lang.Long] => objectMapper.convertValue(rowData.asInstanceOf[java.lang.Long], classOf[JsonNode])
320+
case TimestampType if rowData.isInstanceOf[java.lang.Integer] => objectMapper.convertValue(rowData.asInstanceOf[java.lang.Integer], classOf[JsonNode])
315321
case TimestampType => objectMapper.convertValue(rowData.asInstanceOf[Timestamp].getTime, classOf[JsonNode])
316322
case arrayType: ArrayType if rowData.isInstanceOf[ArrayData] => convertSparkArrayToArrayNode(arrayType.elementType, arrayType.containsNull, rowData.asInstanceOf[ArrayData])
317323
case arrayType: ArrayType => convertSparkArrayToArrayNode(arrayType.elementType, arrayType.containsNull, rowData.asInstanceOf[Seq[_]])

sdk/cosmos/azure-cosmos-spark_3-1_2-12/src/test/scala/com/azure/cosmos/spark/CosmosRowConverterSpec.scala

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -366,17 +366,25 @@ class CosmosRowConverterSpec extends UnitSpec with BasicLoggingTrait {
366366
"date and time in spark row" should "translate to ObjectNode" in {
367367
val colName1 = "testCol1"
368368
val colName2 = "testCol2"
369+
val colName3 = "testCol3"
370+
val colName4 = "testCol4"
369371
val currentMillis = System.currentTimeMillis()
370372
val colVal1 = new Date(currentMillis)
371373
val colVal2 = new Timestamp(colVal1.getTime)
374+
val colVal3 = currentMillis.toInt
372375

373376
val row = new GenericRowWithSchema(
374-
Array(colVal1, colVal2),
375-
StructType(Seq(StructField(colName1, DateType), StructField(colName2, TimestampType))))
377+
Array(colVal1, colVal2, colVal3, colVal3),
378+
StructType(Seq(StructField(colName1, DateType),
379+
StructField(colName2, TimestampType),
380+
StructField(colName3, DateType),
381+
StructField(colName4, TimestampType))))
376382

377383
val objectNode = defaultRowConverter.fromRowToObjectNode(row)
378384
objectNode.get(colName1).asLong() shouldEqual currentMillis
379385
objectNode.get(colName2).asLong() shouldEqual currentMillis
386+
objectNode.get(colName3).asInt() shouldEqual colVal3
387+
objectNode.get(colName4).asInt() shouldEqual colVal3
380388
}
381389

382390
"numeric types in spark row" should "translate to ObjectNode" in {
@@ -969,6 +977,7 @@ class CosmosRowConverterSpec extends UnitSpec with BasicLoggingTrait {
969977
val colName2 = "testCol2"
970978
val colName3 = "testCol3"
971979
val colName4 = "testCol4"
980+
val colName5 = "testCol5"
972981
val colVal1 = System.currentTimeMillis()
973982
val colVal1AsTime = new Date(colVal1)
974983
val colVal2 = System.currentTimeMillis()
@@ -979,17 +988,20 @@ class CosmosRowConverterSpec extends UnitSpec with BasicLoggingTrait {
979988
val ff = DateTimeFormatter
980989
.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").withZone(ZoneOffset.UTC)
981990
val colVal4AsTime = Date.valueOf(LocalDateTime.parse(colVal4, ff).toLocalDate)
991+
val colVal5 = colVal1.toInt
982992

983993
val objectNode: ObjectNode = objectMapper.createObjectNode()
984994
objectNode.put(colName1, colVal1)
985995
objectNode.put(colName2, colVal2)
986996
objectNode.put(colName3, colVal3)
987997
objectNode.put(colName4, colVal4)
998+
objectNode.put(colName5, colVal5)
988999
val schema = StructType(Seq(
9891000
StructField(colName1, DateType),
9901001
StructField(colName2, DateType),
9911002
StructField(colName3, DateType),
992-
StructField(colName4, DateType)))
1003+
StructField(colName4, DateType),
1004+
StructField(colName5, DateType)))
9931005
val row = defaultRowConverter.fromObjectNodeToRow(schema, objectNode, SchemaConversionModes.Relaxed)
9941006
val asTime = row.get(0).asInstanceOf[Date]
9951007
asTime.compareTo(colVal1AsTime) shouldEqual 0

0 commit comments

Comments
 (0)