@@ -19,6 +19,7 @@ package org.apache.spark.sql.jdbc
19
19
20
20
import java .sql .{Connection , Date , Timestamp }
21
21
import java .util .Properties
22
+ import java .math .BigDecimal
22
23
23
24
import org .apache .spark .sql .Row
24
25
import org .apache .spark .sql .test .SharedSQLContext
@@ -87,8 +88,31 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
87
88
|USING org.apache.spark.sql.jdbc
88
89
|OPTIONS (url ' $jdbcUrl', dbTable 'datetime1', oracle.jdbc.mapDateToTimestamp 'false')
89
90
""" .stripMargin.replaceAll(" \n " , " " ))
91
+
92
+
93
+ conn.prepareStatement(" CREATE TABLE numerics (b DECIMAL(1), f DECIMAL(3, 2), i DECIMAL(10))" ).executeUpdate();
94
+ conn.prepareStatement(
95
+ " INSERT INTO numerics VALUES (4, 1.23, 9999999999)" ).executeUpdate();
96
+ conn.commit();
90
97
}
91
98
99
+
100
+ test(" SPARK-16625 : Importing Oracle numeric types" ) {
101
+ val df = sqlContext.read.jdbc(jdbcUrl, " numerics" , new Properties );
102
+ val rows = df.collect()
103
+ assert(rows.size == 1 )
104
+ val row = rows(0 )
105
+ // The main point of the below assertions is not to make sure that these Oracle types are
106
+ // mapped to decimal types, but to make sure that the returned values are correct.
107
+ // A value > 1 from DECIMAL(1) is correct:
108
+ assert(row.getDecimal(0 ).compareTo(BigDecimal .valueOf(4 )) == 0 )
109
+ // A value with fractions from DECIMAL(3, 2) is correct:
110
+ assert(row.getDecimal(1 ).compareTo(BigDecimal .valueOf(1.23 )) == 0 )
111
+ // A value > Int.MaxValue from DECIMAL(10) is correct:
112
+ assert(row.getDecimal(2 ).compareTo(BigDecimal .valueOf(9999999999l )) == 0 )
113
+ }
114
+
115
+
92
116
test(" SPARK-12941: String datatypes to be mapped to Varchar in Oracle" ) {
93
117
// create a sample dataframe with string type
94
118
val df1 = sparkContext.parallelize(Seq ((" foo" ))).toDF(" x" )
@@ -148,27 +172,28 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
148
172
val dfRead = spark.read.jdbc(jdbcUrl, tableName, props)
149
173
val rows = dfRead.collect()
150
174
// verify the data type is inserted
151
- val types = rows(0 ).toSeq.map(x => x.getClass.toString)
152
- assert(types(0 ).equals(" class java.lang.Boolean" ))
153
- assert(types(1 ).equals(" class java.lang.Integer" ))
154
- assert(types(2 ).equals(" class java.lang.Long" ))
155
- assert(types(3 ).equals(" class java.lang.Float" ))
156
- assert(types(4 ).equals(" class java.lang.Float" ))
157
- assert(types(5 ).equals(" class java.lang.Integer" ))
158
- assert(types(6 ).equals(" class java.lang.Integer" ))
159
- assert(types(7 ).equals(" class java.lang.String" ))
160
- assert(types(8 ).equals(" class [B" ))
161
- assert(types(9 ).equals(" class java.sql.Date" ))
162
- assert(types(10 ).equals(" class java.sql.Timestamp" ))
175
+ val types = dfRead.schema.map(field => field.dataType)
176
+ assert(types(0 ).equals(DecimalType (1 , 0 )))
177
+ assert(types(1 ).equals(DecimalType (10 , 0 )))
178
+ assert(types(2 ).equals(DecimalType (19 , 0 )))
179
+ assert(types(3 ).equals(DecimalType (19 , 4 )))
180
+ assert(types(4 ).equals(DecimalType (19 , 4 )))
181
+ assert(types(5 ).equals(DecimalType (3 , 0 )))
182
+ assert(types(6 ).equals(DecimalType (5 , 0 )))
183
+ assert(types(7 ).equals(StringType ))
184
+ assert(types(8 ).equals(BinaryType ))
185
+ assert(types(9 ).equals(DateType ))
186
+ assert(types(10 ).equals(TimestampType ))
187
+
163
188
// verify the value is the inserted correct or not
164
189
val values = rows(0 )
165
- assert(values.getBoolean (0 ).equals(booleanVal) )
166
- assert(values.getInt (1 ).equals( integerVal))
167
- assert(values.getLong (2 ).equals( longVal))
168
- assert(values.getFloat (3 ).equals( floatVal))
169
- assert(values.getFloat (4 ).equals(doubleVal.toFloat) )
170
- assert(values.getInt (5 ).equals(byteVal.toInt) )
171
- assert(values.getInt (6 ).equals(shortVal.toInt) )
190
+ assert(values.getDecimal (0 ).compareTo( BigDecimal .valueOf( 1 )) == 0 )
191
+ assert(values.getDecimal (1 ).compareTo( BigDecimal .valueOf( integerVal)) == 0 )
192
+ assert(values.getDecimal (2 ).compareTo( BigDecimal .valueOf( longVal)) == 0 )
193
+ assert(values.getDecimal (3 ).compareTo( BigDecimal .valueOf( floatVal)) == 0 )
194
+ assert(values.getDecimal (4 ).compareTo( BigDecimal .valueOf(doubleVal)) == 0 )
195
+ assert(values.getDecimal (5 ).compareTo( BigDecimal .valueOf(byteVal)) == 0 )
196
+ assert(values.getDecimal (6 ).compareTo( BigDecimal .valueOf(shortVal)) == 0 )
172
197
assert(values.getString(7 ).equals(stringVal))
173
198
assert(values.getAs[Array [Byte ]](8 ).mkString.equals(" 678" ))
174
199
assert(values.getDate(9 ).equals(dateVal))
@@ -177,7 +202,7 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
177
202
178
203
test(" SPARK-19318: connection property keys should be case-sensitive" ) {
179
204
def checkRow (row : Row ): Unit = {
180
- assert(row.getInt (0 ) == 1 )
205
+ assert(row.getDecimal (0 ).equals( BigDecimal .valueOf( 1 )) )
181
206
assert(row.getDate(1 ).equals(Date .valueOf(" 1991-11-09" )))
182
207
assert(row.getTimestamp(2 ).equals(Timestamp .valueOf(" 1996-01-01 01:23:45" )))
183
208
}
0 commit comments