Skip to content

Commit 52671d6

Browse files
MaxGekkdongjoon-hyun
authored andcommitted
[SPARK-27008][SQL][FOLLOWUP] Fix typo from *_EANBLED to *_ENABLED
## What changes were proposed in this pull request? This fixes a typo in the SQL config value: DATETIME_JAVA8API_**EANBLED** -> DATETIME_JAVA8API_**ENABLED**. ## How was this patch tested? This was tested by `RowEncoderSuite` and `LiteralExpressionSuite`. Closes apache#24194 from MaxGekk/date-localdate-followup. Authored-by: Maxim Gekk <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent a6c207c commit 52671d6

File tree

6 files changed

+15
-15
lines changed

6 files changed

+15
-15
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1710,7 +1710,7 @@ object SQLConf {
17101710
.booleanConf
17111711
.createWithDefault(true)
17121712

1713-
val DATETIME_JAVA8API_EANBLED = buildConf("spark.sql.datetime.java8API.enabled")
1713+
val DATETIME_JAVA8API_ENABLED = buildConf("spark.sql.datetime.java8API.enabled")
17141714
.doc("If the configuration property is set to true, java.time.Instant and " +
17151715
"java.time.LocalDate classes of Java 8 API are used as external types for " +
17161716
"Catalyst's TimestampType and DateType. If it is set to false, java.sql.Timestamp " +
@@ -1906,7 +1906,7 @@ class SQLConf extends Serializable with Logging {
19061906

19071907
def fastHashAggregateRowMaxCapacityBit: Int = getConf(FAST_HASH_AGGREGATE_MAX_ROWS_CAPACITY_BIT)
19081908

1909-
def datetimeJava8ApiEnabled: Boolean = getConf(DATETIME_JAVA8API_EANBLED)
1909+
def datetimeJava8ApiEnabled: Boolean = getConf(DATETIME_JAVA8API_ENABLED)
19101910

19111911
/**
19121912
* Returns the [[Resolver]] for the current configuration, which can be used to determine if two

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ class CatalystTypeConvertersSuite extends SparkFunSuite with SQLHelper {
169169
}
170170

171171
test("converting TimestampType to java.time.Instant") {
172-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
172+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
173173
Seq(
174174
-9463427405253013L,
175175
-244000001L,
@@ -199,7 +199,7 @@ class CatalystTypeConvertersSuite extends SparkFunSuite with SQLHelper {
199199
}
200200

201201
test("converting DateType to java.time.LocalDate") {
202-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
202+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
203203
Seq(
204204
-701265,
205205
-371419,

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest {
283283
}
284284

285285
test("encoding/decoding TimestampType to/from java.time.Instant") {
286-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
286+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
287287
val schema = new StructType().add("t", TimestampType)
288288
val encoder = RowEncoder(schema).resolveAndBind()
289289
val instant = java.time.Instant.parse("2019-02-26T16:56:00Z")
@@ -295,7 +295,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest {
295295
}
296296

297297
test("encoding/decoding DateType to/from java.time.LocalDate") {
298-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
298+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
299299
val schema = new StructType().add("d", DateType)
300300
val encoder = RowEncoder(schema).resolveAndBind()
301301
val localDate = java.time.LocalDate.parse("2019-02-27")

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,11 +66,11 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
6666
checkEvaluation(Literal.default(BinaryType), "".getBytes(StandardCharsets.UTF_8))
6767
checkEvaluation(Literal.default(DecimalType.USER_DEFAULT), Decimal(0))
6868
checkEvaluation(Literal.default(DecimalType.SYSTEM_DEFAULT), Decimal(0))
69-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "false") {
69+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "false") {
7070
checkEvaluation(Literal.default(DateType), DateTimeUtils.toJavaDate(0))
7171
checkEvaluation(Literal.default(TimestampType), DateTimeUtils.toJavaTimestamp(0L))
7272
}
73-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
73+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
7474
checkEvaluation(Literal.default(DateType), LocalDate.ofEpochDay(0))
7575
checkEvaluation(Literal.default(TimestampType), Instant.ofEpochSecond(0))
7676
}
@@ -251,7 +251,7 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
251251
}
252252

253253
test("construct literals from arrays of java.time.LocalDate") {
254-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
254+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
255255
val localDate0 = LocalDate.of(2019, 3, 20)
256256
checkEvaluation(Literal(Array(localDate0)), Array(localDate0))
257257
val localDate1 = LocalDate.of(2100, 4, 22)
@@ -272,7 +272,7 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
272272
}
273273

274274
test("construct literals from arrays of java.time.Instant") {
275-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
275+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
276276
val instant0 = Instant.ofEpochMilli(0)
277277
checkEvaluation(Literal(Array(instant0)), Array(instant0))
278278
val instant1 = Instant.parse("2019-03-20T10:15:30Z")

sql/core/src/test/java/test/org/apache/spark/sql/JavaUDFSuite.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -127,16 +127,16 @@ public void udf6Test() {
127127
@SuppressWarnings("unchecked")
128128
@Test
129129
public void udf7Test() {
130-
String originConf = spark.conf().get(SQLConf.DATETIME_JAVA8API_EANBLED().key());
130+
String originConf = spark.conf().get(SQLConf.DATETIME_JAVA8API_ENABLED().key());
131131
try {
132-
spark.conf().set(SQLConf.DATETIME_JAVA8API_EANBLED().key(), "true");
132+
spark.conf().set(SQLConf.DATETIME_JAVA8API_ENABLED().key(), "true");
133133
spark.udf().register(
134134
"plusDay",
135135
(java.time.LocalDate ld) -> ld.plusDays(1), DataTypes.DateType);
136136
Row result = spark.sql("SELECT plusDay(DATE '2019-02-26')").head();
137137
Assert.assertEquals(LocalDate.parse("2019-02-27"), result.get(0));
138138
} finally {
139-
spark.conf().set(SQLConf.DATETIME_JAVA8API_EANBLED().key(), originConf);
139+
spark.conf().set(SQLConf.DATETIME_JAVA8API_ENABLED().key(), originConf);
140140
}
141141
}
142142
}

sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -496,7 +496,7 @@ class UDFSuite extends QueryTest with SharedSQLContext {
496496
}
497497

498498
test("Using java.time.Instant in UDF") {
499-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
499+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
500500
val expected = java.time.Instant.parse("2019-02-27T00:00:00Z")
501501
val plusSec = udf((i: java.time.Instant) => i.plusSeconds(1))
502502
val df = spark.sql("SELECT TIMESTAMP '2019-02-26 23:59:59Z' as t")
@@ -506,7 +506,7 @@ class UDFSuite extends QueryTest with SharedSQLContext {
506506
}
507507

508508
test("Using java.time.LocalDate in UDF") {
509-
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
509+
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
510510
val expected = java.time.LocalDate.parse("2019-02-27")
511511
val plusDay = udf((i: java.time.LocalDate) => i.plusDays(1))
512512
val df = spark.sql("SELECT DATE '2019-02-26' as d")

0 commit comments

Comments
 (0)