Skip to content

Commit e7821c8

Browse files
belieferMaxGekk
authored andcommitted
[SPARK-50993][SQL] Move nullDataSourceOption from QueryCompilationErrors into QueryExecutionErrors
### What changes were proposed in this pull request? This PR proposes to move `nullDataSourceOption` from `QueryCompilationErrors` into `QueryExecutionErrors`. ### Why are the changes needed? Currently, `nullDataSourceOption` is placed into QueryCompilationErrors. In fact, it's an execution error. ### Does this PR introduce _any_ user-facing change? 'Yes'. The type of error will be changed. ### How was this patch tested? GA. ### Was this patch authored or co-authored using generative AI tooling? 'No'. Closes #49677 from beliefer/SPARK-50993. Authored-by: beliefer <[email protected]> Signed-off-by: Max Gekk <[email protected]>
1 parent 7c12ff6 commit e7821c8

File tree

4 files changed

+11
-11
lines changed

4 files changed

+11
-11
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -224,13 +224,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat
224224
"invalidValue" -> toSQLExpr(invalidValue)))
225225
}
226226

227-
def nullDataSourceOption(option: String): Throwable = {
228-
new AnalysisException(
229-
errorClass = "NULL_DATA_SOURCE_OPTION",
230-
messageParameters = Map("option" -> option)
231-
)
232-
}
233-
234227
def unorderablePivotColError(pivotCol: Expression): Throwable = {
235228
new AnalysisException(
236229
errorClass = "INCOMPARABLE_PIVOT_COLUMN",

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2937,4 +2937,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE
29372937
)
29382938
)
29392939
}
2940+
2941+
def nullDataSourceOption(option: String): Throwable = {
2942+
new SparkIllegalArgumentException(
2943+
errorClass = "NULL_DATA_SOURCE_OPTION",
2944+
messageParameters = Map("option" -> option)
2945+
)
2946+
}
29402947
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.apache.commons.io.FilenameUtils
2525
import org.apache.spark.SparkFiles
2626
import org.apache.spark.internal.Logging
2727
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
28-
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
28+
import org.apache.spark.sql.errors.QueryExecutionErrors
2929
import org.apache.spark.sql.internal.SQLConf
3030
import org.apache.spark.sql.types.TimestampNTZType
3131
import org.apache.spark.util.Utils
@@ -56,7 +56,7 @@ class JDBCOptions(
5656
// If an option value is `null`, throw a user-friendly error. Keys here cannot be null, as
5757
// scala's implementation of Maps prohibits null keys.
5858
if (v == null) {
59-
throw QueryCompilationErrors.nullDataSourceOption(k)
59+
throw QueryExecutionErrors.nullDataSourceOption(k)
6060
}
6161
properties.setProperty(k, v)
6262
}

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import scala.util.control.NonFatal
2424

2525
import test.org.apache.spark.sql.connector.catalog.functions.JavaStrLen.JavaStrLenStaticMagic
2626

27-
import org.apache.spark.{SparkConf, SparkException}
27+
import org.apache.spark.{SparkConf, SparkException, SparkIllegalArgumentException}
2828
import org.apache.spark.sql.{AnalysisException, DataFrame, ExplainSuiteHelper, QueryTest, Row}
2929
import org.apache.spark.sql.catalyst.InternalRow
3030
import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, IndexAlreadyExistsException, NoSuchIndexException}
@@ -396,7 +396,7 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel
396396
.option("pushDownOffset", null)
397397
.table("h2.test.employee")
398398
checkError(
399-
exception = intercept[AnalysisException] {
399+
exception = intercept[SparkIllegalArgumentException] {
400400
df.collect()
401401
},
402402
condition = "NULL_DATA_SOURCE_OPTION",

0 commit comments

Comments
 (0)