Skip to content

Commit 942753a

Browse files
committed
[SPARK-29753][SQL] refine the default catalog config
### What changes were proposed in this pull request? rename the config to address the comment: apache#24594 (comment) improve the config description, provide a default value to simplify the code. ### Why are the changes needed? make the config more understandable. ### Does this PR introduce any user-facing change? no ### How was this patch tested? existing tests Closes apache#26395 from cloud-fan/config. Authored-by: Wenchen Fan <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent 0c8d3d2 commit 942753a

File tree

4 files changed

+12
-28
lines changed

4 files changed

+12
-28
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogManager.scala

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -53,18 +53,6 @@ class CatalogManager(
5353
}
5454
}
5555

56-
private def defaultCatalog: Option[CatalogPlugin] = {
57-
conf.defaultV2Catalog.flatMap { catalogName =>
58-
try {
59-
Some(catalog(catalogName))
60-
} catch {
61-
case NonFatal(e) =>
62-
logError(s"Cannot load default v2 catalog: $catalogName", e)
63-
None
64-
}
65-
}
66-
}
67-
6856
private def loadV2SessionCatalog(): CatalogPlugin = {
6957
Catalogs.load(SESSION_CATALOG_NAME, conf) match {
7058
case extension: CatalogExtension =>
@@ -127,9 +115,7 @@ class CatalogManager(
127115
private var _currentCatalogName: Option[String] = None
128116

129117
def currentCatalog: CatalogPlugin = synchronized {
130-
_currentCatalogName.map(catalogName => catalog(catalogName))
131-
.orElse(defaultCatalog)
132-
.getOrElse(v2SessionCatalog)
118+
catalog(_currentCatalogName.getOrElse(conf.getConf(SQLConf.DEFAULT_CATALOG)))
133119
}
134120

135121
def setCurrentCatalog(catalogName: String): Unit = synchronized {

sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@ import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode
3737
import org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator
3838
import org.apache.spark.sql.catalyst.plans.logical.HintErrorHandler
3939
import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
40-
import org.apache.spark.sql.internal.SQLConf.StoreAssignmentPolicy
4140
import org.apache.spark.unsafe.array.ByteArrayMethods
4241
import org.apache.spark.util.Utils
4342

@@ -2019,10 +2018,11 @@ object SQLConf {
20192018
.booleanConf
20202019
.createWithDefault(false)
20212020

2022-
val DEFAULT_V2_CATALOG = buildConf("spark.sql.default.catalog")
2023-
.doc("Name of the default v2 catalog, used when a catalog is not identified in queries")
2021+
val DEFAULT_CATALOG = buildConf("spark.sql.defaultCatalog")
2022+
.doc("Name of the default catalog. This will be the current catalog if users have not " +
2023+
"explicitly set the current catalog yet.")
20242024
.stringConf
2025-
.createOptional
2025+
.createWithDefault(SESSION_CATALOG_NAME)
20262026

20272027
val V2_SESSION_CATALOG_IMPLEMENTATION =
20282028
buildConf(s"spark.sql.catalog.$SESSION_CATALOG_NAME")
@@ -2556,8 +2556,6 @@ class SQLConf extends Serializable with Logging {
25562556

25572557
def castDatetimeToString: Boolean = getConf(SQLConf.LEGACY_CAST_DATETIME_TO_STRING)
25582558

2559-
def defaultV2Catalog: Option[String] = getConf(DEFAULT_V2_CATALOG)
2560-
25612559
def ignoreDataLocality: Boolean = getConf(SQLConf.IGNORE_DATA_LOCALITY)
25622560

25632561
/** ********************** SQLConf functionality methods ************ */

sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogManagerSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ class CatalogManagerSuite extends SparkFunSuite {
4343
assert(catalogManager.currentNamespace.sameElements(Array("default")))
4444

4545
conf.setConfString("spark.sql.catalog.dummy", classOf[DummyCatalog].getName)
46-
conf.setConfString(SQLConf.DEFAULT_V2_CATALOG.key, "dummy")
46+
conf.setConfString(SQLConf.DEFAULT_CATALOG.key, "dummy")
4747

4848
// The current catalog should be changed if the default catalog is set.
4949
assert(catalogManager.currentCatalog.name() == "dummy")
@@ -60,7 +60,7 @@ class CatalogManagerSuite extends SparkFunSuite {
6060
assert(catalogManager.currentNamespace.sameElements(Array("a", "b")))
6161

6262
conf.setConfString("spark.sql.catalog.dummy2", classOf[DummyCatalog].getName)
63-
conf.setConfString(SQLConf.DEFAULT_V2_CATALOG.key, "dummy2")
63+
conf.setConfString(SQLConf.DEFAULT_CATALOG.key, "dummy2")
6464
// The current catalog shouldn't be changed if it's set before.
6565
assert(catalogManager.currentCatalog.name() == "dummy")
6666
}

sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ class DataSourceV2SQLSuite
222222
}
223223

224224
test("CreateTable: use default catalog for v2 sources when default catalog is set") {
225-
spark.conf.set("spark.sql.default.catalog", "testcat")
225+
spark.conf.set(SQLConf.DEFAULT_CATALOG.key, "testcat")
226226
spark.sql(s"CREATE TABLE table_name (id bigint, data string) USING foo")
227227

228228
val testCatalog = catalog("testcat").asTableCatalog
@@ -489,7 +489,7 @@ class DataSourceV2SQLSuite
489489
}
490490

491491
test("CreateTableAsSelect: use default catalog for v2 sources when default catalog is set") {
492-
spark.conf.set("spark.sql.default.catalog", "testcat")
492+
spark.conf.set(SQLConf.DEFAULT_CATALOG.key, "testcat")
493493

494494
val df = spark.createDataFrame(Seq((1L, "a"), (2L, "b"), (3L, "c"))).toDF("id", "data")
495495
df.createOrReplaceTempView("source")
@@ -710,7 +710,7 @@ class DataSourceV2SQLSuite
710710
}
711711

712712
test("ShowTables: namespace is not specified and default v2 catalog is set") {
713-
spark.conf.set("spark.sql.default.catalog", "testcat")
713+
spark.conf.set(SQLConf.DEFAULT_CATALOG.key, "testcat")
714714
spark.sql("CREATE TABLE testcat.table (id bigint, data string) USING foo")
715715

716716
// v2 catalog is used where default namespace is empty for TestInMemoryTableCatalog.
@@ -872,7 +872,7 @@ class DataSourceV2SQLSuite
872872
}
873873

874874
test("ShowNamespaces: show root namespaces with default v2 catalog") {
875-
spark.conf.set("spark.sql.default.catalog", "testcat")
875+
spark.conf.set(SQLConf.DEFAULT_CATALOG.key, "testcat")
876876

877877
testShowNamespaces("SHOW NAMESPACES", Seq())
878878

@@ -915,7 +915,7 @@ class DataSourceV2SQLSuite
915915
spark.conf.set(
916916
"spark.sql.catalog.testcat_no_namspace",
917917
classOf[BasicInMemoryTableCatalog].getName)
918-
spark.conf.set("spark.sql.default.catalog", "testcat_no_namspace")
918+
spark.conf.set(SQLConf.DEFAULT_CATALOG.key, "testcat_no_namspace")
919919

920920
val exception = intercept[AnalysisException] {
921921
sql("SHOW NAMESPACES")

0 commit comments

Comments
 (0)