@@ -38,6 +38,8 @@ val LATEST_RELEASED_SPARK_VERSION = "3.5.0"
3838val SPARK_MASTER_VERSION = " 4.0.0-SNAPSHOT"
3939val sparkVersion = settingKey[String ](" Spark version" )
4040spark / sparkVersion := getSparkVersion()
41+ kernelDefaults / sparkVersion := getSparkVersion()
42+ goldenTables / sparkVersion := getSparkVersion()
4143
4244// Dependent library versions
4345val defaultSparkVersion = LATEST_RELEASED_SPARK_VERSION
@@ -126,6 +128,25 @@ lazy val commonSettings = Seq(
126128 unidocSourceFilePatterns := Nil ,
127129)
128130
131+ /**
132+ * Java-/Scala-/Uni-Doc settings aren't working yet against Spark Master.
133+ 1) delta-spark on Spark Master uses JDK 17. delta-iceberg uses JDK 8 or 11. For some reason,
134+ generating delta-spark unidoc compiles delta-iceberg
135+ 2) delta-spark unidoc fails to compile. spark 3.5 is on its classpath. likely due to iceberg
136+ issue above.
137+ */
138+ def crossSparkProjectSettings (): Seq [Setting [_]] = getSparkVersion() match {
139+ case LATEST_RELEASED_SPARK_VERSION => Seq (
140+ // Java-/Scala-/Uni-Doc Settings
141+ scalacOptions ++= Seq (
142+ " -P:genjavadoc:strictVisibility=true" // hide package private types and methods in javadoc
143+ ),
144+ unidocSourceFilePatterns := Seq (SourceFilePattern (" io/delta/tables/" , " io/delta/exceptions/" ))
145+ )
146+
147+ case SPARK_MASTER_VERSION => Seq ()
148+ }
149+
129150/**
130151 * Note: we cannot access sparkVersion.value here, since that can only be used within a task or
131152 * setting macro.
@@ -140,12 +161,6 @@ def crossSparkSettings(): Seq[Setting[_]] = getSparkVersion() match {
140161 Compile / unmanagedSourceDirectories += (Compile / baseDirectory).value / " src" / " main" / " scala-spark-3.5" ,
141162 Test / unmanagedSourceDirectories += (Compile / baseDirectory).value / " src" / " test" / " scala-spark-3.5" ,
142163 Antlr4 / antlr4Version := " 4.9.3" ,
143-
144- // Java-/Scala-/Uni-Doc Settings
145- scalacOptions ++= Seq (
146- " -P:genjavadoc:strictVisibility=true" // hide package private types and methods in javadoc
147- ),
148- unidocSourceFilePatterns := Seq (SourceFilePattern (" io/delta/tables/" , " io/delta/exceptions/" ))
149164 )
150165
151166 case SPARK_MASTER_VERSION => Seq (
@@ -170,13 +185,6 @@ def crossSparkSettings(): Seq[Setting[_]] = getSparkVersion() match {
170185 " --add-opens=java.base/sun.security.action=ALL-UNNAMED" ,
171186 " --add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
172187 )
173-
174- // Java-/Scala-/Uni-Doc Settings
175- // This isn't working yet against Spark Master.
176- // 1) delta-spark on Spark Master uses JDK 17. delta-iceberg uses JDK 8 or 11. For some reason,
177- // generating delta-spark unidoc compiles delta-iceberg
178- // 2) delta-spark unidoc fails to compile. spark 3.5 is on its classpath. likely due to iceberg
179- // issue above.
180188 )
181189}
182190
@@ -190,6 +198,7 @@ lazy val spark = (project in file("spark"))
190198 sparkMimaSettings,
191199 releaseSettings,
192200 crossSparkSettings(),
201+ crossSparkProjectSettings(),
193202 libraryDependencies ++= Seq (
194203 // Adding test classifier seems to break transitive resolution of the core dependencies
195204 " org.apache.spark" %% " spark-hive" % sparkVersion.value % " provided" ,
@@ -357,6 +366,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
357366 scalaStyleSettings,
358367 javaOnlyReleaseSettings,
359368 Test / javaOptions ++= Seq (" -ea" ),
369+ crossSparkSettings(),
360370 libraryDependencies ++= Seq (
361371 " org.apache.hadoop" % " hadoop-client-runtime" % hadoopVersion,
362372 " com.fasterxml.jackson.core" % " jackson-databind" % " 2.13.5" ,
@@ -373,10 +383,10 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
373383 " org.openjdk.jmh" % " jmh-core" % " 1.37" % " test" ,
374384 " org.openjdk.jmh" % " jmh-generator-annprocess" % " 1.37" % " test" ,
375385
376- " org.apache.spark" %% " spark-hive" % defaultSparkVersion % " test" classifier " tests" ,
377- " org.apache.spark" %% " spark-sql" % defaultSparkVersion % " test" classifier " tests" ,
378- " org.apache.spark" %% " spark-core" % defaultSparkVersion % " test" classifier " tests" ,
379- " org.apache.spark" %% " spark-catalyst" % defaultSparkVersion % " test" classifier " tests" ,
386+ " org.apache.spark" %% " spark-hive" % sparkVersion.value % " test" classifier " tests" ,
387+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " test" classifier " tests" ,
388+ " org.apache.spark" %% " spark-core" % sparkVersion.value % " test" classifier " tests" ,
389+ " org.apache.spark" %% " spark-catalyst" % sparkVersion.value % " test" classifier " tests" ,
380390 ),
381391 javaCheckstyleSettings(" kernel/dev/checkstyle.xml" ),
382392 // Unidoc settings
@@ -1071,14 +1081,15 @@ lazy val goldenTables = (project in file("connectors/golden-tables"))
10711081 name := " golden-tables" ,
10721082 commonSettings,
10731083 skipReleaseSettings,
1084+ crossSparkSettings(),
10741085 libraryDependencies ++= Seq (
10751086 // Test Dependencies
10761087 " org.scalatest" %% " scalatest" % scalaTestVersion % " test" ,
10771088 " commons-io" % " commons-io" % " 2.8.0" % " test" ,
1078- " org.apache.spark" %% " spark-sql" % defaultSparkVersion % " test" ,
1079- " org.apache.spark" %% " spark-catalyst" % defaultSparkVersion % " test" classifier " tests" ,
1080- " org.apache.spark" %% " spark-core" % defaultSparkVersion % " test" classifier " tests" ,
1081- " org.apache.spark" %% " spark-sql" % defaultSparkVersion % " test" classifier " tests"
1089+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " test" ,
1090+ " org.apache.spark" %% " spark-catalyst" % sparkVersion.value % " test" classifier " tests" ,
1091+ " org.apache.spark" %% " spark-core" % sparkVersion.value % " test" classifier " tests" ,
1092+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " test" classifier " tests"
10821093 )
10831094 )
10841095
0 commit comments