@@ -38,6 +38,8 @@ val LATEST_RELEASED_SPARK_VERSION = "3.5.0"
38
38
val SPARK_MASTER_VERSION = " 4.0.0-SNAPSHOT"
39
39
val sparkVersion = settingKey[String ](" Spark version" )
40
40
spark / sparkVersion := getSparkVersion()
41
+ kernelDefaults / sparkVersion := getSparkVersion()
42
+ goldenTables / sparkVersion := getSparkVersion()
41
43
42
44
// Dependent library versions
43
45
val defaultSparkVersion = LATEST_RELEASED_SPARK_VERSION
@@ -126,6 +128,25 @@ lazy val commonSettings = Seq(
126
128
unidocSourceFilePatterns := Nil ,
127
129
)
128
130
131
+ /**
132
+ * Java-/Scala-/Uni-Doc settings aren't working yet against Spark Master.
133
+ 1) delta-spark on Spark Master uses JDK 17. delta-iceberg uses JDK 8 or 11. For some reason,
134
+ generating delta-spark unidoc compiles delta-iceberg
135
+ 2) delta-spark unidoc fails to compile. spark 3.5 is on its classpath. likely due to iceberg
136
+ issue above.
137
+ */
138
+ def crossSparkProjectSettings (): Seq [Setting [_]] = getSparkVersion() match {
139
+ case LATEST_RELEASED_SPARK_VERSION => Seq (
140
+ // Java-/Scala-/Uni-Doc Settings
141
+ scalacOptions ++= Seq (
142
+ " -P:genjavadoc:strictVisibility=true" // hide package private types and methods in javadoc
143
+ ),
144
+ unidocSourceFilePatterns := Seq (SourceFilePattern (" io/delta/tables/" , " io/delta/exceptions/" ))
145
+ )
146
+
147
+ case SPARK_MASTER_VERSION => Seq ()
148
+ }
149
+
129
150
/**
130
151
* Note: we cannot access sparkVersion.value here, since that can only be used within a task or
131
152
* setting macro.
@@ -140,12 +161,6 @@ def crossSparkSettings(): Seq[Setting[_]] = getSparkVersion() match {
140
161
Compile / unmanagedSourceDirectories += (Compile / baseDirectory).value / " src" / " main" / " scala-spark-3.5" ,
141
162
Test / unmanagedSourceDirectories += (Compile / baseDirectory).value / " src" / " test" / " scala-spark-3.5" ,
142
163
Antlr4 / antlr4Version := " 4.9.3" ,
143
-
144
- // Java-/Scala-/Uni-Doc Settings
145
- scalacOptions ++= Seq (
146
- " -P:genjavadoc:strictVisibility=true" // hide package private types and methods in javadoc
147
- ),
148
- unidocSourceFilePatterns := Seq (SourceFilePattern (" io/delta/tables/" , " io/delta/exceptions/" ))
149
164
)
150
165
151
166
case SPARK_MASTER_VERSION => Seq (
@@ -170,13 +185,6 @@ def crossSparkSettings(): Seq[Setting[_]] = getSparkVersion() match {
170
185
" --add-opens=java.base/sun.security.action=ALL-UNNAMED" ,
171
186
" --add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
172
187
)
173
-
174
- // Java-/Scala-/Uni-Doc Settings
175
- // This isn't working yet against Spark Master.
176
- // 1) delta-spark on Spark Master uses JDK 17. delta-iceberg uses JDK 8 or 11. For some reason,
177
- // generating delta-spark unidoc compiles delta-iceberg
178
- // 2) delta-spark unidoc fails to compile. spark 3.5 is on its classpath. likely due to iceberg
179
- // issue above.
180
188
)
181
189
}
182
190
@@ -190,6 +198,7 @@ lazy val spark = (project in file("spark"))
190
198
sparkMimaSettings,
191
199
releaseSettings,
192
200
crossSparkSettings(),
201
+ crossSparkProjectSettings(),
193
202
libraryDependencies ++= Seq (
194
203
// Adding test classifier seems to break transitive resolution of the core dependencies
195
204
" org.apache.spark" %% " spark-hive" % sparkVersion.value % " provided" ,
@@ -357,6 +366,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
357
366
scalaStyleSettings,
358
367
javaOnlyReleaseSettings,
359
368
Test / javaOptions ++= Seq (" -ea" ),
369
+ crossSparkSettings(),
360
370
libraryDependencies ++= Seq (
361
371
" org.apache.hadoop" % " hadoop-client-runtime" % hadoopVersion,
362
372
" com.fasterxml.jackson.core" % " jackson-databind" % " 2.13.5" ,
@@ -373,10 +383,10 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
373
383
" org.openjdk.jmh" % " jmh-core" % " 1.37" % " test" ,
374
384
" org.openjdk.jmh" % " jmh-generator-annprocess" % " 1.37" % " test" ,
375
385
376
- " org.apache.spark" %% " spark-hive" % defaultSparkVersion % " test" classifier " tests" ,
377
- " org.apache.spark" %% " spark-sql" % defaultSparkVersion % " test" classifier " tests" ,
378
- " org.apache.spark" %% " spark-core" % defaultSparkVersion % " test" classifier " tests" ,
379
- " org.apache.spark" %% " spark-catalyst" % defaultSparkVersion % " test" classifier " tests" ,
386
+ " org.apache.spark" %% " spark-hive" % sparkVersion.value % " test" classifier " tests" ,
387
+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " test" classifier " tests" ,
388
+ " org.apache.spark" %% " spark-core" % sparkVersion.value % " test" classifier " tests" ,
389
+ " org.apache.spark" %% " spark-catalyst" % sparkVersion.value % " test" classifier " tests" ,
380
390
),
381
391
javaCheckstyleSettings(" kernel/dev/checkstyle.xml" ),
382
392
// Unidoc settings
@@ -1071,14 +1081,15 @@ lazy val goldenTables = (project in file("connectors/golden-tables"))
1071
1081
name := " golden-tables" ,
1072
1082
commonSettings,
1073
1083
skipReleaseSettings,
1084
+ crossSparkSettings(),
1074
1085
libraryDependencies ++= Seq (
1075
1086
// Test Dependencies
1076
1087
" org.scalatest" %% " scalatest" % scalaTestVersion % " test" ,
1077
1088
" commons-io" % " commons-io" % " 2.8.0" % " test" ,
1078
- " org.apache.spark" %% " spark-sql" % defaultSparkVersion % " test" ,
1079
- " org.apache.spark" %% " spark-catalyst" % defaultSparkVersion % " test" classifier " tests" ,
1080
- " org.apache.spark" %% " spark-core" % defaultSparkVersion % " test" classifier " tests" ,
1081
- " org.apache.spark" %% " spark-sql" % defaultSparkVersion % " test" classifier " tests"
1089
+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " test" ,
1090
+ " org.apache.spark" %% " spark-catalyst" % sparkVersion.value % " test" classifier " tests" ,
1091
+ " org.apache.spark" %% " spark-core" % sparkVersion.value % " test" classifier " tests" ,
1092
+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " test" classifier " tests"
1082
1093
)
1083
1094
)
1084
1095
0 commit comments