@@ -38,6 +38,8 @@ val LATEST_RELEASED_SPARK_VERSION = "3.5.0"
38
38
val SPARK_MASTER_VERSION = " 4.0.0-SNAPSHOT"
39
39
val sparkVersion = settingKey[String ](" Spark version" )
40
40
spark / sparkVersion := getSparkVersion()
41
+ kernelDefaults / sparkVersion := getSparkVersion()
42
+ goldenTables / sparkVersion := getSparkVersion()
41
43
42
44
// Dependent library versions
43
45
val defaultSparkVersion = LATEST_RELEASED_SPARK_VERSION
@@ -126,6 +128,25 @@ lazy val commonSettings = Seq(
126
128
unidocSourceFilePatterns := Nil ,
127
129
)
128
130
131
+ /**
132
+ * Java-/Scala-/Uni-Doc settings aren't working yet against Spark Master.
133
+ 1) delta-spark on Spark Master uses JDK 17. delta-iceberg uses JDK 8 or 11. For some reason,
134
+ generating delta-spark unidoc compiles delta-iceberg
135
+ 2) delta-spark unidoc fails to compile. spark 3.5 is on its classpath. likely due to iceberg
136
+ issue above.
137
+ */
138
+ def crossSparkProjectSettings (): Seq [Setting [_]] = getSparkVersion() match {
139
+ case LATEST_RELEASED_SPARK_VERSION => Seq (
140
+ // Java-/Scala-/Uni-Doc Settings
141
+ scalacOptions ++= Seq (
142
+ " -P:genjavadoc:strictVisibility=true" // hide package private types and methods in javadoc
143
+ ),
144
+ unidocSourceFilePatterns := Seq (SourceFilePattern (" io/delta/tables/" , " io/delta/exceptions/" ))
145
+ )
146
+
147
+ case SPARK_MASTER_VERSION => Seq ()
148
+ }
149
+
129
150
/**
130
151
* Note: we cannot access sparkVersion.value here, since that can only be used within a task or
131
152
* setting macro.
@@ -140,12 +161,6 @@ def crossSparkSettings(): Seq[Setting[_]] = getSparkVersion() match {
140
161
Compile / unmanagedSourceDirectories += (Compile / baseDirectory).value / " src" / " main" / " scala-spark-3.5" ,
141
162
Test / unmanagedSourceDirectories += (Test / baseDirectory).value / " src" / " test" / " scala-spark-3.5" ,
142
163
Antlr4 / antlr4Version := " 4.9.3" ,
143
-
144
- // Java-/Scala-/Uni-Doc Settings
145
- scalacOptions ++= Seq (
146
- " -P:genjavadoc:strictVisibility=true" // hide package private types and methods in javadoc
147
- ),
148
- unidocSourceFilePatterns := Seq (SourceFilePattern (" io/delta/tables/" , " io/delta/exceptions/" ))
149
164
)
150
165
151
166
case SPARK_MASTER_VERSION => Seq (
@@ -170,13 +185,6 @@ def crossSparkSettings(): Seq[Setting[_]] = getSparkVersion() match {
170
185
" --add-opens=java.base/sun.security.action=ALL-UNNAMED" ,
171
186
" --add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
172
187
)
173
-
174
- // Java-/Scala-/Uni-Doc Settings
175
- // This isn't working yet against Spark Master.
176
- // 1) delta-spark on Spark Master uses JDK 17. delta-iceberg uses JDK 8 or 11. For some reason,
177
- // generating delta-spark unidoc compiles delta-iceberg
178
- // 2) delta-spark unidoc fails to compile. spark 3.5 is on its classpath. likely due to iceberg
179
- // issue above.
180
188
)
181
189
}
182
190
@@ -190,6 +198,7 @@ lazy val spark = (project in file("spark"))
190
198
sparkMimaSettings,
191
199
releaseSettings,
192
200
crossSparkSettings(),
201
+ crossSparkProjectSettings(),
193
202
libraryDependencies ++= Seq (
194
203
// Adding test classifier seems to break transitive resolution of the core dependencies
195
204
" org.apache.spark" %% " spark-hive" % sparkVersion.value % " provided" ,
@@ -358,6 +367,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
358
367
scalaStyleSettings,
359
368
javaOnlyReleaseSettings,
360
369
Test / javaOptions ++= Seq (" -ea" ),
370
+ crossSparkSettings(),
361
371
libraryDependencies ++= Seq (
362
372
" org.apache.hadoop" % " hadoop-client-runtime" % hadoopVersion,
363
373
" com.fasterxml.jackson.core" % " jackson-databind" % " 2.13.5" ,
@@ -374,10 +384,10 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
374
384
" org.openjdk.jmh" % " jmh-core" % " 1.37" % " test" ,
375
385
" org.openjdk.jmh" % " jmh-generator-annprocess" % " 1.37" % " test" ,
376
386
377
- " org.apache.spark" %% " spark-hive" % defaultSparkVersion % " test" classifier " tests" ,
378
- " org.apache.spark" %% " spark-sql" % defaultSparkVersion % " test" classifier " tests" ,
379
- " org.apache.spark" %% " spark-core" % defaultSparkVersion % " test" classifier " tests" ,
380
- " org.apache.spark" %% " spark-catalyst" % defaultSparkVersion % " test" classifier " tests" ,
387
+ " org.apache.spark" %% " spark-hive" % sparkVersion.value % " test" classifier " tests" ,
388
+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " test" classifier " tests" ,
389
+ " org.apache.spark" %% " spark-core" % sparkVersion.value % " test" classifier " tests" ,
390
+ " org.apache.spark" %% " spark-catalyst" % sparkVersion.value % " test" classifier " tests" ,
381
391
),
382
392
javaCheckstyleSettings(" kernel/dev/checkstyle.xml" ),
383
393
// Unidoc settings
@@ -1072,14 +1082,15 @@ lazy val goldenTables = (project in file("connectors/golden-tables"))
1072
1082
name := " golden-tables" ,
1073
1083
commonSettings,
1074
1084
skipReleaseSettings,
1085
+ crossSparkSettings(),
1075
1086
libraryDependencies ++= Seq (
1076
1087
// Test Dependencies
1077
1088
" org.scalatest" %% " scalatest" % scalaTestVersion % " test" ,
1078
1089
" commons-io" % " commons-io" % " 2.8.0" % " test" ,
1079
- " org.apache.spark" %% " spark-sql" % defaultSparkVersion % " test" ,
1080
- " org.apache.spark" %% " spark-catalyst" % defaultSparkVersion % " test" classifier " tests" ,
1081
- " org.apache.spark" %% " spark-core" % defaultSparkVersion % " test" classifier " tests" ,
1082
- " org.apache.spark" %% " spark-sql" % defaultSparkVersion % " test" classifier " tests"
1090
+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " test" ,
1091
+ " org.apache.spark" %% " spark-catalyst" % sparkVersion.value % " test" classifier " tests" ,
1092
+ " org.apache.spark" %% " spark-core" % sparkVersion.value % " test" classifier " tests" ,
1093
+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " test" classifier " tests"
1083
1094
)
1084
1095
)
1085
1096
0 commit comments