@@ -91,6 +91,20 @@ lazy val commonSettings = Seq(
91
91
unidocSourceFilePatterns := Nil ,
92
92
)
93
93
94
+ // Copied from SparkBuild.scala to support Java 17 for unit tests (see apache/spark#34153)
95
+ val extraJavaTestArgs = Seq (
96
+ " --add-opens=java.base/java.lang=ALL-UNNAMED" ,
97
+ " --add-opens=java.base/java.lang.invoke=ALL-UNNAMED" ,
98
+ " --add-opens=java.base/java.io=ALL-UNNAMED" ,
99
+ " --add-opens=java.base/java.net=ALL-UNNAMED" ,
100
+ " --add-opens=java.base/java.nio=ALL-UNNAMED" ,
101
+ " --add-opens=java.base/java.util=ALL-UNNAMED" ,
102
+ " --add-opens=java.base/java.util.concurrent=ALL-UNNAMED" ,
103
+ " --add-opens=java.base/sun.nio.ch=ALL-UNNAMED" ,
104
+ " --add-opens=java.base/sun.nio.cs=ALL-UNNAMED" ,
105
+ " --add-opens=java.base/sun.security.action=ALL-UNNAMED" ,
106
+ " --add-opens=java.base/sun.util.calendar=ALL-UNNAMED" )
107
+
94
108
lazy val spark = (project in file(" spark" ))
95
109
.dependsOn(storage)
96
110
.enablePlugins(Antlr4Plugin )
@@ -129,20 +143,7 @@ lazy val spark = (project in file("spark"))
129
143
130
144
Test / testOptions += Tests .Argument (" -oDF" ),
131
145
Test / testOptions += Tests .Argument (TestFrameworks .JUnit , " -v" , " -a" ),
132
-
133
- // Copied from SparkBuild.scala to support Java 17 (see apache/spark#34153)
134
- (Test / javaOptions) ++= Seq (
135
- " --add-opens=java.base/java.lang=ALL-UNNAMED" ,
136
- " --add-opens=java.base/java.lang.invoke=ALL-UNNAMED" ,
137
- " --add-opens=java.base/java.io=ALL-UNNAMED" ,
138
- " --add-opens=java.base/java.net=ALL-UNNAMED" ,
139
- " --add-opens=java.base/java.nio=ALL-UNNAMED" ,
140
- " --add-opens=java.base/java.util=ALL-UNNAMED" ,
141
- " --add-opens=java.base/java.util.concurrent=ALL-UNNAMED" ,
142
- " --add-opens=java.base/sun.nio.ch=ALL-UNNAMED" ,
143
- " --add-opens=java.base/sun.nio.cs=ALL-UNNAMED" ,
144
- " --add-opens=java.base/sun.security.action=ALL-UNNAMED" ,
145
- " --add-opens=java.base/sun.util.calendar=ALL-UNNAMED" ),
146
+ Test / javaOptions ++= extraJavaTestArgs, // Required for UTs with Java 17
146
147
147
148
// Don't execute in parallel since we can't have multiple Sparks in the same JVM
148
149
Test / parallelExecution := false ,
@@ -253,7 +254,8 @@ lazy val sharing = (project in file("sharing"))
253
254
" org.apache.spark" %% " spark-core" % sparkVersion % " test" classifier " tests" ,
254
255
" org.apache.spark" %% " spark-sql" % sparkVersion % " test" classifier " tests" ,
255
256
" org.apache.spark" %% " spark-hive" % sparkVersion % " test" classifier " tests" ,
256
- )
257
+ ),
258
+ Test / javaOptions ++= extraJavaTestArgs // Required for UTs with Java 17
257
259
).configureUnidoc()
258
260
259
261
lazy val kernelApi = (project in file(" kernel/kernel-api" ))
0 commit comments