@@ -157,6 +157,7 @@ abstract class SparkTestDefinitions extends ScalaCliSuite with TestScalaVersionA
157
157
for {
158
158
withTestScope <- Seq (true , false )
159
159
scopeDescription = if (withTestScope) " test scope" else " main scope"
160
+ if ! Properties .isMac // TODO: https://github.com/VirtusLab/scala-cli/issues/3841
160
161
} test(s " run spark 3.3 standalone ( $scopeDescription) " ) {
161
162
simpleRunStandaloneSparkJobTest(
162
163
actualScalaVersion,
@@ -166,51 +167,52 @@ abstract class SparkTestDefinitions extends ScalaCliSuite with TestScalaVersionA
166
167
)
167
168
}
168
169
169
- test(" run spark spark-submit args" ) {
170
- val jobName = " the test spark job"
171
- val inputs = TestInputs (
172
- os.rel / " SparkJob.scala" ->
173
- s """ //> using dep org.apache.spark::spark-sql:3.3.0
174
- |
175
- |import org.apache.spark._
176
- |import org.apache.spark.sql._
177
- |
178
- |object SparkJob {
179
- | def main(args: Array[String]): Unit = {
180
- | val spark = SparkSession.builder().getOrCreate()
181
- | val name = spark.conf.get("spark.app.name")
182
- | assert(name == " $jobName")
183
- | import spark.implicits._
184
- | def sc = spark.sparkContext
185
- | val accum = sc.longAccumulator
186
- | sc.parallelize(1 to 10).foreach(x => accum.add(x))
187
- | println("Result: " + accum.value)
188
- | }
189
- |}
190
- | """ .stripMargin
191
- )
192
- inputs.fromRoot { root =>
193
- val extraEnv = maybeHadoopHomeForWinutils(root / " hadoop-home" )
194
- val res = os.proc(
195
- TestUtil .cli,
196
- " --power" ,
197
- " run" ,
198
- extraOptions,
199
- " --spark-standalone" ,
200
- " ." ,
201
- " --submit-arg" ,
202
- " --name" ,
203
- " --submit-arg" ,
204
- jobName
170
+ if (! Properties .isMac) // TODO: https://github.com/VirtusLab/scala-cli/issues/3841
171
+ test(" run spark spark-submit args" ) {
172
+ val jobName = " the test spark job"
173
+ val inputs = TestInputs (
174
+ os.rel / " SparkJob.scala" ->
175
+ s """ //> using dep org.apache.spark::spark-sql:3.3.0
176
+ |
177
+ |import org.apache.spark._
178
+ |import org.apache.spark.sql._
179
+ |
180
+ |object SparkJob {
181
+ | def main(args: Array[String]): Unit = {
182
+ | val spark = SparkSession.builder().getOrCreate()
183
+ | val name = spark.conf.get("spark.app.name")
184
+ | assert(name == " $jobName")
185
+ | import spark.implicits._
186
+ | def sc = spark.sparkContext
187
+ | val accum = sc.longAccumulator
188
+ | sc.parallelize(1 to 10).foreach(x => accum.add(x))
189
+ | println("Result: " + accum.value)
190
+ | }
191
+ |}
192
+ | """ .stripMargin
205
193
)
206
- .call(cwd = root, env = extraEnv)
207
-
208
- val expectedOutput = " Result: 55"
209
-
210
- val output = res.out.trim().linesIterator.toVector
211
-
212
- expect(output.contains(expectedOutput))
194
+ inputs.fromRoot { root =>
195
+ val extraEnv = maybeHadoopHomeForWinutils(root / " hadoop-home" )
196
+ val res = os.proc(
197
+ TestUtil .cli,
198
+ " --power" ,
199
+ " run" ,
200
+ extraOptions,
201
+ " --spark-standalone" ,
202
+ " ." ,
203
+ " --submit-arg" ,
204
+ " --name" ,
205
+ " --submit-arg" ,
206
+ jobName
207
+ )
208
+ .call(cwd = root, env = extraEnv)
209
+
210
+ val expectedOutput = " Result: 55"
211
+
212
+ val output = res.out.trim().linesIterator.toVector
213
+
214
+ expect(output.contains(expectedOutput))
215
+ }
213
216
}
214
- }
215
217
216
218
}
0 commit comments