Skip to content

Commit b896d4b

Browse files
authored
Temporarily disable flaky Spark tests on Mac CI (VirtusLab#3842)
1 parent e1ae122 commit b896d4b

File tree

2 files changed

+54
-49
lines changed

2 files changed

+54
-49
lines changed

modules/integration/src/test/scala/scala/cli/integration/SparkTestDefinitions.scala

Lines changed: 46 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,7 @@ abstract class SparkTestDefinitions extends ScalaCliSuite with TestScalaVersionA
157157
for {
158158
withTestScope <- Seq(true, false)
159159
scopeDescription = if (withTestScope) "test scope" else "main scope"
160+
if !Properties.isMac // TODO: https://github.com/VirtusLab/scala-cli/issues/3841
160161
} test(s"run spark 3.3 standalone ($scopeDescription)") {
161162
simpleRunStandaloneSparkJobTest(
162163
actualScalaVersion,
@@ -166,51 +167,52 @@ abstract class SparkTestDefinitions extends ScalaCliSuite with TestScalaVersionA
166167
)
167168
}
168169

169-
test("run spark spark-submit args") {
170-
val jobName = "the test spark job"
171-
val inputs = TestInputs(
172-
os.rel / "SparkJob.scala" ->
173-
s"""//> using dep org.apache.spark::spark-sql:3.3.0
174-
|
175-
|import org.apache.spark._
176-
|import org.apache.spark.sql._
177-
|
178-
|object SparkJob {
179-
| def main(args: Array[String]): Unit = {
180-
| val spark = SparkSession.builder().getOrCreate()
181-
| val name = spark.conf.get("spark.app.name")
182-
| assert(name == "$jobName")
183-
| import spark.implicits._
184-
| def sc = spark.sparkContext
185-
| val accum = sc.longAccumulator
186-
| sc.parallelize(1 to 10).foreach(x => accum.add(x))
187-
| println("Result: " + accum.value)
188-
| }
189-
|}
190-
|""".stripMargin
191-
)
192-
inputs.fromRoot { root =>
193-
val extraEnv = maybeHadoopHomeForWinutils(root / "hadoop-home")
194-
val res = os.proc(
195-
TestUtil.cli,
196-
"--power",
197-
"run",
198-
extraOptions,
199-
"--spark-standalone",
200-
".",
201-
"--submit-arg",
202-
"--name",
203-
"--submit-arg",
204-
jobName
170+
if (!Properties.isMac) // TODO: https://github.com/VirtusLab/scala-cli/issues/3841
171+
test("run spark spark-submit args") {
172+
val jobName = "the test spark job"
173+
val inputs = TestInputs(
174+
os.rel / "SparkJob.scala" ->
175+
s"""//> using dep org.apache.spark::spark-sql:3.3.0
176+
|
177+
|import org.apache.spark._
178+
|import org.apache.spark.sql._
179+
|
180+
|object SparkJob {
181+
| def main(args: Array[String]): Unit = {
182+
| val spark = SparkSession.builder().getOrCreate()
183+
| val name = spark.conf.get("spark.app.name")
184+
| assert(name == "$jobName")
185+
| import spark.implicits._
186+
| def sc = spark.sparkContext
187+
| val accum = sc.longAccumulator
188+
| sc.parallelize(1 to 10).foreach(x => accum.add(x))
189+
| println("Result: " + accum.value)
190+
| }
191+
|}
192+
|""".stripMargin
205193
)
206-
.call(cwd = root, env = extraEnv)
207-
208-
val expectedOutput = "Result: 55"
209-
210-
val output = res.out.trim().linesIterator.toVector
211-
212-
expect(output.contains(expectedOutput))
194+
inputs.fromRoot { root =>
195+
val extraEnv = maybeHadoopHomeForWinutils(root / "hadoop-home")
196+
val res = os.proc(
197+
TestUtil.cli,
198+
"--power",
199+
"run",
200+
extraOptions,
201+
"--spark-standalone",
202+
".",
203+
"--submit-arg",
204+
"--name",
205+
"--submit-arg",
206+
jobName
207+
)
208+
.call(cwd = root, env = extraEnv)
209+
210+
val expectedOutput = "Result: 55"
211+
212+
val output = res.out.trim().linesIterator.toVector
213+
214+
expect(output.contains(expectedOutput))
215+
}
213216
}
214-
}
215217

216218
}

modules/integration/src/test/scala/scala/cli/integration/SparkTests212.scala

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -110,17 +110,20 @@ class SparkTests212 extends SparkTestDefinitions with Test212 {
110110
expect(output.contains(expectedOutput))
111111
}
112112

113-
test("package spark 2.4") {
114-
simplePackageSparkJobTest(spark24)
115-
}
113+
if (!Properties.isMac) { // TODO: https://github.com/VirtusLab/scala-cli/issues/3841
114+
test("package spark 2.4") {
115+
simplePackageSparkJobTest(spark24)
116+
}
116117

117-
test("package spark 3.0") {
118-
simplePackageSparkJobTest(spark30)
118+
test("package spark 3.0") {
119+
simplePackageSparkJobTest(spark30)
120+
}
119121
}
120122

121123
for {
122124
withTestScope <- Seq(true, false)
123125
scopeDescription = if (withTestScope) "test scope" else "main scope"
126+
if !Properties.isMac // TODO: https://github.com/VirtusLab/scala-cli/issues/3841
124127
} {
125128
test(s"run spark 2.4 ($scopeDescription)") {
126129
simpleRunSparkJobTest(spark24, withTestScope = withTestScope)

0 commit comments

Comments
 (0)