Skip to content

Commit 06290c1

Browse files
authored
Ignore flaky scala tests as well as hive tests (apache-spark-on-k8s#335)
1 parent 7827060 commit 06290c1

File tree

8 files changed

+76
-11
lines changed

8 files changed

+76
-11
lines changed
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.tags;
19+
20+
import org.scalatest.TagAnnotation;
21+
22+
import java.lang.annotation.ElementType;
23+
import java.lang.annotation.Retention;
24+
import java.lang.annotation.RetentionPolicy;
25+
import java.lang.annotation.Target;
26+
27+
@TagAnnotation
28+
@Retention(RetentionPolicy.RUNTIME)
29+
@Target({ElementType.METHOD, ElementType.TYPE})
30+
public @interface FlakyTest { }
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.tags
19+
20+
import org.scalatest.Tag
21+
22+
object Flaky extends Tag(classOf[FlakyTest].getCanonicalName)

dev/run-scala-tests.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,5 +37,9 @@ def java_version():
3737
env = get_build_environment()
3838
mtt = modules_to_test(env)
3939

40+
excluded_tags = mtt.excluded_tags + [
41+
"org.apache.spark.tags.FlakyTest"
42+
]
43+
4044
# run the test suites
41-
run_scala_tests(env.build_tool, env.hadoop_version, mtt.test_modules, mtt.excluded_tags)
45+
run_scala_tests(env.build_tool, env.hadoop_version, mtt.test_modules, excluded_tags)

project/CirclePlugin.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,9 @@ object CirclePlugin extends AutoPlugin {
4646
override def trigger: PluginTrigger = allRequirements
4747

4848
private[this] lazy val testsByProject = Def.task {
49-
// Defaults.detectTests is basically the value of Keys.definedTests, but since we're
50-
// overriding the latter depending on the value of this task, we can't depend on it
51-
ProjectTests(thisProjectRef.value, Defaults.detectTests.value)
49+
// We can use Keys.definedTests because we resolve this task 'in Test' but later define it
50+
// 'in Circle' so there's no cycle.
51+
ProjectTests(thisProjectRef.value, (definedTests in Test).value)
5252
}
5353

5454
private[this] lazy val mapper = new ObjectMapper() with ScalaObjectMapper

project/SparkBuild.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -559,6 +559,8 @@ object SQL {
559559
object Hive {
560560

561561
lazy val settings = Seq(
562+
// Skip all hive tests because we don't use hive.
563+
definedTests in Test := Nil,
562564
// Specially disable assertions since some Hive tests fail them
563565
javaOptions in Test := (javaOptions in Test).value.filterNot(_ == "-ea"),
564566
// Supporting all SerDes requires us to depend on deprecated APIs, so we turn off the warnings

sql/core/src/test/scala/org/apache/spark/sql/FileBasedDataSourceSuite.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ import org.scalatest.BeforeAndAfterAll
2525
import org.apache.spark.SparkException
2626
import org.apache.spark.sql.internal.SQLConf
2727
import org.apache.spark.sql.test.SharedSQLContext
28+
import org.apache.spark.tags.Flaky
2829

2930

3031
class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext with BeforeAndAfterAll {
@@ -115,7 +116,11 @@ class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext with Befo
115116
}
116117

117118
allFileBasedDataSources.foreach { format =>
118-
testQuietly(s"Enabling/disabling ignoreMissingFiles using $format") {
119+
val tags = format match {
120+
case "orc" => List(Flaky)
121+
case _ => List()
122+
}
123+
testQuietly(s"Enabling/disabling ignoreMissingFiles using $format", tags: _*) {
119124
def testIgnoreMissingFiles(): Unit = {
120125
withTempDir { dir =>
121126
val basePath = dir.getCanonicalPath

sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ import org.apache.spark.sql.execution.streaming.{MemoryStream, StatefulOperatorS
3535
import org.apache.spark.sql.execution.streaming.state.{StateStore, StateStoreProviderId}
3636
import org.apache.spark.sql.functions._
3737
import org.apache.spark.sql.types._
38+
import org.apache.spark.tags.Flaky
3839
import org.apache.spark.util.Utils
3940

4041

@@ -446,7 +447,7 @@ class StreamingOuterJoinSuite extends StreamTest with StateStoreMetricsTest with
446447
(input1, input2, joined)
447448
}
448449

449-
test("left outer early state exclusion on left") {
450+
test("left outer early state exclusion on left", Flaky) {
450451
val (leftInput, df1) = setupStream("left", 2)
451452
val (rightInput, df2) = setupStream("right", 3)
452453
// Use different schemas to ensure the null row is being generated from the correct side.
@@ -477,7 +478,7 @@ class StreamingOuterJoinSuite extends StreamTest with StateStoreMetricsTest with
477478
)
478479
}
479480

480-
test("left outer early state exclusion on right") {
481+
test("left outer early state exclusion on right", Flaky) {
481482
val (leftInput, df1) = setupStream("left", 2)
482483
val (rightInput, df2) = setupStream("right", 3)
483484
// Use different schemas to ensure the null row is being generated from the correct side.
@@ -508,7 +509,7 @@ class StreamingOuterJoinSuite extends StreamTest with StateStoreMetricsTest with
508509
)
509510
}
510511

511-
test("right outer early state exclusion on left") {
512+
test("right outer early state exclusion on left", Flaky) {
512513
val (leftInput, df1) = setupStream("left", 2)
513514
val (rightInput, df2) = setupStream("right", 3)
514515
// Use different schemas to ensure the null row is being generated from the correct side.
@@ -539,7 +540,7 @@ class StreamingOuterJoinSuite extends StreamTest with StateStoreMetricsTest with
539540
)
540541
}
541542

542-
test("right outer early state exclusion on right") {
543+
test("right outer early state exclusion on right", Flaky) {
543544
val (leftInput, df1) = setupStream("left", 2)
544545
val (rightInput, df2) = setupStream("right", 3)
545546
// Use different schemas to ensure the null row is being generated from the correct side.

sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import scala.util.control.NonFatal
2828

2929
import org.apache.hadoop.fs.Path
3030
import org.scalatest.{BeforeAndAfterAll, Suite}
31+
import org.scalatest.Tag
3132
import org.scalatest.concurrent.Eventually
3233

3334
import org.apache.spark.SparkFunSuite
@@ -80,8 +81,8 @@ private[sql] trait SQLTestUtils extends SparkFunSuite with SQLTestUtilsBase with
8081
* System.out or System.err. Otherwise, ConsoleAppender will still output to the console even if
8182
* we change System.out and System.err.
8283
*/
83-
protected def testQuietly(name: String)(f: => Unit): Unit = {
84-
test(name) {
84+
protected def testQuietly(name: String, tags: Tag*)(f: => Unit): Unit = {
85+
test(name, tags: _*) {
8586
quietly {
8687
f
8788
}

0 commit comments

Comments
 (0)