Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit 10f45b3

Browse files
committed
[SPARK-22047][FLAKY TEST] HiveExternalCatalogVersionsSuite
## What changes were proposed in this pull request? This PR tries to download Spark for each test run, to make sure each test run is absolutely isolated. ## How was this patch tested? N/A Author: Wenchen Fan <[email protected]> Closes apache#19265 from cloud-fan/test.
1 parent 94f7e04 commit 10f45b3

File tree

1 file changed

+5
-3
lines changed

1 file changed

+5
-3
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,16 +35,18 @@ import org.apache.spark.util.Utils
3535
* expected version under this local directory, e.g. `/tmp/spark-test/spark-2.0.3`, we will skip the
3636
* downloading for this spark version.
3737
*/
38-
@org.scalatest.Ignore
3938
class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
4039
private val wareHousePath = Utils.createTempDir(namePrefix = "warehouse")
4140
private val tmpDataDir = Utils.createTempDir(namePrefix = "test-data")
42-
private val sparkTestingDir = "/tmp/spark-test"
41+
// For local test, you can set `sparkTestingDir` to a static value like `/tmp/test-spark`, to
42+
// avoid downloading Spark of different versions in each run.
43+
private val sparkTestingDir = Utils.createTempDir(namePrefix = "test-spark")
4344
private val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
4445

4546
override def afterAll(): Unit = {
4647
Utils.deleteRecursively(wareHousePath)
4748
Utils.deleteRecursively(tmpDataDir)
49+
Utils.deleteRecursively(sparkTestingDir)
4850
super.afterAll()
4951
}
5052

@@ -53,7 +55,7 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
5355

5456
val url = s"https://d3kbcqa49mib13.cloudfront.net/spark-$version-bin-hadoop2.7.tgz"
5557

56-
Seq("wget", url, "-q", "-P", sparkTestingDir).!
58+
Seq("wget", url, "-q", "-P", sparkTestingDir.getCanonicalPath).!
5759

5860
val downloaded = new File(sparkTestingDir, s"spark-$version-bin-hadoop2.7.tgz").getCanonicalPath
5961
val targetDir = new File(sparkTestingDir, s"spark-$version").getCanonicalPath

0 commit comments

Comments
 (0)