Skip to content

Commit baa6b3b

Browse files
Marcelo VanzinRobert Kruszewski
authored andcommitted
[SPARK-19964][CORE] Avoid reading from remote repos in SparkSubmitSuite.
These tests can fail with a timeout if the remote repos are not responding, or slow. The tests don't need anything from those repos, so use an empty ivy config file to avoid setting up the defaults. The tests are passing reliably for me locally now, and failing more often than not today without this change since http://dl.bintray.com/spark-packages/maven doesn't seem to be loading from my machine. Author: Marcelo Vanzin <[email protected]> Closes apache#20916 from vanzin/SPARK-19964.
1 parent f9927a5 commit baa6b3b

File tree

5 files changed

+27
-13
lines changed

5 files changed

+27
-13
lines changed

core/src/main/scala/org/apache/spark/deploy/DependencyUtils.scala

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,18 +33,21 @@ private[deploy] object DependencyUtils {
3333
packagesExclusions: String,
3434
packages: String,
3535
repositories: String,
36-
ivyRepoPath: String): String = {
36+
ivyRepoPath: String,
37+
ivySettingsPath: Option[String]): String = {
3738
val exclusions: Seq[String] =
3839
if (!StringUtils.isBlank(packagesExclusions)) {
3940
packagesExclusions.split(",")
4041
} else {
4142
Nil
4243
}
4344
// Create the IvySettings, either load from file or build defaults
44-
val ivySettings = sys.props.get("spark.jars.ivySettings").map { ivySettingsFile =>
45-
SparkSubmitUtils.loadIvySettings(ivySettingsFile, Option(repositories), Option(ivyRepoPath))
46-
}.getOrElse {
47-
SparkSubmitUtils.buildIvySettings(Option(repositories), Option(ivyRepoPath))
45+
val ivySettings = ivySettingsPath match {
46+
case Some(path) =>
47+
SparkSubmitUtils.loadIvySettings(path, Option(repositories), Option(ivyRepoPath))
48+
49+
case None =>
50+
SparkSubmitUtils.buildIvySettings(Option(repositories), Option(ivyRepoPath))
4851
}
4952

5053
SparkSubmitUtils.resolveMavenCoordinates(packages, ivySettings, exclusions = exclusions)

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -359,7 +359,8 @@ object SparkSubmit extends CommandLineUtils with Logging {
359359
// Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files
360360
// too for packages that include Python code
361361
val resolvedMavenCoordinates = DependencyUtils.resolveMavenDependencies(
362-
args.packagesExclusions, args.packages, args.repositories, args.ivyRepoPath)
362+
args.packagesExclusions, args.packages, args.repositories, args.ivyRepoPath,
363+
args.ivySettingsPath)
363364

364365
if (!StringUtils.isBlank(resolvedMavenCoordinates)) {
365366
args.jars = mergeFileLists(args.jars, resolvedMavenCoordinates)

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
6363
var packages: String = null
6464
var repositories: String = null
6565
var ivyRepoPath: String = null
66+
var ivySettingsPath: Option[String] = None
6667
var packagesExclusions: String = null
6768
var verbose: Boolean = false
6869
var isPython: Boolean = false
@@ -184,6 +185,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
184185
jars = Option(jars).orElse(sparkProperties.get("spark.jars")).orNull
185186
files = Option(files).orElse(sparkProperties.get("spark.files")).orNull
186187
ivyRepoPath = sparkProperties.get("spark.jars.ivy").orNull
188+
ivySettingsPath = sparkProperties.get("spark.jars.ivySettings")
187189
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
188190
packagesExclusions = Option(packagesExclusions)
189191
.orElse(sparkProperties.get("spark.jars.excludes")).orNull

core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,12 +79,17 @@ object DriverWrapper extends Logging {
7979
val secMgr = new SecurityManager(sparkConf)
8080
val hadoopConf = SparkHadoopUtil.newConfiguration(sparkConf)
8181

82-
val Seq(packagesExclusions, packages, repositories, ivyRepoPath) =
83-
Seq("spark.jars.excludes", "spark.jars.packages", "spark.jars.repositories", "spark.jars.ivy")
84-
.map(sys.props.get(_).orNull)
82+
val Seq(packagesExclusions, packages, repositories, ivyRepoPath, ivySettingsPath) =
83+
Seq(
84+
"spark.jars.excludes",
85+
"spark.jars.packages",
86+
"spark.jars.repositories",
87+
"spark.jars.ivy",
88+
"spark.jars.ivySettings"
89+
).map(sys.props.get(_).orNull)
8590

8691
val resolvedMavenCoordinates = DependencyUtils.resolveMavenDependencies(packagesExclusions,
87-
packages, repositories, ivyRepoPath)
92+
packages, repositories, ivyRepoPath, Option(ivySettingsPath))
8893
val jars = {
8994
val jarsProp = sys.props.get("spark.jars").orNull
9095
if (!StringUtils.isBlank(resolvedMavenCoordinates)) {

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,9 @@ class SparkSubmitSuite
106106
// Necessary to make ScalaTest 3.x interrupt a thread on the JVM like ScalaTest 2.2.x
107107
implicit val defaultSignaler: Signaler = ThreadSignaler
108108

109+
private val emptyIvySettings = File.createTempFile("ivy", ".xml")
110+
FileUtils.write(emptyIvySettings, "<ivysettings />", StandardCharsets.UTF_8)
111+
109112
override def beforeEach() {
110113
super.beforeEach()
111114
}
@@ -520,6 +523,7 @@ class SparkSubmitSuite
520523
"--repositories", repo,
521524
"--conf", "spark.ui.enabled=false",
522525
"--conf", "spark.master.rest.enabled=false",
526+
"--conf", s"spark.jars.ivySettings=${emptyIvySettings.getAbsolutePath()}",
523527
unusedJar.toString,
524528
"my.great.lib.MyLib", "my.great.dep.MyLib")
525529
runSparkSubmit(args)
@@ -530,7 +534,6 @@ class SparkSubmitSuite
530534
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
531535
val main = MavenCoordinate("my.great.lib", "mylib", "0.1")
532536
val dep = MavenCoordinate("my.great.dep", "mylib", "0.1")
533-
// Test using "spark.jars.packages" and "spark.jars.repositories" configurations.
534537
IvyTestUtils.withRepository(main, Some(dep.toString), None) { repo =>
535538
val args = Seq(
536539
"--class", JarCreationTest.getClass.getName.stripSuffix("$"),
@@ -540,6 +543,7 @@ class SparkSubmitSuite
540543
"--conf", s"spark.jars.repositories=$repo",
541544
"--conf", "spark.ui.enabled=false",
542545
"--conf", "spark.master.rest.enabled=false",
546+
"--conf", s"spark.jars.ivySettings=${emptyIvySettings.getAbsolutePath()}",
543547
unusedJar.toString,
544548
"my.great.lib.MyLib", "my.great.dep.MyLib")
545549
runSparkSubmit(args)
@@ -550,7 +554,6 @@ class SparkSubmitSuite
550554
// See https://gist.github.com/shivaram/3a2fecce60768a603dac for a error log
551555
ignore("correctly builds R packages included in a jar with --packages") {
552556
assume(RUtils.isRInstalled, "R isn't installed on this machine.")
553-
// Check if the SparkR package is installed
554557
assume(RUtils.isSparkRInstalled, "SparkR is not installed in this build.")
555558
val main = MavenCoordinate("my.great.lib", "mylib", "0.1")
556559
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
@@ -563,6 +566,7 @@ class SparkSubmitSuite
563566
"--master", "local-cluster[2,1,1024]",
564567
"--packages", main.toString,
565568
"--repositories", repo,
569+
"--conf", s"spark.jars.ivySettings=${emptyIvySettings.getAbsolutePath()}",
566570
"--verbose",
567571
"--conf", "spark.ui.enabled=false",
568572
rScriptDir)
@@ -573,7 +577,6 @@ class SparkSubmitSuite
573577
test("include an external JAR in SparkR") {
574578
assume(RUtils.isRInstalled, "R isn't installed on this machine.")
575579
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
576-
// Check if the SparkR package is installed
577580
assume(RUtils.isSparkRInstalled, "SparkR is not installed in this build.")
578581
val rScriptDir =
579582
Seq(sparkHome, "R", "pkg", "tests", "fulltests", "jarTest.R").mkString(File.separator)

0 commit comments

Comments
 (0)