@@ -20,7 +20,6 @@ package org.apache.spark.deploy
20
20
import java .io ._
21
21
import java .lang .reflect .{InvocationTargetException , Modifier , UndeclaredThrowableException }
22
22
import java .net .URL
23
- import java .nio .file .Files
24
23
import java .security .{KeyStore , PrivilegedExceptionAction }
25
24
import java .security .cert .X509Certificate
26
25
import java .text .ParseException
@@ -31,7 +30,6 @@ import scala.collection.mutable.{ArrayBuffer, HashMap, Map}
31
30
import scala .util .Properties
32
31
33
32
import com .google .common .io .ByteStreams
34
- import org .apache .commons .io .FileUtils
35
33
import org .apache .commons .lang3 .StringUtils
36
34
import org .apache .hadoop .conf .{Configuration => HadoopConfiguration }
37
35
import org .apache .hadoop .fs .{FileSystem , Path }
@@ -300,28 +298,13 @@ object SparkSubmit extends CommandLineUtils {
300
298
}
301
299
val isYarnCluster = clusterManager == YARN && deployMode == CLUSTER
302
300
val isMesosCluster = clusterManager == MESOS && deployMode == CLUSTER
301
+ val isStandAloneCluster = clusterManager == STANDALONE && deployMode == CLUSTER
303
302
304
- if (! isMesosCluster) {
303
+ if (! isMesosCluster && ! isStandAloneCluster ) {
305
304
// Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files
306
305
// too for packages that include Python code
307
- val exclusions : Seq [String ] =
308
- if (! StringUtils .isBlank(args.packagesExclusions)) {
309
- args.packagesExclusions.split(" ," )
310
- } else {
311
- Nil
312
- }
313
-
314
- // Create the IvySettings, either load from file or build defaults
315
- val ivySettings = args.sparkProperties.get(" spark.jars.ivySettings" ).map { ivySettingsFile =>
316
- SparkSubmitUtils .loadIvySettings(ivySettingsFile, Option (args.repositories),
317
- Option (args.ivyRepoPath))
318
- }.getOrElse {
319
- SparkSubmitUtils .buildIvySettings(Option (args.repositories), Option (args.ivyRepoPath))
320
- }
321
-
322
- val resolvedMavenCoordinates = SparkSubmitUtils .resolveMavenCoordinates(args.packages,
323
- ivySettings, exclusions = exclusions)
324
-
306
+ val resolvedMavenCoordinates = DependencyUtils .resolveMavenDependencies(
307
+ args.packagesExclusions, args.packages, args.repositories, args.ivyRepoPath)
325
308
326
309
if (! StringUtils .isBlank(resolvedMavenCoordinates)) {
327
310
args.jars = mergeFileLists(args.jars, resolvedMavenCoordinates)
@@ -338,14 +321,7 @@ object SparkSubmit extends CommandLineUtils {
338
321
}
339
322
340
323
val hadoopConf = new HadoopConfiguration ()
341
- val targetDir = Files .createTempDirectory(" tmp" ).toFile
342
- // scalastyle:off runtimeaddshutdownhook
343
- Runtime .getRuntime.addShutdownHook(new Thread () {
344
- override def run (): Unit = {
345
- FileUtils .deleteQuietly(targetDir)
346
- }
347
- })
348
- // scalastyle:on runtimeaddshutdownhook
324
+ val targetDir = DependencyUtils .createTempDir()
349
325
350
326
// Resolve glob path for different resources.
351
327
args.jars = Option (args.jars).map(resolveGlobPaths(_, hadoopConf)).orNull
@@ -473,11 +449,13 @@ object SparkSubmit extends CommandLineUtils {
473
449
OptionAssigner (args.driverExtraLibraryPath, ALL_CLUSTER_MGRS , ALL_DEPLOY_MODES ,
474
450
sysProp = " spark.driver.extraLibraryPath" ),
475
451
476
- // Mesos only - propagate attributes for dependency resolution at the driver side
477
- OptionAssigner (args.packages, MESOS , CLUSTER , sysProp = " spark.jars.packages" ),
478
- OptionAssigner (args.repositories, MESOS , CLUSTER , sysProp = " spark.jars.repositories" ),
479
- OptionAssigner (args.ivyRepoPath, MESOS , CLUSTER , sysProp = " spark.jars.ivy" ),
480
- OptionAssigner (args.packagesExclusions, MESOS , CLUSTER , sysProp = " spark.jars.excludes" ),
452
+ // Propagate attributes for dependency resolution at the driver side
453
+ OptionAssigner (args.packages, STANDALONE | MESOS , CLUSTER , sysProp = " spark.jars.packages" ),
454
+ OptionAssigner (args.repositories, STANDALONE | MESOS , CLUSTER ,
455
+ sysProp = " spark.jars.repositories" ),
456
+ OptionAssigner (args.ivyRepoPath, STANDALONE | MESOS , CLUSTER , sysProp = " spark.jars.ivy" ),
457
+ OptionAssigner (args.packagesExclusions, STANDALONE | MESOS ,
458
+ CLUSTER , sysProp = " spark.jars.excludes" ),
481
459
482
460
// Yarn only
483
461
OptionAssigner (args.queue, YARN , ALL_DEPLOY_MODES , sysProp = " spark.yarn.queue" ),
@@ -780,7 +758,7 @@ object SparkSubmit extends CommandLineUtils {
780
758
}
781
759
}
782
760
783
- private def addJarToClasspath (localJar : String , loader : MutableURLClassLoader ) {
761
+ private [deploy] def addJarToClasspath (localJar : String , loader : MutableURLClassLoader ) {
784
762
val uri = Utils .resolveURI(localJar)
785
763
uri.getScheme match {
786
764
case " file" | " local" =>
@@ -845,7 +823,7 @@ object SparkSubmit extends CommandLineUtils {
845
823
* Merge a sequence of comma-separated file lists, some of which may be null to indicate
846
824
* no files, into a single comma-separated string.
847
825
*/
848
- private def mergeFileLists (lists : String * ): String = {
826
+ private [deploy] def mergeFileLists (lists : String * ): String = {
849
827
val merged = lists.filterNot(StringUtils .isBlank)
850
828
.flatMap(_.split(" ," ))
851
829
.mkString(" ," )
@@ -968,7 +946,7 @@ object SparkSubmit extends CommandLineUtils {
968
946
}
969
947
}
970
948
971
- private def resolveGlobPaths (paths : String , hadoopConf : HadoopConfiguration ): String = {
949
+ private [deploy] def resolveGlobPaths (paths : String , hadoopConf : HadoopConfiguration ): String = {
972
950
require(paths != null , " paths cannot be null." )
973
951
paths.split(" ," ).map(_.trim).filter(_.nonEmpty).flatMap { path =>
974
952
val uri = Utils .resolveURI(path)
0 commit comments