Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit d6fec87

Browse files
committed
Minor styling
1 parent eba1cb2 commit d6fec87

File tree

4 files changed

+16
-25
lines changed

4 files changed

+16
-25
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/HadoopConfBootstrap.scala

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -18,21 +18,19 @@ package org.apache.spark.deploy.kubernetes
1818

1919
import java.io.File
2020

21-
import org.apache.spark.deploy.kubernetes.constants._
22-
import io.fabric8.kubernetes.api.model.{ConfigMapBuilder, ContainerBuilder, KeyToPathBuilder, PodBuilder}
23-
24-
import collection.JavaConverters._
21+
import io.fabric8.kubernetes.api.model.{ContainerBuilder, KeyToPathBuilder, PodBuilder}
2522

23+
import org.apache.spark.deploy.kubernetes.constants._
2624

2725
/**
28-
* This is separated out from the HadoopConf steps API because this component can be reused to
29-
* set up the hadoop-conf for executors as well.
30-
*/
26+
* This is separated out from the HadoopConf steps API because this component can be reused to
27+
* set up the hadoop-conf for executors as well.
28+
*/
3129
private[spark] trait HadoopConfBootstrap {
32-
/**
33-
* Bootstraps a main container with the ConfigMaps mounted as volumes and an ENV variable
34-
* pointing to the mounted file.
35-
*/
30+
/**
31+
* Bootstraps a main container with the ConfigMaps mounted as volumes and an ENV variable
32+
* pointing to the mounted file.
33+
*/
3634
def bootstrapMainContainerAndVolumes(
3735
originalPodWithMainContainer: PodWithMainContainer)
3836
: PodWithMainContainer
@@ -45,6 +43,7 @@ private[spark] class HadoopConfBootstrapImpl(
4543
override def bootstrapMainContainerAndVolumes(
4644
originalPodWithMainContainer: PodWithMainContainer)
4745
: PodWithMainContainer = {
46+
import collection.JavaConverters._
4847
val fileContents = hadoopConfigFiles.map(file => (file.getPath, file.toString)).toMap
4948
val keyPaths = hadoopConfigFiles.map(file =>
5049
new KeyToPathBuilder().withKey(file.getPath).withPath(file.getAbsolutePath).build())
@@ -75,4 +74,4 @@ private[spark] class HadoopConfBootstrapImpl(
7574
mainContainerWithMountedHadoopConf
7675
)
7776
}
78-
}
77+
}

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/DriverConfigurationStepsOrchestrator.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ import org.apache.spark.SparkConf
2222
import org.apache.spark.deploy.kubernetes.ConfigurationUtils
2323
import org.apache.spark.deploy.kubernetes.config._
2424
import org.apache.spark.deploy.kubernetes.constants._
25-
import org.apache.spark.deploy.kubernetes.submit.submitsteps.hadoopsteps.HadoopStepsOrchestrator
2625
import org.apache.spark.deploy.kubernetes.submit.submitsteps._
26+
import org.apache.spark.deploy.kubernetes.submit.submitsteps.hadoopsteps.HadoopStepsOrchestrator
2727
import org.apache.spark.deploy.kubernetes.submit.submitsteps.initcontainer.InitContainerConfigurationStepsOrchestrator
2828
import org.apache.spark.launcher.SparkLauncher
2929
import org.apache.spark.util.Utils

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/hadoopsteps/HadoopConfMounterStep.scala

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,15 +16,7 @@
1616
*/
1717
package org.apache.spark.deploy.kubernetes.submit.submitsteps.hadoopsteps
1818

19-
import java.io.File
20-
21-
import io.fabric8.kubernetes.api.model._
2219
import org.apache.spark.deploy.kubernetes.{HadoopConfBootstrap, PodWithMainContainer}
23-
import org.apache.spark.deploy.kubernetes.config._
24-
import org.apache.spark.deploy.kubernetes.constants._
25-
import org.apache.spark.deploy.kubernetes.submit.KubernetesFileUtils
26-
import org.apache.spark.deploy.kubernetes.submit.submitsteps.{DriverKubernetesCredentialsStep, KubernetesDriverSpec}
27-
import scala.collection.JavaConverters._
2820

2921
/**
3022
* Step that configures the ConfigMap + Volumes for the driver
@@ -46,4 +38,4 @@ private[spark] class HadoopConfMounterStep(
4638
driverContainer = bootstrappedPodAndMainContainer.mainContainer
4739
)
4840
}
49-
}
41+
}

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/kubernetes/KubernetesClusterManager.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,15 @@ package org.apache.spark.scheduler.cluster.kubernetes
1919
import java.io.File
2020

2121
import io.fabric8.kubernetes.client.Config
22-
import org.apache.spark.SparkContext
23-
import org.apache.spark.deploy.kubernetes.{HadoopConfBootstrapImpl, InitContainerResourceStagingServerSecretPluginImpl, SparkKubernetesClientFactory, SparkPodInitContainerBootstrapImpl}
22+
23+
import org.apache.spark.deploy.kubernetes.{InitContainerResourceStagingServerSecretPluginImpl, SparkKubernetesClientFactory, SparkPodInitContainerBootstrapImpl}
2424
import org.apache.spark.deploy.kubernetes.config._
2525
import org.apache.spark.deploy.kubernetes.constants._
2626
import org.apache.spark.internal.Logging
2727
import org.apache.spark.scheduler.{ExternalClusterManager, SchedulerBackend, TaskScheduler, TaskSchedulerImpl}
2828

2929
private[spark] class KubernetesClusterManager extends ExternalClusterManager with Logging {
30-
30+
import org.apache.spark.SparkContext
3131
override def canCreate(masterURL: String): Boolean = masterURL.startsWith("k8s")
3232

3333
override def createTaskScheduler(sc: SparkContext, masterURL: String): TaskScheduler = {

0 commit comments

Comments
 (0)