Skip to content

Commit ffe7891

Browse files
committed
address initial comments and scalastyle issues
1 parent 499b037 commit ffe7891

File tree

12 files changed

+12
-15
lines changed

12 files changed

+12
-15
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/initcontainer/InitContainerConfigurationStepsOrchestrator.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,6 @@ private[spark] class InitContainerConfigurationStepsOrchestrator(
6262
submissionSparkConf.get(RESOURCE_STAGING_SERVER_INTERNAL_SSL_ENABLED)
6363
.orElse(submissionSparkConf.get(RESOURCE_STAGING_SERVER_SSL_ENABLED))
6464
.getOrElse(false)
65-
6665
OptionRequirements.requireNandDefined(
6766
maybeResourceStagingServerInternalClientCert,
6867
maybeResourceStagingServerInternalTrustStore,

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/kubernetes/KubernetesClusterManager.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ import org.apache.spark.deploy.kubernetes.submit.MountSmallFilesBootstrapImpl
2727
import org.apache.spark.internal.Logging
2828
import org.apache.spark.scheduler.{ExternalClusterManager, SchedulerBackend, TaskScheduler, TaskSchedulerImpl}
2929
import org.apache.spark.util.Utils
30+
import org.apache.spark.SparkContext
3031

3132
private[spark] class KubernetesClusterManager extends ExternalClusterManager with Logging {
32-
import org.apache.spark.SparkContext
3333
override def canCreate(masterURL: String): Boolean = masterURL.startsWith("k8s")
3434

3535
override def createTaskScheduler(sc: SparkContext, masterURL: String): TaskScheduler = {

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/InitContainerResourceStagingServerSecretPluginSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,8 @@ import scala.collection.JavaConverters._
2323
import org.apache.spark.deploy.kubernetes.constants._
2424
import org.apache.spark.SparkFunSuite
2525

26-
private[spark] class InitContainerResourceStagingServerSecretPluginSuite extends SparkFunSuite with BeforeAndAfter{
26+
class InitContainerResourceStagingServerSecretPluginSuite
27+
extends SparkFunSuite with BeforeAndAfter{
2728
private val INIT_CONTAINER_SECRET_NAME = "init-secret"
2829
private val INIT_CONTAINER_SECRET_MOUNT = "/tmp/secret"
2930

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/ClientSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,6 @@ private[spark] class ClientSuite extends SparkFunSuite with BeforeAndAfter {
156156
val driverJvmOptsEnvs = driverContainer.getEnv.asScala.filter { env =>
157157
env.getName.startsWith(ENV_JAVA_OPT_PREFIX)
158158
}.sortBy(_.getName)
159-
logInfo(s"driverJVM Options $driverJvmOptsEnvs")
160159
assert(driverJvmOptsEnvs.size === 6)
161160

162161
val expectedJvmOptsValues = Seq(

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/InitContainerBootstrapStepSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.deploy.kubernetes.constants._
3131
import org.apache.spark.deploy.kubernetes.submit.submitsteps.initcontainer.{InitContainerConfigurationStep, InitContainerSpec}
3232
import org.apache.spark.util.Utils
3333

34-
private[spark] class InitContainerBootstrapStepSuite extends SparkFunSuite {
34+
class InitContainerBootstrapStepSuite extends SparkFunSuite {
3535

3636
private val OBJECT_MAPPER = new ObjectMapper().registerModule(DefaultScalaModule)
3737
private val CONFIG_MAP_NAME = "spark-init-config-map"

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/PythonStepSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import scala.collection.JavaConverters._
2222

2323
import org.apache.spark.{SparkConf, SparkFunSuite}
2424

25-
private[spark] class PythonStepSuite extends SparkFunSuite with BeforeAndAfter {
25+
class PythonStepSuite extends SparkFunSuite with BeforeAndAfter {
2626
private val FILE_DOWNLOAD_PATH = "/var/data/spark-files"
2727
private val PYSPARK_FILES = Seq(
2828
"hdfs://localhost:9000/app/files/file1.py",

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KerberizedHadoopClusterLauncher.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,4 +54,4 @@ private[spark] class KerberizedHadoopClusterLauncher(
5454
Thread.sleep(500)
5555
}
5656
}
57-
}
57+
}

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KerberosTestPodLauncher.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,4 +118,4 @@ private[spark] class KerberosTestPodLauncher(
118118
kubernetesClient.extensions().deployments()
119119
.inNamespace(namespace).create(deploymentWithEnv)}
120120
}
121-
}
121+
}

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesTestComponents.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,12 @@ package org.apache.spark.deploy.kubernetes.integrationtest
1919
import java.util.UUID
2020

2121
import io.fabric8.kubernetes.client.DefaultKubernetesClient
22+
import org.scalatest.concurrent.Eventually
2223
import scala.collection.JavaConverters._
2324

2425
import org.apache.spark.SparkConf
2526
import org.apache.spark.deploy.kubernetes.config._
2627

27-
import org.scalatest.concurrent.Eventually
28-
2928
private[spark] class KubernetesTestComponents(defaultClient: DefaultKubernetesClient) {
3029

3130
val namespace = UUID.randomUUID().toString.replaceAll("-", "")

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/docker/SparkDockerImageBuilder.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,13 +24,12 @@ import scala.collection.JavaConverters._
2424

2525
import com.spotify.docker.client.{DefaultDockerClient, DockerCertificates, LoggingBuildHandler}
2626
import org.apache.http.client.utils.URIBuilder
27+
import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
28+
import org.scalatest.time.{Minutes, Seconds, Span}
2729

2830
import org.apache.spark.internal.Logging
29-
3031
import org.apache.spark.util.RedirectThread
3132

32-
import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
33-
import org.scalatest.time.{Minutes, Seconds, Span}
3433

3534
private[spark] class SparkDockerImageBuilder
3635
(private val dockerEnv: Map[String, String]) extends Logging{

0 commit comments

Comments
 (0)