Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit 50f47d0

Browse files
committed
fixing build and unit test issues
1 parent 7612bf5 commit 50f47d0

File tree

7 files changed

+62
-20
lines changed

7 files changed

+62
-20
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/HadoopUGIUtil.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ private[spark] class HadoopUGIUtil{
4545
def getCurrentTime: Long = System.currentTimeMillis()
4646

4747
// Functions that should be in Core with Rebase to 2.3
48-
@deprecated("Moved to core in 2.2", "2.2")
48+
@deprecated("Moved to core in 2.3", "2.3")
4949
def getTokenRenewalInterval(
5050
renewedTokens: Iterable[Token[_ <: TokenIdentifier]],
5151
hadoopConf: Configuration): Option[Long] = {
@@ -62,15 +62,15 @@ private[spark] class HadoopUGIUtil{
6262
renewIntervals.reduceLeftOption(_ min _)
6363
}
6464

65-
@deprecated("Moved to core in 2.2", "2.2")
65+
@deprecated("Moved to core in 2.3", "2.3")
6666
def serialize(creds: Credentials): Array[Byte] = {
6767
val byteStream = new ByteArrayOutputStream
6868
val dataStream = new DataOutputStream(byteStream)
6969
creds.writeTokenStorageToStream(dataStream)
7070
byteStream.toByteArray
7171
}
7272

73-
@deprecated("Moved to core in 2.2", "2.2")
73+
@deprecated("Moved to core in 2.3", "2.3")
7474
def deserialize(tokenBytes: Array[Byte]): Credentials = {
7575
val creds = new Credentials()
7676
creds.readTokenStorageStream(new DataInputStream(new ByteArrayInputStream(tokenBytes)))

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KerberosTokenConfBootstrap.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,4 +73,4 @@ private[spark] class KerberosTokenConfBootstrapImpl(
7373
pod = dtMountedPod,
7474
mainContainer = mainContainerWithMountedKerberos)
7575
}
76-
}
76+
}

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/submitsteps/hadoopsteps/HadoopStepsOrchestrator.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,4 +101,4 @@ private[spark] class HadoopStepsOrchestrator(
101101
Seq.empty[File]
102102
}
103103
}
104-
}
104+
}

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactory.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@ private[spark] class ExecutorPodFactoryImpl(
4747
executorInitContainerBootstrap: Option[SparkPodInitContainerBootstrap],
4848
executorMountInitContainerSecretPlugin: Option[InitContainerResourceStagingServerSecretPlugin],
4949
executorLocalDirVolumeProvider: ExecutorLocalDirVolumeProvider,
50-
shuffleManager: Option[KubernetesExternalShuffleManager],
5150
hadoopBootStrap: Option[HadoopConfBootstrap],
5251
kerberosBootstrap: Option[KerberosTokenConfBootstrap])
5352
extends ExecutorPodFactory {

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterManager.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,6 @@ private[spark] class KubernetesClusterManager extends ExternalClusterManager wit
164164
executorInitContainerBootstrap,
165165
executorInitContainerSecretVolumePlugin,
166166
executorLocalDirVolumeProvider,
167-
kubernetesShuffleManager,
168167
hadoopBootStrap,
169168
kerberosBootstrap)
170169
val allocatorExecutor = ThreadUtils

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestratorSuite.scala

Lines changed: 36 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ package org.apache.spark.deploy.k8s.submit
1818

1919
import org.apache.spark.{SparkConf, SparkFunSuite}
2020
import org.apache.spark.deploy.k8s.config._
21-
import org.apache.spark.deploy.k8s.submit.submitsteps.{BaseDriverConfigurationStep, DependencyResolutionStep, DriverConfigurationStep, DriverKubernetesCredentialsStep, DriverServiceBootstrapStep, InitContainerBootstrapStep, LocalDirectoryMountConfigurationStep, MountSecretsStep, MountSmallLocalFilesStep, PythonStep, RStep}
21+
import org.apache.spark.deploy.k8s.submit.submitsteps.{BaseDriverConfigurationStep, DependencyResolutionStep, DriverConfigurationStep, DriverKubernetesCredentialsStep, DriverServiceBootstrapStep, HadoopConfigBootstrapStep, InitContainerBootstrapStep, LocalDirectoryMountConfigurationStep, MountSecretsStep, MountSmallLocalFilesStep, PythonStep, RStep}
2222

2323
private[spark] class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {
2424

@@ -45,7 +45,8 @@ private[spark] class DriverConfigurationStepsOrchestratorSuite extends SparkFunS
4545
APP_NAME,
4646
MAIN_CLASS,
4747
APP_ARGS,
48-
Seq.empty[String],
48+
ADDITIONAL_PYTHON_FILES,
49+
None,
4950
sparkConf)
5051
validateStepTypes(
5152
orchestrator,
@@ -69,7 +70,8 @@ private[spark] class DriverConfigurationStepsOrchestratorSuite extends SparkFunS
6970
APP_NAME,
7071
MAIN_CLASS,
7172
APP_ARGS,
72-
Seq.empty[String],
73+
ADDITIONAL_PYTHON_FILES,
74+
None,
7375
sparkConf)
7476
validateStepTypes(
7577
orchestrator,
@@ -93,6 +95,7 @@ private[spark] class DriverConfigurationStepsOrchestratorSuite extends SparkFunS
9395
MAIN_CLASS,
9496
APP_ARGS,
9597
ADDITIONAL_PYTHON_FILES,
98+
None,
9699
sparkConf)
97100
validateStepTypes(
98101
orchestrator,
@@ -116,6 +119,7 @@ private[spark] class DriverConfigurationStepsOrchestratorSuite extends SparkFunS
116119
MAIN_CLASS,
117120
APP_ARGS,
118121
Seq.empty[String],
122+
None,
119123
sparkConf)
120124
validateStepTypes(
121125
orchestrator,
@@ -127,7 +131,6 @@ private[spark] class DriverConfigurationStepsOrchestratorSuite extends SparkFunS
127131
classOf[RStep])
128132
}
129133

130-
131134
test("Only local files without a resource staging server.") {
132135
val sparkConf = new SparkConf(false).set("spark.files", "/var/spark/file1.txt")
133136
val mainAppResource = JavaMainAppResource("local:///var/apps/jars/main.jar")
@@ -139,7 +142,8 @@ private[spark] class DriverConfigurationStepsOrchestratorSuite extends SparkFunS
139142
APP_NAME,
140143
MAIN_CLASS,
141144
APP_ARGS,
142-
Seq.empty[String],
145+
ADDITIONAL_PYTHON_FILES,
146+
None,
143147
sparkConf)
144148
validateStepTypes(
145149
orchestrator,
@@ -164,7 +168,8 @@ private[spark] class DriverConfigurationStepsOrchestratorSuite extends SparkFunS
164168
APP_NAME,
165169
MAIN_CLASS,
166170
APP_ARGS,
167-
Seq.empty[String],
171+
ADDITIONAL_PYTHON_FILES,
172+
None,
168173
sparkConf)
169174
validateStepTypes(
170175
orchestrator,
@@ -175,6 +180,31 @@ private[spark] class DriverConfigurationStepsOrchestratorSuite extends SparkFunS
175180
classOf[LocalDirectoryMountConfigurationStep],
176181
classOf[MountSecretsStep])
177182
}
183+
test("Submission steps with hdfs interaction and HADOOP_CONF_DIR defined") {
184+
val sparkConf = new SparkConf(false)
185+
val mainAppResource = JavaMainAppResource("local:///var/apps/jars/main.jar")
186+
val hadoopConf = Some("/etc/hadoop/conf")
187+
val orchestrator = new DriverConfigurationStepsOrchestrator(
188+
NAMESPACE,
189+
APP_ID,
190+
LAUNCH_TIME,
191+
mainAppResource,
192+
APP_NAME,
193+
MAIN_CLASS,
194+
APP_ARGS,
195+
ADDITIONAL_PYTHON_FILES,
196+
hadoopConf,
197+
sparkConf)
198+
val steps = orchestrator.getAllConfigurationSteps()
199+
validateStepTypes(
200+
orchestrator,
201+
classOf[BaseDriverConfigurationStep],
202+
classOf[DriverServiceBootstrapStep],
203+
classOf[DriverKubernetesCredentialsStep],
204+
classOf[DependencyResolutionStep],
205+
classOf[LocalDirectoryMountConfigurationStep],
206+
classOf[HadoopConfigBootstrapStep])
207+
}
178208

179209
private def validateStepTypes(
180210
orchestrator: DriverConfigurationStepsOrchestrator,

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactorySuite.scala

Lines changed: 21 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,9 @@ class ExecutorPodFactorySuite extends SparkFunSuite with BeforeAndAfter with Bef
7777
None,
7878
None,
7979
None,
80-
executorLocalDirVolumeProvider)
80+
executorLocalDirVolumeProvider,
81+
None,
82+
None)
8183
val executor = factory.createExecutorPod(
8284
"1", "dummy", "dummy", Seq[(String, String)](), driverPod, Map[String, Int]())
8385

@@ -117,7 +119,9 @@ class ExecutorPodFactorySuite extends SparkFunSuite with BeforeAndAfter with Bef
117119
None,
118120
None,
119121
None,
120-
executorLocalDirVolumeProvider)
122+
executorLocalDirVolumeProvider,
123+
None,
124+
None)
121125
val executor = factory.createExecutorPod(
122126
"1", "dummy", "dummy", Seq[(String, String)](), driverPod, Map[String, Int]())
123127

@@ -138,7 +142,9 @@ class ExecutorPodFactorySuite extends SparkFunSuite with BeforeAndAfter with Bef
138142
None,
139143
None,
140144
None,
141-
executorLocalDirVolumeProvider)
145+
executorLocalDirVolumeProvider,
146+
None,
147+
None)
142148
val executor = factory.createExecutorPod(
143149
"1", "dummy", "dummy", Seq[(String, String)](), driverPod, Map[String, Int]())
144150

@@ -172,7 +178,9 @@ class ExecutorPodFactorySuite extends SparkFunSuite with BeforeAndAfter with Bef
172178
None,
173179
Some(initContainerBootstrap),
174180
None,
175-
executorLocalDirVolumeProvider)
181+
executorLocalDirVolumeProvider,
182+
None,
183+
None)
176184
val executor = factory.createExecutorPod(
177185
"1", "dummy", "dummy", Seq[(String, String)](), driverPod, Map[String, Int]())
178186

@@ -202,7 +210,9 @@ class ExecutorPodFactorySuite extends SparkFunSuite with BeforeAndAfter with Bef
202210
None,
203211
None,
204212
None,
205-
executorLocalDirVolumeProvider)
213+
executorLocalDirVolumeProvider,
214+
None,
215+
None)
206216
val executor = factory.createExecutorPod(
207217
"1", "dummy", "dummy", Seq[(String, String)](), driverPod, Map[String, Int]())
208218
assert(executor.getSpec.getVolumes.size === 1)
@@ -223,7 +233,9 @@ class ExecutorPodFactorySuite extends SparkFunSuite with BeforeAndAfter with Bef
223233
Some(smallFiles),
224234
None,
225235
None,
226-
executorLocalDirVolumeProvider)
236+
executorLocalDirVolumeProvider,
237+
None,
238+
None)
227239
val executor = factory.createExecutorPod(
228240
"1", "dummy", "dummy", Seq[(String, String)](), driverPod, Map[String, Int]())
229241

@@ -256,7 +268,9 @@ class ExecutorPodFactorySuite extends SparkFunSuite with BeforeAndAfter with Bef
256268
None,
257269
None,
258270
None,
259-
executorLocalDirVolumeProvider)
271+
executorLocalDirVolumeProvider,
272+
None,
273+
None)
260274
val executor = factory.createExecutorPod(
261275
"1", "dummy", "dummy", Seq[(String, String)]("qux" -> "quux"), driverPod, Map[String, Int]())
262276

0 commit comments

Comments
 (0)