Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit e394eda

Browse files
committed
setting HADOOP_CONF_DIR env variables
1 parent 55d12b5 commit e394eda

File tree

1 file changed

+21
-3
lines changed
  • resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest

1 file changed

+21
-3
lines changed

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala

Lines changed: 21 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,15 +28,15 @@ import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
2828
import org.scalatest.time.{Minutes, Seconds, Span}
2929
import scala.collection.JavaConverters._
3030

31-
import org.apache.spark.{SparkConf, SparkFunSuite, SSLOptions}
3231
import org.apache.spark.deploy.kubernetes.SSLUtils
32+
import org.apache.spark.{SSLOptions, SparkConf, SparkFunSuite}
3333
import org.apache.spark.deploy.kubernetes.config._
3434
import org.apache.spark.deploy.kubernetes.integrationtest.backend.IntegrationTestBackendFactory
3535
import org.apache.spark.deploy.kubernetes.integrationtest.backend.minikube.Minikube
3636
import org.apache.spark.deploy.kubernetes.integrationtest.constants.MINIKUBE_TEST_BACKEND
3737
import org.apache.spark.deploy.kubernetes.submit.{Client, ClientArguments, JavaMainAppResource, KeyAndCertPem, MainAppResource, PythonMainAppResource}
3838
import org.apache.spark.launcher.SparkLauncher
39-
import org.apache.spark.util.Utils
39+
import org.apache.spark.util.{RedirectThread, Utils}
4040

4141
private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
4242
import KubernetesSuite._
@@ -74,13 +74,31 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
7474

7575
test("Include HADOOP_CONF for HDFS based jobs ") {
7676
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
77+
// Ensuring that HADOOP_CONF_DIR env variable is set
78+
val builder = new ProcessBuilder(
79+
Seq("/bin/bash", "-c", "export HADOOP_CONF_DIR=" +
80+
"test-data/hadoop-conf-files && exec").asJava)
81+
builder.redirectErrorStream(true) // Ugly but needed for stdout and stderr to synchronize
82+
val process = builder.start()
83+
new RedirectThread(process.getInputStream, System.out, "redirect output").start()
84+
val exitCode = process.waitFor()
85+
if (exitCode != 0) {
86+
logInfo(s"exitCode: $exitCode")
87+
}
7788
sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH))
7889
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE)
7990
}
8091

8192
test("Run PySpark Job on file from SUBMITTER with --py-files") {
8293
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
83-
94+
// Ensuring that HADOOP_CONF_DIR env variable is unset
95+
val builder = new ProcessBuilder(
96+
Seq("/bin/bash", "-c", "export HADOOP_CONF_DIR=" +
97+
" && exec").asJava)
98+
builder.redirectErrorStream(true) // Ugly but needed for stdout and stderr to synchronize
99+
val process = builder.start()
100+
new RedirectThread(process.getInputStream, System.out, "redirect output").start()
101+
val exitCode = process.waitFor()
84102
launchStagingServer(SSLOptions(), None)
85103
sparkConf
86104
.set(DRIVER_DOCKER_IMAGE,

0 commit comments

Comments
 (0)