Skip to content

Commit 0abac18

Browse files
authored
Merge pull request spark-jobserver#838 from noorul/noorul_fix_spark_version_used_by_ci
fix(ci): Use spark version 2.1.0 in ci tests
2 parents 7126ee2 + 4e7fd48 commit 0abac18

File tree

6 files changed

+7
-7
lines changed

6 files changed

+7
-7
lines changed

Dockerfile.test

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ RUN sbt update
2424
# add the rest of the code
2525
COPY . .
2626

27-
ENV SPARK_HOME /tmp/spark-2.0.1-bin-hadoop2.7
27+
ENV SPARK_HOME /tmp/spark-2.1.0-bin-hadoop2.7
2828
ENV JAVA_OPTIONS "-Xmx1500m -XX:MaxPermSize=512m -Dakka.test.timefactor=3"
2929

3030
CMD ["/usr/src/app/run_tests.sh"]

ci/install-spark.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
#!/usr/bin/env bash
22
set -e
3-
curl -L -o /tmp/spark.tgz http://d3kbcqa49mib13.cloudfront.net/spark-2.0.1-bin-hadoop2.7.tgz
3+
curl -L -o /tmp/spark.tgz http://d3kbcqa49mib13.cloudfront.net/spark-2.1.0-bin-hadoop2.7.tgz
44
tar -xvzf /tmp/spark.tgz -C /tmp

job-server-extras/src/test/scala/spark/jobserver/python/PythonSparkContextFactorySpec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ object PythonSparkContextFactorySpec {
7474
lazy val jobServerAPIExamplePath = jobServerPaths.find(_.getAbsolutePath.contains("examples"))
7575

7676
lazy val pysparkPath = sys.env.get("SPARK_HOME").map(d => s"$d/python/lib/pyspark.zip")
77-
lazy val py4jPath = sys.env.get("SPARK_HOME").map(d => s"$d/python/lib/py4j-0.10.3-src.zip")
77+
lazy val py4jPath = sys.env.get("SPARK_HOME").map(d => s"$d/python/lib/py4j-0.10.4-src.zip")
7878
lazy val originalPythonPath = sys.env.get("PYTHONPATH")
7979

8080
case object DummyJobCache extends JobCache {

job-server-python/src/python/run-tests.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env bash
2-
PYTHONPATH=.:$SPARK_HOME/python/lib/pyspark.zip:$SPARK_HOME/python/lib/py4j-0.10.3-src.zip:$PYTHONPATH python test/apitests.py
2+
PYTHONPATH=.:$SPARK_HOME/python/lib/pyspark.zip:$SPARK_HOME/python/lib/py4j-0.10.4-src.zip:$PYTHONPATH python test/apitests.py
33
exitCode=$?
44
#This sleep is here so that all of Spark's shutdown stdout if written before we exit,
55
#so that we return cleanly to the command prompt.

job-server-python/src/test/scala/spark/jobserver/python/SubprocessSpec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,10 +34,10 @@ object SubprocessSpec {
3434
lazy val jobServerPath = getPythonDir("src/python")
3535

3636
lazy val pysparkPath = sys.env.get("SPARK_HOME").map(d => s"$d/python/lib/pyspark.zip")
37-
lazy val py4jPath = sys.env.get("SPARK_HOME").map(d => s"$d/python/lib/py4j-0.10.3-src.zip")
37+
lazy val py4jPath = sys.env.get("SPARK_HOME").map(d => s"$d/python/lib/py4j-0.10.4-src.zip")
3838
lazy val sparkPaths = sys.env.get("SPARK_HOME").map{sh =>
3939
val pysparkPath = s"$sh/python/lib/pyspark.zip"
40-
val py4jPath = s"$sh/python/lib/py4j-0.10.3-src.zip"
40+
val py4jPath = s"$sh/python/lib/py4j-0.10.4-src.zip"
4141
Seq(pysparkPath, py4jPath)
4242
}.getOrElse(Seq())
4343
lazy val originalPythonPath = sys.env.get("PYTHONPATH")

project/Versions.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ object Versions {
1717
lazy val metrics = "2.2.0"
1818
lazy val netty = "4.0.42.Final"
1919
lazy val postgres = "9.4.1209"
20-
lazy val py4j = "0.10.3"
20+
lazy val py4j = "0.10.4"
2121
lazy val scalaTest = "2.2.6"
2222
lazy val scalatic = "2.2.6"
2323
lazy val shiro = "1.2.4"

0 commit comments

Comments
 (0)