Skip to content

Commit 18ec021

Browse files
authored
fix(core): Update spark version to 2.2.0 (spark-jobserver#845)
* Explicitly add hadoop-client dependency * Update netty version * Update C* driver version * Remove metastore_db lock across tests
1 parent 2848433 commit 18ec021

File tree

6 files changed

+25
-6
lines changed

6 files changed

+25
-6
lines changed

Dockerfile.test

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ RUN sbt update
2424
# add the rest of the code
2525
COPY . .
2626

27-
ENV SPARK_HOME /tmp/spark-2.1.0-bin-hadoop2.7
27+
ENV SPARK_HOME /tmp/spark-2.2.0-bin-hadoop2.7
2828
ENV JAVA_OPTIONS "-Xmx1500m -XX:MaxPermSize=512m -Dakka.test.timefactor=3"
2929

3030
CMD ["/usr/src/app/run_tests.sh"]

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ Spark Job Server is now included in Datastax Enterprise 4.8!
119119
| 0.6.1 | 1.5.2 |
120120
| 0.6.2 | 1.6.1 |
121121
| 0.7.0 | 1.6.2 |
122-
| 0.8.0-SNAPSHOT | 2.1.0 |
122+
| 0.8.0-SNAPSHOT | 2.2.0 |
123123

124124
For release notes, look in the `notes/` directory.
125125

ci/install-spark.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
#!/usr/bin/env bash
22
set -e
3-
curl -L -o /tmp/spark.tgz http://d3kbcqa49mib13.cloudfront.net/spark-2.1.0-bin-hadoop2.7.tgz
3+
curl -L -o /tmp/spark.tgz http://d3kbcqa49mib13.cloudfront.net/spark-2.2.0-bin-hadoop2.7.tgz
44
tar -xvzf /tmp/spark.tgz -C /tmp

job-server-python/src/python/test/apitests.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import errno
2+
import os
13
import unittest
24
from pyhocon import ConfigFactory
35
from pyspark import SparkConf, SparkContext
@@ -6,6 +8,14 @@
68
from py4j.java_gateway import java_import
79

810

11+
def silentremove(filename):
12+
try:
13+
os.remove(filename)
14+
except OSError as exc:
15+
if exc.errno != errno.ENOENT:
16+
raise
17+
18+
919
class WordCountSparkJob(SparkJob):
1020
"""
1121
Simple example of a SparkContext job for use in tests
@@ -56,6 +66,11 @@ class TestSJSApi(unittest.TestCase):
5666

5767
def setUp(self):
5868
conf = SparkConf().setAppName('test').setMaster('local[*]')
69+
pwd = os.path.dirname(os.path.realpath(__file__))
70+
metastore_dir = os.path.abspath(os.path.join(pwd, '..',
71+
'metastore_db'))
72+
silentremove(os.path.join(metastore_dir, "dbex.lck"))
73+
silentremove(os.path.join(metastore_dir, "db.lck"))
5974
self.sc = SparkContext(conf=conf)
6075
self.jvm = self.sc._gateway.jvm
6176
java_import(self.jvm, "org.apache.spark.sql.*")

project/Dependencies.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@ object Dependencies {
3434
)
3535

3636
lazy val sparkExtraDeps = Seq(
37+
"org.apache.derby" % "derby" % derby % Provided excludeAll(excludeNettyIo, excludeQQ),
38+
"org.apache.hadoop" % "hadoop-client" % hadoop % Provided excludeAll(excludeNettyIo, excludeQQ),
3739
"org.apache.spark" %% "spark-mllib" % spark % Provided excludeAll(excludeNettyIo, excludeQQ),
3840
"org.apache.spark" %% "spark-sql" % spark % Provided excludeAll(excludeNettyIo, excludeQQ),
3941
"org.apache.spark" %% "spark-streaming" % spark % Provided excludeAll(excludeNettyIo, excludeQQ),

project/Versions.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,23 @@
11
import scala.util.Properties.isJavaAtLeast
22

33
object Versions {
4-
lazy val spark = sys.env.getOrElse("SPARK_VERSION", "2.1.0")
4+
lazy val spark = sys.env.getOrElse("SPARK_VERSION", "2.2.0")
55

66
lazy val akka = "2.4.9"
7-
lazy val cassandra = "3.0.3"
7+
lazy val cassandra = "3.3.0"
88
lazy val cassandraUnit = "2.2.2.1"
99
lazy val commons = "1.4"
10+
lazy val derby = "10.12.1.1"
1011
lazy val flyway = "3.2.1"
12+
lazy val hadoop = "2.7.3"
1113
lazy val h2 = "1.3.176"
1214
lazy val java = sys.env.getOrElse("JAVA_VERSION", "8-jdk")
1315
lazy val jodaConvert = "1.8.1"
1416
lazy val jodaTime = "2.9.3"
1517
lazy val logback = "1.0.7"
1618
lazy val mesos = sys.env.getOrElse("MESOS_VERSION", "1.0.0-2.0.89.ubuntu1404")
1719
lazy val metrics = "2.2.0"
18-
lazy val netty = "4.0.42.Final"
20+
lazy val netty = "4.0.44.Final"
1921
lazy val postgres = "9.4.1209"
2022
lazy val mysql = "5.1.42"
2123
lazy val py4j = "0.10.4"

0 commit comments

Comments
 (0)