Skip to content

Commit 98bdee0

Browse files
committed
DSP-14152 netty 4.1.13 support for SJS
This changes OSS dependency to our Spark fork. This dependency drags in netty 4.1.13.
1 parent a4e51a2 commit 98bdee0

File tree

5 files changed

+16
-15
lines changed

5 files changed

+16
-15
lines changed

build.sbt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -265,6 +265,7 @@ lazy val commonSettings = Defaults.coreDefaultSettings ++ dirSettings ++ implici
265265
// For Building on Encrypted File Systems...
266266
scalacOptions ++= Seq("-Xmax-classfile-name", "128"),
267267
resolvers ++= Dependencies.repos,
268+
credentials += Credentials(Path.userHome / ".sbt" / ".credentials"),
268269
libraryDependencies ++= apiDeps,
269270
parallelExecution in Test := false,
270271
testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest, "-oDF"),

project/Assembly.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,9 @@ object Assembly {
1010
assemblyExcludedJars in assembly <<= (fullClasspath in assembly) map { _ filter { cp =>
1111
List("servlet-api", "guice-all", "junit", "uuid",
1212
"jetty", "jsp-api-2.0", "antlr", "avro", "slf4j-log4j", "log4j-1.2",
13-
"scala-actors", "spark", "commons-cli", "stax-api", "mockito").exists(cp.data.getName.startsWith(_))
13+
"scala-actors", "spark", "commons-cli", "stax-api", "mockito",
14+
// we rely on whatever version DSE has:
15+
"netty", "dse-java-driver").exists(cp.data.getName.startsWith(_))
1416
} },
1517
// We don't need the Scala library, Spark already includes it
1618
assembleArtifact in assemblyPackageScala := false,

project/Dependencies.scala

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -28,18 +28,16 @@ object Dependencies {
2828
)
2929

3030
lazy val sparkDeps = Seq(
31-
"org.apache.spark" %% "spark-core" % spark % "provided" excludeAll(excludeNettyIo, excludeQQ),
32-
// Force netty version. This avoids some Spark netty dependency problem.
33-
"io.netty" % "netty-all" % netty
31+
"com.datastax.spark" %% "spark-core" % spark % Provided excludeAll(excludeQQ)
3432
)
3533

3634
lazy val sparkExtraDeps = Seq(
37-
"org.apache.spark" %% "spark-mllib" % spark % Provided excludeAll(excludeNettyIo, excludeQQ),
38-
"org.apache.spark" %% "spark-sql" % spark % Provided excludeAll(excludeNettyIo, excludeQQ),
39-
"org.apache.spark" %% "spark-streaming" % spark % Provided excludeAll(excludeNettyIo, excludeQQ),
40-
"org.apache.spark" %% "spark-hive" % spark % Provided excludeAll(
35+
"com.datastax.spark" %% "spark-mllib" % spark % Provided excludeAll(excludeNettyIo, excludeQQ),
36+
"com.datastax.spark" %% "spark-sql" % spark % Provided excludeAll(excludeNettyIo, excludeQQ),
37+
"com.datastax.spark" %% "spark-streaming" % spark % Provided excludeAll(excludeNettyIo, excludeQQ),
38+
"com.datastax.spark" %% "spark-hive" % spark % Provided excludeAll(
4139
excludeNettyIo, excludeQQ, excludeScalaTest
42-
)
40+
)
4341
)
4442

4543
lazy val sparkExtraDepsTest = Seq(
@@ -60,8 +58,8 @@ object Dependencies {
6058
)
6159

6260
lazy val cassandraDeps = Seq(
63-
"com.datastax.cassandra" % "cassandra-driver-core" % cassandra,
64-
"com.datastax.cassandra" % "cassandra-driver-mapping" % cassandra
61+
"com.datastax.dse" % "dse-java-driver-core" % dseDriver % Provided excludeAll excludeNettyIo,
62+
"com.datastax.dse" % "dse-java-driver-mapping" % dseDriver % Provided excludeAll excludeNettyIo
6563
)
6664

6765
lazy val logbackDeps = Seq(
@@ -85,6 +83,7 @@ object Dependencies {
8583
lazy val apiDeps = sparkDeps ++ miscDeps :+ typeSafeConfigDeps :+ scalaTestDep
8684

8785
val repos = Seq(
86+
"datastax-release" at "http://datastax.artifactoryonline.com/datastax/datastax-releases-local",
8887
"Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/",
8988
"sonatype snapshots" at "https://oss.sonatype.org/content/repositories/snapshots/",
9089
"spray repo" at "http://repo.spray.io"

project/ExclusionRules.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ object ExclusionRules {
55
val excludeJackson = ExclusionRule(organization = "org.codehaus.jackson")
66
val excludeScalaTest = ExclusionRule(organization = "org.scalatest")
77
val excludeScala = ExclusionRule(organization = "org.scala-lang")
8-
val excludeNettyIo = ExclusionRule(organization = "io.netty", artifact = "netty-all")
8+
val excludeNettyIo = ExclusionRule(organization = "io.netty")
99
val excludeAsm = ExclusionRule(organization = "asm")
1010
val excludeQQ = ExclusionRule(organization = "org.scalamacros")
1111
}

project/Versions.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import scala.util.Properties.isJavaAtLeast
22

33
object Versions {
4-
lazy val spark = sys.env.getOrElse("SPARK_VERSION", "2.2.0")
4+
lazy val spark = sys.env.getOrElse("SPARK_VERSION", "2.2.0.0-e9faedf")
55

66
lazy val akka = "2.4.9"
7-
lazy val cassandra = "3.2.0"
7+
lazy val dseDriver = "1.4.0"
88
lazy val cassandraUnit = "2.2.2.1"
99
lazy val commons = "1.4"
1010
lazy val flyway = "3.2.1"
@@ -15,7 +15,6 @@ object Versions {
1515
lazy val logback = "1.0.7"
1616
lazy val mesos = sys.env.getOrElse("MESOS_VERSION", "1.0.0-2.0.89.ubuntu1404")
1717
lazy val metrics = "2.2.0"
18-
lazy val netty = "4.0.44.Final"
1918
lazy val postgres = "9.4.1209"
2019
lazy val py4j = "0.10.4"
2120
lazy val scalaTest = "2.2.6"

0 commit comments

Comments
 (0)