Skip to content

Commit 425c4ad

Browse files
srowencloud-fan
authored andcommitted
[SPARK-19810][BUILD][CORE] Remove support for Scala 2.10
## What changes were proposed in this pull request? - Remove Scala 2.10 build profiles and support - Replace some 2.10 support in scripts with commented placeholders for 2.12 later - Remove deprecated API calls from 2.10 support - Remove usages of deprecated context bounds where possible - Remove Scala 2.10 workarounds like ScalaReflectionLock - Other minor Scala warning fixes ## How was this patch tested? Existing tests Author: Sean Owen <[email protected]> Closes apache#17150 from srowen/SPARK-19810.
1 parent e08d06b commit 425c4ad

File tree

101 files changed

+311
-5231
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

101 files changed

+311
-5231
lines changed

R/pkg/R/sparkR.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ sparkR.stop <- function() {
113113
#' list(spark.executor.memory="4g"),
114114
#' list(LD_LIBRARY_PATH="/directory of JVM libraries (libjvm.so) on workers/"),
115115
#' c("one.jar", "two.jar", "three.jar"),
116-
#' c("com.databricks:spark-avro_2.10:2.0.1"))
116+
#' c("com.databricks:spark-avro_2.11:2.0.1"))
117117
#'}
118118
#' @note sparkR.init since 1.4.0
119119
sparkR.init <- function(
@@ -357,7 +357,7 @@ sparkRHive.init <- function(jsc = NULL) {
357357
#' sparkR.session("yarn-client", "SparkR", "/home/spark",
358358
#' list(spark.executor.memory="4g"),
359359
#' c("one.jar", "two.jar", "three.jar"),
360-
#' c("com.databricks:spark-avro_2.10:2.0.1"))
360+
#' c("com.databricks:spark-avro_2.11:2.0.1"))
361361
#' sparkR.session(spark.master = "yarn-client", spark.executor.memory = "4g")
362362
#'}
363363
#' @note sparkR.session since 2.0.0

R/pkg/tests/fulltests/test_client.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ test_that("multiple packages don't produce a warning", {
3737

3838
test_that("sparkJars sparkPackages as character vectors", {
3939
args <- generateSparkSubmitArgs("", "", c("one.jar", "two.jar", "three.jar"), "",
40-
c("com.databricks:spark-avro_2.10:2.0.1"))
40+
c("com.databricks:spark-avro_2.11:2.0.1"))
4141
expect_match(args, "--jars one.jar,two.jar,three.jar")
42-
expect_match(args, "--packages com.databricks:spark-avro_2.10:2.0.1")
42+
expect_match(args, "--packages com.databricks:spark-avro_2.11:2.0.1")
4343
})

bin/load-spark-env.cmd

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -35,21 +35,21 @@ if [%SPARK_ENV_LOADED%] == [] (
3535

3636
rem Setting SPARK_SCALA_VERSION if not already set.
3737

38-
set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11"
39-
set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.10"
38+
rem set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11"
39+
rem set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.12"
4040

4141
if [%SPARK_SCALA_VERSION%] == [] (
4242

43-
if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
44-
echo "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected."
45-
echo "Either clean one of them or, set SPARK_SCALA_VERSION=2.11 in spark-env.cmd."
46-
exit 1
47-
)
48-
if exist %ASSEMBLY_DIR2% (
43+
rem if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
44+
rem echo "Presence of build for multiple Scala versions detected."
45+
rem echo "Either clean one of them or, set SPARK_SCALA_VERSION=2.11 in spark-env.cmd."
46+
rem exit 1
47+
rem )
48+
rem if exist %ASSEMBLY_DIR2% (
4949
set SPARK_SCALA_VERSION=2.11
50-
) else (
51-
set SPARK_SCALA_VERSION=2.10
52-
)
50+
rem ) else (
51+
rem set SPARK_SCALA_VERSION=2.12
52+
rem )
5353
)
5454
exit /b 0
5555

bin/load-spark-env.sh

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -46,18 +46,18 @@ fi
4646

4747
if [ -z "$SPARK_SCALA_VERSION" ]; then
4848

49-
ASSEMBLY_DIR2="${SPARK_HOME}/assembly/target/scala-2.11"
50-
ASSEMBLY_DIR1="${SPARK_HOME}/assembly/target/scala-2.10"
49+
#ASSEMBLY_DIR2="${SPARK_HOME}/assembly/target/scala-2.11"
50+
#ASSEMBLY_DIR1="${SPARK_HOME}/assembly/target/scala-2.12"
5151

52-
if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
53-
echo -e "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected." 1>&2
54-
echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 in spark-env.sh.' 1>&2
55-
exit 1
56-
fi
52+
#if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
53+
# echo -e "Presence of build for multiple Scala versions detected." 1>&2
54+
# echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 in spark-env.sh.' 1>&2
55+
# exit 1
56+
#fi
5757

58-
if [ -d "$ASSEMBLY_DIR2" ]; then
58+
#if [ -d "$ASSEMBLY_DIR2" ]; then
5959
export SPARK_SCALA_VERSION="2.11"
60-
else
61-
export SPARK_SCALA_VERSION="2.10"
62-
fi
60+
#else
61+
# export SPARK_SCALA_VERSION="2.12"
62+
#fi
6363
fi

build/mvn

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,13 +91,13 @@ install_mvn() {
9191

9292
# Install zinc under the build/ folder
9393
install_zinc() {
94-
local zinc_path="zinc-0.3.11/bin/zinc"
94+
local zinc_path="zinc-0.3.15/bin/zinc"
9595
[ ! -f "${_DIR}/${zinc_path}" ] && ZINC_INSTALL_FLAG=1
9696
local TYPESAFE_MIRROR=${TYPESAFE_MIRROR:-https://downloads.typesafe.com}
9797

9898
install_app \
99-
"${TYPESAFE_MIRROR}/zinc/0.3.11" \
100-
"zinc-0.3.11.tgz" \
99+
"${TYPESAFE_MIRROR}/zinc/0.3.15" \
100+
"zinc-0.3.15.tgz" \
101101
"${zinc_path}"
102102
ZINC_BIN="${_DIR}/${zinc_path}"
103103
}

core/src/main/scala/org/apache/spark/Accumulable.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,8 @@ trait AccumulableParam[R, T] extends Serializable {
201201

202202
@deprecated("use AccumulatorV2", "2.0.0")
203203
private[spark] class
204-
GrowableAccumulableParam[R <% Growable[T] with TraversableOnce[T] with Serializable: ClassTag, T]
204+
GrowableAccumulableParam[R : ClassTag, T]
205+
(implicit rg: R => Growable[T] with TraversableOnce[T] with Serializable)
205206
extends AccumulableParam[R, T] {
206207

207208
def addAccumulator(growable: R, elem: T): R = {

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -183,8 +183,6 @@ class SparkContext(config: SparkConf) extends Logging {
183183
// log out Spark Version in Spark driver log
184184
logInfo(s"Running Spark version $SPARK_VERSION")
185185

186-
warnDeprecatedVersions()
187-
188186
/* ------------------------------------------------------------------------------------- *
189187
| Private variables. These variables keep the internal state of the context, and are |
190188
| not accessible by the outside world. They're mutable since we want to initialize all |
@@ -349,13 +347,6 @@ class SparkContext(config: SparkConf) extends Logging {
349347
value
350348
}
351349

352-
private def warnDeprecatedVersions(): Unit = {
353-
val javaVersion = System.getProperty("java.version").split("[+.\\-]+", 3)
354-
if (scala.util.Properties.releaseVersion.exists(_.startsWith("2.10"))) {
355-
logWarning("Support for Scala 2.10 is deprecated as of Spark 2.1.0")
356-
}
357-
}
358-
359350
/** Control our logLevel. This overrides any user-defined log settings.
360351
* @param logLevel The desired log level as a string.
361352
* Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
@@ -1396,6 +1387,8 @@ class SparkContext(config: SparkConf) extends Logging {
13961387
@deprecated("use AccumulatorV2", "2.0.0")
13971388
def accumulableCollection[R <% Growable[T] with TraversableOnce[T] with Serializable: ClassTag, T]
13981389
(initialValue: R): Accumulable[R, T] = {
1390+
// TODO the context bound (<%) above should be replaced with simple type bound and implicit
1391+
// conversion but is a breaking change. This should be fixed in Spark 3.x.
13991392
val param = new GrowableAccumulableParam[R, T]
14001393
val acc = new Accumulable(initialValue, param)
14011394
cleaner.foreach(_.registerAccumulatorForCleanup(acc.newAcc))
@@ -2605,9 +2598,9 @@ object SparkContext extends Logging {
26052598
*/
26062599
private[spark] val LEGACY_DRIVER_IDENTIFIER = "<driver>"
26072600

2608-
private implicit def arrayToArrayWritable[T <% Writable: ClassTag](arr: Traversable[T])
2601+
private implicit def arrayToArrayWritable[T <: Writable : ClassTag](arr: Traversable[T])
26092602
: ArrayWritable = {
2610-
def anyToWritable[U <% Writable](u: U): Writable = u
2603+
def anyToWritable[U <: Writable](u: U): Writable = u
26112604

26122605
new ArrayWritable(classTag[T].runtimeClass.asInstanceOf[Class[Writable]],
26132606
arr.map(x => anyToWritable(x)).toArray)

core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala

Lines changed: 11 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
*/
1717
package org.apache.spark.rdd
1818

19-
import scala.reflect.{classTag, ClassTag}
19+
import scala.reflect.ClassTag
2020

2121
import org.apache.hadoop.io.Writable
2222
import org.apache.hadoop.io.compress.CompressionCodec
@@ -39,40 +39,8 @@ class SequenceFileRDDFunctions[K <% Writable: ClassTag, V <% Writable : ClassTag
3939
extends Logging
4040
with Serializable {
4141

42-
private val keyWritableClass =
43-
if (_keyWritableClass == null) {
44-
// pre 1.3.0, we need to use Reflection to get the Writable class
45-
getWritableClass[K]()
46-
} else {
47-
_keyWritableClass
48-
}
49-
50-
private val valueWritableClass =
51-
if (_valueWritableClass == null) {
52-
// pre 1.3.0, we need to use Reflection to get the Writable class
53-
getWritableClass[V]()
54-
} else {
55-
_valueWritableClass
56-
}
57-
58-
private def getWritableClass[T <% Writable: ClassTag](): Class[_ <: Writable] = {
59-
val c = {
60-
if (classOf[Writable].isAssignableFrom(classTag[T].runtimeClass)) {
61-
classTag[T].runtimeClass
62-
} else {
63-
// We get the type of the Writable class by looking at the apply method which converts
64-
// from T to Writable. Since we have two apply methods we filter out the one which
65-
// is not of the form "java.lang.Object apply(java.lang.Object)"
66-
implicitly[T => Writable].getClass.getDeclaredMethods().filter(
67-
m => m.getReturnType().toString != "class java.lang.Object" &&
68-
m.getName() == "apply")(0).getReturnType
69-
70-
}
71-
// TODO: use something like WritableConverter to avoid reflection
72-
}
73-
c.asInstanceOf[Class[_ <: Writable]]
74-
}
75-
42+
// TODO the context bound (<%) above should be replaced with simple type bound and implicit
43+
// conversion but is a breaking change. This should be fixed in Spark 3.x.
7644

7745
/**
7846
* Output the RDD as a Hadoop SequenceFile using the Writable types we infer from the RDD's key
@@ -90,24 +58,24 @@ class SequenceFileRDDFunctions[K <% Writable: ClassTag, V <% Writable : ClassTag
9058
// valueWritableClass at the compile time. To implement that, we need to add type parameters to
9159
// SequenceFileRDDFunctions. however, SequenceFileRDDFunctions is a public class so it will be a
9260
// breaking change.
93-
val convertKey = self.keyClass != keyWritableClass
94-
val convertValue = self.valueClass != valueWritableClass
61+
val convertKey = self.keyClass != _keyWritableClass
62+
val convertValue = self.valueClass != _valueWritableClass
9563

96-
logInfo("Saving as sequence file of type (" + keyWritableClass.getSimpleName + "," +
97-
valueWritableClass.getSimpleName + ")" )
64+
logInfo("Saving as sequence file of type " +
65+
s"(${_keyWritableClass.getSimpleName},${_valueWritableClass.getSimpleName})" )
9866
val format = classOf[SequenceFileOutputFormat[Writable, Writable]]
9967
val jobConf = new JobConf(self.context.hadoopConfiguration)
10068
if (!convertKey && !convertValue) {
101-
self.saveAsHadoopFile(path, keyWritableClass, valueWritableClass, format, jobConf, codec)
69+
self.saveAsHadoopFile(path, _keyWritableClass, _valueWritableClass, format, jobConf, codec)
10270
} else if (!convertKey && convertValue) {
10371
self.map(x => (x._1, anyToWritable(x._2))).saveAsHadoopFile(
104-
path, keyWritableClass, valueWritableClass, format, jobConf, codec)
72+
path, _keyWritableClass, _valueWritableClass, format, jobConf, codec)
10573
} else if (convertKey && !convertValue) {
10674
self.map(x => (anyToWritable(x._1), x._2)).saveAsHadoopFile(
107-
path, keyWritableClass, valueWritableClass, format, jobConf, codec)
75+
path, _keyWritableClass, _valueWritableClass, format, jobConf, codec)
10876
} else if (convertKey && convertValue) {
10977
self.map(x => (anyToWritable(x._1), anyToWritable(x._2))).saveAsHadoopFile(
110-
path, keyWritableClass, valueWritableClass, format, jobConf, codec)
78+
path, _keyWritableClass, _valueWritableClass, format, jobConf, codec)
11179
}
11280
}
11381
}

core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -125,9 +125,9 @@ private[spark] object RpcTimeout {
125125
var foundProp: Option[(String, String)] = None
126126
while (itr.hasNext && foundProp.isEmpty) {
127127
val propKey = itr.next()
128-
conf.getOption(propKey).foreach { prop => foundProp = Some(propKey, prop) }
128+
conf.getOption(propKey).foreach { prop => foundProp = Some((propKey, prop)) }
129129
}
130-
val finalProp = foundProp.getOrElse(timeoutPropList.head, defaultValue)
130+
val finalProp = foundProp.getOrElse((timeoutPropList.head, defaultValue))
131131
val timeout = { Utils.timeStringAsSeconds(finalProp._2).seconds }
132132
new RpcTimeout(timeout, finalProp._1)
133133
}

core/src/main/scala/org/apache/spark/ui/JettyUtils.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ private[spark] object JettyUtils extends Logging {
5454
// implicit conversion from many types of functions to jetty Handlers.
5555
type Responder[T] = HttpServletRequest => T
5656

57-
class ServletParams[T <% AnyRef](val responder: Responder[T],
57+
class ServletParams[T <: AnyRef](val responder: Responder[T],
5858
val contentType: String,
5959
val extractFn: T => String = (in: Any) => in.toString) {}
6060

@@ -68,7 +68,7 @@ private[spark] object JettyUtils extends Logging {
6868
implicit def textResponderToServlet(responder: Responder[String]): ServletParams[String] =
6969
new ServletParams(responder, "text/plain")
7070

71-
def createServlet[T <% AnyRef](
71+
def createServlet[T <: AnyRef](
7272
servletParams: ServletParams[T],
7373
securityMgr: SecurityManager,
7474
conf: SparkConf): HttpServlet = {
@@ -113,7 +113,7 @@ private[spark] object JettyUtils extends Logging {
113113
}
114114

115115
/** Create a context handler that responds to a request with the given path prefix */
116-
def createServletHandler[T <% AnyRef](
116+
def createServletHandler[T <: AnyRef](
117117
path: String,
118118
servletParams: ServletParams[T],
119119
securityMgr: SecurityManager,

0 commit comments

Comments
 (0)