Skip to content

Commit 2ddcdab

Browse files
author
Robert Kruszewski
committed
resolve conflicts
1 parent 6e4c8f5 commit 2ddcdab

File tree

22 files changed

+62
-833
lines changed

22 files changed

+62
-833
lines changed

build/sbt-launch-lib.bash

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ addDebugger () {
111111
# a ham-fisted attempt to move some memory settings in concert
112112
# so they need not be dicked around with individually.
113113
get_mem_opts () {
114-
local mem=${1:-2048}
114+
local mem=${1:-4096}
115115
local perm=$(( $mem / 4 ))
116116
(( $perm > 256 )) || perm=256
117117
(( $perm < 4096 )) || perm=4096

core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -103,15 +103,11 @@ private[spark] abstract class BasePythonRunner[IN, OUT](
103103
if (reuseWorker) {
104104
envVars.put("SPARK_REUSE_WORKER", "1")
105105
}
106-
<<<<<<< HEAD
107-
val worker: Socket = env.createPythonWorker(
108-
pythonExec, envVars.asScala.toMap, condaInstructions)
109-
=======
110106
if (memoryMb.isDefined) {
111107
envVars.put("PYSPARK_EXECUTOR_MEMORY_MB", memoryMb.get.toString)
112108
}
113-
val worker: Socket = env.createPythonWorker(pythonExec, envVars.asScala.toMap)
114-
>>>>>>> master
109+
val worker: Socket = env.createPythonWorker(
110+
pythonExec, envVars.asScala.toMap, condaInstructions)
115111
// Whether is the worker released into idle pool
116112
val released = new AtomicBoolean(false)
117113

core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala

Lines changed: 1 addition & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -50,13 +50,7 @@ import org.apache.spark.util.Utils
5050
private[spark] abstract class RestSubmissionServer(
5151
val host: String,
5252
val requestedPort: Int,
53-
<<<<<<< HEAD
54-
val masterConf: SparkConf,
55-
val sslOptions: SSLOptions = SSLOptions()) extends Logging {
56-
=======
5753
val masterConf: SparkConf) extends Logging {
58-
59-
>>>>>>> master
6054
protected val submitRequestServlet: SubmitRequestServlet
6155
protected val killRequestServlet: KillRequestServlet
6256
protected val statusRequestServlet: StatusRequestServlet
@@ -90,18 +84,6 @@ private[spark] abstract class RestSubmissionServer(
9084
threadPool.setDaemon(true)
9185
val server = new Server(threadPool)
9286

93-
val resolvedConnectionFactories = sslOptions
94-
.createJettySslContextFactory()
95-
.map(sslFactory => {
96-
val sslConnectionFactory = new SslConnectionFactory(
97-
sslFactory, HttpVersion.HTTP_1_1.asString())
98-
val rawHttpConfiguration = new HttpConfiguration()
99-
rawHttpConfiguration.setSecureScheme("https")
100-
rawHttpConfiguration.setSecurePort(startPort)
101-
val rawHttpConnectionFactory = new HttpConnectionFactory(rawHttpConfiguration)
102-
Array(sslConnectionFactory, rawHttpConnectionFactory)
103-
}).getOrElse(Array(new HttpConnectionFactory()))
104-
10587
val connector = new ServerConnector(
10688
server,
10789
null,
@@ -110,7 +92,7 @@ private[spark] abstract class RestSubmissionServer(
11092
null,
11193
-1,
11294
-1,
113-
resolvedConnectionFactories: _*)
95+
new HttpConnectionFactory())
11496
connector.setHost(host)
11597
connector.setPort(startPort)
11698
connector.setReuseAddress(!Utils.isWindows)

dev/deps/spark-deps-hadoop-2.6

Lines changed: 0 additions & 198 deletions
This file was deleted.

0 commit comments

Comments
 (0)