Skip to content

Commit ae935d8

Browse files
author
Robert Kruszewski
committed
resolve conflicts
1 parent 69b9e3d commit ae935d8

File tree

27 files changed

+71
-1045
lines changed

27 files changed

+71
-1045
lines changed

core/src/main/scala/org/apache/spark/api/r/RRunner.scala

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -401,21 +401,13 @@ private[r] object RRunner {
401401
// the socket used to send out the input of task
402402
serverSocket.setSoTimeout(10000)
403403
val sock = serverSocket.accept()
404-
<<<<<<< HEAD
405-
env.setRDaemonChannel(
406-
new DataOutputStream(new BufferedOutputStream(sock.getOutputStream)))
407-
serverSocket.close()
408-
||||||| merged common ancestors
409-
daemonChannel = new DataOutputStream(new BufferedOutputStream(sock.getOutputStream))
410-
serverSocket.close()
411-
=======
412404
try {
413405
authHelper.authClient(sock)
414-
daemonChannel = new DataOutputStream(new BufferedOutputStream(sock.getOutputStream))
406+
env.setRDaemonChannel(
407+
new DataOutputStream(new BufferedOutputStream(sock.getOutputStream)))
415408
} finally {
416409
serverSocket.close()
417410
}
418-
>>>>>>> apache/master
419411
}
420412
try {
421413
env.createRWorkerFromDaemon(port)

core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ object PythonRunner extends CondaRunner with Logging {
4444
val pyFiles = args(1)
4545
val otherArgs = args.slice(2, args.length)
4646
val sparkConf = new SparkConf()
47-
<<<<<<< HEAD
47+
val secret = Utils.createSecret(sparkConf)
4848
val presetPythonExec = Provenance.fromConf(sparkConf, PYSPARK_DRIVER_PYTHON)
4949
.orElse(Provenance.fromConf(sparkConf, PYSPARK_PYTHON))
5050
.orElse(Provenance.fromEnv("PYSPARK_DRIVER_PYTHON"))
@@ -60,20 +60,6 @@ object PythonRunner extends CondaRunner with Logging {
6060
.getOrElse("python")
6161

6262
logInfo(s"Python binary that will be called: $pythonExec")
63-
||||||| merged common ancestors
64-
val pythonExec = sparkConf.get(PYSPARK_DRIVER_PYTHON)
65-
.orElse(sparkConf.get(PYSPARK_PYTHON))
66-
.orElse(sys.env.get("PYSPARK_DRIVER_PYTHON"))
67-
.orElse(sys.env.get("PYSPARK_PYTHON"))
68-
.getOrElse("python")
69-
=======
70-
val secret = Utils.createSecret(sparkConf)
71-
val pythonExec = sparkConf.get(PYSPARK_DRIVER_PYTHON)
72-
.orElse(sparkConf.get(PYSPARK_PYTHON))
73-
.orElse(sys.env.get("PYSPARK_DRIVER_PYTHON"))
74-
.orElse(sys.env.get("PYSPARK_PYTHON"))
75-
.getOrElse("python")
76-
>>>>>>> apache/master
7763

7864
// Format python file paths before adding them to the PYTHONPATH
7965
val formattedPythonFile = formatPath(pythonFile)

dev/deps/spark-deps-hadoop-2.6

Lines changed: 0 additions & 201 deletions
This file was deleted.

0 commit comments

Comments
 (0)