Skip to content

Commit 19f7172

Browse files
committed
Merge pull request spark-jobserver#427 from Atigeo/xPatterns_6.0
Support user impersonation for an already Kerberos authenticated user
2 parents cd27bf7 + 1de98a0 commit 19f7172

File tree

3 files changed

+23
-3
lines changed

3 files changed

+23
-3
lines changed

README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -592,6 +592,10 @@ or in the job config when using POST /jobs,
592592
spark.cores.max = 10
593593
}
594594
595+
User impersonation for an already Kerberos authenticated user is supported via `spark.proxy.user` query param:
596+
597+
POST /contexts/my-new-context?spark.proxy.user=<user-to-impersonate>
598+
595599
To pass settings directly to the sparkConf that do not use the "spark." prefix "as-is", use the "passthrough" section.
596600
597601
spark.context-settings {

bin/manager_start.sh

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#!/bin/bash
22
# Script to start the job manager
3-
# args: <work dir for context> <cluster address>
3+
# args: <work dir for context> <cluster address> [proxy_user]
44
set -e
55

66
get_abs_script_path() {
@@ -27,10 +27,18 @@ JAVA_OPTS="-XX:MaxDirectMemorySize=$MAX_DIRECT_MEMORY
2727

2828
MAIN="spark.jobserver.JobManager"
2929

30-
cmd='$SPARK_HOME/bin/spark-submit --class $MAIN --driver-memory $JOBSERVER_MEMORY
30+
if [ ! -z $3 ]; then
31+
cmd='$SPARK_HOME/bin/spark-submit --class $MAIN --driver-memory $JOBSERVER_MEMORY
3132
--conf "spark.executor.extraJavaOptions=$LOGGING_OPTS"
33+
--proxy-user $3
3234
--driver-java-options "$GC_OPTS $JAVA_OPTS $LOGGING_OPTS $CONFIG_OVERRIDES"
3335
$appdir/spark-job-server.jar $1 $2 $conffile'
36+
else
37+
cmd='$SPARK_HOME/bin/spark-submit --class $MAIN --driver-memory $JOBSERVER_MEMORY
38+
--conf "spark.executor.extraJavaOptions=$LOGGING_OPTS"
39+
--driver-java-options "$GC_OPTS $JAVA_OPTS $LOGGING_OPTS $CONFIG_OVERRIDES"
40+
$appdir/spark-job-server.jar $1 $2 $conffile'
41+
fi
3442

3543
eval $cmd > /dev/null 2>&1 &
3644
# exec java -cp $CLASSPATH $GC_OPTS $JAVA_OPTS $LOGGING_OPTS $CONFIG_OVERRIDES $MAIN $1 $2 $conffile 2>&1 &

job-server/src/spark.jobserver/AkkaClusterSupervisorActor.scala

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,15 @@ class AkkaClusterSupervisorActor(daoActor: ActorRef) extends InstrumentedActor {
204204
failureFunc(e)
205205
return
206206
}
207-
val pb = Process(s"$managerStartCommand $contextDir ${selfAddress.toString}")
207+
208+
//extract spark.proxy.user from contextConfig, if available and pass it to $managerStartCommand
209+
var cmdString = s"$managerStartCommand $contextDir ${selfAddress.toString}"
210+
211+
if (contextConfig.hasPath("spark.proxy.user")) {
212+
cmdString = cmdString + s" ${contextConfig.getString("spark.proxy.user")}"
213+
}
214+
215+
val pb = Process(cmdString)
208216
val pio = new ProcessIO(_ => (),
209217
stdout => scala.io.Source.fromInputStream(stdout)
210218
.getLines.foreach(println),

0 commit comments

Comments
 (0)