Skip to content

Commit 3c00dd3

Browse files
committed
Resolve environment vars in Typesafe configs everywhere
1 parent 286ec60 commit 3c00dd3

File tree

3 files changed

+5
-4
lines changed

3 files changed

+5
-4
lines changed

job-server/src/spark.jobserver/JobServer.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ object JobServer {
3636
println("Could not find configuration file " + configFile)
3737
sys.exit(1)
3838
}
39-
ConfigFactory.parseFile(configFile).withFallback(defaultConfig)
39+
ConfigFactory.parseFile(configFile).withFallback(defaultConfig).resolve()
4040
} else {
4141
defaultConfig
4242
}

job-server/src/spark.jobserver/WebApi.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ class WebApi(system: ActorSystem,
117117
complete(StatusCodes.BadRequest, errMap("context name must start with letters"))
118118
} else {
119119
parameterMap { (params) =>
120-
val config = ConfigFactory.parseMap(params.asJava)
120+
val config = ConfigFactory.parseMap(params.asJava).resolve()
121121
val future = (supervisor ? AddContext(contextName, config))(contextTimeout.seconds)
122122
respondWithMediaType(MediaTypes.`application/json`) { ctx =>
123123
future.map {
@@ -290,7 +290,7 @@ class WebApi(system: ActorSystem,
290290
try {
291291
val async = !syncOpt.getOrElse(false)
292292
val postedJobConfig = ConfigFactory.parseString(configString)
293-
val jobConfig = postedJobConfig.withFallback(config)
293+
val jobConfig = postedJobConfig.withFallback(config).resolve()
294294
val contextConfig = Try(jobConfig.getConfig("spark.context-settings")).
295295
getOrElse(ConfigFactory.empty)
296296
val jobManager = getJobManagerForContext(contextOpt, contextConfig, classPath)

notes/0.5.2.markdown

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
#Scala #akka @ApacheSpark
22

33
* Spark streaming context support !! (@zeitos)
4-
* Change `server_start.sh` to use `spark-submit`
4+
* Change `server_start.sh` to use `spark-submit`. This should fix some edge case bugs.
55
* Configurable driver memory (@acidghost)
6+
* Be able to accept environment vars in job server config files, eg `master = ${?MY_SPARK_HOST}`

0 commit comments

Comments
 (0)