Skip to content

Commit 4b72274

Browse files
committed
Move default JDBC config to application.conf; get rid of getOrElse
1 parent f3935aa commit 4b72274

File tree

5 files changed

+28
-21
lines changed

5 files changed

+28
-21
lines changed

job-server/src/main/resources/application.conf

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,21 @@ spark {
1717
rootdir = /tmp/spark-jobserver/filedao/data
1818
}
1919

20+
sqldao {
21+
# Slick database driver, full classpath
22+
slick-driver = scala.slick.driver.H2Driver
23+
24+
# JDBC driver, full classpath
25+
jdbc-driver = org.h2.Driver
26+
27+
# Directory where default H2 driver stores its data. Only needed for H2.
28+
rootdir = /tmp/spark-jobserver/sqldao/data
29+
30+
# Full JDBC URL / init string. Sorry, needs to match above.
31+
# Substitutions may be used to launch job-server, but leave it out here in the default or tests won't pass
32+
jdbc.url = "jdbc:h2:file:/tmp/spark-jobserver/sqldao/data/h2-db"
33+
}
34+
2035
# Time out for job server to wait while creating contexts
2136
context-creation-timeout = 15 s
2237

@@ -53,7 +68,7 @@ spark {
5368
# A zero-arg class implementing spark.jobserver.context.SparkContextFactory
5469
# Determines the type of jobs that can run in a SparkContext
5570
context-factory = spark.jobserver.context.DefaultSparkContextFactory
56-
71+
5772
# uris of jars to be loaded into the classpath for this context. Uris is a string list, or a string separated by commas ','
5873
# dependent-jar-uris = ["file:///some/path/present/in/each/mesos/slave/somepackage.jar"]
5974

job-server/src/spark.jobserver/io/JobDAO.scala

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -82,14 +82,4 @@ trait JobDAO {
8282
*/
8383
def getLastUploadTime(appName: String): Option[DateTime] =
8484
getApps.get(appName)
85-
86-
/**
87-
* A safe API for getting values from Typesafe Config, will return a default if the
88-
* value is missing. If the value is badly formatted, error still goes through.
89-
*/
90-
def getOrElse[T](getter: => T, default: T): T = {
91-
try getter catch {
92-
case e: ConfigException.Missing => default
93-
}
94-
}
9585
}

job-server/src/spark.jobserver/io/JobFileDAO.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,7 @@ class JobFileDAO(config: Config) extends JobDAO {
1616
// jobId to its Config
1717
private val configs = mutable.HashMap.empty[String, Config]
1818

19-
private val rootDir = getOrElse(config.getString("spark.jobserver.filedao.rootdir"),
20-
"/tmp/spark-jobserver/filedao/data")
19+
private val rootDir = config.getString("spark.jobserver.filedao.rootdir")
2120
private val rootDirFile = new File(rootDir)
2221
logger.info("rootDir is " + rootDirFile.getAbsolutePath)
2322

job-server/src/spark.jobserver/io/JobSqlDAO.scala

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,8 @@ import scala.slick.jdbc.meta.MTable
1010
import scala.reflect.runtime.universe
1111

1212
class JobSqlDAO(config: Config) extends JobDAO {
13-
val slickDriverClass = getOrElse(config.getString("spark.jobserver.sqldao.slick-driver"),
14-
"scala.slick.driver.H2Driver")
15-
val jdbcDriverClass = getOrElse(config.getString("spark.jobserver.sqldao.jdbc-driver"),
16-
"org.h2.Driver")
13+
val slickDriverClass = config.getString("spark.jobserver.sqldao.slick-driver")
14+
val jdbcDriverClass = config.getString("spark.jobserver.sqldao.jdbc-driver")
1715

1816
val runtimeMirror = universe.runtimeMirror(getClass.getClassLoader)
1917
val profileModule = runtimeMirror.staticModule(slickDriverClass)
@@ -22,8 +20,8 @@ class JobSqlDAO(config: Config) extends JobDAO {
2220

2321
private val logger = LoggerFactory.getLogger(getClass)
2422

25-
private val rootDir = getOrElse(config.getString("spark.jobserver.sqldao.rootdir"),
26-
"/tmp/spark-jobserver/sqldao/data")
23+
// NOTE: below is only needed for H2 drivers
24+
private val rootDir = config.getString("spark.jobserver.sqldao.rootdir")
2725
private val rootDirFile = new File(rootDir)
2826
logger.info("rootDir is " + rootDirFile.getAbsolutePath)
2927

@@ -63,8 +61,7 @@ class JobSqlDAO(config: Config) extends JobDAO {
6361
val configs = TableQuery[Configs]
6462

6563
// DB initialization
66-
val defaultJdbcUrl = "jdbc:h2:file:" + rootDir + "/h2-db"
67-
val jdbcUrl = getOrElse(config.getString("spark.jobserver.sqldao.jdbc.url"), defaultJdbcUrl)
64+
val jdbcUrl = config.getString("spark.jobserver.sqldao.jdbc.url")
6865
val db = Database.forURL(jdbcUrl, driver = jdbcDriverClass)
6966

7067
// Server initialization

job-server/src/test/resources/local.test.jobsqldao.conf

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,5 +7,11 @@ spark.jobserver {
77
rootdir = /tmp/spark-job-server-test/sqldao/data
88
# https://coderwall.com/p/a2vnxg
99
jdbc.url = "jdbc:h2:mem:jobserver-test;DATABASE_TO_UPPER=false;DB_CLOSE_DELAY=-1"
10+
11+
# Slick database driver, full classpath
12+
slick-driver = scala.slick.driver.H2Driver
13+
14+
# JDBC driver, full classpath
15+
jdbc-driver = org.h2.Driver
1016
}
1117
}

0 commit comments

Comments
 (0)