@@ -3,6 +3,8 @@ import Dependencies._
3
3
import JobServerRelease ._
4
4
import sbtassembly .AssemblyPlugin .autoImport .assemblyMergeStrategy
5
5
import sbtassembly .MergeStrategy
6
+ import scala .xml .{Node => XmlNode , NodeSeq => XmlNodeSeq , _ }
7
+ import scala .xml .transform .{RewriteRule , RuleTransformer }
6
8
7
9
transitiveClassifiers in Global := Seq ()
8
10
lazy val dirSettings = Seq ()
@@ -11,7 +13,7 @@ lazy val akkaApp = Project(id = "akka-app", base = file("akka-app"))
11
13
.settings(description := " Common Akka application stack: metrics, tracing, logging, and more." )
12
14
.settings(commonSettings)
13
15
.settings(libraryDependencies ++= coreTestDeps ++ akkaDeps)
14
- .settings(publishSettings )
16
+ .settings(noPublishSettings )
15
17
.disablePlugins(SbtScalariform )
16
18
17
19
lazy val jobServer = Project (id = " job-server" , base = file(" job-server" ))
@@ -32,7 +34,7 @@ lazy val jobServer = Project(id = "job-server", base = file("job-server"))
32
34
test in assembly := {},
33
35
fork in Test := true
34
36
)
35
- .settings(publishSettings )
37
+ .settings(noPublishSettings )
36
38
.dependsOn(akkaApp, jobServerApi)
37
39
.disablePlugins(SbtScalariform )
38
40
@@ -109,6 +111,12 @@ lazy val jobServerTestJarSettings = Seq(
109
111
exportJars := true // use the jar instead of target/classes
110
112
)
111
113
114
+ lazy val noPublishSettings = Seq (
115
+ publishTo := Some (Resolver .file(" Unused repo" , file(" target/unusedrepo" ))),
116
+ publishArtifact := false ,
117
+ publish := {}
118
+ )
119
+
112
120
lazy val dockerSettings = Seq (
113
121
// Make the docker task depend on the assembly task, which generates a fat JAR file
114
122
docker <<= (docker dependsOn (assembly in jobServerExtras)),
@@ -251,9 +259,36 @@ lazy val scoverageSettings = {
251
259
coverageExcludedPackages := " .+Benchmark.*"
252
260
}
253
261
262
+ /** Used for publishing `extras`, `api` and `python` jars. Main Spark Job Server assembly is published
263
+ * as always. */
254
264
lazy val publishSettings = Seq (
265
+ autoScalaLibrary := false ,
266
+ credentials += Credentials (Path .userHome / " .sbt" / " .credentials" ),
267
+ publishMavenStyle := true ,
268
+ publishTo := Some (sys.env(" MVN_PUBLISH_REPO" ) at sys.env(" MVN_PUBLISH_URL" )),
255
269
licenses += (" Apache-2.0" , url(" http://choosealicense.com/licenses/apache/" )),
256
- bintrayOrganization := Some (" spark-jobserver" )
270
+ pomIncludeRepository := { _ => false },
271
+ /** Since users are encouraged to use dse-spark-dependencies, which provides most of the needed
272
+ * dependencies, we remove most of the Spark Job Server deps here. Provided, test and blacklisted
273
+ * deps are removed from resulting poms. */
274
+ pomPostProcess := { (node : XmlNode ) =>
275
+ new RuleTransformer (new RewriteRule {
276
+ val pomDependencyBlacklist = Seq (" job-server_" , " joda-convert" , " joda-time" )
277
+ val emptyElement = Text (" " )
278
+
279
+ def hasTestOrProvidedScope (e : Elem ): Boolean = e.child.exists(child =>
280
+ child.label == " scope" && (child.text == " provided" || child.text == " test" ))
281
+
282
+ def isBlacklisted (e : Elem ): Boolean = e.child.exists(child =>
283
+ child.label == " artifactId" && pomDependencyBlacklist.exists(child.text.startsWith))
284
+
285
+ override def transform (node : XmlNode ): XmlNodeSeq = node match {
286
+ case e : Elem if e.label == " dependency" && (hasTestOrProvidedScope(e) || isBlacklisted(e)) =>
287
+ emptyElement
288
+ case _ => node
289
+ }
290
+ }).transform(node).head
291
+ }
257
292
)
258
293
259
294
// This is here so we can easily switch back to Logback when Spark fixes its log4j dependency.
0 commit comments