@@ -3,6 +3,8 @@ import Dependencies._
3
3
import JobServerRelease ._
4
4
import sbtassembly .AssemblyPlugin .autoImport .assemblyMergeStrategy
5
5
import sbtassembly .MergeStrategy
6
+ import scala .xml .{Node => XmlNode , NodeSeq => XmlNodeSeq , _ }
7
+ import scala .xml .transform .{RewriteRule , RuleTransformer }
6
8
7
9
transitiveClassifiers in Global := Seq (Artifact .SourceClassifier )
8
10
lazy val dirSettings = Seq ()
@@ -11,7 +13,7 @@ lazy val akkaApp = Project(id = "akka-app", base = file("akka-app"))
11
13
.settings(description := " Common Akka application stack: metrics, tracing, logging, and more." )
12
14
.settings(commonSettings)
13
15
.settings(libraryDependencies ++= coreTestDeps ++ akkaDeps)
14
- .settings(publishSettings )
16
+ .settings(noPublishSettings )
15
17
.disablePlugins(SbtScalariform )
16
18
17
19
lazy val jobServer = Project (id = " job-server" , base = file(" job-server" ))
@@ -36,7 +38,7 @@ lazy val jobServer = Project(id = "job-server", base = file("job-server"))
36
38
test in assembly := {},
37
39
fork in Test := true
38
40
)
39
- .settings(publishSettings )
41
+ .settings(noPublishSettings )
40
42
.dependsOn(akkaApp, jobServerApi)
41
43
.disablePlugins(SbtScalariform )
42
44
@@ -283,9 +285,36 @@ lazy val scoverageSettings = {
283
285
coverageExcludedPackages := " .+Benchmark.*"
284
286
}
285
287
288
+ /** Used for publishing `extras`, `api` and `python` jars. Main Spark Job Server assembly is published
289
+ * as always. */
286
290
lazy val publishSettings = Seq (
291
+ autoScalaLibrary := false ,
292
+ credentials += Credentials (Path .userHome / " .sbt" / " .credentials" ),
293
+ publishMavenStyle := true ,
294
+ publishTo := Some (sys.env(" MVN_PUBLISH_REPO" ) at sys.env(" MVN_PUBLISH_URL" )),
287
295
licenses += (" Apache-2.0" , url(" http://choosealicense.com/licenses/apache/" )),
288
- bintrayOrganization := Some (" spark-jobserver" )
296
+ pomIncludeRepository := { _ => false },
297
+ /** Since users are encouraged to use dse-spark-dependencies, which provides most of the needed
298
+ * dependencies, we remove most of the Spark Job Server deps here. Provided, test and blacklisted
299
+ * deps are removed from resulting poms. */
300
+ pomPostProcess := { (node : XmlNode ) =>
301
+ new RuleTransformer (new RewriteRule {
302
+ val pomDependencyBlacklist = Seq (" job-server_" , " joda-convert" , " joda-time" )
303
+ val emptyElement = Text (" " )
304
+
305
+ def hasTestOrProvidedScope (e : Elem ): Boolean = e.child.exists(child =>
306
+ child.label == " scope" && (child.text == " provided" || child.text == " test" ))
307
+
308
+ def isBlacklisted (e : Elem ): Boolean = e.child.exists(child =>
309
+ child.label == " artifactId" && pomDependencyBlacklist.exists(child.text.startsWith))
310
+
311
+ override def transform (node : XmlNode ): XmlNodeSeq = node match {
312
+ case e : Elem if e.label == " dependency" && (hasTestOrProvidedScope(e) || isBlacklisted(e)) =>
313
+ emptyElement
314
+ case _ => node
315
+ }
316
+ }).transform(node).head
317
+ }
289
318
)
290
319
291
320
// This is here so we can easily switch back to Logback when Spark fixes its log4j dependency.
0 commit comments