Skip to content

Commit bc0dd4d

Browse files
committed
remove maven
1 parent 2b22c43 commit bc0dd4d

File tree

6 files changed

+110
-330
lines changed

6 files changed

+110
-330
lines changed

README.md

Lines changed: 7 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -20,21 +20,11 @@ The LDBC SNB Data Generator (Datagen) produces the datasets for the [LDBC Social
2020

2121
### Build the JAR
2222

23-
You can build the JAR with both Maven and SBT.
23+
To assemble the JAR file with SBT, run:
2424

25-
* To assemble the JAR file with Maven, run:
26-
27-
```bash
28-
./tools/build.sh
29-
```
30-
31-
* For faster builds during development, consider using SBT. To assemble the JAR file with SBT, run:
32-
33-
```bash
34-
sbt assembly
35-
```
36-
37-
:warning: When using SBT, change the path of the JAR file in the instructions provided in the README (`target/ldbc_snb_datagen_${PLATFORM_VERSION}-${DATAGEN_VERSION}.jar` -> `./target/scala-2.12/ldbc_snb_datagen-assembly-${DATAGEN_VERSION}.jar`).
25+
```bash
26+
sbt assembly
27+
```
3828

3929
### Install Python tools
4030

@@ -78,9 +68,9 @@ Both Java 8 and Java 11 are supported.
7868
Once you have Spark in place and built the JAR file, run the generator as follows:
7969

8070
```bash
81-
export PLATFORM_VERSION=2.12_spark3.2
82-
export DATAGEN_VERSION=0.5.0-SNAPSHOT
83-
export LDBC_SNB_DATAGEN_JAR=./target/ldbc_snb_datagen_${PLATFORM_VERSION}-${DATAGEN_VERSION}.jar
71+
export PLATFORM_VERSION=spark3.2_2.12
72+
export DATAGEN_VERSION=$(sbt -batch -error 'print version')
73+
export LDBC_SNB_DATAGEN_JAR=$(sbt -batch -error 'print assembly / assemblyOutputPath')
8474
./tools/run.py <runtime configuration arguments> -- <generator configuration arguments>
8575
```
8676

build.sbt

Lines changed: 99 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,105 @@
1+
name := "ldbc_snb_datagen"
2+
organization := "ldbc.snb.datagen"
3+
4+
scmInfo := {
5+
val org = "ldbc"
6+
val project = "ldbc_snb_datagen_spark"
7+
Some(ScmInfo(url(s"https://github.com/$org/$project"), s"[email protected]:$org/$project.git"))
8+
}
9+
10+
Global / onChangedBuildSource := ReloadOnSourceChanges
11+
Global / cancelable := true
112
ThisBuild / Test / parallelExecution := false
213
ThisBuild / Test / fork := true
3-
ThisBuild / cancelable := true
414

5-
javacOptions ++= Seq("-source", "1.8", "-target", "1.8", "-Xlint")
15+
val sparkVersion = settingKey[String]("The version of Spark used for building.")
16+
val sparkCompatVersion = taskKey[String]("The compatibility version of Spark")
17+
18+
sparkVersion := "3.2.1"
19+
sparkCompatVersion := { sparkVersion.value.split("\\.", 3).take(2).mkString(".") }
20+
21+
resolvers += "TUDelft Repository" at "https://simulation.tudelft.nl/maven/"
22+
23+
libraryDependencies ++= Seq(
24+
"org.apache.spark" %% "spark-sql" % sparkVersion.value % "provided",
25+
"org.apache.spark" %% "spark-core" % sparkVersion.value % "provided",
26+
"com.chuusai" %% "shapeless" % "2.3.3",
27+
"com.github.scopt" %% "scopt" % "3.7.1",
28+
"org.javatuples" % "javatuples" % "1.2",
29+
"ca.umontreal.iro" % "ssj" % "2.5",
30+
"xerces" % "xercesImpl" % "2.12.0" % Runtime,
31+
"org.specs2" %% "specs2-core" % "4.2.0" % Test,
32+
"org.specs2" %% "specs2-junit" % "4.2.0" % Test,
33+
"org.mockito" % "mockito-core" % "3.3.3" % Test,
34+
"org.scalatest" %% "scalatest" % "3.1.0" % Test withSources(),
35+
"junit" % "junit" % "4.13.1" % Test
36+
)
37+
38+
scalaVersion := "2.12.16"
639

7-
assemblyMergeStrategy in assembly := {
40+
scalacOptions ++= Seq(
41+
"-deprecation", // Emit warning and location for usages of deprecated APIs.
42+
"-encoding", "utf-8", // Specify character encoding used by source files.
43+
"-explaintypes", // Explain type errors in more detail.
44+
"-feature", // Emit warning and location for usages of features that should be imported explicitly.
45+
"-language:existentials", // Existential types (besides wildcard types) can be written and inferred
46+
"-language:experimental.macros", // Allow macro definition (besides implementation and application)
47+
"-language:higherKinds", // Allow higher-kinded types
48+
"-language:implicitConversions", // Allow definition of implicit functions called views
49+
"-unchecked", // Enable additional warnings where generated code depends on assumptions.
50+
"-Xcheckinit", // Wrap field accessors to throw an exception on uninitialized access.
51+
"-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver.
52+
"-Xlint:constant", // Evaluation of a constant arithmetic expression results in an error.
53+
"-Xlint:delayedinit-select", // Selecting member of DelayedInit.
54+
"-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element.
55+
"-Xlint:inaccessible", // Warn about inaccessible types in method signatures.
56+
"-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`.
57+
"-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id.
58+
"-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
59+
"-Xlint:nullary-unit", // Warn when nullary methods return Unit.
60+
"-Xlint:option-implicit", // Option.apply used implicit view.
61+
"-Xlint:package-object-classes", // Class or object defined in package object.
62+
"-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds.
63+
"-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field.
64+
"-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component.
65+
"-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope.
66+
)
67+
68+
scalacOptions ++=
69+
scalaVersion {
70+
case sv if sv.startsWith("2.13") => List(
71+
)
72+
73+
case sv if sv.startsWith("2.12") => List(
74+
"-Yno-adapted-args", // Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.
75+
"-Ypartial-unification", // Enable partial unification in type constructor inference
76+
"-Ywarn-extra-implicit", // Warn when more than one implicit parameter section is defined.
77+
"-Ywarn-inaccessible", // Warn about inaccessible types in method signatures.
78+
"-Ywarn-infer-any", // Warn when a type argument is inferred to be `Any`.
79+
"-Ywarn-nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
80+
"-Ywarn-nullary-unit", // Warn when nullary methods return Unit.
81+
"-Ywarn-numeric-widen" // Warn when numerics are widened.
82+
)
83+
84+
case _ => Nil
85+
}.value
86+
87+
// The REPL can’t cope with -Ywarn-unused:imports or -Xfatal-warnings so turn them off for the console
88+
Compile / console / scalacOptions --= Seq("-Ywarn-unused:imports", "-Xfatal-warnings")
89+
90+
javacOptions ++= Seq(
91+
"-Xlint",
92+
"-source", "1.8",
93+
"-target", "1.8",
94+
"-g:vars"
95+
)
96+
97+
assembly / assemblyMergeStrategy := {
898
case PathList("META-INF", "MANIFEST.MF") => MergeStrategy.discard
9-
case x => MergeStrategy.first
99+
case _ => MergeStrategy.first
10100
}
101+
102+
assembly / assemblyJarName := {
103+
moduleName.value + "-spark" + sparkCompatVersion.value + "_" +
104+
scalaBinaryVersion.value + "-" + version.value + ".assembly.jar"
105+
}

0 commit comments

Comments
 (0)