1+ plugins {
2+ id " org.sonarqube" version " 6.1.0.5360"
3+ }
4+
5+ sonar {
6+ properties {
7+ property " sonar.projectKey" , " flux"
8+ property " sonar.host.url" , " http://localhost:9000"
9+ property " sonar.coverage.jacoco.xmlReportPaths" , " code-coverage-report/build/reports/jacoco/testCodeCoverageReport/testCodeCoverageReport.xml"
10+ // Avoids a warning from Gradle.
11+ property " sonar.gradle.skipCompile" , " true"
12+ }
13+ }
14+
115subprojects {
216 apply plugin : " java-library"
317
418 group = " com.marklogic"
519
620 java {
7- sourceCompatibility = 11
8- targetCompatibility = 11
21+ // Flux requires Java 11 for all operations besides splitting and embedding, which require Java 17 due to
22+ // the requirements of the langchain4j dependency.
23+ toolchain {
24+ languageVersion = JavaLanguageVersion . of(11 )
25+ }
926 }
1027
28+ // Allows for quickly identifying compiler warnings.
29+ tasks. withType(JavaCompile ) {
30+ options. compilerArgs << ' -Xlint:unchecked'
31+ options. deprecation = true
32+ }
33+
34+ javadoc. failOnError = false
35+ // Ignores warnings on params that don't have descriptions, which is a little too noisy
36+ javadoc. options. addStringOption(' Xdoclint:none' , ' -quiet' )
37+
1138 repositories {
1239 mavenCentral()
1340 mavenLocal()
@@ -22,7 +49,42 @@ subprojects {
2249 details. useVersion ' 2.15.2'
2350 details. because ' Need to match the version used by Spark.'
2451 }
52+ if (details. requested. group. equals(" org.slf4j" )) {
53+ details. useVersion " 2.0.16"
54+ details. because " Ensures that slf4j-api 1.x does not appear on the Flux classpath in particular, which can " +
55+ " lead to this issue - https://www.slf4j.org/codes.html#StaticLoggerBinder."
56+ }
57+ if (details. requested. group. equals(" org.apache.logging.log4j" )) {
58+ details. useVersion " 2.24.3"
59+ details. because " Need to match the version used by Apache Tika. Spark uses 2.20.0 but automated tests confirm " +
60+ " that Spark seems fine with 2.24.3."
61+ }
2562 }
63+
64+ resolutionStrategy {
65+ // By default, Spark 3.5.x does not include the log4j 1.x dependency via its zookeeper dependency. But somehow, by
66+ // adding hadoop-client 3.3.4 to the mix, the log4j 1.x dependency comes via the zookeeper 3.6.3 dependency. Per
67+ // the release notes at https://zookeeper.apache.org/doc/r3.6.4/releasenotes.html, using zookeeper 3.6.4 - which
68+ // removes log4j 1.x, thus avoiding the major CVE associated with log4j 1.x - appears safe, which is confirmed by
69+ // tests as well.
70+ force " org.apache.zookeeper:zookeeper:3.6.4"
71+
72+ // Avoids a classpath conflict between Spark and tika-parser-microsoft-module. Forces Spark to use the
73+ // version that tika-parser-microsoft-module wants.
74+ // Avoids another classpath conflict between Spark and tika-parser-microsoft-module.
75+ force " org.apache.commons:commons-compress:1.27.1"
76+ }
77+
78+ // Without this exclusion, we have multiple slf4j providers, leading to an ugly warning at the start
79+ // of each Flux execution.
80+ exclude group : " org.slf4j" , module : " slf4j-reload4j"
81+
82+ // The rocksdbjni dependency weighs in at 50mb and so far does not appear necessary for our use of Spark.
83+ exclude module : " rocksdbjni"
84+ }
85+
86+ task allDeps(type : DependencyReportTask ) {
87+ description = " Allows for generating dependency reports for every subproject in a single task."
2688 }
2789
2890 test {
@@ -31,6 +93,20 @@ subprojects {
3193 events ' started' , ' passed' , ' skipped' , ' failed'
3294 exceptionFormat ' full'
3395 }
96+ jvmArgs = [
97+ // Needed for all Java 17 testing.
98+ " --add-opens" , " java.base/sun.nio.ch=ALL-UNNAMED" ,
99+
100+ // For Spark's SerializationDebugger when using Java 17. See ReprocessTest for one example of why this is needed.
101+ " --add-opens" , " java.base/sun.security.action=ALL-UNNAMED" ,
102+
103+ // Needed by the JDBC tests.
104+ " --add-opens" , " java.base/sun.util.calendar=ALL-UNNAMED" ,
105+
106+ // Needed by CustomImportTest
107+ " --add-opens" , " java.base/java.io=ALL-UNNAMED" ,
108+ " --add-opens" , " java.base/sun.nio.cs=ALL-UNNAMED"
109+ ]
34110 }
35111}
36112
@@ -39,6 +115,7 @@ task gettingStartedZip(type: Zip) {
39115 " on the GitHub release page."
40116 from " examples/getting-started"
41117 exclude " build" , " .gradle" , " gradle-*.properties" , " flux" , " .gitignore" , " marklogic-flux"
118+ exclude " src/main/ml-schemas/tde/chunks.json"
42119 into " marklogic-flux-getting-started-${ version} "
43120 archiveFileName = " marklogic-flux-getting-started-${ version} .zip"
44121 destinationDirectory = file(" build" )
0 commit comments