Skip to content

Commit 4b9c5a1

Browse files
committed
Bumping dependencies and using shadowJar
Also modified ConvertSinkRecordTest to use Jackson instead of gson so we can drop the gson dependency.
1 parent 046407c commit 4b9c5a1

File tree

5 files changed

+56
-58
lines changed

5 files changed

+56
-58
lines changed

README.md

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,14 @@ This is a connector for subscribing to Kafka queues and pushing messages to Mark
1717
1. Copy the jar to the <kafkaHome>/libs on the remote server.
1818
1. Copy the two properties (config/marklogic-connect-distributed.properties config/marklogic-sink.properties) to <kafkaHome>/config on the remote server.
1919

20-
See https://kafka.apache.org/quickstart for instructions on starting up Zookeeper and Kafka.
20+
See https://kafka.apache.org/quickstart for instructions on starting up Zookeeper and Kafka, which as of August 2022
21+
will instruct you to run the following commands (in separate terminal windows, both from the Kafka home directory):
22+
23+
bin/zookeeper-server-start.sh config/zookeeper.properties
24+
25+
and:
26+
27+
bin/kafka-server-start.sh config/server.properties
2128

2229
To start the Kafka connector in standalone mode (from the Kafka home directory):
2330

build.gradle

Lines changed: 31 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,35 @@
11
plugins {
22
id 'java'
3-
id 'net.saliman.properties' version '1.4.6'
3+
id 'net.saliman.properties' version '1.5.2'
4+
id 'com.github.johnrengelman.shadow' version '7.1.2'
45
id "com.github.jk1.dependency-license-report" version "1.3"
56
}
67

7-
sourceCompatibility = 1.8
8+
java {
9+
sourceCompatibility = 1.8
10+
targetCompatibility = 1.8
11+
}
812

913
repositories {
1014
mavenCentral()
11-
jcenter()
1215
}
1316

1417
configurations {
1518
documentation
1619
assets
1720
}
1821

22+
ext {
23+
kafkaVersion = "2.8.1"
24+
}
25+
1926
dependencies {
20-
compileOnly "org.apache.kafka:connect-api:2.5.0"
21-
compileOnly "org.apache.kafka:connect-json:2.5.0"
27+
compileOnly "org.apache.kafka:connect-api:${kafkaVersion}"
28+
compileOnly "org.apache.kafka:connect-json:${kafkaVersion}"
2229

23-
compile "com.marklogic:marklogic-client-api:5.3.0"
24-
compile("com.marklogic:marklogic-data-hub:5.2.4") {
25-
// Prefer the version above
26-
exclude module: "marklogic-client-api"
30+
implementation "com.marklogic:marklogic-client-api:5.5.3"
31+
32+
implementation("com.marklogic:marklogic-data-hub:5.7.2") {
2733
// Excluding these because there's no need for them
2834
exclude module: "spring-boot-autoconfigure"
2935
exclude module: "spring-integration-http"
@@ -34,18 +40,14 @@ dependencies {
3440
exclude module: "logback-classic"
3541
}
3642

37-
testCompile "org.junit.jupiter:junit-jupiter-api:5.3.0"
38-
testCompile "org.apache.kafka:connect-api:2.5.0"
39-
testCompile "org.apache.kafka:connect-json:2.5.0"
40-
testCompile "com.google.code.gson:gson:2.8.6"
41-
42-
// Needed by Gradle 4.6+ - see https://www.petrikainulainen.net/programming/testing/junit-5-tutorial-running-unit-tests-with-gradle/
43-
testRuntime "org.junit.jupiter:junit-jupiter-engine:5.3.0"
43+
testImplementation 'org.junit.jupiter:junit-jupiter:5.9.0'
44+
testImplementation "org.apache.kafka:connect-api:${kafkaVersion}"
45+
testImplementation "org.apache.kafka:connect-json:${kafkaVersion}"
4446

4547
// Forcing logback to be used for test logging
46-
testRuntime "ch.qos.logback:logback-classic:1.1.8"
47-
testRuntime group: "org.slf4j", name: "jcl-over-slf4j", version: "1.7.22"
48-
testRuntime group: "org.slf4j", name: "slf4j-api", version: "1.7.22"
48+
testImplementation "ch.qos.logback:logback-classic:1.2.11"
49+
testImplementation "org.slf4j:jcl-over-slf4j:1.7.36"
50+
testImplementation "org.slf4j:slf4j-api:1.7.36"
4951

5052
documentation files('LICENSE.txt')
5153
documentation files('NOTICE.txt')
@@ -60,21 +62,20 @@ test {
6062
useJUnitPlatform()
6163
}
6264

63-
// Customize the Java plugin's jar task to produce a "fat" jar with all dependencies included
64-
jar {
65-
manifest {
66-
attributes 'Implementation-Title': 'Kafka-Connect-MarkLogic',
67-
'Implementation-Version': version
68-
}
69-
from { configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } }
65+
shadowJar {
66+
// Exclude DHF source files
67+
exclude "hub-internal-artifacts/**"
68+
exclude "hub-internal-config/**"
69+
exclude "ml-config/**"
70+
exclude "ml-modules*/**"
71+
exclude "scaffolding/**"
7072
}
7173

72-
task copyJarToKafka(type: Copy) {
74+
task copyJarToKafka(type: Copy, dependsOn: shadowJar) {
7375
description = "Used for local development and testing; copies the jar to your local Kafka install"
7476
from "build/libs"
7577
into "${kafkaHome}/libs"
7678
}
77-
copyJarToKafka.mustRunAfter(jar)
7879

7980
task copyPropertyFilesToKafka(type: Copy) {
8081
description = "Used for local development and testing; copies the properties files to your local Kafka install"
@@ -84,7 +85,7 @@ task copyPropertyFilesToKafka(type: Copy) {
8485

8586
task deploy {
8687
description = "Used for local development and testing; builds the jar and copies it and the properties files to your local Kafka install"
87-
dependsOn = ["jar", "copyJarToKafka", "copyPropertyFilesToKafka"]
88+
dependsOn = ["copyJarToKafka", "copyPropertyFilesToKafka"]
8889
}
8990

9091
// Tasks for building the archive required for submitting to the Confluence Connector Hub
@@ -134,7 +135,7 @@ task connectorArchive_CopyDependenciesToBuildDirectory(type: Copy) {
134135
dependsOn = [jar]
135136

136137
from jar
137-
from configurations.compile
138+
from configurations.runtimeClasspath.findAll { it.name.endsWith('jar') }
138139
into "${baseArchiveBuildDir}/${baseArchiveName}/lib"
139140
}
140141

gradle.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
group=com.marklogic
2-
version=1.6.1
2+
version=1.7.0-SNAPSHOT
33

44
# For the Confluent Connector Archive
55
componentOwner=marklogic
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
distributionBase=GRADLE_USER_HOME
22
distributionPath=wrapper/dists
3-
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10-bin.zip
3+
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip
44
zipStoreBase=GRADLE_USER_HOME
55
zipStorePath=wrapper/dists

src/test/java/com/marklogic/kafka/connect/sink/ConvertSinkRecordTest.java

Lines changed: 15 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
package com.marklogic.kafka.connect.sink;
22

3-
import com.google.gson.JsonObject;
4-
import com.google.gson.JsonParser;
3+
import com.fasterxml.jackson.databind.JsonNode;
4+
import com.fasterxml.jackson.databind.ObjectMapper;
55
import com.marklogic.client.document.DocumentWriteOperation;
66
import com.marklogic.client.io.BytesHandle;
77
import com.marklogic.client.io.DocumentMetadataHandle;
@@ -20,12 +20,11 @@
2020
*/
2121
public class ConvertSinkRecordTest {
2222

23-
DefaultSinkRecordConverter converter;
24-
MarkLogicSinkTask markLogicSinkTask = new MarkLogicSinkTask();
25-
private JsonObject doc1, doc2, doc3;
23+
private DefaultSinkRecordConverter converter;
24+
private final MarkLogicSinkTask markLogicSinkTask = new MarkLogicSinkTask();
2625

2726
@Test
28-
public void allPropertiesSet() throws IOException {
27+
public void allPropertiesSet() {
2928
Map<String, Object> config = new HashMap<>();
3029
config.put("ml.document.collections", "one,two");
3130
config.put("ml.document.format", "json");
@@ -57,10 +56,9 @@ public void allPropertiesSet() throws IOException {
5756
}
5857

5958
@Test
60-
public void noPropertiesSet() throws IOException {
59+
public void noPropertiesSet() {
6160
Map<String, Object> kafkaConfig = new HashMap<String, Object>();
6261
converter = new DefaultSinkRecordConverter(kafkaConfig);
63-
converter.getDocumentWriteOperationBuilder();
6462

6563
DocumentWriteOperation op = converter.convert(newSinkRecord("doesn't matter"));
6664

@@ -73,11 +71,10 @@ public void noPropertiesSet() throws IOException {
7371
}
7472

7573
@Test
76-
public void UriWithUUIDStrategy() throws IOException {
74+
public void uriWithUUIDStrategy() {
7775
Map<String, Object> kafkaConfig = new HashMap<String, Object>();
7876
kafkaConfig.put("ml.id.strategy", "UUID");
7977
converter = new DefaultSinkRecordConverter(kafkaConfig);
80-
converter.getDocumentWriteOperationBuilder();
8178

8279
DocumentWriteOperation op = converter.convert(newSinkRecord("doesn't matter"));
8380

@@ -90,10 +87,9 @@ public void UriWithUUIDStrategy() throws IOException {
9087
}
9188

9289
@Test
93-
public void UriWithDefaultStrategy() throws IOException {
90+
public void uriWithDefaultStrategy() {
9491
Map<String, Object> kafkaConfig = new HashMap<String, Object>();
9592
converter = new DefaultSinkRecordConverter(kafkaConfig);
96-
converter.getDocumentWriteOperationBuilder();
9793

9894
DocumentWriteOperation op = converter.convert(newSinkRecord("doesn't matter"));
9995

@@ -106,11 +102,10 @@ public void UriWithDefaultStrategy() throws IOException {
106102
}
107103

108104
@Test
109-
public void UriWithKafkaMetaData() throws IOException {
105+
public void uriWithKafkaMetaData() {
110106
Map<String, Object> kafkaConfig = new HashMap<String, Object>();
111107
kafkaConfig.put("ml.id.strategy", "KAFKA_META_WITH_SLASH");
112108
converter = new DefaultSinkRecordConverter(kafkaConfig);
113-
converter.getDocumentWriteOperationBuilder();
114109

115110
DocumentWriteOperation op = converter.convert(newSinkRecord("doesn't matter"));
116111

@@ -122,14 +117,12 @@ public void UriWithKafkaMetaData() throws IOException {
122117
}
123118

124119
@Test
125-
public void UriWithJsonPath() throws IOException {
126-
JsonParser parser = new JsonParser();
127-
doc1 = parser.parse("{\"f1\":\"100\"}").getAsJsonObject();
120+
public void uriWithJsonPath() throws IOException {
121+
JsonNode doc1 = new ObjectMapper().readTree("{\"f1\":\"100\"}");
128122
Map<String, Object> kafkaConfig = new HashMap<String, Object>();
129123
kafkaConfig.put("ml.id.strategy", "JSONPATH");
130124
kafkaConfig.put("ml.id.strategy.paths", "/f1");
131125
converter = new DefaultSinkRecordConverter(kafkaConfig);
132-
converter.getDocumentWriteOperationBuilder();
133126

134127
DocumentWriteOperation op = converter.convert(newSinkRecord(doc1));
135128

@@ -142,14 +135,12 @@ public void UriWithJsonPath() throws IOException {
142135
}
143136

144137
@Test
145-
public void UriWithHashedJsonPaths() throws IOException {
146-
JsonParser parser = new JsonParser();
147-
doc1 = parser.parse("{\"f1\":\"100\",\"f2\":\"200\"}").getAsJsonObject();
138+
public void uriWithHashedJsonPaths() throws IOException {
139+
JsonNode doc1 = new ObjectMapper().readTree("{\"f1\":\"100\",\"f2\":\"200\"}");
148140
Map<String, Object> kafkaConfig = new HashMap<String, Object>();
149141
kafkaConfig.put("ml.id.strategy", "HASH");
150142
kafkaConfig.put("ml.id.strategy.paths", "/f1,/f2");
151143
converter = new DefaultSinkRecordConverter(kafkaConfig);
152-
converter.getDocumentWriteOperationBuilder();
153144

154145
DocumentWriteOperation op = converter.convert(newSinkRecord(doc1));
155146

@@ -162,11 +153,10 @@ public void UriWithHashedJsonPaths() throws IOException {
162153
}
163154

164155
@Test
165-
public void UriWithHashedKafkaMeta() throws IOException {
156+
public void uriWithHashedKafkaMeta() {
166157
Map<String, Object> kafkaConfig = new HashMap<String, Object>();
167158
kafkaConfig.put("ml.id.strategy", "KAFKA_META_HASHED");
168159
converter = new DefaultSinkRecordConverter(kafkaConfig);
169-
converter.getDocumentWriteOperationBuilder();
170160

171161
DocumentWriteOperation op = converter.convert(newSinkRecord("doesn't matter"));
172162

@@ -179,7 +169,7 @@ public void UriWithHashedKafkaMeta() throws IOException {
179169
}
180170

181171
@Test
182-
public void binaryContent() throws IOException {
172+
public void binaryContent() {
183173
converter = new DefaultSinkRecordConverter(new HashMap<>());
184174

185175
DocumentWriteOperation op = converter.convert(newSinkRecord("hello world".getBytes()));

0 commit comments

Comments
 (0)