@@ -50,7 +50,7 @@ dependencies {
5050
5151 // Note that in general, the version of the DHF jar must match that of the deployed DHF instance. Different versions
5252 // may work together, but that behavior is not guaranteed.
53- implementation(" com.marklogic:marklogic-data-hub:5.8 .0" ) {
53+ implementation(" com.marklogic:marklogic-data-hub:6.0 .0" ) {
5454 exclude module : " marklogic-client-api"
5555 exclude module : " ml-javaclient-util"
5656 exclude module : " ml-app-deployer"
@@ -117,29 +117,6 @@ shadowJar {
117117 exclude " scaffolding/**"
118118}
119119
120- task copyJarToKafka (type : Copy , dependsOn : shadowJar) {
121- description = " Used for local development and testing; copies the jar to your local Kafka install"
122- from " build/libs"
123- into " ${ kafkaHome} /libs"
124- }
125-
126- task copyPropertyFilesToKafka (type : Copy ) {
127- description = " Used for local development and testing; copies the properties files to your local Kafka install"
128- from " config"
129- into " ${ kafkaHome} /config"
130- filter { String line ->
131- line. startsWith(' ml.connection.username=' ) ? ' ml.connection.username=' + kafkaMlUsername : line
132- }
133- filter { String line ->
134- line. startsWith(' ml.connection.password=' ) ? ' ml.connection.password=' + kafkaMlPassword : line
135- }
136- }
137-
138- task deploy {
139- description = " Used for local development and testing; builds the jar and copies it and the properties files to your local Kafka install"
140- dependsOn = [" copyJarToKafka" , " copyPropertyFilesToKafka" ]
141- }
142-
143120ext {
144121 confluentArchiveGroup = " Confluent Connector Archive"
145122 confluentTestingGroup = " Confluent Platform Local Testing"
@@ -214,52 +191,10 @@ task connectorArchive(type: Zip, dependsOn: connectorArchive_BuildDirectory, gro
214191 destinationDirectory = file(' build/distro' )
215192}
216193
217- // Tasks for working with Confluent Platform running locally.
218- // See "Testing with Confluent Platform" in CONTRIBUTING.md
194+ // Tasks for using the connector with Confluent Platform on Docker
219195
220- task installConnectorInConfluent (type : Copy , dependsOn : connectorArchive, group : confluentTestingGroup) {
196+ task copyConnectorToDockerVolume (type : Copy , dependsOn : connectorArchive, group : confluentTestingGroup) {
221197 description = " Copies the connector's archive directory to the Docker volume shared with the Connect server"
222198 from " build/connectorArchive"
223- into " src/test/confluent-platform-example/docker/confluent-marklogic-components"
224- }
225-
226- task loadDatagenPurchasesConnector (type : Exec , group : confluentTestingGroup) {
227- description = " Load an instance of the Datagen connector into Confluent Platform for sending JSON documents to " +
228- " the 'purchases' topic"
229- commandLine " curl" , " -s" , " -X" , " POST" , " -H" , " Content-Type: application/json" ,
230- " --data" , " @src/test/resources/confluent/datagen-purchases-source.json" , " http://localhost:8083/connectors"
231- }
232-
233- task loadMarkLogicPurchasesSinkConnector (type : Exec , group : confluentTestingGroup) {
234- description = " Load an instance of the MarkLogic Kafka connector into Confluent Platform for writing data to " +
235- " MarkLogic from the 'purchases' topic"
236- commandLine " curl" , " -s" , " -X" , " POST" , " -H" , " Content-Type: application/json" ,
237- " --data" , " @src/test/resources/confluent/marklogic-purchases-sink.json" , " http://localhost:8083/connectors"
238- }
239-
240- task loadMarkLogicPurchasesSourceConnector (type : Exec , group : confluentTestingGroup) {
241- description = " Load an instance of the MarkLogic Kafka connector into Confluent Platform for reading rows from " +
242- " the demo/purchases view"
243- commandLine " curl" , " -s" , " -X" , " POST" , " -H" , " Content-Type: application/json" ,
244- " --data" , " @src/test/resources/confluent/marklogic-purchases-source.json" , " http://localhost:8083/connectors"
245- }
246-
247- task loadMarkLogicAuthorsSourceConnector (type : Exec , group : confluentTestingGroup) {
248- description = " Loads a source connector that retrieves authors from the citations.xml file, which is also used for " +
249- " all the automated tests"
250- commandLine " curl" , " -s" , " -X" , " POST" , " -H" , " Content-Type: application/json" ,
251- " --data" , " @src/test/resources/confluent/marklogic-authors-source.json" , " http://localhost:8083/connectors"
252- }
253-
254- task loadMarkLogicEmployeesSourceConnector (type : Exec , group : confluentTestingGroup) {
255- commandLine " curl" , " -s" , " -X" , " POST" , " -H" , " Content-Type: application/json" ,
256- " --data" , " @src/test/resources/confluent/marklogic-employees-source.json" , " http://localhost:8083/connectors"
257- }
258-
259- task insertAuthors (type : Test ) {
260- useJUnitPlatform()
261- systemProperty " AUTHOR_IDS" , authorIds
262- description = " Insert a new author into the kafka-test-content database via a new citations XML document; " +
263- " use e.g. -PauthorIds=7,8,9 to insert 3 new authors with IDs of 7, 8, and 9"
264- include " com/marklogic/kafka/connect/source/debug/InsertAuthorsTest.class"
199+ into " test-app/docker/confluent-marklogic-components"
265200}
0 commit comments