@@ -5,7 +5,7 @@ plugins {
5
5
id " com.github.jk1.dependency-license-report" version " 1.19"
6
6
7
7
// Only used for testing
8
- id ' com.marklogic.ml-gradle' version ' 4.6 .0'
8
+ id ' com.marklogic.ml-gradle' version ' 4.8 .0'
9
9
id ' jacoco'
10
10
id " org.sonarqube" version " 4.4.1.3373"
11
11
@@ -31,24 +31,26 @@ configurations {
31
31
}
32
32
33
33
ext {
34
- kafkaVersion = " 3.5.1"
34
+ // Even though Kafka Connect 3.7.0 is out, we're staying with 3.6.1 in order to continue
35
+ // using the third-party Kafka JUnit tool. See https://github.com/mguenther/kafka-junit?tab=readme-ov-file
36
+ kafkaVersion = " 3.6.1"
35
37
}
36
38
37
39
dependencies {
38
40
compileOnly " org.apache.kafka:connect-api:${ kafkaVersion} "
39
41
compileOnly " org.apache.kafka:connect-json:${ kafkaVersion} "
40
42
compileOnly " org.apache.kafka:connect-runtime:${ kafkaVersion} "
41
- compileOnly " org.slf4j:slf4j-api:1.7.36 "
43
+ compileOnly " org.slf4j:slf4j-api:2.0.13 "
42
44
43
- implementation ' com.marklogic:ml-javaclient-util:4.6 .0'
45
+ implementation ' com.marklogic:ml-javaclient-util:4.8 .0'
44
46
// Force DHF to use the latest version of ml-app-deployer, which minimizes security vulnerabilities
45
- implementation " com.marklogic:ml-app-deployer:4.6 .0"
47
+ implementation " com.marklogic:ml-app-deployer:4.8 .0"
46
48
47
- implementation " com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.15.2 "
49
+ implementation " com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.15.3 "
48
50
49
51
// Note that in general, the version of the DHF jar must match that of the deployed DHF instance. Different versions
50
52
// may work together, but that behavior is not guaranteed.
51
- implementation(" com.marklogic:marklogic-data-hub:5.8 .0" ) {
53
+ implementation(" com.marklogic:marklogic-data-hub:6.0 .0" ) {
52
54
exclude module : " marklogic-client-api"
53
55
exclude module : " ml-javaclient-util"
54
56
exclude module : " ml-app-deployer"
@@ -63,13 +65,13 @@ dependencies {
63
65
64
66
testImplementation " org.apache.kafka:connect-api:${ kafkaVersion} "
65
67
testImplementation " org.apache.kafka:connect-json:${ kafkaVersion} "
66
- testImplementation ' net.mguenther.kafka:kafka-junit:3.5.1 '
68
+ testImplementation ' net.mguenther.kafka:kafka-junit:3.6.0 '
67
69
68
- testImplementation " org.apache.avro:avro-compiler:1.11.1 "
70
+ testImplementation " org.apache.avro:avro-compiler:1.11.3 "
69
71
70
72
// Forcing logback to be used for test logging
71
- testImplementation " ch.qos.logback:logback-classic:1.3.5 "
72
- testImplementation " org.slf4j:jcl-over-slf4j:1.7.36 "
73
+ testImplementation " ch.qos.logback:logback-classic:1.3.14 "
74
+ testImplementation " org.slf4j:jcl-over-slf4j:2.0.13 "
73
75
74
76
documentation files(' LICENSE.txt' )
75
77
documentation files(' NOTICE.txt' )
@@ -115,29 +117,6 @@ shadowJar {
115
117
exclude " scaffolding/**"
116
118
}
117
119
118
- task copyJarToKafka (type : Copy , dependsOn : shadowJar) {
119
- description = " Used for local development and testing; copies the jar to your local Kafka install"
120
- from " build/libs"
121
- into " ${ kafkaHome} /libs"
122
- }
123
-
124
- task copyPropertyFilesToKafka (type : Copy ) {
125
- description = " Used for local development and testing; copies the properties files to your local Kafka install"
126
- from " config"
127
- into " ${ kafkaHome} /config"
128
- filter { String line ->
129
- line. startsWith(' ml.connection.username=' ) ? ' ml.connection.username=' + kafkaMlUsername : line
130
- }
131
- filter { String line ->
132
- line. startsWith(' ml.connection.password=' ) ? ' ml.connection.password=' + kafkaMlPassword : line
133
- }
134
- }
135
-
136
- task deploy {
137
- description = " Used for local development and testing; builds the jar and copies it and the properties files to your local Kafka install"
138
- dependsOn = [" copyJarToKafka" , " copyPropertyFilesToKafka" ]
139
- }
140
-
141
120
ext {
142
121
confluentArchiveGroup = " Confluent Connector Archive"
143
122
confluentTestingGroup = " Confluent Platform Local Testing"
@@ -212,77 +191,10 @@ task connectorArchive(type: Zip, dependsOn: connectorArchive_BuildDirectory, gro
212
191
destinationDirectory = file(' build/distro' )
213
192
}
214
193
215
- task installConnectorInConfluent (type : Exec , group : confluentTestingGroup, dependsOn : [connectorArchive]) {
216
- description = " Uses 'Confluent-hub' to install the connector in your local Confluent Platform"
217
- commandLine " confluent-hub" , " install" , " --no-prompt" , " build/distro/${ baseArchiveName} .zip"
218
- ignoreExitValue = true
219
- }
220
-
221
- // See https://docs.confluent.io/confluent-cli/current/command-reference/local/confluent_local_destroy.html
222
- task destroyLocalConfluent (type : Exec , group : confluentTestingGroup) {
223
- description = " Destroy the local Confluent Platform instance"
224
- commandLine " confluent" , " local" , " destroy"
225
- // Main reason this will fail is because Confluent is not running, which shouldn't cause a failure
226
- ignoreExitValue = true
227
- }
228
-
229
- // See https://docs.confluent.io/confluent-cli/current/command-reference/local/services/confluent_local_services_start.html
230
- task startLocalConfluent (type : Exec , group : confluentTestingGroup) {
231
- description = " Convenience task for starting a local instance of Confluent Platform"
232
- commandLine " confluent" , " local" , " services" , " start"
233
- }
234
-
235
- task loadDatagenPurchasesConnector (type : Exec , group : confluentTestingGroup) {
236
- description = " Load an instance of the Datagen connector into Confluent Platform for sending JSON documents to " +
237
- " the 'purchases' topic"
238
- commandLine " confluent" , " local" , " services" , " connect" , " connector" , " load" , " datagen-purchases-source" , " -c" ,
239
- " src/test/resources/confluent/datagen-purchases-source.json"
240
- }
241
-
242
- task loadMarkLogicPurchasesSinkConnector (type : Exec , group : confluentTestingGroup) {
243
- description = " Load an instance of the MarkLogic Kafka connector into Confluent Platform for writing data to " +
244
- " MarkLogic from the 'purchases' topic"
245
- commandLine " confluent" , " local" , " services" , " connect" , " connector" , " load" , " marklogic-purchases-sink" , " -c" ,
246
- " src/test/resources/confluent/marklogic-purchases-sink.json"
247
- }
248
-
249
- task loadMarkLogicPurchasesSourceConnector (type : Exec , group : confluentTestingGroup) {
250
- description = " Load an instance of the MarkLogic Kafka connector into Confluent Platform for reading rows from " +
251
- " the demo/purchases view"
252
- commandLine " confluent" , " local" , " services" , " connect" , " connector" , " load" , " marklogic-purchases-source" , " -c" ,
253
- " src/test/resources/confluent/marklogic-purchases-source.json"
254
- }
255
-
256
- task loadMarkLogicAuthorsSourceConnector (type : Exec , group : confluentTestingGroup) {
257
- description = " Loads a source connector that retrieves authors from the citations.xml file, which is also used for " +
258
- " all the automated tests"
259
- commandLine " confluent" , " local" , " services" , " connect" , " connector" , " load" , " marklogic-authors-source" , " -c" ,
260
- " src/test/resources/confluent/marklogic-authors-source.json"
261
- }
194
+ // Tasks for using the connector with Confluent Platform on Docker
262
195
263
- task loadMarkLogicEmployeesSourceConnector (type : Exec , group : confluentTestingGroup) {
264
- commandLine " confluent" , " local" , " services" , " connect" , " connector" , " load" , " marklogic-employees-source" , " -c" ,
265
- " src/test/resources/confluent/marklogic-employees-source.json"
266
- }
267
-
268
- task setupLocalConfluent (group : confluentTestingGroup) {
269
- description = " Start a local Confluent Platform instance and load the Datagen and MarkLogic connectors"
270
- }
271
-
272
- // Temporarily only loading the source connector to make manual testing easier, will re-enable all of these before 1.8.0
273
- // setupLocalConfluent.dependsOn startLocalConfluent, loadDatagenPurchasesConnector, loadMarkLogicPurchasesSinkConnector, loadMarkLogicPurchasesSourceConnector
274
- setupLocalConfluent. dependsOn startLocalConfluent, loadMarkLogicEmployeesSourceConnector
275
-
276
- loadDatagenPurchasesConnector. mustRunAfter startLocalConfluent
277
- loadMarkLogicPurchasesSinkConnector. mustRunAfter startLocalConfluent
278
- loadMarkLogicPurchasesSourceConnector. mustRunAfter startLocalConfluent
279
- loadMarkLogicAuthorsSourceConnector. mustRunAfter startLocalConfluent
280
- loadMarkLogicEmployeesSourceConnector. mustRunAfter startLocalConfluent
281
-
282
- task insertAuthors (type : Test ) {
283
- useJUnitPlatform()
284
- systemProperty " AUTHOR_IDS" , authorIds
285
- description = " Insert a new author into the kafka-test-content database via a new citations XML document; " +
286
- " use e.g. -PauthorIds=7,8,9 to insert 3 new authors with IDs of 7, 8, and 9"
287
- include " com/marklogic/kafka/connect/source/debug/InsertAuthorsTest.class"
196
+ task copyConnectorToDockerVolume (type : Copy , dependsOn : connectorArchive, group : confluentTestingGroup) {
197
+ description = " Copies the connector's archive directory to the Docker volume shared with the Connect server"
198
+ from " build/connectorArchive"
199
+ into " test-app/docker/confluent-marklogic-components"
288
200
}
0 commit comments