Skip to content

Commit 91fa572

Browse files
authored
Develop (#49)
* Ensuring optional parameters are actually optional and do not cause a crash when they are not included. * Fixing some Boolean and Integer casts now that the initial configuration object is a Hashmap of <String, Object> instead of <String, String>. Also fixed some tests that used that configuration object. * Updating the build to use the ML client api v5.3.0 and the ML data hub v5.2.4. This should solve problems with running the connector with Java 11.
1 parent d47d98f commit 91fa572

13 files changed

+144
-132
lines changed

build.gradle

+8-2
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,10 @@ dependencies {
2020
compileOnly "org.apache.kafka:connect-api:2.5.0"
2121
compileOnly "org.apache.kafka:connect-json:2.5.0"
2222

23-
24-
compile ("com.marklogic:marklogic-data-hub:5.2.2") {
23+
compile "com.marklogic:marklogic-client-api:5.3.0"
24+
compile ("com.marklogic:marklogic-data-hub:5.2.4") {
25+
// Prefer the version above
26+
exclude module: "marklogic-client-api"
2527
// Excluding these because there's no need for them
2628
exclude module: "spring-boot-autoconfigure"
2729
exclude module: "spring-integration-http"
@@ -60,6 +62,10 @@ test {
6062

6163
// Customize the Java plugin's jar task to produce a "fat" jar with all dependencies included
6264
jar {
65+
manifest {
66+
attributes 'Implementation-Title': 'Kafka-Connect-MarkLogic',
67+
'Implementation-Version': version
68+
}
6369
from { configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } }
6470
}
6571

gradle.properties

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
group=com.marklogic
2-
version=1.5.0
2+
version=1.5.2
33

44
# For the Confluent Connector Archive
55
componentOwner=marklogic

src/main/java/com/marklogic/client/id/strategy/IdStrategyFactory.java

+5-6
Original file line numberDiff line numberDiff line change
@@ -6,22 +6,21 @@
66

77
public class IdStrategyFactory {
88

9-
public static IdStrategy getIdStrategy(Map<String, String> kafkaConfig) {
10-
String strategyType = kafkaConfig.get(MarkLogicSinkConfig.ID_STRATEGY);
11-
String strategyPaths= kafkaConfig.get(MarkLogicSinkConfig.ID_STRATEGY_PATH);
9+
public static IdStrategy getIdStrategy(Map<String, Object> parsedConfig) {
10+
String strategyType = (String) parsedConfig.get(MarkLogicSinkConfig.ID_STRATEGY);
11+
String strategyPaths= (String) parsedConfig.get(MarkLogicSinkConfig.ID_STRATEGY_PATH);
1212

1313
switch((strategyType != null) ? strategyType : "UUID") {
1414
case "JSONPATH":
1515
return (new JSONPathStrategy(strategyPaths.trim().split(",")[0]));
1616
case "HASH":
1717
return (new HashedJSONPathsStrategy(strategyPaths.trim().split(",")));
18-
case "UUID":
19-
return (new DefaultStrategy());
2018
case "KAFKA_META_WITH_SLASH":
2119
return (new KafkaMetaStrategy());
2220
case "KAFKA_META_HASHED":
2321
return (new HashedKafkaMetaStrategy());
24-
default:
22+
case "UUID":
23+
default:
2524
return (new DefaultStrategy());
2625
}
2726
}

src/main/java/com/marklogic/kafka/connect/DatabaseClientConfigBuilder.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,6 @@
99
*/
1010
public interface DatabaseClientConfigBuilder {
1111

12-
DatabaseClientConfig buildDatabaseClientConfig(Map<String, String> kafkaConfig);
12+
DatabaseClientConfig buildDatabaseClientConfig(Map<String, Object> kafkaConfig);
1313

1414
}

src/main/java/com/marklogic/kafka/connect/DefaultDatabaseClientConfigBuilder.java

+26-32
Original file line numberDiff line numberDiff line change
@@ -5,19 +5,13 @@
55
import java.util.Map;
66

77
import java.io.FileInputStream;
8-
import java.io.FileNotFoundException;
9-
import java.io.IOException;
108
import java.io.InputStream;
119
import java.security.KeyManagementException;
1210
import java.security.KeyStore;
1311
import java.security.KeyStoreException;
14-
import java.security.NoSuchAlgorithmException;
15-
import java.security.UnrecoverableKeyException;
16-
import java.security.cert.CertificateException;
1712

1813
import javax.net.ssl.KeyManager;
1914
import javax.net.ssl.KeyManagerFactory;
20-
import javax.net.ssl.SSLContext;
2115
import javax.net.ssl.TrustManager;
2216
import javax.net.ssl.TrustManagerFactory;
2317

@@ -31,35 +25,36 @@
3125
public class DefaultDatabaseClientConfigBuilder implements DatabaseClientConfigBuilder {
3226

3327
@Override
34-
public DatabaseClientConfig buildDatabaseClientConfig(Map<String, String> kafkaConfig) {
28+
public DatabaseClientConfig buildDatabaseClientConfig(Map<String, Object> parsedConfig) {
29+
3530
DatabaseClientConfig clientConfig = new DatabaseClientConfig();
36-
clientConfig.setCertFile(kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_CERT_FILE));
37-
clientConfig.setCertPassword(kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_CERT_PASSWORD));
31+
clientConfig.setCertFile((String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_CERT_FILE));
32+
clientConfig.setCertPassword((String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_CERT_PASSWORD));
3833
clientConfig.setTrustManager(new SimpleX509TrustManager());
39-
clientConfig = configureHostNameVerifier(clientConfig,kafkaConfig);
40-
String securityContextType = kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_SECURITY_CONTEXT_TYPE).toUpperCase();
34+
clientConfig = configureHostNameVerifier(clientConfig,parsedConfig);
35+
String securityContextType = ((String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_SECURITY_CONTEXT_TYPE)).toUpperCase();
4136
clientConfig.setSecurityContextType(SecurityContextType.valueOf(securityContextType));
42-
String database = kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_DATABASE);
37+
String database = (String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_DATABASE);
4338
if (database != null && database.trim().length() > 0) {
4439
clientConfig.setDatabase(database);
4540
}
46-
String connType = kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_TYPE);
41+
String connType = (String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_TYPE);
4742
if (connType != null && connType.trim().length() > 0) {
4843
clientConfig.setConnectionType(DatabaseClient.ConnectionType.valueOf(connType.toUpperCase()));
4944
}
50-
clientConfig.setExternalName(kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_EXTERNAL_NAME));
51-
clientConfig.setHost(kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_HOST));
52-
clientConfig.setPassword(kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_PASSWORD));
53-
clientConfig.setPort(Integer.parseInt(kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_PORT)));
54-
String customSsl = kafkaConfig.get(MarkLogicSinkConfig.SSL);
55-
if (customSsl != null && Boolean.parseBoolean(customSsl)) {
56-
clientConfig = configureCustomSslConnection(clientConfig, kafkaConfig);
45+
clientConfig.setExternalName((String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_EXTERNAL_NAME));
46+
clientConfig.setHost((String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_HOST));
47+
clientConfig.setPassword((String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_PASSWORD));
48+
clientConfig.setPort((Integer) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_PORT));
49+
Boolean customSsl = (Boolean) parsedConfig.get(MarkLogicSinkConfig.SSL);
50+
if (customSsl != null && customSsl) {
51+
clientConfig = configureCustomSslConnection(clientConfig, parsedConfig, customSsl);
5752
}
58-
String simpleSsl = kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_SIMPLE_SSL);
59-
if (simpleSsl != null && Boolean.parseBoolean(simpleSsl)) {
53+
Boolean simpleSsl = (Boolean) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_SIMPLE_SSL);
54+
if (simpleSsl != null && simpleSsl) {
6055
clientConfig = configureSimpleSsl(clientConfig);
6156
}
62-
clientConfig.setUsername(kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_USERNAME));
57+
clientConfig.setUsername((String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_USERNAME));
6358
return clientConfig;
6459
}
6560

@@ -82,8 +77,8 @@ protected DatabaseClientConfig configureSimpleSsl(DatabaseClientConfig clientCon
8277
*
8378
* @param clientConfig
8479
*/
85-
protected DatabaseClientConfig configureHostNameVerifier(DatabaseClientConfig clientConfig, Map<String, String> kafkaConfig) {
86-
String sslHostNameVerifier = kafkaConfig.get(MarkLogicSinkConfig.SSL_HOST_VERIFIER);
80+
protected DatabaseClientConfig configureHostNameVerifier(DatabaseClientConfig clientConfig, Map<String, Object> parsedConfig) {
81+
String sslHostNameVerifier = (String) parsedConfig.get(MarkLogicSinkConfig.SSL_HOST_VERIFIER);
8782
if ("ANY".equals(sslHostNameVerifier))
8883
clientConfig.setSslHostnameVerifier(DatabaseClientFactory.SSLHostnameVerifier.ANY);
8984
else if ("COMMON".equals(sslHostNameVerifier))
@@ -95,18 +90,17 @@ else if ("STRICT".equals(sslHostNameVerifier))
9590
return clientConfig;
9691
}
9792

98-
protected DatabaseClientConfig configureCustomSslConnection(DatabaseClientConfig clientConfig, Map<String, String> kafkaConfig) {
99-
String ssl = kafkaConfig.get(MarkLogicSinkConfig.SSL);
100-
String tlsVersion = kafkaConfig.get(MarkLogicSinkConfig.TLS_VERSION);
101-
String sslMutualAuth = kafkaConfig.get(MarkLogicSinkConfig.SSL_MUTUAL_AUTH);
93+
protected DatabaseClientConfig configureCustomSslConnection(DatabaseClientConfig clientConfig, Map<String, Object> parsedConfig, Boolean ssl) {
94+
String tlsVersion = (String) parsedConfig.get(MarkLogicSinkConfig.TLS_VERSION);
95+
String sslMutualAuth = (String) parsedConfig.get(MarkLogicSinkConfig.SSL_MUTUAL_AUTH);
10296
SSLContext sslContext = null;
103-
String securityContextType = kafkaConfig.get(MarkLogicSinkConfig.CONNECTION_SECURITY_CONTEXT_TYPE).toUpperCase();
97+
String securityContextType = ((String) parsedConfig.get(MarkLogicSinkConfig.CONNECTION_SECURITY_CONTEXT_TYPE)).toUpperCase();
10498
clientConfig.setSecurityContextType(SecurityContextType.valueOf(securityContextType));
10599

106100
if ("BASIC".equals(securityContextType) ||
107101
"DIGEST".equals(securityContextType)
108102
) {
109-
if (ssl != null && Boolean.parseBoolean(ssl)) {
103+
if (ssl != null && ssl) {
110104
if (sslMutualAuth != null && Boolean.parseBoolean(sslMutualAuth)) {
111105
/*2 way ssl changes*/
112106
KeyStore clientKeyStore = null;
@@ -155,7 +149,7 @@ protected DatabaseClientConfig configureCustomSslConnection(DatabaseClientConfig
155149
}
156150
clientConfig.setSslContext(sslContext);
157151
}
158-
else {/*1wayssl*/
152+
else {/* 1-way ssl */
159153
TrustManager[] trust = new TrustManager[] { new SimpleX509TrustManager()};
160154
try {
161155
if (tlsVersion != null && tlsVersion.trim().length() > 0 ) {

src/main/java/com/marklogic/kafka/connect/sink/DefaultSinkRecordConverter.java

+12-21
Original file line numberDiff line numberDiff line change
@@ -3,16 +3,13 @@
33
import java.io.IOException;
44
import java.nio.charset.StandardCharsets;
55
import java.util.Collections;
6-
import java.util.Date;
76
import java.util.Map;
87
import org.apache.kafka.connect.data.Schema;
98
import org.apache.kafka.connect.data.Struct;
109

1110
import org.apache.kafka.connect.json.JsonConverter;
1211
import org.apache.kafka.connect.sink.SinkRecord;
1312
import org.apache.kafka.connect.storage.Converter;
14-
import org.slf4j.Logger;
15-
import org.slf4j.LoggerFactory;
1613

1714
import com.marklogic.client.document.DocumentWriteOperation;
1815
import com.marklogic.client.id.strategy.IdStrategyFactory;
@@ -24,19 +21,13 @@
2421
import com.marklogic.client.io.Format;
2522
import com.marklogic.client.io.StringHandle;
2623
import com.marklogic.client.io.marker.AbstractWriteHandle;
27-
import org.apache.kafka.connect.json.JsonConverter;
28-
import org.apache.kafka.connect.sink.SinkRecord;
29-
import org.apache.kafka.connect.storage.Converter;
30-
import java.util.Collections;
31-
import org.apache.kafka.connect.json.JsonConverter;
3224

3325
/**
3426
* Handles converting a SinkRecord into a DocumentWriteOperation via the properties in the given config map.
3527
*/
3628
public class DefaultSinkRecordConverter implements SinkRecordConverter {
3729

3830
private static final Converter JSON_CONVERTER;
39-
private static final Logger logger = LoggerFactory.getLogger(DefaultSinkRecordConverter.class);
4031
static {
4132
JSON_CONVERTER = new JsonConverter();
4233
JSON_CONVERTER.configure(Collections.singletonMap("schemas.enable", "false"), false);
@@ -48,30 +39,30 @@ public class DefaultSinkRecordConverter implements SinkRecordConverter {
4839
private Boolean addTopicToCollections = false;
4940
private IdStrategy idStrategy = null;
5041

51-
public DefaultSinkRecordConverter(Map<String, String> kafkaConfig) {
52-
53-
String val = kafkaConfig.get(MarkLogicSinkConfig.DOCUMENT_COLLECTIONS_ADD_TOPIC);
54-
if (val != null && val.trim().length() > 0) {
55-
addTopicToCollections = Boolean.parseBoolean(val.trim());
42+
public DefaultSinkRecordConverter(Map<String, Object> parsedConfig) {
43+
44+
Boolean booleanVal = (Boolean) parsedConfig.get(MarkLogicSinkConfig.DOCUMENT_COLLECTIONS_ADD_TOPIC);
45+
if (booleanVal != null) {
46+
addTopicToCollections = booleanVal;
5647
}
5748

5849
documentWriteOperationBuilder = new DocumentWriteOperationBuilder()
59-
.withCollections(kafkaConfig.get(MarkLogicSinkConfig.DOCUMENT_COLLECTIONS))
60-
.withPermissions(kafkaConfig.get(MarkLogicSinkConfig.DOCUMENT_PERMISSIONS))
61-
.withUriPrefix(kafkaConfig.get(MarkLogicSinkConfig.DOCUMENT_URI_PREFIX))
62-
.withUriSuffix(kafkaConfig.get(MarkLogicSinkConfig.DOCUMENT_URI_SUFFIX))
50+
.withCollections((String) parsedConfig.get(MarkLogicSinkConfig.DOCUMENT_COLLECTIONS))
51+
.withPermissions((String) parsedConfig.get(MarkLogicSinkConfig.DOCUMENT_PERMISSIONS))
52+
.withUriPrefix((String) parsedConfig.get(MarkLogicSinkConfig.DOCUMENT_URI_PREFIX))
53+
.withUriSuffix((String) parsedConfig.get(MarkLogicSinkConfig.DOCUMENT_URI_SUFFIX))
6354
;
6455

65-
val = kafkaConfig.get(MarkLogicSinkConfig.DOCUMENT_FORMAT);
56+
String val = (String) parsedConfig.get(MarkLogicSinkConfig.DOCUMENT_FORMAT);
6657
if (val != null && val.trim().length() > 0) {
6758
format = Format.valueOf(val.toUpperCase());
6859
}
69-
val = kafkaConfig.get(MarkLogicSinkConfig.DOCUMENT_MIMETYPE);
60+
val = (String) parsedConfig.get(MarkLogicSinkConfig.DOCUMENT_MIMETYPE);
7061
if (val != null && val.trim().length() > 0) {
7162
mimeType = val;
7263
}
7364
//Get the correct ID or URI generation strategy based on the configuration
74-
idStrategy = IdStrategyFactory.getIdStrategy(kafkaConfig);
65+
idStrategy = IdStrategyFactory.getIdStrategy(parsedConfig);
7566
}
7667

7768
@Override

src/main/java/com/marklogic/kafka/connect/sink/MarkLogicSinkConfig.java

+24-24
Original file line numberDiff line numberDiff line change
@@ -50,46 +50,46 @@ public class MarkLogicSinkConfig extends AbstractConfig {
5050
public static final String LOGGING_RECORD_KEY = "ml.log.record.key";
5151
public static final String LOGGING_RECORD_HEADERS = "ml.log.record.headers";
5252

53-
public static final String ID_STRATEGY = "ml.id.strategy";
53+
public static final String ID_STRATEGY = "ml.id.strategy";
5454
public static final String ID_STRATEGY_PATH = "ml.id.strategy.paths";
5555

5656
public static ConfigDef CONFIG_DEF = new ConfigDef()
5757
.define(CONNECTION_HOST, Type.STRING, Importance.HIGH, "MarkLogic server hostname")
5858
.define(CONNECTION_PORT, Type.INT, Importance.HIGH, "The REST app server port to connect to")
59-
.define(CONNECTION_DATABASE, Type.STRING, Importance.LOW, "Database to connect, if different from the one associated with the port")
60-
.define(CONNECTION_SECURITY_CONTEXT_TYPE, Type.STRING, Importance.HIGH, "Type of MarkLogic security context to create - either digest, basic, kerberos, certificate, or none")
59+
.define(CONNECTION_DATABASE, Type.STRING, "", Importance.LOW, "Database to connect, if different from the one associated with the port")
60+
.define(CONNECTION_SECURITY_CONTEXT_TYPE, Type.STRING, "NONE", Importance.HIGH, "Type of MarkLogic security context to create - either digest, basic, kerberos, certificate, or none")
6161
.define(CONNECTION_USERNAME, Type.STRING, Importance.HIGH, "Name of MarkLogic user to authenticate as")
6262
.define(CONNECTION_PASSWORD, Type.STRING, Importance.HIGH, "Password for the MarkLogic user")
63-
.define(CONNECTION_TYPE, Type.STRING, Importance.LOW, "Connection type; DIRECT or GATEWAY")
64-
.define(CONNECTION_SIMPLE_SSL, Type.BOOLEAN, Importance.LOW, "Set to true to use a trust-everything SSL connection")
65-
.define(CONNECTION_CERT_FILE, Type.STRING, Importance.LOW, "Path to a certificate file")
66-
.define(CONNECTION_CERT_PASSWORD, Type.STRING, Importance.LOW, "Password for the certificate file")
67-
.define(CONNECTION_EXTERNAL_NAME, Type.STRING, Importance.LOW, "External name for Kerberos authentication")
63+
.define(CONNECTION_TYPE, Type.STRING, "DIRECT", Importance.LOW, "Connection type; DIRECT or GATEWAY")
64+
.define(CONNECTION_SIMPLE_SSL, Type.BOOLEAN, false, Importance.LOW, "Set to true to use a trust-everything SSL connection")
65+
.define(CONNECTION_CERT_FILE, Type.STRING, "", Importance.LOW, "Path to a certificate file")
66+
.define(CONNECTION_CERT_PASSWORD, Type.STRING, "", Importance.LOW, "Password for the certificate file")
67+
.define(CONNECTION_EXTERNAL_NAME, Type.STRING, "", Importance.LOW, "External name for Kerberos authentication")
6868
.define(DATAHUB_FLOW_NAME, Type.STRING, null, Importance.MEDIUM, "Name of a Data Hub flow to run")
6969
.define(DATAHUB_FLOW_STEPS, Type.STRING, null, Importance.MEDIUM, "Comma-delimited names of steps to run")
7070
.define(DATAHUB_FLOW_LOG_RESPONSE, Type.BOOLEAN, false, Importance.LOW, "If set to true, the response from running a flow on each ingested batch will be logged at the info level")
7171
.define(DMSDK_BATCH_SIZE, Type.INT, 100, Importance.HIGH, "Number of documents to write in each batch")
7272
.define(DMSDK_THREAD_COUNT, Type.INT, 8, Importance.HIGH, "Number of threads for DMSDK to use")
73-
.define(DMSDK_TRANSFORM, Type.STRING, Importance.MEDIUM, "Name of a REST transform to use when writing documents")
74-
.define(DMSDK_TRANSFORM_PARAMS, Type.STRING, Importance.MEDIUM, "Delimited set of transform names and values")
73+
.define(DMSDK_TRANSFORM, Type.STRING, "", Importance.MEDIUM, "Name of a REST transform to use when writing documents")
74+
.define(DMSDK_TRANSFORM_PARAMS, Type.STRING, "", Importance.MEDIUM, "Delimited set of transform names and values")
7575
.define(DMSDK_TRANSFORM_PARAMS_DELIMITER, Type.STRING, ",", Importance.LOW, "Delimiter for transform parameter names and values; defaults to a comma")
76-
.define(DOCUMENT_COLLECTIONS_ADD_TOPIC, Type.BOOLEAN, false,Importance.LOW, "Indicates if the topic name should be added to the set of collections for a document")
77-
.define(DOCUMENT_COLLECTIONS, Type.STRING, Importance.MEDIUM, "String-delimited collections to add each document to")
78-
.define(DOCUMENT_FORMAT, Type.STRING, Importance.LOW, "Defines format of each document; can be one of json, xml, text, binary, or unknown")
79-
.define(DOCUMENT_MIMETYPE, Type.STRING, Importance.LOW, "Defines the mime type of each document; optional, and typically the format is set instead of the mime type")
80-
.define(DOCUMENT_PERMISSIONS, Type.STRING, Importance.MEDIUM, "String-delimited permissions to add to each document; role1,capability1,role2,capability2,etc")
81-
.define(DOCUMENT_URI_PREFIX, Type.STRING, Importance.MEDIUM, "Prefix to prepend to each generated URI")
82-
.define(DOCUMENT_URI_SUFFIX, Type.STRING, Importance.MEDIUM, "Suffix to append to each generated URI")
83-
.define(SSL, Type.BOOLEAN, Importance.LOW, "Whether SSL connection to the App server - true or false.")
84-
.define(TLS_VERSION, Type.STRING, Importance.LOW, "Version of TLS to connect to MarkLogic SSL enabled App server. Ex. TLSv1.2")
85-
.define(SSL_HOST_VERIFIER, Type.STRING, Importance.LOW, "The strictness of Host Verifier - ANY, COMMON, STRICT")
86-
.define(SSL_MUTUAL_AUTH, Type.BOOLEAN, Importance.LOW, "Mutual Authentication for Basic or Digest : true or false")
76+
.define(DOCUMENT_COLLECTIONS_ADD_TOPIC, Type.BOOLEAN, false, Importance.LOW, "Indicates if the topic name should be added to the set of collections for a document")
77+
.define(DOCUMENT_COLLECTIONS, Type.STRING, "", Importance.MEDIUM, "String-delimited collections to add each document to")
78+
.define(DOCUMENT_FORMAT, Type.STRING, "", Importance.LOW, "Defines format of each document; can be one of json, xml, text, binary, or unknown")
79+
.define(DOCUMENT_MIMETYPE, Type.STRING, "", Importance.LOW, "Defines the mime type of each document; optional, and typically the format is set instead of the mime type")
80+
.define(DOCUMENT_PERMISSIONS, Type.STRING, "", Importance.MEDIUM, "String-delimited permissions to add to each document; role1,capability1,role2,capability2,etc")
81+
.define(DOCUMENT_URI_PREFIX, Type.STRING, "", Importance.MEDIUM, "Prefix to prepend to each generated URI")
82+
.define(DOCUMENT_URI_SUFFIX, Type.STRING, "", Importance.MEDIUM, "Suffix to append to each generated URI")
83+
.define(SSL, Type.BOOLEAN, false, Importance.LOW, "Whether SSL connection to the App server - true or false.")
84+
.define(TLS_VERSION, Type.STRING, "", Importance.LOW, "Version of TLS to connect to MarkLogic SSL enabled App server. Ex. TLSv1.2")
85+
.define(SSL_HOST_VERIFIER, Type.STRING, "", Importance.LOW, "The strictness of Host Verifier - ANY, COMMON, STRICT")
86+
.define(SSL_MUTUAL_AUTH, Type.BOOLEAN, false, Importance.LOW, "Mutual Authentication for Basic or Digest : true or false")
8787

88-
.define(LOGGING_RECORD_KEY, Type.BOOLEAN, false, Importance.LOW, "Log incoming record keys")
88+
.define(LOGGING_RECORD_KEY, Type.BOOLEAN, false, Importance.LOW, "Log incoming record keys")
8989
.define(LOGGING_RECORD_HEADERS, Type.BOOLEAN, false, Importance.LOW, "Log incoming record headers")
9090

91-
.define(ID_STRATEGY, Type.STRING, Importance.LOW, "The ID Strategy for URI.")
92-
.define(ID_STRATEGY_PATH, Type.STRING, Importance.LOW, "The JSON path for ID Strategy")
91+
.define(ID_STRATEGY, Type.STRING, "", Importance.LOW, "The ID Strategy for URI.")
92+
.define(ID_STRATEGY_PATH, Type.STRING, "", Importance.LOW, "The JSON path for ID Strategy")
9393
;
9494

9595
public MarkLogicSinkConfig(final Map<?, ?> originals) {

0 commit comments

Comments
 (0)