@@ -27,6 +27,8 @@ import scala.util.Try
2727 * example: --conf spark.sparkmeasure.kafkaBroker=kafka.your-site.com:9092
2828 * spark.sparkmeasure.kafkaTopic = Kafka topic
2929 * example: --conf spark.sparkmeasure.kafkaTopic=sparkmeasure-stageinfo
30+ * spark.sparkmeasure.kafka.* = Other kafka properties
31+ * example: --conf spark.sparkmeasure.kafka.ssl.keystore.location=/var/private/ssl/kafka.server.keystore.jks
3032 *
3133 * This code depends on "kafka clients", you may need to add the dependency:
3234 * --packages org.apache.kafka:kafka-clients:3.2.1
@@ -39,7 +41,7 @@ class KafkaSink(conf: SparkConf) extends SparkListener {
3941 logger.warn(" Custom monitoring listener with Kafka sink initializing. Now attempting to connect to Kafka topic" )
4042
4143 // Initialize Kafka connection
42- val (broker, topic) = Utils .parseKafkaConfig(conf, logger)
44+ val (broker, topic, properties ) = Utils .parseKafkaConfig(conf, logger)
4345 private var producer : Producer [String , Array [Byte ]] = _
4446
4547 var appId : String = SparkSession .getActiveSession match {
@@ -248,6 +250,7 @@ class KafkaSink(conf: SparkConf) extends SparkListener {
248250 props.put(" key.serializer" , " org.apache.kafka.common.serialization.StringSerializer" )
249251 props.put(" value.serializer" , classOf [ByteArraySerializer ].getName)
250252 props.put(" client.id" , " spark-measure" )
253+ properties.foreach{ case (k, v) => props.put(k, v) }
251254 producer = new KafkaProducer (props)
252255 }
253256 )
@@ -343,4 +346,4 @@ class KafkaSinkExtended(conf: SparkConf) extends KafkaSink(conf) {
343346 )
344347 report(point2)
345348 }
346- }
349+ }
0 commit comments