@@ -2,7 +2,7 @@ use async_trait::async_trait;
22use derive_more:: { Display , Error , From } ;
33use futures:: future:: try_join_all;
44use rand:: { seq:: SliceRandom , thread_rng} ;
5- use rdkafka:: client:: ClientContext ;
5+ use rdkafka:: client:: { ClientContext , OAuthToken } ;
66use rdkafka:: config:: ClientConfig ;
77use rdkafka:: consumer:: {
88 stream_consumer:: StreamConsumer , CommitMode , Consumer , ConsumerContext , Rebalance ,
@@ -14,20 +14,23 @@ use rdkafka::types::RDKafkaErrorCode;
1414use rdkafka:: TopicPartitionList ;
1515use std:: collections:: HashMap ;
1616use std:: env;
17- use std:: sync:: Arc ;
17+ use std:: error:: Error as StdError ;
18+ use std:: sync:: { Arc , Mutex as StdMutex } ;
1819use std:: time:: Duration ;
1920use tokio:: sync:: mpsc:: { error:: SendError , unbounded_channel, UnboundedSender } ;
2021use tokio:: sync:: { Mutex , RwLock } ;
2122use tokio:: task:: { JoinError , JoinHandle } ;
2223use tokio:: time:: sleep;
2324
2425use crate :: channel:: { get_data_channel_map_from_env, get_data_channel_value_from_env} ;
26+ use crate :: msk_iam:: MSKIAMAuthManager ;
2527use crate :: util:: parse_env_var;
2628
2729const KAFKA_ENC_TOPICS_ENV_KEY : & str = "KAFKA_ENCRYPTED_TOPICS" ;
2830const KAFKA_OUT_TOPICS_ENV_KEY : & str = "KAFKA_OUTPUT_TOPICS" ;
2931const DEFAULT_ENC_KAFKA_TOPICS : & str = "typical=p3a-star-enc" ;
3032const DEFAULT_OUT_KAFKA_TOPICS : & str = "typical=p3a-star-out" ;
33+ const KAFKA_IAM_BROKERS_ENV_KEY : & str = "KAFKA_IAM_BROKERS" ;
3134const KAFKA_BROKERS_ENV_KEY : & str = "KAFKA_BROKERS" ;
3235const KAFKA_ENABLE_PLAINTEXT_ENV_KEY : & str = "KAFKA_ENABLE_PLAINTEXT" ;
3336const KAFKA_PRODUCER_QUEUE_TASK_COUNT_ENV_KEY : & str = "KAFKA_PRODUCE_QUEUE_TASK_COUNT" ;
@@ -54,9 +57,24 @@ pub enum RecordStreamError {
5457 Join ( JoinError ) ,
5558}
5659
57- struct KafkaContext ;
60+ #[ derive( Clone ) ]
61+ struct KafkaContext {
62+ msk_iam_auth_manager : Arc < StdMutex < MSKIAMAuthManager > > ,
63+ }
64+
5865
59- impl ClientContext for KafkaContext { }
66+ impl ClientContext for KafkaContext {
67+ const ENABLE_REFRESH_OAUTH_TOKEN : bool = true ;
68+
69+ fn generate_oauth_token ( & self , _oauthbearer_config : Option < & str > ) -> Result < OAuthToken , Box < dyn StdError > > {
70+ let token_info = self . msk_iam_auth_manager . lock ( ) . unwrap ( ) . get_auth_token ( ) ?;
71+ Ok ( OAuthToken {
72+ token : token_info. token ,
73+ lifetime_ms : ( token_info. expiration_time . unix_timestamp_nanos ( ) / 1_000_000 ) as i64 ,
74+ principal_name : String :: new ( ) ,
75+ } )
76+ }
77+ }
6078
6179impl ConsumerContext for KafkaContext {
6280 fn pre_rebalance ( & self , rebalance : & Rebalance ) {
@@ -115,6 +133,25 @@ pub struct KafkaRecordStreamConfig {
115133 pub use_output_group_id : bool ,
116134}
117135
136+ pub struct KafkaRecordStreamFactory {
137+ msk_iam_auth_manager : Arc < StdMutex < MSKIAMAuthManager > > ,
138+ }
139+
140+ impl KafkaRecordStreamFactory {
141+ pub fn new ( ) -> Self {
142+ Self {
143+ msk_iam_auth_manager : Arc :: new ( StdMutex :: new ( MSKIAMAuthManager :: new ( ) ) ) ,
144+ }
145+ }
146+
147+ pub fn create_record_stream ( & self , stream_config : KafkaRecordStreamConfig ) -> KafkaRecordStream {
148+ let context = KafkaContext {
149+ msk_iam_auth_manager : self . msk_iam_auth_manager . clone ( ) ,
150+ } ;
151+ KafkaRecordStream :: new ( stream_config, context)
152+ }
153+ }
154+
118155pub struct KafkaRecordStream {
119156 producer : Option < Arc < FutureProducer < KafkaContext > > > ,
120157 consumer : Option < StreamConsumer < KafkaContext > > ,
@@ -150,7 +187,7 @@ pub fn get_data_channel_topic_from_env(use_output_topic: bool, channel_name: &st
150187}
151188
152189impl KafkaRecordStream {
153- pub fn new ( stream_config : KafkaRecordStreamConfig ) -> Self {
190+ fn new ( stream_config : KafkaRecordStreamConfig , context : KafkaContext ) -> Self {
154191 let group_id = match stream_config. use_output_group_id {
155192 true => "star-agg-dec" ,
156193 false => "star-agg-enc" ,
@@ -163,7 +200,6 @@ impl KafkaRecordStream {
163200 producer_queues : RwLock :: new ( Vec :: new ( ) ) ,
164201 } ;
165202 if stream_config. enable_producer {
166- let context = KafkaContext ;
167203 let mut config = Self :: new_client_config ( ) ;
168204 let mut config_ref = & mut config;
169205 if stream_config. use_output_group_id {
@@ -175,13 +211,12 @@ impl KafkaRecordStream {
175211 . set ( "transaction.timeout.ms" , "3600000" )
176212 . set ( "request.timeout.ms" , "900000" )
177213 . set ( "socket.timeout.ms" , "300000" )
178- . create_with_context ( context)
214+ . create_with_context ( context. clone ( ) )
179215 . unwrap ( ) ,
180216 ) ) ;
181217 info ! ( "Producing to topic: {}" , stream_config. topic) ;
182218 }
183219 if stream_config. enable_consumer {
184- let context = KafkaContext ;
185220 let mut config = Self :: new_client_config ( ) ;
186221 result. consumer = Some (
187222 config
@@ -210,6 +245,13 @@ impl KafkaRecordStream {
210245 }
211246
212247 fn new_client_config ( ) -> ClientConfig {
248+ if let Some ( brokers) = env:: var ( KAFKA_IAM_BROKERS_ENV_KEY ) . ok ( ) {
249+ let mut result = ClientConfig :: new ( ) ;
250+ result. set ( "bootstrap.servers" , brokers) ;
251+ result. set ( "security.protocol" , "SASL_SSL" ) ;
252+ result. set ( "sasl.mechanism" , "OAUTHBEARER" ) ;
253+ return result;
254+ }
213255 let brokers = env:: var ( KAFKA_BROKERS_ENV_KEY )
214256 . unwrap_or_else ( |_| panic ! ( "{} env var must be defined" , KAFKA_BROKERS_ENV_KEY ) ) ;
215257 let mut result = ClientConfig :: new ( ) ;
0 commit comments