19
19
import java .io .File ;
20
20
import java .io .IOException ;
21
21
import java .nio .file .Files ;
22
+ import java .time .Duration ;
22
23
import java .util .Arrays ;
24
+ import java .util .Collections ;
23
25
import java .util .HashMap ;
24
26
import java .util .Map ;
25
27
import java .util .Properties ;
30
32
import org .apache .kafka .clients .admin .AdminClientConfig ;
31
33
import org .apache .kafka .clients .admin .NewTopic ;
32
34
import org .apache .kafka .clients .consumer .ConsumerConfig ;
35
+ import org .apache .kafka .clients .consumer .ConsumerRecord ;
33
36
import org .apache .kafka .clients .consumer .KafkaConsumer ;
34
37
import org .apache .kafka .common .TopicPartition ;
35
38
39
+ import com .fasterxml .jackson .core .type .TypeReference ;
40
+ import com .fasterxml .jackson .databind .ObjectMapper ;
36
41
import org .junit .jupiter .api .AfterEach ;
37
42
import org .junit .jupiter .api .BeforeAll ;
38
43
import org .junit .jupiter .api .BeforeEach ;
@@ -130,6 +135,9 @@ void setUp() throws ExecutionException, InterruptedException {
130
135
"org.apache.kafka.common.serialization.ByteArrayDeserializer" );
131
136
consumerProps .put (ConsumerConfig .VALUE_DESERIALIZER_CLASS_CONFIG ,
132
137
"org.apache.kafka.common.serialization.ByteArrayDeserializer" );
138
+ consumerProps .put (ConsumerConfig .ENABLE_AUTO_COMMIT_CONFIG , "false" );
139
+ consumerProps .put (ConsumerConfig .GROUP_ID_CONFIG , "test-consumer" );
140
+ consumerProps .put (ConsumerConfig .AUTO_OFFSET_RESET_CONFIG , "earliest" );
133
141
consumer = new KafkaConsumer <>(consumerProps );
134
142
135
143
final NewTopic originalTopic = new NewTopic (TestSourceConnector .ORIGINAL_TOPIC , 1 , (short ) 1 );
@@ -156,7 +164,7 @@ final void tearDown() {
156
164
157
165
@ Test
158
166
@ Timeout (10 )
159
- final void testExtractTopic () throws ExecutionException , InterruptedException , IOException {
167
+ final void testExtractTopic () throws ExecutionException , InterruptedException {
160
168
final Map <String , String > connectorConfig = new HashMap <>();
161
169
connectorConfig .put ("name" , "test-source-connector" );
162
170
connectorConfig .put ("connector.class" , TestSourceConnector .class .getName ());
@@ -194,6 +202,55 @@ final void testExtractTopicFromValueSchemaName() throws ExecutionException, Inte
194
202
195
203
}
196
204
205
+ @ Test
206
+ @ Timeout (10 )
207
+ final void testCaseTransform () throws ExecutionException , InterruptedException , IOException {
208
+ adminClient .createTopics (Arrays .asList (new NewTopic (TestCaseTransformConnector .SOURCE_TOPIC , 1 , (short ) 1 )))
209
+ .all ().get ();
210
+ adminClient .createTopics (Arrays .asList (new NewTopic (TestCaseTransformConnector .TARGET_TOPIC , 1 , (short ) 1 )))
211
+ .all ().get ();
212
+
213
+ final Map <String , String > connectorConfig = new HashMap <>();
214
+ connectorConfig .put ("name" , "test-source-connector" );
215
+ connectorConfig .put ("connector.class" , TestCaseTransformConnector .class .getName ());
216
+ connectorConfig .put ("key.converter" , "org.apache.kafka.connect.json.JsonConverter" );
217
+ connectorConfig .put ("value.converter" , "org.apache.kafka.connect.json.JsonConverter" );
218
+ connectorConfig .put ("value.converter.value.subject.name.strategy" ,
219
+ "io.confluent.kafka.serializers.subject.RecordNameStrategy" );
220
+ connectorConfig .put ("tasks.max" , "1" );
221
+ connectorConfig .put ("transforms" , "regexRouteToTargetTopic, caseTransform" );
222
+ connectorConfig .put ("transforms.caseTransform.case" , "upper" );
223
+ connectorConfig .put ("transforms.caseTransform.field.names" , TestCaseTransformConnector .TRANSFORM_FIELD );
224
+ connectorConfig .put ("transforms.caseTransform.type" , "io.aiven.kafka.connect.transforms.CaseTransform$Value" );
225
+ connectorConfig .put ("transforms.regexRouteToTargetTopic.type" ,
226
+ "org.apache.kafka.connect.transforms.RegexRouter" );
227
+ connectorConfig .put ("transforms.regexRouteToTargetTopic.regex" , "(.*)-source-(.*)" );
228
+ connectorConfig .put ("transforms.regexRouteToTargetTopic.replacement" , String .format ("$1-target-$2" ));
229
+
230
+ connectRunner .createConnector (connectorConfig );
231
+ checkMessageTransformInTopic (
232
+ new TopicPartition (TestCaseTransformConnector .TARGET_TOPIC , 0 ),
233
+ TestCaseTransformConnector .MESSAGES_TO_PRODUCE
234
+ );
235
+ }
236
+
237
+ final void checkMessageTransformInTopic (final TopicPartition topicPartition , final long expectedNumberOfMessages )
238
+ throws InterruptedException , IOException {
239
+ waitForCondition (
240
+ () -> consumer .endOffsets (Arrays .asList (topicPartition ))
241
+ .values ().stream ().reduce (Long ::sum ).map (s -> s == expectedNumberOfMessages )
242
+ .orElse (false ), 5000 , "Messages appear in target topic"
243
+ );
244
+ consumer .subscribe (Collections .singletonList (topicPartition .topic ()));
245
+ final ObjectMapper objectMapper = new ObjectMapper ();
246
+ final TypeReference <Map <String , Object >> tr = new TypeReference <>() {};
247
+ for (final ConsumerRecord <byte [], byte []> consumerRecord : consumer .poll (Duration .ofSeconds (1 ))) {
248
+ final Map <String , Object > value = objectMapper .readValue (consumerRecord .value (), tr );
249
+ final Map <String , String > payload = (Map <String , String >) value .get ("payload" );
250
+ assertThat (payload .get ("transform" )).isEqualTo ("LOWER-CASE-DATA-TRANSFORMS-TO-UPPERCASE" );
251
+ }
252
+ }
253
+
197
254
final void checkMessageTopics (final TopicPartition originalTopicPartition , final TopicPartition newTopicPartition )
198
255
throws InterruptedException {
199
256
waitForCondition (
0 commit comments