Skip to content

Commit fe628e6

Browse files
authored
bump guava version (#2)
* Bump Kafka to 2.5.1 * Revert "Bump Kafka to 2.5.1" This reverts commit 01ab670. * Bump to Kafka 2.5.1 * update version number * Update distribution version * remove dependency from parent pom * update parent version in children pom * Fix wrong k version number * bump distribution xml version * bump guava to 24.1.1; change error handling * use getCause instead of hardcode message * unwrap execution exception * centralise exception handling * refactor exception catching * fix a wrong if condition * make exception test more valuable * address pr comment
1 parent 4085a41 commit fe628e6

File tree

5 files changed

+35
-20
lines changed

5 files changed

+35
-20
lines changed

s3/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
<dependency>
3333
<groupId>com.google.guava</groupId>
3434
<artifactId>guava</artifactId>
35-
<version>20.0</version>
35+
<version>24.1.1-jre</version>
3636
</dependency>
3737
</dependencies>
3838

s3/src/main/java/com/instaclustr/kafka/connect/s3/source/AwsStorageSourceTask.java

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import com.amazonaws.AmazonClientException;
44
import com.google.common.util.concurrent.RateLimiter;
5+
import com.google.common.util.concurrent.UncheckedExecutionException;
56
import com.google.common.util.concurrent.UncheckedTimeoutException;
67
import com.instaclustr.kafka.connect.s3.AwsConnectorStringFormats;
78
import com.instaclustr.kafka.connect.s3.AwsStorageConnectorCommonConfig;
@@ -16,7 +17,9 @@
1617

1718
import java.io.IOException;
1819
import java.util.*;
20+
import java.util.concurrent.ExecutionException;
1921
import java.util.concurrent.TimeUnit;
22+
import java.util.concurrent.TimeoutException;
2023
import java.util.stream.Collectors;
2124

2225
public class AwsStorageSourceTask extends SourceTask {
@@ -139,20 +142,24 @@ public List<SourceRecord> poll() {
139142
} catch (InterruptedException e) {
140143
log.info("Thread interrupted in poll. Shutting down", e);
141144
Thread.currentThread().interrupt();
142-
} catch (AmazonClientException exception) {
143-
if (!exception.isRetryable()) {
144-
throw exception;
145-
} else {
146-
log.warn("Retryable S3 service exception while reading from s3", exception);
145+
} catch (UncheckedExecutionException | ExecutionException | TimeoutException e) {
146+
Throwable exceptionCause = e.getCause();
147+
if (exceptionCause instanceof AmazonClientException) {
148+
AmazonClientException amazonClientException = (AmazonClientException) exceptionCause;
149+
if (!amazonClientException.isRetryable()) {
150+
throw amazonClientException;
151+
} else {
152+
log.warn("Retryable S3 service exception while reading from s3", e);
153+
if (topicPartition != null) {
154+
awsSourceReader.revertAwsReadPositionMarker(topicPartition);
155+
}
156+
}
157+
} else if (exceptionCause instanceof IOException || e instanceof TimeoutException) {
158+
log.warn("Retryable exception while reading from s3", e);
147159
if (topicPartition != null) {
148160
awsSourceReader.revertAwsReadPositionMarker(topicPartition);
149161
}
150162
}
151-
} catch (IOException | UncheckedTimeoutException exception) {
152-
log.warn("Retryable exception while reading from s3", exception);
153-
if (topicPartition != null) {
154-
awsSourceReader.revertAwsReadPositionMarker(topicPartition);
155-
}
156163
} catch (RuntimeException e){
157164
throw e;
158165
} catch (Exception e) {

s3/src/main/java/com/instaclustr/kafka/connect/s3/source/TopicPartitionSegmentParser.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
package com.instaclustr.kafka.connect.s3.source;
22

3+
import com.amazonaws.AmazonClientException;
34
import com.google.common.util.concurrent.SimpleTimeLimiter;
45
import com.google.common.util.concurrent.TimeLimiter;
56
import com.instaclustr.kafka.connect.s3.AwsConnectorStringFormats;
@@ -12,9 +13,7 @@
1213
import java.io.IOException;
1314
import java.io.InputStream;
1415
import java.util.HashMap;
15-
import java.util.concurrent.ExecutorService;
16-
import java.util.concurrent.Executors;
17-
import java.util.concurrent.TimeUnit;
16+
import java.util.concurrent.*;
1817
import java.util.regex.Matcher;
1918

2019
/**
@@ -62,7 +61,7 @@ public TopicPartitionSegmentParser(final InputStream s3ObjectInputStream, final
6261
this.topicPrefix = topicPrefix;
6362
this.targetTopic = AwsConnectorStringFormats.generateTargetTopic(topicPrefix, topic);
6463
this.singleThreadExecutor = Executors.newSingleThreadExecutor();
65-
this.timeLimiter = new SimpleTimeLimiter(this.singleThreadExecutor);
64+
this.timeLimiter = SimpleTimeLimiter.create(this.singleThreadExecutor);
6665
}
6766

6867
public void closeResources() throws IOException, InterruptedException {
@@ -99,7 +98,7 @@ private SourceRecord getNextRecord() throws IOException { //blocking call
9998

10099
public SourceRecord getNextRecord(Long time, TimeUnit units) throws Exception {
101100
try {
102-
return this.timeLimiter.callWithTimeout(this::getNextRecord, time, units, true);
101+
return this.timeLimiter.callWithTimeout(this::getNextRecord, time, units);
103102
} catch (Exception e) {
104103
this.closeResources(); //not possible to read from this stream after a timeout as read positions gets messed up
105104
throw e;

s3/src/test/java/com/instaclustr/kafka/connect/s3/source/AwsStorageSourceTaskTest.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
import java.time.temporal.ChronoUnit;
1717
import java.util.Collections;
1818
import java.util.HashMap;
19+
import java.util.concurrent.ExecutionException;
20+
import java.util.concurrent.TimeoutException;
1921

2022
import static org.mockito.Mockito.*;
2123

@@ -54,7 +56,7 @@ public void givenNonResponsiveObjectStreamResetReadPosition() throws Exception {
5456
doReturn("test").when(mockTopicPartitionSegmentParser).getTopic();
5557
doReturn(0).when(mockTopicPartitionSegmentParser).getPartition();
5658

57-
doThrow(new UncheckedTimeoutException()).when(mockTopicPartitionSegmentParser).getNextRecord(any(), any());
59+
doThrow(new TimeoutException()).when(mockTopicPartitionSegmentParser).getNextRecord(any(), any());
5860

5961
AwsStorageSourceTask awsStorageSourceTask = new AwsStorageSourceTask(mockTransferManagerProvider, mockAwsSourceReader);
6062
awsStorageSourceTask.poll();
@@ -71,7 +73,7 @@ public void givenObjectStreamThatThrowsIOExceptionResetReadPosition() throws Exc
7173
doReturn(mockTopicPartitionSegmentParser).when(mockAwsSourceReader).getNextTopicPartitionSegmentParser();
7274
doReturn("test").when(mockTopicPartitionSegmentParser).getTopic();
7375
doReturn(0).when(mockTopicPartitionSegmentParser).getPartition();
74-
doThrow(new IOException()).when(mockTopicPartitionSegmentParser).getNextRecord(any(), any());
76+
doThrow(new ExecutionException(new IOException())).when(mockTopicPartitionSegmentParser).getNextRecord(any(), any());
7577

7678
AwsStorageSourceTask awsStorageSourceTask = new AwsStorageSourceTask(mockTransferManagerProvider, mockAwsSourceReader);
7779
awsStorageSourceTask.poll();

s3/src/test/java/com/instaclustr/kafka/connect/s3/source/TopicPartitionSegmentParserTest.java

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,9 @@
1313
import org.testng.annotations.Test;
1414

1515
import java.io.*;
16+
import java.util.concurrent.ExecutionException;
1617
import java.util.concurrent.TimeUnit;
18+
import java.util.concurrent.TimeoutException;
1719

1820

1921
public class TopicPartitionSegmentParserTest {
@@ -72,7 +74,7 @@ public void givenNonResponsiveStreamTriggerTimeoutOnDefinedTimePeriod() throws I
7274
try (PipedInputStream empty = new PipedInputStream(1)) {
7375
pipedOutputStream = new PipedOutputStream(empty); //making sure we just have an unresponsive stream and not throwing an ioexception
7476
TopicPartitionSegmentParser topicPartitionSegmentParser = new TopicPartitionSegmentParser(empty, s3ObjectKey, "");
75-
Assert.expectThrows(UncheckedTimeoutException.class, () -> topicPartitionSegmentParser.getNextRecord(100L, TimeUnit.MILLISECONDS));
77+
Assert.expectThrows(TimeoutException.class, () -> topicPartitionSegmentParser.getNextRecord(5L, TimeUnit.MILLISECONDS));
7678
} finally {
7779
if (pipedOutputStream != null) {
7880
pipedOutputStream.close();
@@ -103,7 +105,12 @@ public void givenClosedStreamThrowIoException() throws IOException {
103105
InputStream nullInputStream = InputStream.nullInputStream();
104106
TopicPartitionSegmentParser topicPartitionSegmentParser = new TopicPartitionSegmentParser(nullInputStream, s3ObjectKey, "");
105107
nullInputStream.close();
106-
Assert.expectThrows(IOException.class, () -> topicPartitionSegmentParser.getNextRecord(5L, TimeUnit.SECONDS));
108+
try {
109+
topicPartitionSegmentParser.getNextRecord(5L, TimeUnit.SECONDS);
110+
} catch (Exception e) {
111+
Assert.assertTrue(e instanceof ExecutionException);
112+
Assert.assertTrue(e.getCause() instanceof IOException);
113+
}
107114
}
108115

109116
@Test

0 commit comments

Comments
 (0)