Skip to content

Commit c22ef8d

Browse files
committed
Update Parquet
1 parent 48c5eb5 commit c22ef8d

File tree

4 files changed

+20
-124
lines changed

4 files changed

+20
-124
lines changed

exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ColumnDataReader.java

Lines changed: 0 additions & 103 deletions
This file was deleted.

exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@
5555
import org.slf4j.Logger;
5656
import org.slf4j.LoggerFactory;
5757

58-
import java.io.IOException;
5958
import java.util.ArrayList;
6059
import java.util.Arrays;
6160
import java.util.Collection;
@@ -417,13 +416,10 @@ public void close() {
417416
recordReader = null;
418417
recordMaterializer = null;
419418
nullFilledVectors = null;
420-
try {
421-
if (pageReadStore != null) {
422-
pageReadStore.close();
423-
pageReadStore = null;
424-
}
425-
} catch (IOException e) {
426-
logger.warn("Failure while closing PageReadStore", e);
419+
420+
if (pageReadStore != null) {
421+
pageReadStore.close();
422+
pageReadStore = null;
427423
}
428424
}
429425

exec/java-exec/src/main/java/org/apache/parquet/hadoop/ColumnChunkIncReadStore.java

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,7 @@
1717
*/
1818
package org.apache.parquet.hadoop;
1919

20-
import java.io.IOException;
21-
import java.nio.ByteBuffer;
22-
import java.util.ArrayList;
23-
import java.util.HashMap;
24-
import java.util.List;
25-
import java.util.Map;
26-
20+
import io.netty.buffer.ByteBuf;
2721
import org.apache.drill.common.exceptions.DrillRuntimeException;
2822
import org.apache.drill.exec.exception.OutOfMemoryException;
2923
import org.apache.drill.exec.memory.BufferAllocator;
@@ -47,11 +41,16 @@
4741
import org.apache.parquet.format.converter.ParquetMetadataConverter;
4842
import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
4943
import org.apache.parquet.hadoop.util.HadoopStreams;
50-
51-
import io.netty.buffer.ByteBuf;
5244
import org.slf4j.Logger;
5345
import org.slf4j.LoggerFactory;
5446

47+
import java.io.IOException;
48+
import java.nio.ByteBuffer;
49+
import java.util.ArrayList;
50+
import java.util.HashMap;
51+
import java.util.List;
52+
import java.util.Map;
53+
5554

5655
public class ColumnChunkIncReadStore implements PageReadStore {
5756
private static final Logger logger = LoggerFactory.getLogger(ColumnChunkIncReadStore.class);
@@ -295,9 +294,13 @@ public void addColumn(ColumnDescriptor descriptor, ColumnChunkMetaData metaData)
295294
columns.put(descriptor, reader);
296295
}
297296

298-
public void close() throws IOException {
297+
public void close() {
299298
for (FSDataInputStream stream : streams) {
300-
stream.close();
299+
try {
300+
stream.close();
301+
} catch (IOException e) {
302+
logger.warn("Error closing stream: {}", e.getMessage(), e);
303+
}
301304
}
302305
for (ColumnChunkIncPageReader reader : columns.values()) {
303306
reader.close();

pom.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -125,8 +125,8 @@
125125
<netty.tcnative.classifier />
126126
<netty.tcnative.version>2.0.65.Final</netty.tcnative.version>
127127
<netty.version>4.1.115.Final</netty.version>
128-
<parquet.format.version>2.9.0</parquet.format.version>
129-
<parquet.version>1.12.3</parquet.version>
128+
<parquet.format.version>2.11.0</parquet.format.version>
129+
<parquet.version>1.15.1</parquet.version>
130130
<project.build.outputTimestamp>1676438963</project.build.outputTimestamp>
131131
<protobuf.version>3.25.5</protobuf.version>
132132
<proto.cas.path>${project.basedir}/src/main/protobuf/</proto.cas.path>

0 commit comments

Comments
 (0)