-
Notifications
You must be signed in to change notification settings - Fork 15.1k
MINOR: perf optimization for header serialization and type conversion #21762
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 5 commits
aa477af
4f1e183
0d2b7e5
44fdee1
0d1952e
fa2f7c5
f2883bf
a4a5f52
c7f7458
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -16,14 +16,11 @@ | |
| */ | ||
| package org.apache.kafka.streams.state.internals; | ||
|
|
||
| import org.apache.kafka.common.errors.SerializationException; | ||
| import org.apache.kafka.common.header.Header; | ||
| import org.apache.kafka.common.header.Headers; | ||
| import org.apache.kafka.common.utils.ByteUtils; | ||
|
|
||
| import java.io.ByteArrayOutputStream; | ||
| import java.io.DataOutputStream; | ||
| import java.io.IOException; | ||
| import java.nio.ByteBuffer; | ||
| import java.nio.charset.StandardCharsets; | ||
|
|
||
| /** | ||
|
|
@@ -48,50 +45,83 @@ | |
| */ | ||
| class HeadersSerializer { | ||
|
|
||
| final static class PreSerializedHeaders { | ||
| final int requiredBufferSizeForHeaders; | ||
| final byte[][][] serializedHeaders; | ||
|
|
||
| PreSerializedHeaders(final int requiredBufferSizeForHeaders, final byte[][][] serializedHeaders) { | ||
| this.requiredBufferSizeForHeaders = requiredBufferSizeForHeaders; | ||
| this.serializedHeaders = serializedHeaders; | ||
| } | ||
| } | ||
|
|
||
| public static PreSerializedHeaders prepareSerialization(final Headers headers) { | ||
| final Header[] headersArray = (headers == null) ? new Header[0] : headers.toArray(); | ||
|
|
||
| if (headersArray.length == 0) { | ||
| return new PreSerializedHeaders(0, null); | ||
| } | ||
|
|
||
| // we first compute the size for the buffer we need, | ||
| // so we can allocate the whole buffer at once later | ||
|
|
||
| // cache to avoid translating String header-keys to byte[] twice | ||
| final byte[][][] serializedHeaders = new byte[headersArray.length][2][]; | ||
|
|
||
| // start with varint encoding of header count | ||
| int requiredBufferSizeForHeaders = ByteUtils.sizeOfVarint(headersArray.length); | ||
| int i = 0; | ||
|
|
||
| for (final Header header : headersArray) { | ||
| serializedHeaders[i][0] = header.key().getBytes(StandardCharsets.UTF_8); | ||
| requiredBufferSizeForHeaders += ByteUtils.sizeOfVarint(serializedHeaders[i][0].length) + serializedHeaders[i][0].length; | ||
|
|
||
| serializedHeaders[i][1] = header.value(); | ||
| if (serializedHeaders[i][1] == null) { | ||
| ++requiredBufferSizeForHeaders; | ||
| } else { | ||
| requiredBufferSizeForHeaders += ByteUtils.sizeOfVarint(serializedHeaders[i][1].length) + serializedHeaders[i][1].length; | ||
| } | ||
|
|
||
| ++i; | ||
| } | ||
|
|
||
| return new PreSerializedHeaders(requiredBufferSizeForHeaders, serializedHeaders); | ||
| } | ||
|
|
||
| /** | ||
| * Serializes headers into a byte array using varint encoding per KIP-1271. | ||
| * Serializes headers into a ByteBuffer using varint encoding per KIP-1271. | ||
| * <p> | ||
| * The output format is [count][header1][header2]... without a size prefix. | ||
| * The size prefix is added by the outer serializer that uses this. | ||
| * <p> | ||
| * For null or empty headers, returns an empty byte array (0 bytes) | ||
| * instead of encoding headerCount=0 (1 byte). | ||
| * | ||
| * @param headers the headers to serialize (can be null) | ||
| * @return the serialized byte array (empty array if headers are null or empty) | ||
| * @param preSerializedHeaders the preSerializedHeaders | ||
| * @param buffer the buffer to write the serialized header into (it's expected that the buffer position is set correctly) | ||
| * @return the modified {@code buffer} containing the serializer headers (empty array if headers are null or empty),\ | ||
|
||
| * with corresponding advanced position | ||
| */ | ||
| public static byte[] serialize(final Headers headers) { | ||
| final Header[] headersArray = (headers == null) ? new Header[0] : headers.toArray(); | ||
|
|
||
| if (headersArray.length == 0) { | ||
| return new byte[0]; | ||
| public static ByteBuffer serialize(final PreSerializedHeaders preSerializedHeaders, final ByteBuffer buffer) { | ||
| if (preSerializedHeaders.requiredBufferSizeForHeaders == 0) { | ||
| return buffer; | ||
| } | ||
|
|
||
| try (final ByteArrayOutputStream baos = new ByteArrayOutputStream(); | ||
| final DataOutputStream out = new DataOutputStream(baos)) { | ||
|
|
||
| ByteUtils.writeVarint(headersArray.length, out); | ||
| ByteUtils.writeVarint(preSerializedHeaders.serializedHeaders.length, buffer); | ||
|
|
||
| for (final Header header : headersArray) { | ||
| final byte[] keyBytes = header.key().getBytes(StandardCharsets.UTF_8); | ||
| final byte[] valueBytes = header.value(); | ||
| for (final byte[][] serializedHeader : preSerializedHeaders.serializedHeaders) { | ||
| ByteUtils.writeVarint(serializedHeader[0].length, buffer); | ||
| buffer.put(serializedHeader[0]); | ||
|
|
||
| ByteUtils.writeVarint(keyBytes.length, out); | ||
| out.write(keyBytes); | ||
|
|
||
| // Write value length and value bytes (varint + raw bytes) | ||
| // null is represented as -1, encoded as varint | ||
| if (valueBytes == null) { | ||
| ByteUtils.writeVarint(-1, out); | ||
| } else { | ||
| ByteUtils.writeVarint(valueBytes.length, out); | ||
| out.write(valueBytes); | ||
| } | ||
| if (serializedHeader[1] != null) { | ||
| ByteUtils.writeVarint(serializedHeader[1].length, buffer); | ||
| buffer.put(serializedHeader[1]); | ||
| } else { | ||
| buffer.put((byte) 0x01); // hardcoded varint encoding for `-1` | ||
| } | ||
|
|
||
| return baos.toByteArray(); | ||
| } catch (final IOException e) { | ||
| throw new SerializationException("Failed to serialize headers", e); | ||
| } | ||
|
|
||
| return buffer; | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,14 +17,9 @@ | |
| package org.apache.kafka.streams.state.internals; | ||
|
|
||
| import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
| import org.apache.kafka.common.errors.SerializationException; | ||
| import org.apache.kafka.common.header.Headers; | ||
| import org.apache.kafka.common.serialization.LongSerializer; | ||
| import org.apache.kafka.common.utils.ByteUtils; | ||
|
|
||
| import java.io.ByteArrayOutputStream; | ||
| import java.io.DataOutputStream; | ||
| import java.io.IOException; | ||
| import java.nio.ByteBuffer; | ||
|
|
||
| public final class RecordConverters { | ||
|
|
@@ -33,7 +28,7 @@ public final class RecordConverters { | |
| private static final RecordConverter RAW_TO_TIMESTAMED_INSTANCE = record -> { | ||
| final byte[] rawValue = record.value(); | ||
| final long timestamp = record.timestamp(); | ||
| final byte[] recordValue = rawValue == null ? null : | ||
| final byte[] recordValueWithTimestamp = rawValue == null ? null : | ||
| ByteBuffer.allocate(8 + rawValue.length) | ||
| .putLong(timestamp) | ||
| .put(rawValue) | ||
|
|
@@ -45,9 +40,9 @@ public final class RecordConverters { | |
| timestamp, | ||
| record.timestampType(), | ||
| record.serializedKeySize(), | ||
| record.serializedValueSize(), | ||
| recordValueWithTimestamp != null ? recordValueWithTimestamp.length : 0, | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Stumbled over this by chance -- it's a long standing minor bug... Fixing on the side. |
||
| record.key(), | ||
| recordValue, | ||
| recordValueWithTimestamp, | ||
| record.headers(), | ||
| record.leaderEpoch() | ||
| ); | ||
|
|
@@ -57,7 +52,7 @@ public final class RecordConverters { | |
| final byte[] rawValue = record.value(); | ||
|
|
||
| // Format: [headersSize(varint)][headersBytes][timestamp(8)][value] | ||
| final byte[] recordValue = reconstructFromRaw( | ||
| final byte[] recordValueWithTimestampAndHeaders = reconstructFromRaw( | ||
| rawValue, | ||
| record.timestamp(), | ||
| record.headers() | ||
|
|
@@ -70,9 +65,9 @@ public final class RecordConverters { | |
| record.timestamp(), | ||
| record.timestampType(), | ||
| record.serializedKeySize(), | ||
| record.serializedValueSize(), | ||
| recordValueWithTimestampAndHeaders != null ? recordValueWithTimestampAndHeaders.length : 0, | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same... |
||
| record.key(), | ||
| recordValue, | ||
| recordValueWithTimestampAndHeaders, | ||
| record.headers(), | ||
| record.leaderEpoch() | ||
| ); | ||
|
|
@@ -86,7 +81,7 @@ public static RecordConverter rawValueToHeadersValue() { | |
| final byte[] rawValue = record.value(); | ||
|
|
||
| // Format: [headersSize(varint)][headersBytes][aggregation] (no timestamp) | ||
| final byte[] recordValue = reconstructSessionFromRaw( | ||
| final byte[] recordValueWithHeaders = reconstructSessionFromRaw( | ||
| rawValue, | ||
| record.headers() | ||
| ); | ||
|
|
@@ -98,9 +93,9 @@ public static RecordConverter rawValueToHeadersValue() { | |
| record.timestamp(), | ||
| record.timestampType(), | ||
| record.serializedKeySize(), | ||
| record.serializedValueSize(), | ||
| recordValueWithHeaders != null ? recordValueWithHeaders.length : 0, | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. And one more |
||
| record.key(), | ||
| recordValue, | ||
| recordValueWithHeaders, | ||
| record.headers(), | ||
| record.leaderEpoch() | ||
| ); | ||
|
|
@@ -133,19 +128,18 @@ static byte[] reconstructSessionFromRaw(final byte[] rawValue, final Headers hea | |
| if (rawValue == null) { | ||
| return null; | ||
| } | ||
| final byte[] rawHeaders = HeadersSerializer.serialize(headers); | ||
|
|
||
| try (final ByteArrayOutputStream baos = new ByteArrayOutputStream(); | ||
| final DataOutputStream out = new DataOutputStream(baos)) { | ||
| final HeadersSerializer.PreSerializedHeaders preSerializedHeaders = HeadersSerializer.prepareSerialization(headers); | ||
|
|
||
| ByteUtils.writeVarint(rawHeaders.length, out); | ||
| out.write(rawHeaders); | ||
| out.write(rawValue); | ||
| final int payloadSize = preSerializedHeaders.requiredBufferSizeForHeaders + rawValue.length; | ||
|
|
||
| return baos.toByteArray(); | ||
| } catch (final IOException e) { | ||
| throw new SerializationException("Failed to reconstruct AggregationWithHeaders", e); | ||
| } | ||
| // Format: [headersSize(varint)][headersBytes][value] | ||
| final ByteBuffer buffer = ByteBuffer.allocate(ByteUtils.sizeOfVarint(preSerializedHeaders.requiredBufferSizeForHeaders) + payloadSize); | ||
| ByteUtils.writeVarint(preSerializedHeaders.requiredBufferSizeForHeaders, buffer); | ||
|
|
||
| return HeadersSerializer.serialize(preSerializedHeaders, buffer) | ||
| .put(rawValue) | ||
| .array(); | ||
| } | ||
|
|
||
| /** | ||
|
|
@@ -161,23 +155,18 @@ static byte[] reconstructFromRaw(final byte[] rawValue, final long timestamp, fi | |
| if (rawValue == null) { | ||
| return null; | ||
| } | ||
| final byte[] rawTimestamp; | ||
| try (LongSerializer timestampSerializer = new LongSerializer()) { | ||
| rawTimestamp = timestampSerializer.serialize("", timestamp); | ||
| } | ||
| final byte[] rawHeaders = HeadersSerializer.serialize(headers); | ||
|
|
||
| try (final ByteArrayOutputStream baos = new ByteArrayOutputStream(); | ||
| final DataOutputStream out = new DataOutputStream(baos)) { | ||
| final HeadersSerializer.PreSerializedHeaders preSerializedHeaders = HeadersSerializer.prepareSerialization(headers); | ||
|
|
||
| ByteUtils.writeVarint(rawHeaders.length, out); | ||
| out.write(rawHeaders); | ||
| out.write(rawTimestamp); | ||
| out.write(rawValue); | ||
| final int payloadSize = preSerializedHeaders.requiredBufferSizeForHeaders + 8 + rawValue.length; | ||
|
|
||
| return baos.toByteArray(); | ||
| } catch (final IOException e) { | ||
| throw new SerializationException("Failed to reconstruct ValueTimestampHeaders", e); | ||
| } | ||
| // Format: [headersSize(varint)][headersBytes][timestamp(8)][value] | ||
| final ByteBuffer buffer = ByteBuffer.allocate(ByteUtils.sizeOfVarint(preSerializedHeaders.requiredBufferSizeForHeaders) + payloadSize); | ||
| ByteUtils.writeVarint(preSerializedHeaders.requiredBufferSizeForHeaders, buffer); | ||
|
|
||
| return HeadersSerializer.serialize(preSerializedHeaders, buffer) | ||
| .putLong(timestamp) | ||
| .put(rawValue) | ||
| .array(); | ||
| } | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This 3-dimensional array is a very bad idea... totally kills performance...