Skip to content

Commit 489a41f

Browse files
kafka: upgrade to kafka 3.3.1 (#23354)
Signed-off-by: Adam Kotwasinski <[email protected]>
1 parent 3c645fa commit 489a41f

File tree

4 files changed

+25
-13
lines changed

4 files changed

+25
-13
lines changed

bazel/repository_locations.bzl

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1205,13 +1205,13 @@ REPOSITORY_LOCATIONS_SPEC = dict(
12051205
project_name = "Kafka (source)",
12061206
project_desc = "Open-source distributed event streaming platform",
12071207
project_url = "https://kafka.apache.org",
1208-
version = "3.2.3",
1209-
sha256 = "a5b45221e215696769f6ccb741c1e91fa1e18194d7ce02fade797f09efaae03a",
1208+
version = "3.3.1",
1209+
sha256 = "aab244e6ad1d63a830af1776e1810d355e36900be1e8e4e66b7555af8639e2d2",
12101210
strip_prefix = "kafka-{version}/clients/src/main/resources/common/message",
12111211
urls = ["https://github.com/apache/kafka/archive/{version}.zip"],
12121212
use_category = ["dataplane_ext"],
12131213
extensions = ["envoy.filters.network.kafka_broker", "envoy.filters.network.kafka_mesh"],
1214-
release_date = "2022-09-13",
1214+
release_date = "2022-09-29",
12151215
cpe = "cpe:2.3:a:apache:kafka:*",
12161216
license = "Apache-2.0",
12171217
license_url = "https://github.com/apache/kafka/blob/{version}/LICENSE",
@@ -1235,11 +1235,11 @@ REPOSITORY_LOCATIONS_SPEC = dict(
12351235
project_name = "Kafka (server binary)",
12361236
project_desc = "Open-source distributed event streaming platform",
12371237
project_url = "https://kafka.apache.org",
1238-
version = "3.2.3",
1239-
sha256 = "b6f91bc013fcdccd73977d49e20eaebb8fcb121a89a0803d11a9b8f1fc93db80",
1238+
version = "3.3.1",
1239+
sha256 = "18ad8a365fb111de249d3bb8bf3c96cd1af060ec8fb3e3d1fc4a7ae10d9042de",
12401240
strip_prefix = "kafka_2.13-{version}",
12411241
urls = ["https://archive.apache.org/dist/kafka/{version}/kafka_2.13-{version}.tgz"],
1242-
release_date = "2022-09-09",
1242+
release_date = "2022-10-02",
12431243
use_category = ["test_only"],
12441244
),
12451245
kafka_python_client = dict(

contrib/kafka/filters/network/source/protocol/generator.py

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -238,8 +238,8 @@ def parse_field(self, field_spec, highest_possible_version):
238238

239239
def parse_type(self, type_name, field_spec, highest_possible_version):
240240
"""
241-
Parse a given type element - returns an array type, primitive (e.g. uint32_t) or complex one.
242-
"""
241+
Parse a given type element - returns an array type, primitive (e.g. uint32_t) or complex one.
242+
"""
243243
if (type_name.startswith('[]')):
244244
# In spec files, array types are defined as `[]underlying_type` instead of having its own
245245
# element with type inside.
@@ -474,8 +474,8 @@ def is_printable(self):
474474

475475
class Primitive(TypeSpecification):
476476
"""
477-
Represents a Kafka primitive value.
478-
"""
477+
Represents a Kafka primitive value.
478+
"""
479479

480480
USABLE_PRIMITIVE_TYPE_NAMES = [
481481
'bool', 'int8', 'int16', 'int32', 'int64', 'uint16', 'float64', 'string', 'bytes',
@@ -562,7 +562,7 @@ class Primitive(TypeSpecification):
562562
def __init__(self, name, custom_default_value):
563563
self.original_name = name
564564
self.name = Primitive.compute(name, Primitive.KAFKA_TYPE_TO_ENVOY_TYPE)
565-
self.custom_default_value = custom_default_value
565+
self.custom_default_value = Primitive.sanitize_value(self.name, custom_default_value)
566566

567567
@staticmethod
568568
def compute(name, map):
@@ -571,6 +571,18 @@ def compute(name, map):
571571
else:
572572
raise ValueError(name)
573573

574+
@staticmethod
575+
def sanitize_value(type, arg):
576+
"""
577+
Unfortunately we cannot print Python True/False straight into C++ code, so we lowercase.
578+
"""
579+
if arg is None:
580+
return None
581+
if 'bool' == type:
582+
return str(arg).lower()
583+
else:
584+
return arg
585+
574586
def compute_declaration_chain(self):
575587
# Primitives need no declarations.
576588
return []

docs/root/configuration/listeners/network_filters/kafka_broker_filter.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ Kafka Broker filter
55

66
The Apache Kafka broker filter decodes the client protocol for
77
`Apache Kafka <https://kafka.apache.org/>`_, both the requests and responses in the payload.
8-
The message versions in `Kafka 3.2.3 <http://kafka.apache.org/32/protocol.html#protocol_api_keys>`_
8+
The message versions in `Kafka 3.3.1 <http://kafka.apache.org/33/protocol.html#protocol_api_keys>`_
99
are supported.
1010
The filter attempts not to influence the communication between client and brokers, so the messages
1111
that could not be decoded (due to Kafka client or broker running a newer version than supported by

docs/root/configuration/listeners/network_filters/kafka_mesh_filter.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ Kafka Mesh filter
66
The Apache Kafka mesh filter provides a facade for `Apache Kafka <https://kafka.apache.org/>`_
77
producers. Produce requests sent to this filter insance can be forwarded to one of multiple
88
clusters, depending on configured forwarding rules. Corresponding message versions from
9-
Kafka 3.2.3 are supported.
9+
Kafka 3.3.1 are supported.
1010

1111
* This filter should be configured with the type URL ``type.googleapis.com/envoy.extensions.filters.network.kafka_mesh.v3alpha.KafkaMesh``.
1212
* :ref:`v3 API reference <envoy_v3_api_msg_extensions.filters.network.kafka_mesh.v3alpha.KafkaMesh>`

0 commit comments

Comments
 (0)