1- from aiokafka import AIOKafkaProducer
2- from aiokafka .errors import KafkaConnectionError
31from binascii import Error as B64DecodeError
42from collections import namedtuple
53from confluent_kafka .error import KafkaException
1311 TopicAuthorizationFailedError ,
1412 UnknownTopicOrPartitionError ,
1513)
16- from karapace .config import Config , create_client_ssl_context
14+ from karapace .config import Config
1715from karapace .errors import InvalidSchema
1816from karapace .kafka .admin import KafkaAdminClient
17+ from karapace .kafka .producer import AsyncKafkaProducer
1918from karapace .kafka_rest_apis .authentication import (
2019 get_auth_config_from_header ,
2120 get_expiration_time_from_header ,
3635 SchemaRetrievalError ,
3736)
3837from karapace .typing import NameStrategy , SchemaId , Subject , SubjectType
39- from karapace .utils import convert_to_int , json_encode , KarapaceKafkaClient
38+ from karapace .utils import convert_to_int , json_encode
4039from typing import Callable , Dict , List , Optional , Tuple , Union
4140
4241import asyncio
@@ -73,6 +72,7 @@ def __init__(self, config: Config) -> None:
7372 self ._idle_proxy_janitor_task : Optional [asyncio .Task ] = None
7473
7574 async def close (self ) -> None :
75+ log .info ("Closing REST proxy application" )
7676 if self ._idle_proxy_janitor_task is not None :
7777 self ._idle_proxy_janitor_task .cancel ()
7878 self ._idle_proxy_janitor_task = None
@@ -441,7 +441,7 @@ def __init__(
441441 self ._auth_expiry = auth_expiry
442442
443443 self ._async_producer_lock = asyncio .Lock ()
444- self ._async_producer : Optional [AIOKafkaProducer ] = None
444+ self ._async_producer : Optional [AsyncKafkaProducer ] = None
445445 self .naming_strategy = NameStrategy (self .config ["name_strategy" ])
446446
447447 def __str__ (self ) -> str :
@@ -461,12 +461,12 @@ def auth_expiry(self) -> datetime.datetime:
461461 def num_consumers (self ) -> int :
462462 return len (self .consumer_manager .consumers )
463463
464- async def _maybe_create_async_producer (self ) -> AIOKafkaProducer :
464+ async def _maybe_create_async_producer (self ) -> AsyncKafkaProducer :
465465 if self ._async_producer is not None :
466466 return self ._async_producer
467467
468468 if self .config ["producer_acks" ] == "all" :
469- acks = "all"
469+ acks = - 1
470470 else :
471471 acks = int (self .config ["producer_acks" ])
472472
@@ -477,33 +477,34 @@ async def _maybe_create_async_producer(self) -> AIOKafkaProducer:
477477
478478 log .info ("Creating async producer" )
479479
480- # Don't retry if creating the SSL context fails, likely a configuration issue with
481- # ciphers or certificate chains
482- ssl_context = create_client_ssl_context (self .config )
483-
484- # Don't retry if instantiating the producer fails, likely a configuration error.
485- producer = AIOKafkaProducer (
480+ producer = AsyncKafkaProducer (
486481 acks = acks ,
487482 bootstrap_servers = self .config ["bootstrap_uri" ],
488483 compression_type = self .config ["producer_compression_type" ],
489484 connections_max_idle_ms = self .config ["connections_max_idle_ms" ],
490485 linger_ms = self .config ["producer_linger_ms" ],
491- max_request_size = self .config ["producer_max_request_size" ],
486+ message_max_bytes = self .config ["producer_max_request_size" ],
492487 metadata_max_age_ms = self .config ["metadata_max_age_ms" ],
493488 security_protocol = self .config ["security_protocol" ],
494- ssl_context = ssl_context ,
489+ ssl_cafile = self .config ["ssl_cafile" ],
490+ ssl_certfile = self .config ["ssl_certfile" ],
491+ ssl_keyfile = self .config ["ssl_keyfile" ],
492+ ssl_crlfile = self .config ["ssl_crlfile" ],
495493 ** get_kafka_client_auth_parameters_from_config (self .config ),
496494 )
497-
498495 try :
499496 await producer .start ()
500- except KafkaConnectionError :
497+ except (NoBrokersAvailable , AuthenticationFailedError ):
498+ await producer .stop ()
501499 if retry :
502500 log .exception ("Unable to connect to the bootstrap servers, retrying" )
503501 else :
504502 log .exception ("Giving up after trying to connect to the bootstrap servers" )
505503 raise
506504 await asyncio .sleep (1 )
505+ except Exception :
506+ await producer .stop ()
507+ raise
507508 else :
508509 self ._async_producer = producer
509510
@@ -645,10 +646,8 @@ def init_admin_client(self):
645646 ssl_cafile = self .config ["ssl_cafile" ],
646647 ssl_certfile = self .config ["ssl_certfile" ],
647648 ssl_keyfile = self .config ["ssl_keyfile" ],
648- api_version = (1 , 0 , 0 ),
649649 metadata_max_age_ms = self .config ["metadata_max_age_ms" ],
650650 connections_max_idle_ms = self .config ["connections_max_idle_ms" ],
651- kafka_client = KarapaceKafkaClient ,
652651 ** get_kafka_client_auth_parameters_from_config (self .config , async_client = False ),
653652 )
654653 break
@@ -1069,8 +1068,11 @@ async def produce_messages(self, *, topic: str, prepared_records: List) -> List:
10691068 if not isinstance (result , Exception ):
10701069 produce_results .append (
10711070 {
1072- "offset" : result .offset if result else - 1 ,
1073- "partition" : result .topic_partition .partition if result else 0 ,
1071+ # In case the offset is not available, `confluent_kafka.Message.offset()` is
1072+ # `None`. To preserve backwards compatibility, we replace this with -1.
1073+ # -1 was the default `aiokafka` behaviour.
1074+ "offset" : result .offset () if result and result .offset () is not None else - 1 ,
1075+ "partition" : result .partition () if result else 0 ,
10741076 }
10751077 )
10761078
0 commit comments