@@ -2458,7 +2458,7 @@ class KafkaApisTest extends Logging {
2458
2458
@ Test
2459
2459
def shouldReplaceProducerFencedWithInvalidProducerEpochInProduceResponse (): Unit = {
2460
2460
val topic = " topic"
2461
- val topicId = Uuid .randomUuid( )
2461
+ val topicId = Uuid .fromString( " d2Gg8tgzJa2JYK2eTHUapg " )
2462
2462
val tp = new TopicIdPartition (topicId, 0 , " topic" )
2463
2463
addTopicToMetadataCache(topic, numPartitions = 2 , topicId = topicId)
2464
2464
@@ -2519,7 +2519,7 @@ class KafkaApisTest extends Logging {
2519
2519
@ Test
2520
2520
def testProduceResponseContainsNewLeaderOnNotLeaderOrFollower (): Unit = {
2521
2521
val topic = " topic"
2522
- val topicId = Uuid .randomUuid( )
2522
+ val topicId = Uuid .fromString( " d2Gg8tgzJa2JYK2eTHUapg " )
2523
2523
addTopicToMetadataCache(topic, numPartitions = 2 , numBrokers = 3 , topicId = topicId)
2524
2524
2525
2525
for (version <- 10 to ApiKeys .PRODUCE .latestVersion) {
@@ -2590,7 +2590,7 @@ class KafkaApisTest extends Logging {
2590
2590
@ Test
2591
2591
def testProduceResponseReplicaManagerLookupErrorOnNotLeaderOrFollower (): Unit = {
2592
2592
val topic = " topic"
2593
- val topicId = Uuid .randomUuid( )
2593
+ val topicId = Uuid .fromString( " d2Gg8tgzJa2JYK2eTHUapg " )
2594
2594
addTopicToMetadataCache(topic, numPartitions = 2 , numBrokers = 3 , topicId = topicId)
2595
2595
2596
2596
for (version <- 10 to ApiKeys .PRODUCE .latestVersion) {
@@ -2657,7 +2657,7 @@ class KafkaApisTest extends Logging {
2657
2657
@ Test
2658
2658
def testProduceResponseMetadataLookupErrorOnNotLeaderOrFollower (): Unit = {
2659
2659
val topic = " topic"
2660
- val topicId = Uuid .randomUuid( )
2660
+ val topicId = Uuid .fromString( " d2Gg8tgzJa2JYK2eTHUapg " )
2661
2661
metadataCache = mock(classOf [ZkMetadataCache ])
2662
2662
2663
2663
for (version <- 10 to ApiKeys .PRODUCE .latestVersion) {
@@ -2669,10 +2669,11 @@ class KafkaApisTest extends Logging {
2669
2669
val tp = new TopicIdPartition (topicId, 0 , topic)
2670
2670
2671
2671
val topicProduceData = new ProduceRequestData .TopicProduceData ()
2672
- .setName(tp.topic)
2673
2672
2674
2673
if (version >= 12 ) {
2675
2674
topicProduceData.setTopicId(topicId)
2675
+ } else {
2676
+ topicProduceData.setName(tp.topic)
2676
2677
}
2677
2678
2678
2679
val produceRequest = ProduceRequest .forCurrentMagic(new ProduceRequestData ()
@@ -2707,8 +2708,8 @@ class KafkaApisTest extends Logging {
2707
2708
when(clientQuotaManager.maybeRecordAndGetThrottleTimeMs(
2708
2709
any[RequestChannel .Request ](), anyDouble, anyLong)).thenReturn(0 )
2709
2710
when(metadataCache.contains(tp.topicPartition())).thenAnswer(_ => true )
2710
- when(metadataCache.getTopicName(any ())).thenReturn(Some (topicProduceData.name ()))
2711
- when(metadataCache.getTopicId(any ())).thenReturn(topicProduceData .topicId())
2711
+ when(metadataCache.getTopicName(tp.topicId ())).thenReturn(Some (tp.topic ()))
2712
+ when(metadataCache.getTopicId(tp.topic ())).thenReturn(tp .topicId())
2712
2713
when(metadataCache.getPartitionInfo(tp.topic(), tp.partition())).thenAnswer(_ => Option .empty)
2713
2714
when(metadataCache.getAliveBrokerNode(any(), any())).thenReturn(Option .empty)
2714
2715
kafkaApis = createKafkaApis()
@@ -2732,7 +2733,8 @@ class KafkaApisTest extends Logging {
2732
2733
val topic = " topic"
2733
2734
val transactionalId = " txn1"
2734
2735
2735
- val tp = new TopicIdPartition (Uuid .randomUuid(), 0 , " topic" )
2736
+ val topicId = Uuid .fromString(" d2Gg8tgzJa2JYK2eTHUapg" )
2737
+ val tp = new TopicIdPartition (topicId, 0 , " topic" )
2736
2738
addTopicToMetadataCache(topic, numPartitions = 2 , topicId = tp.topicId())
2737
2739
2738
2740
for (version <- 3 to ApiKeys .PRODUCE .latestVersion) {
@@ -2913,7 +2915,7 @@ class KafkaApisTest extends Logging {
2913
2915
def shouldRespondWithUnsupportedMessageFormatForBadPartitionAndNoErrorsForGoodPartition (): Unit = {
2914
2916
val tp1 = new TopicPartition (" t" , 0 )
2915
2917
val tp2 = new TopicPartition (" t1" , 0 )
2916
- val topicId = Uuid .randomUuid( )
2918
+ val topicId = Uuid .fromString( " d2Gg8tgzJa2JYK2eTHUapg " )
2917
2919
val (_, request) = createWriteTxnMarkersRequest(asList(tp1, tp2))
2918
2920
val expectedErrors = Map (tp1 -> Errors .UNSUPPORTED_FOR_MESSAGE_FORMAT , tp2 -> Errors .NONE ).asJava
2919
2921
@@ -3046,7 +3048,7 @@ class KafkaApisTest extends Logging {
3046
3048
def shouldRespondWithUnknownTopicOrPartitionForBadPartitionAndNoErrorsForGoodPartition (): Unit = {
3047
3049
val tp1 = new TopicPartition (" t" , 0 )
3048
3050
val tp2 = new TopicPartition (" t1" , 0 )
3049
- val topicId = Uuid .randomUuid( )
3051
+ val topicId = Uuid .fromString( " d2Gg8tgzJa2JYK2eTHUapg " )
3050
3052
val (_, request) = createWriteTxnMarkersRequest(asList(tp1, tp2))
3051
3053
val expectedErrors = Map (tp1 -> Errors .UNKNOWN_TOPIC_OR_PARTITION , tp2 -> Errors .NONE ).asJava
3052
3054
@@ -3131,8 +3133,8 @@ class KafkaApisTest extends Logging {
3131
3133
val foo1 = new TopicPartition (" foo" , 1 )
3132
3134
3133
3135
val topicIds = Map (
3134
- Topic .GROUP_METADATA_TOPIC_NAME -> Uuid .randomUuid( ),
3135
- " foo" -> Uuid .randomUuid( ))
3136
+ Topic .GROUP_METADATA_TOPIC_NAME -> Uuid .fromString( " JaTH2JYK2ed2GzUapg8tgg " ),
3137
+ " foo" -> Uuid .fromString( " d2Gg8tgzJa2JYK2eTHUapg " ))
3136
3138
val allPartitions = List (
3137
3139
offset0,
3138
3140
offset1,
0 commit comments