@@ -246,7 +246,8 @@ private void unsafeFlush() {
246
246
final Map <String , List <Record <GenericRecord >>> recordsToInsertByTopic =
247
247
recordsToInsert .stream ().collect (Collectors .groupingBy (record -> record .getTopicName ().get ()));
248
248
249
- for (List <Record <GenericRecord >> singleTopicRecordsToInsert : recordsToInsertByTopic .values ()) {
249
+ for (Map .Entry <String , List <Record <GenericRecord >>> entry : recordsToInsertByTopic .entrySet ()) {
250
+ List <Record <GenericRecord >> singleTopicRecordsToInsert = entry .getValue ();
250
251
Record <GenericRecord > firstRecord = singleTopicRecordsToInsert .get (0 );
251
252
Schema <GenericRecord > schema ;
252
253
try {
@@ -269,21 +270,27 @@ private void unsafeFlush() {
269
270
final Iterator <Record <GenericRecord >> iter = singleTopicRecordsToInsert .iterator ();
270
271
filepath = buildPartitionPath (firstRecord , partitioner , format , timeStampForPartitioning );
271
272
ByteBuffer payload = bindValue (iter , format );
272
- log .info ("Uploading blob {} currentBatchSize {} currentBatchBytes {}" , filepath , currentBatchSize .get (),
273
- currentBatchBytes .get ());
273
+ int uploadSize = singleTopicRecordsToInsert .size ();
274
+ long uploadBytes = getBytesSum (singleTopicRecordsToInsert );
275
+ log .info ("Uploading blob {} from topic {} uploadSize {} out of currentBatchSize {} "
276
+ + " uploadBytes {} out of currcurrentBatchBytes {}" ,
277
+ filepath , entry .getKey (),
278
+ uploadSize , currentBatchSize .get (),
279
+ uploadBytes , currentBatchBytes .get ());
274
280
long elapsedMs = System .currentTimeMillis ();
275
281
uploadPayload (payload , filepath );
276
282
elapsedMs = System .currentTimeMillis () - elapsedMs ;
277
283
log .debug ("Uploading blob {} elapsed time in ms: {}" , filepath , elapsedMs );
278
284
singleTopicRecordsToInsert .forEach (Record ::ack );
279
- currentBatchBytes .addAndGet (-1 * getBytesSum ( singleTopicRecordsToInsert ) );
280
- currentBatchSize .addAndGet (-1 * singleTopicRecordsToInsert . size () );
285
+ currentBatchBytes .addAndGet (-1 * uploadBytes );
286
+ currentBatchSize .addAndGet (-1 * uploadSize );
281
287
if (sinkContext != null ) {
282
288
sinkContext .recordMetric (METRICS_TOTAL_SUCCESS , singleTopicRecordsToInsert .size ());
283
289
sinkContext .recordMetric (METRICS_LATEST_UPLOAD_ELAPSED_TIME , elapsedMs );
284
290
}
285
- log .info ("Successfully uploaded blob {} currentBatchSize {} currentBatchBytes {}" , filepath ,
286
- currentBatchSize .get (), currentBatchBytes .get ());
291
+ log .info ("Successfully uploaded blob {} from topic {} uploadSize {} uploadBytes {}" ,
292
+ filepath , entry .getKey (),
293
+ uploadSize , uploadBytes );
287
294
} catch (Exception e ) {
288
295
if (e instanceof ContainerNotFoundException ) {
289
296
log .error ("Blob {} is not found" , filepath , e );
0 commit comments