Der Consumer erkennt die Ă„nderung der Partitionierung schneller
[demos/kafka/training] / src / main / java / de / juplo / kafka / EndlessConsumer.java
index da2f8f0..357a0b4 100644 (file)
@@ -10,6 +10,8 @@ import org.apache.kafka.common.serialization.StringDeserializer;
 import javax.annotation.PreDestroy;
 import java.time.Duration;
 import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
 import java.util.Properties;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
@@ -25,24 +27,30 @@ public class EndlessConsumer implements Runnable
   private final String groupId;
   private final String id;
   private final String topic;
+  private final String autoOffsetReset;
 
   private AtomicBoolean running = new AtomicBoolean();
   private long consumed = 0;
   private KafkaConsumer<String, String> consumer = null;
   private Future<?> future = null;
 
+  private Map<Integer, Map<String, Integer>> seen;
+
+
   public EndlessConsumer(
       ExecutorService executor,
       String bootstrapServer,
       String groupId,
       String clientId,
-      String topic)
+      String topic,
+      String autoOffsetReset)
   {
     this.executor = executor;
     this.bootstrapServer = bootstrapServer;
     this.groupId = groupId;
     this.id = clientId;
     this.topic = topic;
+    this.autoOffsetReset = autoOffsetReset;
   }
 
   @Override
@@ -54,7 +62,8 @@ public class EndlessConsumer implements Runnable
       props.put("bootstrap.servers", bootstrapServer);
       props.put("group.id", groupId);
       props.put("client.id", id);
-      props.put("auto.offset.reset", "earliest");
+      props.put("auto.offset.reset", autoOffsetReset);
+      props.put("metadata.max.age.ms", "1000");
       props.put("key.deserializer", StringDeserializer.class.getName());
       props.put("value.deserializer", StringDeserializer.class.getName());
 
@@ -63,6 +72,8 @@ public class EndlessConsumer implements Runnable
       log.info("{} - Subscribing to topic {}", id, topic);
       consumer.subscribe(Arrays.asList(topic));
 
+      seen = new HashMap<>();
+
       while (true)
       {
         ConsumerRecords<String, String> records =
@@ -82,6 +93,21 @@ public class EndlessConsumer implements Runnable
               record.key(),
               record.value()
           );
+
+          Integer partition = record.partition();
+          String key = record.key() == null ? "NULL" : record.key();
+
+          if (!seen.containsKey(partition))
+            seen.put(partition, new HashMap<>());
+
+          Map<String, Integer> byKey = seen.get(partition);
+
+          if (!byKey.containsKey(key))
+            byKey.put(key, 0);
+
+          int seenByKey = byKey.get(key);
+          seenByKey++;
+          byKey.put(key, seenByKey);
         }
       }
     }
@@ -98,10 +124,30 @@ public class EndlessConsumer implements Runnable
     {
       log.info("{} - Closing the KafkaConsumer", id);
       consumer.close();
+
+      for (Integer partition : seen.keySet())
+      {
+        Map<String, Integer> byKey = seen.get(partition);
+        for (String key : byKey.keySet())
+        {
+          log.info(
+              "{} - Seen {} messages for partition={}|key={}",
+              id,
+              byKey.get(key),
+              partition,
+              key);
+        }
+      }
+      seen = null;
+
       log.info("{} - Consumer-Thread exiting", id);
     }
   }
 
+  public Map<Integer, Map<String, Integer>> getSeen()
+  {
+    return seen;
+  }
 
   public synchronized void start()
   {