Fixes für Setup/README.sh aus 'deserialization' in 'stored-offsets' gemerged
[demos/kafka/training] / src / main / java / de / juplo / kafka / ApplicationConfiguration.java
index 0d17823..3925fcb 100644 (file)
@@ -2,6 +2,7 @@ package de.juplo.kafka;
 
 import org.apache.kafka.clients.consumer.Consumer;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.serialization.LongDeserializer;
 import org.apache.kafka.common.serialization.StringDeserializer;
 import org.springframework.boot.context.properties.EnableConfigurationProperties;
 import org.springframework.context.annotation.Bean;
@@ -18,39 +19,34 @@ import java.util.concurrent.Executors;
 public class ApplicationConfiguration
 {
   @Bean
-  public WordcountRecordHandler wordcountRecordHandler(
-      PartitionStatisticsRepository repository,
-      Consumer<String, String> consumer,
-      ApplicationProperties properties)
+  public KeyCountingRecordHandler messageCountingRecordHandler()
   {
-    return new WordcountRecordHandler(
-        repository,
-        properties.getTopic(),
-        Clock.systemDefaultZone(),
-        properties.getCommitInterval(),
-        consumer);
+    return new KeyCountingRecordHandler();
   }
 
   @Bean
-  public WordcountRebalanceListener wordcountRebalanceListener(
-      WordcountRecordHandler wordcountRecordHandler,
+  public KeyCountingRebalanceListener wordcountRebalanceListener(
+      KeyCountingRecordHandler keyCountingRecordHandler,
       PartitionStatisticsRepository repository,
-      Consumer<String, String> consumer,
+      Consumer<String, Long> consumer,
       ApplicationProperties properties)
   {
-    return new WordcountRebalanceListener(
-        wordcountRecordHandler,
+    return new KeyCountingRebalanceListener(
+        keyCountingRecordHandler,
         repository,
         properties.getClientId(),
+        properties.getTopic(),
+        Clock.systemDefaultZone(),
+        properties.getCommitInterval(),
         consumer);
   }
 
   @Bean
-  public EndlessConsumer<String, String> endlessConsumer(
-      KafkaConsumer<String, String> kafkaConsumer,
+  public EndlessConsumer<String, Long> endlessConsumer(
+      KafkaConsumer<String, Long> kafkaConsumer,
       ExecutorService executor,
-      WordcountRebalanceListener wordcountRebalanceListener,
-      WordcountRecordHandler wordcountRecordHandler,
+      KeyCountingRebalanceListener keyCountingRebalanceListener,
+      KeyCountingRecordHandler keyCountingRecordHandler,
       ApplicationProperties properties)
   {
     return
@@ -59,8 +55,8 @@ public class ApplicationConfiguration
             properties.getClientId(),
             properties.getTopic(),
             kafkaConsumer,
-            wordcountRebalanceListener,
-            wordcountRecordHandler);
+            keyCountingRebalanceListener,
+            keyCountingRecordHandler);
   }
 
   @Bean
@@ -70,7 +66,7 @@ public class ApplicationConfiguration
   }
 
   @Bean(destroyMethod = "close")
-  public KafkaConsumer<String, String> kafkaConsumer(ApplicationProperties properties)
+  public KafkaConsumer<String, Long> kafkaConsumer(ApplicationProperties properties)
   {
     Properties props = new Properties();
 
@@ -80,9 +76,10 @@ public class ApplicationConfiguration
     props.put("client.id", properties.getClientId());
     props.put("enable.auto.commit", false);
     props.put("auto.offset.reset", properties.getAutoOffsetReset());
+    props.put("auto.commit.interval.ms", (int)properties.getCommitInterval().toMillis());
     props.put("metadata.max.age.ms", "1000");
     props.put("key.deserializer", StringDeserializer.class.getName());
-    props.put("value.deserializer", StringDeserializer.class.getName());
+    props.put("value.deserializer", LongDeserializer.class.getName());
 
     return new KafkaConsumer<>(props);
   }