Wordcount-Implementierung mit Kafka-Boardmitteln und MongoDB als Storage
[demos/kafka/training] / src / main / java / de / juplo / kafka / ApplicationConfiguration.java
index f228d85..2cf263e 100644 (file)
@@ -2,12 +2,12 @@ package de.juplo.kafka;
 
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.serialization.LongDeserializer;
 import org.apache.kafka.common.serialization.StringDeserializer;
 import org.springframework.boot.context.properties.EnableConfigurationProperties;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
+import java.time.Clock;
 import java.util.Properties;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -18,29 +18,22 @@ import java.util.function.Consumer;
 @EnableConfigurationProperties(ApplicationProperties.class)
 public class ApplicationConfiguration
 {
-  @Bean
-  public Consumer<ConsumerRecord<String, String>> consumer()
-  {
-    return (record) ->
-    {
-      // Handle record
-    };
-  }
-
   @Bean
   public EndlessConsumer endlessConsumer(
       KafkaConsumer<String, String> kafkaConsumer,
       ExecutorService executor,
-      Consumer<ConsumerRecord<String, String>> handler,
+      PartitionStatisticsRepository repository,
       ApplicationProperties properties)
   {
     return
         new EndlessConsumer(
             executor,
+            repository,
             properties.getClientId(),
             properties.getTopic(),
-            kafkaConsumer,
-            handler);
+            Clock.systemDefaultZone(),
+            properties.getCommitInterval(),
+            kafkaConsumer);
   }
 
   @Bean
@@ -55,12 +48,14 @@ public class ApplicationConfiguration
     Properties props = new Properties();
 
     props.put("bootstrap.servers", properties.getBootstrapServer());
+    props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
     props.put("group.id", properties.getGroupId());
     props.put("client.id", properties.getClientId());
+    props.put("enable.auto.commit", false);
     props.put("auto.offset.reset", properties.getAutoOffsetReset());
     props.put("metadata.max.age.ms", "1000");
     props.put("key.deserializer", StringDeserializer.class.getName());
-    props.put("value.deserializer", LongDeserializer.class.getName());
+    props.put("value.deserializer", StringDeserializer.class.getName());
 
     return new KafkaConsumer<>(props);
   }