Springify: Konfiguration erfolgt über `KafkaProperties`
[demos/kafka/training] / src / main / java / de / juplo / kafka / ApplicationConfiguration.java
index 6e04453..3c526df 100644 (file)
@@ -1,46 +1,65 @@
 package de.juplo.kafka;
 
+import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.common.serialization.LongDeserializer;
 import org.apache.kafka.common.serialization.StringDeserializer;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
 import org.springframework.boot.context.properties.EnableConfigurationProperties;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
 import java.util.Properties;
+import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
+import java.util.function.Consumer;
 
 
 @Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
+@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class })
 public class ApplicationConfiguration
 {
   @Bean
-  public EndlessConsumer endlessConsumer(
-      KafkaConsumer<String, String> kafkaConsumer,
-      ApplicationProperties properties)
+  public Consumer<ConsumerRecord<String, Long>> consumer()
   {
-    EndlessConsumer consumer =
-        new EndlessConsumer(
-            Executors.newFixedThreadPool(1),
-            properties.getClientId(),
-            properties.getTopic(),
-            kafkaConsumer);
+    return (record) ->
+    {
+      // Handle record
+    };
+  }
 
-    consumer.start();
+  @Bean
+  public EndlessConsumer<String, Long> endlessConsumer(
+      KafkaConsumer<String, Long> kafkaConsumer,
+      ExecutorService executor,
+      Consumer<ConsumerRecord<String, Long>> handler,
+      KafkaProperties kafkaProperties,
+      ApplicationProperties applicationProperties)
+  {
+    return
+        new EndlessConsumer<>(
+            executor,
+            kafkaProperties.getConsumer().getClientId(),
+            applicationProperties.getTopic(),
+            kafkaConsumer,
+            handler);
+  }
 
-    return consumer;
+  @Bean
+  public ExecutorService executor()
+  {
+    return Executors.newSingleThreadExecutor();
   }
 
   @Bean(destroyMethod = "close")
-  public KafkaConsumer<String, String> kafkaConsumer(ApplicationProperties properties)
+  public KafkaConsumer<String, Long> kafkaConsumer(KafkaProperties properties)
   {
     Properties props = new Properties();
 
-    props.put("bootstrap.servers", properties.getBootstrapServer());
-    props.put("group.id", properties.getGroupId());
-    props.put("client.id", properties.getClientId());
-    props.put("auto.offset.reset", properties.getAutoOffsetReset());
+    props.put("bootstrap.servers", properties.getConsumer().getBootstrapServers());
+    props.put("group.id", properties.getConsumer().getGroupId());
+    props.put("client.id", properties.getConsumer().getClientId());
+    props.put("auto.offset.reset", properties.getConsumer().getAutoOffsetReset());
     props.put("metadata.max.age.ms", "1000");
     props.put("key.deserializer", StringDeserializer.class.getName());
     props.put("value.deserializer", LongDeserializer.class.getName());