Umbau des `spring-consumer` auf den `@KafkaHandler` spring/spring-consumer--kafkahandler--2025-03-18--19-42
authorKai Moritz <kai@juplo.de>
Thu, 14 Nov 2024 20:33:44 +0000 (21:33 +0100)
committerKai Moritz <kai@juplo.de>
Sat, 15 Mar 2025 18:31:32 +0000 (19:31 +0100)
README.sh
build.gradle
docker/docker-compose.yml
pom.xml
src/main/java/de/juplo/kafka/ApplicationConfiguration.java [deleted file]
src/main/java/de/juplo/kafka/ApplicationProperties.java [deleted file]
src/main/java/de/juplo/kafka/ExampleConsumer.java
src/main/resources/application.yml

index 7152ec9..ca47773 100755 (executable)
--- a/README.sh
+++ b/README.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-IMAGE=juplo/spring-consumer:1.1-json-SNAPSHOT
+IMAGE=juplo/spring-consumer:1.1-kafkahandler-SNAPSHOT
 
 if [ "$1" = "cleanup" ]
 then
index 3ddca4b..c771b9c 100644 (file)
@@ -8,7 +8,7 @@ plugins {
 }
 
 group = 'de.juplo.kafka'
-version = '1.1-json-SNAPSHOT'
+version = '1.1-kafkahandler-SNAPSHOT'
 
 java {
        toolchain {
index 9fb105f..736a704 100644 (file)
@@ -92,7 +92,7 @@ services:
         cub kafka-ready -b kafka-1:9092,kafka-2:9092,kafka-3:9092 3 60 > /dev/null 2>&1 || exit 1
         if [ -e INITIALIZED ]
         then
-          echo -n Bereits konfiguriert: 
+          echo -n Bereits konfiguriert:
           cat INITIALIZED
           kafka-topics --bootstrap-server kafka:9092 --describe --topic test
         else
@@ -145,24 +145,30 @@ services:
       juplo.producer.throttle-ms: 100
 
   consumer:
-    image: juplo/spring-consumer:1.1-json-SNAPSHOT
+    image: juplo/spring-consumer:1.1-kafkahandler-SNAPSHOT
     environment:
-      juplo.bootstrap-server: kafka:9092
-      juplo.client-id: consumer
+      spring.kafka.bootstrap-servers: kafka:9092
+      spring.kafka.client-id: consumer
+      spring.kafka.consumer.auto-offset-reset: earliest
+      logging.level.org.apache.kafka.clients.consumer: INFO
       juplo.consumer.topic: test
 
   peter:
-    image: juplo/spring-consumer:1.1-json-SNAPSHOT
+    image: juplo/spring-consumer:1.1-kafkahandler-SNAPSHOT
     environment:
-      juplo.bootstrap-server: kafka:9092
-      juplo.client-id: peter
+      spring.kafka.bootstrap-servers: kafka:9092
+      spring.kafka.client-id: peter
+      spring.kafka.consumer.auto-offset-reset: earliest
+      logging.level.org.apache.kafka.clients.consumer: INFO
       juplo.consumer.topic: test
 
   ute:
-    image: juplo/spring-consumer:1.1-json-SNAPSHOT
+    image: juplo/spring-consumer:1.1-kafkahandler-SNAPSHOT
     environment:
-      juplo.bootstrap-server: kafka:9092
-      juplo.client-id: ute
+      spring.kafka.bootstrap-servers: kafka:9092
+      spring.kafka.client-id: ute
+      spring.kafka.consumer.auto-offset-reset: earliest
+      logging.level.org.apache.kafka.clients.consumer: INFO
       juplo.consumer.topic: test
 
 volumes:
diff --git a/pom.xml b/pom.xml
index 8dd99a5..284bb4a 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -15,7 +15,7 @@
   <artifactId>spring-consumer</artifactId>
   <name>Spring Consumer</name>
   <description>Super Simple Consumer-Group, that is implemented as Spring-Boot application and configured by Spring Kafka</description>
-  <version>1.1-json-SNAPSHOT</version>
+  <version>1.1-kafkahandler-SNAPSHOT</version>
 
   <properties>
     <java.version>21</java.version>
diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
deleted file mode 100644 (file)
index 33022bf..0000000
+++ /dev/null
@@ -1,57 +0,0 @@
-package de.juplo.kafka;
-
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.clients.consumer.StickyAssignor;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.springframework.kafka.support.serializer.JsonDeserializer;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Properties;
-
-
-@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
-public class ApplicationConfiguration
-{
-  @Bean
-  public ExampleConsumer exampleConsumer(
-    Consumer<String, Message> kafkaConsumer,
-    ApplicationProperties properties,
-    ConfigurableApplicationContext applicationContext)
-  {
-    return
-      new ExampleConsumer(
-        properties.getClientId(),
-        properties.getConsumerProperties().getTopic(),
-        kafkaConsumer,
-        () -> applicationContext.close());
-  }
-
-  @Bean(destroyMethod = "")
-  public KafkaConsumer<String, Message> kafkaConsumer(ApplicationProperties properties)
-  {
-    Properties props = new Properties();
-    props.put("bootstrap.servers", properties.getBootstrapServer());
-    props.put("client.id", properties.getClientId());
-    props.put("group.id", properties.getConsumerProperties().getGroupId());
-    if (properties.getConsumerProperties().getAutoOffsetReset() != null)
-    {
-      props.put("auto.offset.reset", properties.getConsumerProperties().getAutoOffsetReset().name());
-    }
-    if (properties.getConsumerProperties().getAutoCommitInterval() != null)
-    {
-      props.put("auto.commit.interval", properties.getConsumerProperties().getAutoCommitInterval());
-    }
-    props.put("metadata.maxage.ms", 5000); //  5 Sekunden
-    props.put("partition.assignment.strategy", StickyAssignor.class.getName());
-    props.put("key.deserializer", StringDeserializer.class.getName());
-    props.put("value.deserializer", JsonDeserializer.class.getName());
-    props.put("spring.json.type.mapping", "ADD:de.juplo.kafka.MessageAddNumber,CALC:de.juplo.kafka.MessageCalculateSum");
-
-    return new KafkaConsumer<>(props);
-  }
-}
diff --git a/src/main/java/de/juplo/kafka/ApplicationProperties.java b/src/main/java/de/juplo/kafka/ApplicationProperties.java
deleted file mode 100644 (file)
index c8193c9..0000000
+++ /dev/null
@@ -1,52 +0,0 @@
-package de.juplo.kafka;
-
-import jakarta.validation.constraints.NotEmpty;
-import jakarta.validation.constraints.NotNull;
-import lombok.Getter;
-import lombok.Setter;
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.validation.annotation.Validated;
-
-import java.time.Duration;
-
-
-@ConfigurationProperties(prefix = "juplo")
-@Validated
-@Getter
-@Setter
-public class ApplicationProperties
-{
-  @NotNull
-  @NotEmpty
-  private String bootstrapServer;
-  @NotNull
-  @NotEmpty
-  private String clientId;
-
-  @NotNull
-  private ConsumerProperties consumer;
-
-
-  public ConsumerProperties getConsumerProperties()
-  {
-    return consumer;
-  }
-
-
-  @Validated
-  @Getter
-  @Setter
-  static class ConsumerProperties
-  {
-    @NotNull
-    @NotEmpty
-    private String groupId;
-    @NotNull
-    @NotEmpty
-    private String topic;
-    private OffsetReset autoOffsetReset;
-    private Duration autoCommitInterval;
-
-    enum OffsetReset { latest, earliest, none }
-  }
-}
index a03fece..639d82f 100644 (file)
 package de.juplo.kafka;
 
 import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.errors.WakeupException;
-
-import java.time.Duration;
-import java.util.Arrays;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.kafka.annotation.KafkaHandler;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.stereotype.Component;
 
 
 @Slf4j
-public class ExampleConsumer implements Runnable
+@Component
+@KafkaListener(topics = "${juplo.consumer.topic}")
+public class ExampleConsumer
 {
-  private final String id;
-  private final String topic;
-  private final Consumer<String, Message> consumer;
-  private final Thread workerThread;
-  private final Runnable closeCallback;
-
+  @Value("${spring.kafka.client-id}")
+  private String id;
   private long consumed = 0;
 
 
-  public ExampleConsumer(
-    String clientId,
-    String topic,
-    Consumer<String, Message> consumer,
-    Runnable closeCallback)
-  {
-    this.id = clientId;
-    this.topic = topic;
-    this.consumer = consumer;
-
-    workerThread = new Thread(this, "ExampleConsumer Worker-Thread");
-    workerThread.start();
-
-    this.closeCallback = closeCallback;
-  }
-
-
-  @Override
-  public void run()
-  {
-    try
-    {
-      log.info("{} - Subscribing to topic {}", id, topic);
-      consumer.subscribe(Arrays.asList(topic));
-
-      while (true)
-      {
-        ConsumerRecords<String, Message> records = consumer.poll(Duration.ofSeconds(1));
-
-        log.info("{} - Received {} messages", id, records.count());
-        for (ConsumerRecord<String, Message> record : records)
-        {
-          handleRecord(
-            record.topic(),
-            record.partition(),
-            record.offset(),
-            record.key(),
-            record.value());
-        }
-      }
-    }
-    catch(WakeupException e)
-    {
-      log.info("{} - Consumer was signaled to finish its work", id);
-    }
-    catch(Exception e)
-    {
-      log.error("{} - Unexpected error, unsubscribing!", id, e);
-      consumer.unsubscribe();
-      log.info("{} - Triggering exit of application!", id);
-      new Thread(closeCallback).start();
-    }
-    finally
-    {
-      log.info("{} - Closing the KafkaConsumer", id);
-      consumer.close();
-      log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
-    }
-  }
-
-  private void handleRecord(
-    String topic,
-    Integer partition,
-    Long offset,
-    String key,
-    Message value)
-  {
-    consumed++;
-    log.info("{} - partition={}-{}, offset={}: {}={}", id, topic, partition, offset, key, value);
-    switch (value.getType())
-    {
-      case ADD  -> addNumber((MessageAddNumber)value);
-      case CALC -> calcSum((MessageCalculateSum)value);
-      default   -> log.error("{} - Ignoring message of unknown typ {}", id, value.getType());
-    }
-  }
-
+  @KafkaHandler
   private void addNumber(MessageAddNumber addNumber)
   {
     log.info("{} - Adding number {}", id, addNumber.getNext());
   }
 
+  @KafkaHandler
   private void calcSum(MessageCalculateSum calculateSum)
   {
     log.info("{} - Calculating sum", id);
   }
-
-  public void shutdown() throws InterruptedException
-  {
-    log.info("{} - Waking up the consumer", id);
-    consumer.wakeup();
-    log.info("{} - Joining the worker thread", id);
-    workerThread.join();
-  }
 }
index 7a06731..b3e3358 100644 (file)
@@ -1,11 +1,6 @@
 juplo:
-  bootstrap-server: :9092
-  client-id: DEV
   consumer:
-    group-id: my-group
     topic: test
-    auto-offset-reset: earliest
-    auto-commit-interval: 5s
 management:
   endpoint:
     shutdown:
@@ -21,14 +16,20 @@ management:
       enabled: true
 info:
   kafka:
-    bootstrap-server: ${juplo.bootstrap-server}
-    client-id: ${juplo.client-id}
+    bootstrap-server: ${spring.kafka.bootstrap-servers}
+    client-id: ${spring.kafka.client-id}
+    group-id: ${spring.kafka.consumer.group-id}
+    topic: ${simple.consumer.topic}
+    auto-offset-reset: ${spring.kafka.consumer.auto-offset-reset}
+spring:
+  kafka:
+    bootstrap-servers: :9092
+    client-id: DEV
     consumer:
-      group-id: ${juplo.consumer.group-id}
-      topic: ${juplo.consumer.topic}
-      auto-offset-reset: ${juplo.consumer.auto-offset-reset}
-      auto-commit-interval: ${juplo.consumer.auto-commit-interval}
-logging:
+      group-id: my-group
+      value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer
+      properties:
+        "[spring.json.type.mapping]": ADD:de.juplo.kafka.MessageAddNumber,CALC:de.juplo.kafka.MessageCalculateSum
   level:
     root: INFO
     de.juplo: DEBUG