Umbau des `spring-consumer` auf den `@KafkaListener` spring/spring-consumer--kafkalistener--generics4all
authorKai Moritz <kai@juplo.de>
Thu, 14 Nov 2024 20:33:44 +0000 (21:33 +0100)
committerKai Moritz <kai@juplo.de>
Wed, 26 Feb 2025 21:50:09 +0000 (22:50 +0100)
README.sh
build.gradle
docker/docker-compose.yml
pom.xml
src/main/java/de/juplo/kafka/ApplicationConfiguration.java [deleted file]
src/main/java/de/juplo/kafka/ApplicationProperties.java [deleted file]
src/main/java/de/juplo/kafka/ExampleConsumer.java
src/main/resources/application.yml

index b46e235..6b1d575 100755 (executable)
--- a/README.sh
+++ b/README.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-IMAGE=juplo/spring-consumer:1.1-SNAPSHOT
+IMAGE=juplo/spring-consumer:1.1-kafkalistener-SNAPSHOT
 
 if [ "$1" = "cleanup" ]
 then
index a8614fd..380fcb9 100644 (file)
@@ -8,7 +8,7 @@ plugins {
 }
 
 group = 'de.juplo.kafka'
-version = '1.1-SNAPSHOT'
+version = '1.1-kafkalistener-SNAPSHOT'
 
 java {
        toolchain {
@@ -27,7 +27,7 @@ repositories {
 }
 
 dependencies {
-       implementation 'org.apache.kafka:kafka-clients'
+       implementation 'org.springframework.kafka:spring-kafka'
        implementation 'org.springframework.boot:spring-boot-starter-actuator'
        implementation 'org.springframework.boot:spring-boot-starter-validation'
        implementation 'org.springframework.boot:spring-boot-starter-web'
@@ -36,7 +36,6 @@ dependencies {
        annotationProcessor 'org.springframework.boot:spring-boot-configuration-processor'
        annotationProcessor 'org.projectlombok:lombok'
        testImplementation 'org.springframework.boot:spring-boot-starter-test'
-       testImplementation 'org.springframework.kafka:spring-kafka'
        testImplementation 'org.springframework.kafka:spring-kafka-test'
        testCompileOnly 'org.projectlombok:lombok'
        testAnnotationProcessor 'org.projectlombok:lombok'
index 4fa2ead..e2db867 100644 (file)
@@ -145,24 +145,30 @@ services:
       juplo.producer.throttle-ms: 100
 
   consumer:
-    image: juplo/spring-consumer:1.1-SNAPSHOT
+    image: juplo/spring-consumer:1.1-kafkalistener-SNAPSHOT
     environment:
-      juplo.bootstrap-server: kafka:9092
-      juplo.client-id: consumer
+      spring.kafka.bootstrap-servers: kafka:9092
+      spring.kafka.client-id: consumer
+      spring.kafka.consumer.auto-offset-reset: earliest
+      logging.level.org.apache.kafka.clients.consumer: INFO
       juplo.consumer.topic: test
 
   peter:
-    image: juplo/spring-consumer:1.1-SNAPSHOT
+    image: juplo/spring-consumer:1.1-kafkalistener-SNAPSHOT
     environment:
-      juplo.bootstrap-server: kafka:9092
-      juplo.client-id: peter
+      spring.kafka.bootstrap-servers: kafka:9092
+      spring.kafka.client-id: peter
+      spring.kafka.consumer.auto-offset-reset: earliest
+      logging.level.org.apache.kafka.clients.consumer: INFO
       juplo.consumer.topic: test
 
   ute:
-    image: juplo/spring-consumer:1.1-SNAPSHOT
+    image: juplo/spring-consumer:1.1-kafkalistener-SNAPSHOT
     environment:
-      juplo.bootstrap-server: kafka:9092
-      juplo.client-id: ute
+      spring.kafka.bootstrap-servers: kafka:9092
+      spring.kafka.client-id: ute
+      spring.kafka.consumer.auto-offset-reset: earliest
+      logging.level.org.apache.kafka.clients.consumer: INFO
       juplo.consumer.topic: test
 
 volumes:
diff --git a/pom.xml b/pom.xml
index dd96d00..c5a13fb 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -15,7 +15,7 @@
   <artifactId>spring-consumer</artifactId>
   <name>Spring Consumer</name>
   <description>Super Simple Consumer-Group, that is implemented as Spring-Boot application and configured by Spring Kafka</description>
-  <version>1.1-SNAPSHOT</version>
+  <version>1.1-kafkalistener-SNAPSHOT</version>
 
   <properties>
     <java.version>21</java.version>
@@ -40,8 +40,8 @@
       <artifactId>spring-boot-starter-validation</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.kafka</groupId>
-      <artifactId>kafka-clients</artifactId>
+      <groupId>org.springframework.kafka</groupId>
+      <artifactId>spring-kafka</artifactId>
     </dependency>
     <dependency>
       <groupId>org.projectlombok</groupId>
       <artifactId>spring-boot-starter-test</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.springframework.kafka</groupId>
-      <artifactId>spring-kafka</artifactId>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>org.springframework.kafka</groupId>
       <artifactId>spring-kafka-test</artifactId>
diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
deleted file mode 100644 (file)
index d2b8e05..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-package de.juplo.kafka;
-
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.clients.consumer.StickyAssignor;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Properties;
-
-
-@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
-public class ApplicationConfiguration
-{
-  @Bean
-  public ExampleConsumer<String, String> exampleConsumer(
-    Consumer<String, String> kafkaConsumer,
-    ApplicationProperties properties,
-    ConfigurableApplicationContext applicationContext)
-  {
-    return
-      new ExampleConsumer<>(
-        properties.getClientId(),
-        properties.getConsumerProperties().getTopic(),
-        kafkaConsumer,
-        () -> applicationContext.close());
-  }
-
-  @Bean(destroyMethod = "")
-  public KafkaConsumer<String, String> kafkaConsumer(ApplicationProperties properties)
-  {
-    Properties props = new Properties();
-    props.put("bootstrap.servers", properties.getBootstrapServer());
-    props.put("client.id", properties.getClientId());
-    props.put("group.id", properties.getConsumerProperties().getGroupId());
-    if (properties.getConsumerProperties().getAutoOffsetReset() != null)
-    {
-      props.put("auto.offset.reset", properties.getConsumerProperties().getAutoOffsetReset().name());
-    }
-    if (properties.getConsumerProperties().getAutoCommitInterval() != null)
-    {
-      props.put("auto.commit.interval", properties.getConsumerProperties().getAutoCommitInterval());
-    }
-    props.put("metadata.maxage.ms", 5000); //  5 Sekunden
-    props.put("partition.assignment.strategy", StickyAssignor.class.getName());
-    props.put("key.deserializer", StringDeserializer.class.getName());
-    props.put("value.deserializer", StringDeserializer.class.getName());
-
-    return new KafkaConsumer<>(props);
-  }
-}
diff --git a/src/main/java/de/juplo/kafka/ApplicationProperties.java b/src/main/java/de/juplo/kafka/ApplicationProperties.java
deleted file mode 100644 (file)
index c8193c9..0000000
+++ /dev/null
@@ -1,52 +0,0 @@
-package de.juplo.kafka;
-
-import jakarta.validation.constraints.NotEmpty;
-import jakarta.validation.constraints.NotNull;
-import lombok.Getter;
-import lombok.Setter;
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.validation.annotation.Validated;
-
-import java.time.Duration;
-
-
-@ConfigurationProperties(prefix = "juplo")
-@Validated
-@Getter
-@Setter
-public class ApplicationProperties
-{
-  @NotNull
-  @NotEmpty
-  private String bootstrapServer;
-  @NotNull
-  @NotEmpty
-  private String clientId;
-
-  @NotNull
-  private ConsumerProperties consumer;
-
-
-  public ConsumerProperties getConsumerProperties()
-  {
-    return consumer;
-  }
-
-
-  @Validated
-  @Getter
-  @Setter
-  static class ConsumerProperties
-  {
-    @NotNull
-    @NotEmpty
-    private String groupId;
-    @NotNull
-    @NotEmpty
-    private String topic;
-    private OffsetReset autoOffsetReset;
-    private Duration autoCommitInterval;
-
-    enum OffsetReset { latest, earliest, none }
-  }
-}
index a6691c3..6618004 100644 (file)
@@ -1,87 +1,37 @@
 package de.juplo.kafka;
 
+import jakarta.annotation.PreDestroy;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.errors.WakeupException;
-
-import java.time.Duration;
-import java.util.Arrays;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.support.KafkaHeaders;
+import org.springframework.messaging.handler.annotation.Header;
+import org.springframework.messaging.handler.annotation.Payload;
+import org.springframework.stereotype.Component;
 
 
 @Slf4j
-public class ExampleConsumer<K, V> implements Runnable
+@Component
+public class ExampleConsumer<K, V>
 {
-  private final String id;
-  private final String topic;
-  private final Consumer<K, V> consumer;
-  private final Thread workerThread;
-  private final Runnable closeCallback;
-
-  private volatile boolean running = false;
+  @Value("${spring.kafka.client-id}")
+  private String id;
   private long consumed = 0;
 
-
-  public ExampleConsumer(
-    String clientId,
+  @KafkaListener(topics = "${juplo.consumer.topic}")
+  private void receive(
+    @Header(KafkaHeaders.RECEIVED_TOPIC)
     String topic,
-    Consumer<K, V> consumer,
-    Runnable closeCallback)
-  {
-    this.id = clientId;
-    this.topic = topic;
-    this.consumer = consumer;
-
-    workerThread = new Thread(this, "ExampleConsumer Worker-Thread");
-    workerThread.start();
-
-    this.closeCallback = closeCallback;
-  }
-
-
-  @Override
-  public void run()
+    @Header(KafkaHeaders.RECEIVED_PARTITION)
+    Integer partition,
+    @Header(KafkaHeaders.OFFSET)
+    Long offset,
+    @Header(KafkaHeaders.RECEIVED_KEY)
+    K key,
+    @Payload
+    V value)
   {
-    try
-    {
-      log.info("{} - Subscribing to topic {}", id, topic);
-      consumer.subscribe(Arrays.asList(topic));
-      running = true;
-
-      while (running)
-      {
-        ConsumerRecords<K, V> records = consumer.poll(Duration.ofSeconds(1));
-
-        log.info("{} - Received {} messages", id, records.count());
-        for (ConsumerRecord<K, V> record : records)
-        {
-          handleRecord(
-            record.topic(),
-            record.partition(),
-            record.offset(),
-            record.key(),
-            record.value());
-        }
-      }
-    }
-    catch(WakeupException e)
-    {
-      log.info("{} - Consumer was signaled to finish its work", id);
-    }
-    catch(Exception e)
-    {
-      log.error("{} - Unexpected error, unsubscribing!", id, e);
-      consumer.unsubscribe();
-      log.info("{} - Triggering exit of application!", id);
-      new Thread(closeCallback).start();
-    }
-    finally
-    {
-      log.info("{} - Closing the KafkaConsumer", id);
-      consumer.close();
-      log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
-    }
+    handleRecord(topic, partition, offset, key, value);
   }
 
   private void handleRecord(
@@ -95,12 +45,9 @@ public class ExampleConsumer<K, V> implements Runnable
     log.info("{} - partition={}-{}, offset={}: {}={}", id, topic, partition, offset, key, value);
   }
 
-
-  public void shutdown() throws InterruptedException
+  @PreDestroy
+  public void close()
   {
-    log.info("{} joining the worker-thread...", id);
-    running = false;
-    consumer.wakeup();
-    workerThread.join();
+    log.info("{}: End of Life! Consumed {} messages in total.", id, consumed);
   }
 }
index 7a06731..71dddda 100644 (file)
@@ -1,11 +1,6 @@
 juplo:
-  bootstrap-server: :9092
-  client-id: DEV
   consumer:
-    group-id: my-group
     topic: test
-    auto-offset-reset: earliest
-    auto-commit-interval: 5s
 management:
   endpoint:
     shutdown:
@@ -21,13 +16,17 @@ management:
       enabled: true
 info:
   kafka:
-    bootstrap-server: ${juplo.bootstrap-server}
-    client-id: ${juplo.client-id}
+    bootstrap-server: ${spring.kafka.bootstrap-servers}
+    client-id: ${spring.kafka.client-id}
+    group-id: ${spring.kafka.consumer.group-id}
+    topic: ${simple.consumer.topic}
+    auto-offset-reset: ${spring.kafka.consumer.auto-offset-reset}
+spring:
+  kafka:
+    bootstrap-servers: :9092
+    client-id: DEV
     consumer:
-      group-id: ${juplo.consumer.group-id}
-      topic: ${juplo.consumer.topic}
-      auto-offset-reset: ${juplo.consumer.auto-offset-reset}
-      auto-commit-interval: ${juplo.consumer.auto-commit-interval}
+      group-id: my-group
 logging:
   level:
     root: INFO