#!/bin/bash
-IMAGE=juplo/spring-consumer:1.1-SNAPSHOT
+IMAGE=juplo/spring-consumer:1.1-kafkalistener-SNAPSHOT
if [ "$1" = "cleanup" ]
then
juplo.producer.throttle-ms: 100
consumer:
- image: juplo/spring-consumer:1.1-SNAPSHOT
+ image: juplo/spring-consumer:1.1-kafkalistener-SNAPSHOT
environment:
- juplo.bootstrap-server: kafka:9092
- juplo.client-id: consumer
+ spring.kafka.bootstrap-servers: kafka:9092
+ spring.kafka.client-id: consumer
+ spring.kafka.consumer.auto-offset-reset: earliest
+ logging.level.org.apache.kafka.clients.consumer: INFO
juplo.consumer.topic: test
volumes:
<artifactId>spring-consumer</artifactId>
<name>Spring Consumer</name>
<description>Super Simple Consumer-Group, that is implemented as Spring-Boot application and configured by Spring Kafka</description>
- <version>1.1-SNAPSHOT</version>
+ <version>1.1-kafkalistener-SNAPSHOT</version>
<properties>
<java.version>21</java.version>
<artifactId>spring-boot-starter-validation</artifactId>
</dependency>
<dependency>
- <groupId>org.apache.kafka</groupId>
- <artifactId>kafka-clients</artifactId>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>org.springframework.kafka</groupId>
- <artifactId>spring-kafka</artifactId>
- <scope>test</scope>
- </dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
+++ /dev/null
-package de.juplo.kafka;
-
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.clients.consumer.StickyAssignor;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Properties;
-
-
-@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
-public class ApplicationConfiguration
-{
- @Bean
- public ExampleConsumer exampleConsumer(
- Consumer<String, String> kafkaConsumer,
- ApplicationProperties properties,
- ConfigurableApplicationContext applicationContext)
- {
- return
- new ExampleConsumer(
- properties.getClientId(),
- properties.getConsumerProperties().getTopic(),
- kafkaConsumer,
- () -> applicationContext.close());
- }
-
- @Bean(destroyMethod = "")
- public KafkaConsumer<String, String> kafkaConsumer(ApplicationProperties properties)
- {
- Properties props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
- props.put("client.id", properties.getClientId());
- props.put("group.id", properties.getConsumerProperties().getGroupId());
- if (properties.getConsumerProperties().getAutoOffsetReset() != null)
- {
- props.put("auto.offset.reset", properties.getConsumerProperties().getAutoOffsetReset().name());
- }
- if (properties.getConsumerProperties().getAutoCommitInterval() != null)
- {
- props.put("auto.commit.interval", properties.getConsumerProperties().getAutoCommitInterval());
- }
- props.put("metadata.maxage.ms", 5000); // 5 Sekunden
- props.put("partition.assignment.strategy", StickyAssignor.class.getName());
- props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer", StringDeserializer.class.getName());
-
- return new KafkaConsumer<>(props);
- }
-}
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.validation.annotation.Validated;
-import java.time.Duration;
-
@ConfigurationProperties(prefix = "juplo")
@Validated
@Setter
public class ApplicationProperties
{
- @NotNull
- @NotEmpty
- private String bootstrapServer;
- @NotNull
- @NotEmpty
- private String clientId;
-
@NotNull
private ConsumerProperties consumer;
@Setter
static class ConsumerProperties
{
- @NotNull
- @NotEmpty
- private String groupId;
@NotNull
@NotEmpty
private String topic;
- private OffsetReset autoOffsetReset;
- private Duration autoCommitInterval;
-
- enum OffsetReset { latest, earliest, none }
}
}
package de.juplo.kafka;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.errors.WakeupException;
-
-import java.time.Duration;
-import java.util.Arrays;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.support.KafkaHeaders;
+import org.springframework.messaging.handler.annotation.Header;
+import org.springframework.messaging.handler.annotation.Payload;
+import org.springframework.stereotype.Component;
@Slf4j
-public class ExampleConsumer implements Runnable
+@Component
+public class ExampleConsumer
{
- private final String id;
- private final String topic;
- private final Consumer<String, String> consumer;
- private final Thread workerThread;
- private final Runnable closeCallback;
-
- private volatile boolean running = false;
+ @Value("${spring.kafka.client-id}")
+ private String id;
private long consumed = 0;
-
- public ExampleConsumer(
- String clientId,
- String topic,
- Consumer<String, String> consumer,
- Runnable closeCallback)
- {
- this.id = clientId;
- this.topic = topic;
- this.consumer = consumer;
-
- workerThread = new Thread(this, "ExampleConsumer Worker-Thread");
- workerThread.start();
-
- this.closeCallback = closeCallback;
- }
-
-
- @Override
- public void run()
- {
- try
- {
- log.info("{} - Subscribing to topic {}", id, topic);
- consumer.subscribe(Arrays.asList(topic));
- running = true;
-
- while (running)
- {
- ConsumerRecords<String, String> records =
- consumer.poll(Duration.ofSeconds(1));
-
- log.info("{} - Received {} messages", id, records.count());
- for (ConsumerRecord<String, String> record : records)
- {
- handleRecord(
- record.topic(),
- record.partition(),
- record.offset(),
- record.key(),
- record.value());
- }
- }
- }
- catch(WakeupException e)
- {
- log.info("{} - Consumer was signaled to finish its work", id);
- }
- catch(Exception e)
- {
- log.error("{} - Unexpected error, unsubscribing!", id, e);
- consumer.unsubscribe();
- log.info("{} - Triggering exit of application!", id);
- new Thread(closeCallback).start();
- }
- finally
- {
- log.info("{} - Closing the KafkaConsumer", id);
- consumer.close();
- log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
- }
- }
-
+ @KafkaListener(topics = "${juplo.consumer.topic}")
private void handleRecord(
+ @Header(KafkaHeaders.RECEIVED_TOPIC)
String topic,
+ @Header(KafkaHeaders.RECEIVED_PARTITION)
Integer partition,
+ @Header(KafkaHeaders.OFFSET)
Long offset,
+ @Header(KafkaHeaders.RECEIVED_KEY)
String key,
+ @Payload
String value)
{
consumed++;
log.info("{} - {}: {}/{} - {}={}", id, offset, topic, partition, key, value);
}
-
-
- public void shutdown() throws InterruptedException
- {
- log.info("{} joining the worker-thread...", id);
- running = false;
- consumer.wakeup();
- workerThread.join();
- }
}
juplo:
- bootstrap-server: :9092
- client-id: DEV
consumer:
- group-id: my-group
topic: test
- auto-offset-reset: earliest
- auto-commit-interval: 5s
management:
endpoint:
shutdown:
enabled: true
info:
kafka:
- bootstrap-server: ${juplo.bootstrap-server}
- client-id: ${juplo.client-id}
+ bootstrap-server: ${spring.kafka.bootstrap-servers}
+ client-id: ${spring.kafka.client-id}
+ group-id: ${spring.kafka.consumer.group-id}
+ topic: ${simple.consumer.topic}
+ auto-offset-reset: ${spring.kafka.consumer.auto-offset-reset}
+spring:
+ kafka:
+ bootstrap-servers: :9092
+ client-id: DEV
consumer:
- group-id: ${juplo.consumer.group-id}
- topic: ${juplo.consumer.topic}
- auto-offset-reset: ${juplo.consumer.auto-offset-reset}
- auto-commit-interval: ${juplo.consumer.auto-commit-interval}
+ group-id: my-group
logging:
level:
root: INFO