#!/bin/bash
-IMAGE=juplo/spring-consumer:1.1-json-SNAPSHOT
+IMAGE=juplo/spring-consumer:1.1-kafkahandler-SNAPSHOT
if [ "$1" = "cleanup" ]
then
}
group = 'de.juplo.kafka'
-version = '1.1-json-SNAPSHOT'
+version = '1.1-kafkahandler-SNAPSHOT'
java {
toolchain {
cub kafka-ready -b kafka-1:9092,kafka-2:9092,kafka-3:9092 3 60 > /dev/null 2>&1 || exit 1
if [ -e INITIALIZED ]
then
- echo -n Bereits konfiguriert:
+ echo -n Bereits konfiguriert:
cat INITIALIZED
kafka-topics --bootstrap-server kafka:9092 --describe --topic test
else
juplo.producer.throttle-ms: 100
consumer:
- image: juplo/spring-consumer:1.1-json-SNAPSHOT
+ image: juplo/spring-consumer:1.1-kafkahandler-SNAPSHOT
environment:
- juplo.bootstrap-server: kafka:9092
- juplo.client-id: consumer
+ spring.kafka.bootstrap-servers: kafka:9092
+ spring.kafka.client-id: consumer
+ spring.kafka.consumer.auto-offset-reset: earliest
+ logging.level.org.apache.kafka.clients.consumer: INFO
juplo.consumer.topic: test
peter:
- image: juplo/spring-consumer:1.1-json-SNAPSHOT
+ image: juplo/spring-consumer:1.1-kafkahandler-SNAPSHOT
environment:
- juplo.bootstrap-server: kafka:9092
- juplo.client-id: peter
+ spring.kafka.bootstrap-servers: kafka:9092
+ spring.kafka.client-id: peter
+ spring.kafka.consumer.auto-offset-reset: earliest
+ logging.level.org.apache.kafka.clients.consumer: INFO
juplo.consumer.topic: test
ute:
- image: juplo/spring-consumer:1.1-json-SNAPSHOT
+ image: juplo/spring-consumer:1.1-kafkahandler-SNAPSHOT
environment:
- juplo.bootstrap-server: kafka:9092
- juplo.client-id: ute
+ spring.kafka.bootstrap-servers: kafka:9092
+ spring.kafka.client-id: ute
+ spring.kafka.consumer.auto-offset-reset: earliest
+ logging.level.org.apache.kafka.clients.consumer: INFO
juplo.consumer.topic: test
volumes:
<artifactId>spring-consumer</artifactId>
<name>Spring Consumer</name>
<description>Super Simple Consumer-Group, that is implemented as Spring-Boot application and configured by Spring Kafka</description>
- <version>1.1-json-SNAPSHOT</version>
+ <version>1.1-kafkahandler-SNAPSHOT</version>
<properties>
<java.version>21</java.version>
+++ /dev/null
-package de.juplo.kafka;
-
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.clients.consumer.StickyAssignor;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.springframework.kafka.support.serializer.JsonDeserializer;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Properties;
-
-
-@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
-public class ApplicationConfiguration
-{
- @Bean
- public ExampleConsumer exampleConsumer(
- Consumer<String, Message> kafkaConsumer,
- ApplicationProperties properties,
- ConfigurableApplicationContext applicationContext)
- {
- return
- new ExampleConsumer(
- properties.getClientId(),
- properties.getConsumerProperties().getTopic(),
- kafkaConsumer,
- () -> applicationContext.close());
- }
-
- @Bean(destroyMethod = "")
- public KafkaConsumer<String, Message> kafkaConsumer(ApplicationProperties properties)
- {
- Properties props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
- props.put("client.id", properties.getClientId());
- props.put("group.id", properties.getConsumerProperties().getGroupId());
- if (properties.getConsumerProperties().getAutoOffsetReset() != null)
- {
- props.put("auto.offset.reset", properties.getConsumerProperties().getAutoOffsetReset().name());
- }
- if (properties.getConsumerProperties().getAutoCommitInterval() != null)
- {
- props.put("auto.commit.interval", properties.getConsumerProperties().getAutoCommitInterval());
- }
- props.put("metadata.maxage.ms", 5000); // 5 Sekunden
- props.put("partition.assignment.strategy", StickyAssignor.class.getName());
- props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer", JsonDeserializer.class.getName());
- props.put("spring.json.type.mapping", "ADD:de.juplo.kafka.MessageAddNumber,CALC:de.juplo.kafka.MessageCalculateSum");
-
- return new KafkaConsumer<>(props);
- }
-}
+++ /dev/null
-package de.juplo.kafka;
-
-import jakarta.validation.constraints.NotEmpty;
-import jakarta.validation.constraints.NotNull;
-import lombok.Getter;
-import lombok.Setter;
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.validation.annotation.Validated;
-
-import java.time.Duration;
-
-
-@ConfigurationProperties(prefix = "juplo")
-@Validated
-@Getter
-@Setter
-public class ApplicationProperties
-{
- @NotNull
- @NotEmpty
- private String bootstrapServer;
- @NotNull
- @NotEmpty
- private String clientId;
-
- @NotNull
- private ConsumerProperties consumer;
-
-
- public ConsumerProperties getConsumerProperties()
- {
- return consumer;
- }
-
-
- @Validated
- @Getter
- @Setter
- static class ConsumerProperties
- {
- @NotNull
- @NotEmpty
- private String groupId;
- @NotNull
- @NotEmpty
- private String topic;
- private OffsetReset autoOffsetReset;
- private Duration autoCommitInterval;
-
- enum OffsetReset { latest, earliest, none }
- }
-}
package de.juplo.kafka;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.errors.WakeupException;
-
-import java.time.Duration;
-import java.util.Arrays;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.kafka.annotation.KafkaHandler;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.stereotype.Component;
@Slf4j
-public class ExampleConsumer implements Runnable
+@Component
+@KafkaListener(topics = "${juplo.consumer.topic}")
+public class ExampleConsumer
{
- private final String id;
- private final String topic;
- private final Consumer<String, Message> consumer;
- private final Thread workerThread;
- private final Runnable closeCallback;
-
+ @Value("${spring.kafka.client-id}")
+ private String id;
private long consumed = 0;
- public ExampleConsumer(
- String clientId,
- String topic,
- Consumer<String, Message> consumer,
- Runnable closeCallback)
- {
- this.id = clientId;
- this.topic = topic;
- this.consumer = consumer;
-
- workerThread = new Thread(this, "ExampleConsumer Worker-Thread");
- workerThread.start();
-
- this.closeCallback = closeCallback;
- }
-
-
- @Override
- public void run()
- {
- try
- {
- log.info("{} - Subscribing to topic {}", id, topic);
- consumer.subscribe(Arrays.asList(topic));
-
- while (true)
- {
- ConsumerRecords<String, Message> records = consumer.poll(Duration.ofSeconds(1));
-
- log.info("{} - Received {} messages", id, records.count());
- for (ConsumerRecord<String, Message> record : records)
- {
- handleRecord(
- record.topic(),
- record.partition(),
- record.offset(),
- record.key(),
- record.value());
- }
- }
- }
- catch(WakeupException e)
- {
- log.info("{} - Consumer was signaled to finish its work", id);
- }
- catch(Exception e)
- {
- log.error("{} - Unexpected error, unsubscribing!", id, e);
- consumer.unsubscribe();
- log.info("{} - Triggering exit of application!", id);
- new Thread(closeCallback).start();
- }
- finally
- {
- log.info("{} - Closing the KafkaConsumer", id);
- consumer.close();
- log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
- }
- }
-
- private void handleRecord(
- String topic,
- Integer partition,
- Long offset,
- String key,
- Message value)
- {
- consumed++;
- log.info("{} - partition={}-{}, offset={}: {}={}", id, topic, partition, offset, key, value);
- switch (value.getType())
- {
- case ADD -> addNumber((MessageAddNumber)value);
- case CALC -> calcSum((MessageCalculateSum)value);
- default -> log.error("{} - Ignoring message of unknown typ {}", id, value.getType());
- }
- }
-
+ @KafkaHandler
private void addNumber(MessageAddNumber addNumber)
{
log.info("{} - Adding number {}", id, addNumber.getNext());
}
+ @KafkaHandler
private void calcSum(MessageCalculateSum calculateSum)
{
log.info("{} - Calculating sum", id);
}
-
- public void shutdown() throws InterruptedException
- {
- log.info("{} - Waking up the consumer", id);
- consumer.wakeup();
- log.info("{} - Joining the worker thread", id);
- workerThread.join();
- }
}
juplo:
- bootstrap-server: :9092
- client-id: DEV
consumer:
- group-id: my-group
topic: test
- auto-offset-reset: earliest
- auto-commit-interval: 5s
management:
endpoint:
shutdown:
enabled: true
info:
kafka:
- bootstrap-server: ${juplo.bootstrap-server}
- client-id: ${juplo.client-id}
+ bootstrap-server: ${spring.kafka.bootstrap-servers}
+ client-id: ${spring.kafka.client-id}
+ group-id: ${spring.kafka.consumer.group-id}
+ topic: ${simple.consumer.topic}
+ auto-offset-reset: ${spring.kafka.consumer.auto-offset-reset}
+spring:
+ kafka:
+ bootstrap-servers: :9092
+ client-id: DEV
consumer:
- group-id: ${juplo.consumer.group-id}
- topic: ${juplo.consumer.topic}
- auto-offset-reset: ${juplo.consumer.auto-offset-reset}
- auto-commit-interval: ${juplo.consumer.auto-commit-interval}
-logging:
+ group-id: my-group
+ value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer
+ properties:
+ "[spring.json.type.mapping]": ADD:de.juplo.kafka.MessageAddNumber,CALC:de.juplo.kafka.MessageCalculateSum
level:
root: INFO
de.juplo: DEBUG