#!/bin/bash
-IMAGE=juplo/spring-consumer:1.0-SNAPSHOT
+IMAGE=juplo/spring-consumer-kafkalistener:1.0-SNAPSHOT
if [ "$1" = "cleanup" ]
then
fi
docker-compose up setup
-docker-compose up -d producer
-
-mvn spring-boot:run &
-sleep 10
-kill $(jobs -p)
-mvn spring-boot:run &
-sleep 10
-docker-compose stop producer
-kill $(jobs -p)
+docker-compose up -d consumer
+docker-compose up producer
+docker-compose logs consumer
command: sleep infinity
producer:
- image: juplo/simple-producer:1.0-SNAPSHOT
- command: producer
+ image: juplo/supersimple-producer:1.0-SNAPSHOT
+ environment:
+ spring.kafka.bootstrap-servers: kafka:9092
+
+ consumer:
+ image: juplo/spring-consumer-kafkalistener:1.0-SNAPSHOT
+ environment:
+ spring.kafka.bootstrap-servers: kafka:9092
+ spring.kafka.client-id: consumer
+
</parent>
<groupId>de.juplo.kafka</groupId>
- <artifactId>spring-consumer</artifactId>
+ <artifactId>spring-consumer-kafkalistener</artifactId>
<version>1.0-SNAPSHOT</version>
<name>Spring Consumer</name>
<description>Super Simple Consumer-Group, that is implemented as Spring-Boot application and configured by Spring Kafka</description>
package de.juplo.kafka;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.ApplicationArguments;
-import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
-
-import javax.annotation.PreDestroy;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
@SpringBootApplication
-@Slf4j
-public class Application implements ApplicationRunner
+@EnableConfigurationProperties(ApplicationProperties.class)
+public class Application
{
- @Autowired
- ThreadPoolTaskExecutor taskExecutor;
- @Autowired
- Consumer<?, ?> kafkaConsumer;
- @Autowired
- SimpleConsumer simpleConsumer;
-
- Future<?> consumerJob;
-
- @Override
- public void run(ApplicationArguments args) throws Exception
- {
- log.info("Starting SimpleConsumer");
- consumerJob = taskExecutor.submit(simpleConsumer);
- }
-
- @PreDestroy
- public void shutdown() throws ExecutionException, InterruptedException
- {
- log.info("Signaling SimpleConsumer to quit its work");
- kafkaConsumer.wakeup();
- log.info("Waiting for SimpleConsumer to finish its work");
- consumerJob.get();
- log.info("SimpleConsumer finished its work");
- }
-
-
public static void main(String[] args)
{
SpringApplication.run(Application.class, args);
+++ /dev/null
-package de.juplo.kafka;
-
-import org.apache.kafka.clients.consumer.Consumer;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import org.springframework.kafka.core.ConsumerFactory;
-
-
-@Configuration
-@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class })
-public class ApplicationConfiguration
-{
- @Bean
- public SimpleConsumer simpleConsumer(
- Consumer<String, String> kafkaConsumer,
- KafkaProperties kafkaProperties,
- ApplicationProperties applicationProperties)
- {
- return
- new SimpleConsumer(
- kafkaProperties.getClientId(),
- applicationProperties.getTopic(),
- kafkaConsumer);
- }
-
- @Bean
- public Consumer<?, ?> kafkaConsumer(ConsumerFactory<?, ?> factory)
- {
- return factory.createConsumer();
- }
-}
package de.juplo.kafka;
-import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.errors.WakeupException;
-
-import java.time.Duration;
-import java.util.Arrays;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.support.KafkaHeaders;
+import org.springframework.messaging.handler.annotation.Header;
+import org.springframework.messaging.handler.annotation.Payload;
+import org.springframework.stereotype.Component;
@Slf4j
-@RequiredArgsConstructor
-public class SimpleConsumer implements Runnable
+@Component
+public class SimpleConsumer
{
- private final String id;
- private final String topic;
- private final Consumer<String, String> consumer;
-
+ @Value("${spring.kafka.client-id}")
+ private String id;
private long consumed = 0;
-
- @Override
- public void run()
- {
- try
- {
- log.info("{} - Subscribing to topic {}", id, topic);
- consumer.subscribe(Arrays.asList(topic));
-
- while (true)
- {
- ConsumerRecords<String, String> records =
- consumer.poll(Duration.ofSeconds(1));
-
- log.info("{} - Received {} messages", id, records.count());
- for (ConsumerRecord<String, String> record : records)
- {
- handleRecord(
- record.topic(),
- record.partition(),
- record.offset(),
- record.key(),
- record.value());
- }
- }
- }
- catch(WakeupException e)
- {
- log.info("{} - Consumer was signaled to finish its work", id);
- }
- catch(Exception e)
- {
- log.error("{} - Unexpected error: {}, unsubscribing!", id, e.toString());
- consumer.unsubscribe();
- }
- finally
- {
- log.info("{} - Closing the KafkaConsumer", id);
- consumer.close();
- log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
- }
- }
-
+ @KafkaListener(
+ id = "${spring.kafka.client-id}",
+ groupId = "${spring.kafka.consumer.group-id}",
+ topics = "${simple.consumer.topic}")
private void handleRecord(
+ @Header(KafkaHeaders.RECEIVED_TOPIC)
String topic,
+ @Header(KafkaHeaders.RECEIVED_PARTITION_ID)
Integer partition,
+ @Header(KafkaHeaders.OFFSET)
Long offset,
+ @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY)
String key,
+ @Payload
String value)
{
consumed++;