* Das README.sh führt beide Fehler vor.
* Der Logik-Fehler wird zuerst vorgeführt, da die Poison-Pill dazu führt,
  dass sich der betroffene Consumer in einer endlosschleife verfängt.
* Das README.sh enthält alle Tag's für die kompletten Übungen zu der
  Fehlerbehandlung in Spring Kafka.
 #!/bin/bash
 
-IMAGE=juplo/spring-consumer-json:1.0-SNAPSHOT
+IMAGE=juplo/spring-consumer-json-kafkalistener:1.0-SNAPSHOT
 
 if [ "$1" = "cleanup" ]
 then
 # tag::logicerror[]
 echo 66 | http -v :8080/peter?error=1
 # end::logicerror[]
+
+echo 66 | http -v :8080/peter
 echo 7  | http -v :8080/klaus
 
-docker-compose logs -f consumer-1 consumer-2
+sleep 5
 
-echo "Restarting consumer-1..."
-# tag::restart[]
-docker-compose up consumer-1
-# end::restart[]
+echo "Writing poison pill..."
+# tag::poisonpill[]
+echo 'BOOM!' | kafkacat -P -b :9092 -t test
+# end::poisonpill[]
+
+sleep 5
+docker-compose stop consumer-1 consumer-2
+docker-compose logs -f consumer-1 consumer-2
 
       producer.topic: test
 
   consumer-1:
-    image: juplo/spring-consumer-json:1.0-SNAPSHOT
+    image: juplo/spring-consumer-json-kafkalistener:1.0-SNAPSHOT
     ports:
       - 8081:8080
     environment:
       simple.consumer.topic: test
 
   consumer-2:
-    image: juplo/spring-consumer-json:1.0-SNAPSHOT
+    image: juplo/spring-consumer-json-kafkalistener:1.0-SNAPSHOT
     ports:
       - 8082:8080
     environment:
 
   </parent>
 
   <groupId>de.juplo.kafka</groupId>
-  <artifactId>spring-consumer-json</artifactId>
+  <artifactId>spring-consumer-json-kafkalistener</artifactId>
   <version>1.0-SNAPSHOT</version>
   <name>Spring Consumer</name>
   <description>Super Simple Consumer-Group, that is implemented as Spring-Boot application and configured by Spring Kafka</description>
 
 package de.juplo.kafka;
 
 
+import org.springframework.stereotype.Component;
+
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Optional;
 
 
+@Component
 public class AdderBusinessLogic
 {
   private final Map<String, AdderResult> state;
 
 package de.juplo.kafka;
 
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.ApplicationArguments;
-import org.springframework.boot.ApplicationRunner;
 import org.springframework.boot.SpringApplication;
 import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
-import org.springframework.util.concurrent.ListenableFuture;
-
-import javax.annotation.PreDestroy;
-import java.util.concurrent.ExecutionException;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
 
 
 @SpringBootApplication
-@Slf4j
-public class Application implements ApplicationRunner
+@EnableConfigurationProperties(ApplicationProperties.class)
+public class Application
 {
-  @Autowired
-  ThreadPoolTaskExecutor taskExecutor;
-  @Autowired
-  Consumer<?, ?> kafkaConsumer;
-  @Autowired
-  SimpleConsumer simpleConsumer;
-  @Autowired
-  ConfigurableApplicationContext context;
-
-  ListenableFuture<Integer> consumerJob;
-
-  @Override
-  public void run(ApplicationArguments args) throws Exception
-  {
-    log.info("Starting SimpleConsumer");
-    consumerJob = taskExecutor.submitListenable(simpleConsumer);
-    consumerJob.addCallback(
-      exitStatus ->
-      {
-        log.info("SimpleConsumer exited normally, exit-status: {}", exitStatus);
-        SpringApplication.exit(context, () -> exitStatus);
-        },
-      t ->
-      {
-        log.error("SimpleConsumer exited abnormally!", t);
-        SpringApplication.exit(context, () -> 2);
-      });
-  }
-
-  @PreDestroy
-  public void shutdown() throws ExecutionException, InterruptedException
-  {
-    log.info("Signaling SimpleConsumer to quit its work");
-    kafkaConsumer.wakeup();
-    log.info("Waiting for SimpleConsumer to finish its work");
-    consumerJob.get();
-    log.info("SimpleConsumer finished its work");
-  }
-
-
   public static void main(String[] args)
   {
     SpringApplication.run(Application.class, args);
 
+++ /dev/null
-package de.juplo.kafka;
-
-import org.apache.kafka.clients.consumer.Consumer;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import org.springframework.kafka.core.ConsumerFactory;
-
-
-@Configuration
-@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class })
-public class ApplicationConfiguration
-{
-  @Bean
-  public AdderBusinessLogic adder()
-  {
-    return new AdderBusinessLogic();
-  }
-
-  @Bean
-  public MessageHandler messageHandler(
-    KafkaProperties properties,
-    AdderBusinessLogic adder)
-  {
-    return new MessageHandler(
-      properties.getClientId(),
-      adder);
-  }
-
-  @Bean
-  public SimpleConsumer simpleConsumer(
-      Consumer<String, Message> kafkaConsumer,
-      MessageHandler messageHandler,
-      KafkaProperties kafkaProperties,
-      ApplicationProperties applicationProperties)
-  {
-    return
-        new SimpleConsumer(
-            kafkaProperties.getClientId(),
-            applicationProperties.getTopic(),
-            kafkaConsumer,
-            messageHandler);
-  }
-
-  @Bean
-  public Consumer<?, ?> kafkaConsumer(ConsumerFactory<?, ?> factory)
-  {
-    return factory.createConsumer();
-  }
-}
 
 package de.juplo.kafka;
 
-import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
 
 
-@RequiredArgsConstructor
 @Slf4j
+@Component
 public class MessageHandler
 {
-  private final String id;
-
-  private final AdderBusinessLogic adder;
+  @Value("${spring.kafka.client-id}")
+  private String id;
+  @Autowired
+  private AdderBusinessLogic adder;
 
 
   public void addNumber(
 
 package de.juplo.kafka;
 
-import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.errors.WakeupException;
-
-import java.time.Duration;
-import java.util.Arrays;
-import java.util.concurrent.Callable;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.support.KafkaHeaders;
+import org.springframework.messaging.handler.annotation.Header;
+import org.springframework.messaging.handler.annotation.Payload;
+import org.springframework.stereotype.Component;
 
 
 @Slf4j
-@RequiredArgsConstructor
-public class SimpleConsumer implements Callable<Integer>
+@Component
+public class SimpleConsumer
 {
-  private final String id;
-  private final String topic;
-  private final Consumer<String, Message> consumer;
-  private final MessageHandler messageHandler;
+  @Value("${spring.kafka.client-id}")
+  private String id;
+  @Autowired
+  private MessageHandler messageHandler;
 
   private long consumed = 0;
 
 
-  @Override
-  public Integer call()
-  {
-    try
-    {
-      log.info("{} - Subscribing to topic {}", id, topic);
-      consumer.subscribe(Arrays.asList(topic));
-
-      while (true)
-      {
-        ConsumerRecords<String, Message> records =
-            consumer.poll(Duration.ofSeconds(1));
-
-        log.info("{} - Received {} messages", id, records.count());
-        for (ConsumerRecord<String, Message> record : records)
-        {
-          handleRecord(
-            record.topic(),
-            record.partition(),
-            record.offset(),
-            record.key(),
-            record.value());
-        }
-      }
-    }
-    catch(WakeupException e)
-    {
-      log.info("{} - Consumer was signaled to finish its work", id);
-      return 0;
-    }
-    catch(Exception e)
-    {
-      log.error("{} - Unexpected error: {}, unsubscribing!", id, e.toString());
-      consumer.unsubscribe();
-      return 1;
-    }
-    finally
-    {
-      log.info("{} - Closing the KafkaConsumer", id);
-      consumer.close();
-      log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
-    }
-  }
-
+  @KafkaListener(topics = "${simple.consumer.topic}")
   private void handleRecord(
+    @Header(KafkaHeaders.RECEIVED_TOPIC)
     String topic,
+    @Header(KafkaHeaders.RECEIVED_PARTITION_ID)
     Integer partition,
+    @Header(KafkaHeaders.OFFSET)
     Long offset,
+    @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY)
     String key,
+    @Payload
     Message value)
   {
     consumed++;
 
   level:
     root: INFO
     de.juplo: DEBUG
+    org.springframework.kafka: DEBUG
 server:
   port: 8881