X-Git-Url: http://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplicationConfiguration.java;h=6ab716e884c706b70efa566297fad39fb5d2a933;hb=3cf17ad1f308a2cd618c554d1142830469d74978;hp=ce2d450e075435397a379b5de266d6f8af833201;hpb=b69dc2b8e3f3dd656577be868ae3d6d7b647c498;p=demos%2Fkafka%2Ftraining diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java index ce2d450..6ab716e 100644 --- a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java +++ b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java @@ -1,14 +1,17 @@ package de.juplo.kafka; import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.common.TopicPartition; import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.core.ConsumerFactory; +import org.springframework.kafka.core.KafkaOperations; +import org.springframework.kafka.listener.DeadLetterPublishingRecoverer; +import org.springframework.kafka.listener.DefaultErrorHandler; +import org.springframework.util.backoff.FixedBackOff; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.function.Consumer; @@ -17,7 +20,7 @@ import java.util.function.Consumer; public class ApplicationConfiguration { @Bean - public Consumer> consumer() + public Consumer> consumer() { return (record) -> { @@ -26,30 +29,23 @@ public class ApplicationConfiguration } @Bean - public EndlessConsumer endlessConsumer( - org.apache.kafka.clients.consumer.Consumer kafkaConsumer, - ExecutorService executor, - Consumer> handler, - KafkaProperties kafkaProperties, - ApplicationProperties applicationProperties) + public DeadLetterPublishingRecoverer recoverer( + ApplicationProperties properties, + KafkaOperations template) { - return - new EndlessConsumer<>( - executor, - kafkaProperties.getConsumer().getClientId(), - applicationProperties.getTopic(), - kafkaConsumer, - handler); + return new DeadLetterPublishingRecoverer( + template, + (record, exception) -> new TopicPartition(properties.getDlqTopic(), record.partition())); } @Bean - public ExecutorService executor() + public DefaultErrorHandler errorHandler(DeadLetterPublishingRecoverer recoverer) { - return Executors.newSingleThreadExecutor(); + return new DefaultErrorHandler(recoverer, new FixedBackOff(0l, 0l)); } @Bean(destroyMethod = "close") - public org.apache.kafka.clients.consumer.Consumer kafkaConsumer(ConsumerFactory factory) + public org.apache.kafka.clients.consumer.Consumer kafkaConsumer(ConsumerFactory factory) { return factory.createConsumer(); }