X-Git-Url: http://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplicationConfiguration.java;h=6ab716e884c706b70efa566297fad39fb5d2a933;hb=3cf17ad1f308a2cd618c554d1142830469d74978;hp=6e0445337aea5c641bc46495f4ccaaa66bd2bbf7;hpb=f9890a95d6672e1847e1d9f53a76c95ade877a9b;p=demos%2Fkafka%2Ftraining diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java index 6e04453..6ab716e 100644 --- a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java +++ b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java @@ -1,50 +1,52 @@ package de.juplo.kafka; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.serialization.LongDeserializer; -import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.common.TopicPartition; +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.kafka.core.ConsumerFactory; +import org.springframework.kafka.core.KafkaOperations; +import org.springframework.kafka.listener.DeadLetterPublishingRecoverer; +import org.springframework.kafka.listener.DefaultErrorHandler; +import org.springframework.util.backoff.FixedBackOff; -import java.util.Properties; -import java.util.concurrent.Executors; +import java.util.function.Consumer; @Configuration -@EnableConfigurationProperties(ApplicationProperties.class) +@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class }) public class ApplicationConfiguration { @Bean - public EndlessConsumer endlessConsumer( - KafkaConsumer kafkaConsumer, - ApplicationProperties properties) + public Consumer> consumer() { - EndlessConsumer consumer = - new EndlessConsumer( - Executors.newFixedThreadPool(1), - properties.getClientId(), - properties.getTopic(), - kafkaConsumer); + return (record) -> + { + // Handle record + }; + } - consumer.start(); + @Bean + public DeadLetterPublishingRecoverer recoverer( + ApplicationProperties properties, + KafkaOperations template) + { + return new DeadLetterPublishingRecoverer( + template, + (record, exception) -> new TopicPartition(properties.getDlqTopic(), record.partition())); + } - return consumer; + @Bean + public DefaultErrorHandler errorHandler(DeadLetterPublishingRecoverer recoverer) + { + return new DefaultErrorHandler(recoverer, new FixedBackOff(0l, 0l)); } @Bean(destroyMethod = "close") - public KafkaConsumer kafkaConsumer(ApplicationProperties properties) + public org.apache.kafka.clients.consumer.Consumer kafkaConsumer(ConsumerFactory factory) { - Properties props = new Properties(); - - props.put("bootstrap.servers", properties.getBootstrapServer()); - props.put("group.id", properties.getGroupId()); - props.put("client.id", properties.getClientId()); - props.put("auto.offset.reset", properties.getAutoOffsetReset()); - props.put("metadata.max.age.ms", "1000"); - props.put("key.deserializer", StringDeserializer.class.getName()); - props.put("value.deserializer", LongDeserializer.class.getName()); - - return new KafkaConsumer<>(props); + return factory.createConsumer(); } }