package de.juplo.kafka;
import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.serialization.LongDeserializer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.common.serialization.ByteArraySerializer;
+import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import org.springframework.kafka.core.*;
+import org.springframework.kafka.listener.DeadLetterPublishingRecoverer;
+import org.springframework.kafka.listener.DefaultErrorHandler;
+import org.springframework.kafka.support.serializer.DelegatingByTypeSerializer;
+import org.springframework.kafka.support.serializer.JsonSerializer;
+import org.springframework.util.backoff.FixedBackOff;
-import java.util.Properties;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
+import java.util.Map;
import java.util.function.Consumer;
public class ApplicationConfiguration
{
@Bean
- public Consumer<ConsumerRecord<String, Long>> consumer()
+ public Consumer<ConsumerRecord<String, ClientMessage>> consumer()
{
return (record) ->
{
}
@Bean
- public EndlessConsumer<String, Long> endlessConsumer(
- KafkaConsumer<String, Long> kafkaConsumer,
- ExecutorService executor,
- Consumer<ConsumerRecord<String, Long>> handler,
- KafkaProperties kafkaProperties,
- ApplicationProperties applicationProperties)
+ public ProducerFactory<String, Object> producerFactory(KafkaProperties properties) {
+ return new DefaultKafkaProducerFactory<>(
+ properties.getProducer().buildProperties(),
+ new StringSerializer(),
+ new DelegatingByTypeSerializer(Map.of(
+ byte[].class, new ByteArraySerializer(),
+ ClientMessage.class, new JsonSerializer<>())));
+ }
+
+ @Bean
+ public KafkaTemplate<String, Object> kafkaTemplate(
+ ProducerFactory<String, Object> producerFactory) {
+
+ return new KafkaTemplate<>(producerFactory);
+ }
+
+ @Bean
+ public DeadLetterPublishingRecoverer recoverer(
+ ApplicationProperties properties,
+ KafkaOperations<?, ?> template)
{
- return
- new EndlessConsumer<>(
- executor,
- kafkaProperties.getConsumer().getClientId(),
- applicationProperties.getTopic(),
- kafkaConsumer,
- handler);
+ return new DeadLetterPublishingRecoverer(
+ template,
+ (record, exception) -> new TopicPartition(properties.getDlqTopic(), record.partition()));
}
@Bean
- public ExecutorService executor()
+ public DefaultErrorHandler errorHandler(DeadLetterPublishingRecoverer recoverer)
{
- return Executors.newSingleThreadExecutor();
+ return new DefaultErrorHandler(recoverer, new FixedBackOff(0l, 0l));
}
@Bean(destroyMethod = "close")
- public KafkaConsumer<String, Long> kafkaConsumer(KafkaProperties properties)
+ public org.apache.kafka.clients.consumer.Consumer<String, ClientMessage> kafkaConsumer(ConsumerFactory<String, ClientMessage> factory)
{
- Properties props = new Properties();
-
- props.put("bootstrap.servers", properties.getConsumer().getBootstrapServers());
- props.put("group.id", properties.getConsumer().getGroupId());
- props.put("client.id", properties.getConsumer().getClientId());
- props.put("auto.offset.reset", properties.getConsumer().getAutoOffsetReset());
- props.put("metadata.max.age.ms", "1000");
- props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer", LongDeserializer.class.getName());
-
- return new KafkaConsumer<>(props);
+ return factory.createConsumer();
}
}