package de.juplo.kafka;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.serialization.LongDeserializer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.ByteArraySerializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
-import java.util.Properties;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.function.Consumer;
+import java.util.Map;
+import java.util.Optional;
+
+import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
+import org.springframework.kafka.core.DefaultKafkaProducerFactory;
+import org.springframework.kafka.core.KafkaOperations;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.core.ProducerFactory;
+import org.springframework.kafka.listener.DeadLetterPublishingRecoverer;
+import org.springframework.kafka.listener.DefaultErrorHandler;
+import org.springframework.kafka.support.serializer.DelegatingByTypeSerializer;
+import org.springframework.kafka.support.serializer.JsonSerializer;
+import org.springframework.util.backoff.FixedBackOff;
@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
+@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class })
public class ApplicationConfiguration
{
@Bean
- public Consumer<ConsumerRecord<String, String>> consumer()
+ public ApplicationRecordHandler applicationRecordHandler(
+ AdderResults adderResults,
+ KafkaProperties kafkaProperties,
+ ApplicationProperties applicationProperties)
+ {
+ return new ApplicationRecordHandler(
+ adderResults,
+ Optional.ofNullable(applicationProperties.getThrottle()),
+ kafkaProperties.getClientId());
+ }
+
+ @Bean
+ public AdderResults adderResults()
+ {
+ return new AdderResults();
+ }
+
+ @Bean
+ public ApplicationRebalanceListener rebalanceListener(
+ ApplicationRecordHandler recordHandler,
+ AdderResults adderResults,
+ StateRepository stateRepository,
+ KafkaProperties kafkaProperties)
{
- return (record) ->
- {
- // Handle record
- };
+ return new ApplicationRebalanceListener(
+ recordHandler,
+ adderResults,
+ stateRepository,
+ kafkaProperties.getClientId());
}
@Bean
public EndlessConsumer endlessConsumer(
- KafkaConsumer<String, String> kafkaConsumer,
- ExecutorService executor,
- Consumer<ConsumerRecord<String, String>> handler,
- ApplicationProperties properties)
+ RecordHandler recordHandler,
+ KafkaProperties kafkaProperties,
+ KafkaListenerEndpointRegistry endpointRegistry)
{
return
new EndlessConsumer(
- executor,
- properties.getClientId(),
- properties.getTopic(),
- kafkaConsumer,
- handler);
+ kafkaProperties.getClientId(),
+ endpointRegistry,
+ recordHandler);
}
@Bean
- public ExecutorService executor()
+ public ProducerFactory<String, Object> producerFactory(
+ KafkaProperties properties)
{
- return Executors.newSingleThreadExecutor();
+ return new DefaultKafkaProducerFactory<>(
+ properties.getProducer().buildProperties(),
+ new StringSerializer(),
+ new DelegatingByTypeSerializer(
+ Map.of(
+ byte[].class, new ByteArraySerializer(),
+ MessageAddNumber.class, new JsonSerializer<>(),
+ MessageCalculateSum.class, new JsonSerializer<>())));
}
- @Bean(destroyMethod = "close")
- public KafkaConsumer<String, String> kafkaConsumer(ApplicationProperties properties)
+ @Bean
+ public KafkaTemplate<String, Object> kafkaTemplate(
+ ProducerFactory<String, Object> producerFactory)
{
- Properties props = new Properties();
+ return new KafkaTemplate<>(producerFactory);
+ }
- props.put("bootstrap.servers", properties.getBootstrapServer());
- props.put("group.id", properties.getGroupId());
- props.put("client.id", properties.getClientId());
- props.put("auto.offset.reset", properties.getAutoOffsetReset());
- props.put("metadata.max.age.ms", "1000");
- props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer", LongDeserializer.class.getName());
+ @Bean
+ public DeadLetterPublishingRecoverer deadLetterPublishingRecoverer(
+ KafkaOperations<?, ?> kafkaTemplate)
+ {
+ return new DeadLetterPublishingRecoverer(kafkaTemplate);
+ }
- return new KafkaConsumer<>(props);
+ @Bean
+ public DefaultErrorHandler errorHandler(
+ DeadLetterPublishingRecoverer recoverer)
+ {
+ return new DefaultErrorHandler(
+ recoverer,
+ new FixedBackOff(0l, 0l));
}
}