X-Git-Url: http://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplication.java;h=69a97125726ef3a84ac885d226bc69db2051cc4f;hb=refs%2Fheads%2Fsumup-adder--springified;hp=5226d6bd062bee4003ef4ca00f8f1c37597af3d8;hpb=5a2c467b5b299f975f22d6c0e761686067634adc;p=demos%2Fkafka%2Ftraining diff --git a/src/main/java/de/juplo/kafka/Application.java b/src/main/java/de/juplo/kafka/Application.java index 5226d6b..69a9712 100644 --- a/src/main/java/de/juplo/kafka/Application.java +++ b/src/main/java/de/juplo/kafka/Application.java @@ -1,52 +1,112 @@ package de.juplo.kafka; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.serialization.LongDeserializer; -import org.apache.kafka.common.serialization.StringDeserializer; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; +import org.springframework.kafka.annotation.EnableKafka; +import org.springframework.kafka.config.KafkaListenerEndpointRegistry; +import org.springframework.kafka.core.DefaultKafkaProducerFactory; +import org.springframework.kafka.core.KafkaOperations; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.core.ProducerFactory; +import org.springframework.kafka.listener.DeadLetterPublishingRecoverer; +import org.springframework.kafka.listener.DefaultErrorHandler; +import org.springframework.kafka.support.serializer.DelegatingByTypeSerializer; +import org.springframework.kafka.support.serializer.JsonSerializer; +import org.springframework.util.backoff.FixedBackOff; -import java.util.Properties; -import java.util.concurrent.Executors; +import java.util.Map; +import java.util.Optional; @SpringBootApplication -@EnableConfigurationProperties(ApplicationProperties.class) +@Slf4j +@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class }) +@EnableKafka public class Application { @Bean - public EndlessConsumer endlessConsumer( - KafkaConsumer kafkaConsumer, - ApplicationProperties properties) + public ApplicationRecordHandler applicationRecordHandler( + AdderResults adderResults, + KafkaProperties kafkaProperties, + ApplicationProperties applicationProperties) { - EndlessConsumer consumer = - new EndlessConsumer( - Executors.newFixedThreadPool(1), - properties.getClientId(), - properties.getTopic(), - kafkaConsumer); + return new ApplicationRecordHandler( + adderResults, + Optional.ofNullable(applicationProperties.getThrottle()), + kafkaProperties.getConsumer().getGroupId()); + } - consumer.start(); + @Bean + public AdderResults adderResults() + { + return new AdderResults(); + } - return consumer; + @Bean + public ApplicationRebalanceListener rebalanceListener( + ApplicationRecordHandler recordHandler, + AdderResults adderResults, + StateRepository stateRepository, + KafkaProperties kafkaProperties) + { + return new ApplicationRebalanceListener( + recordHandler, + adderResults, + stateRepository, + kafkaProperties.getConsumer().getGroupId()); } - @Bean(destroyMethod = "close") - public KafkaConsumer kafkaConsumer(ApplicationProperties properties) + @Bean + ApplicationHealthIndicator applicationHealthIndicator( + KafkaListenerEndpointRegistry registry, + KafkaProperties properties) { - Properties props = new Properties(); + return new ApplicationHealthIndicator( + properties.getConsumer().getGroupId(), + registry); + } - props.put("bootstrap.servers", properties.getBootstrapServer()); - props.put("group.id", properties.getGroupId()); - props.put("client.id", properties.getClientId()); - props.put("auto.offset.reset", properties.getAutoOffsetReset()); - props.put("metadata.max.age.ms", "1000"); - props.put("key.deserializer", StringDeserializer.class.getName()); - props.put("value.deserializer", LongDeserializer.class.getName()); + @Bean + public ProducerFactory producerFactory( + KafkaProperties properties) + { + return new DefaultKafkaProducerFactory<>( + properties.getProducer().buildProperties(), + new StringSerializer(), + new DelegatingByTypeSerializer( + Map.of( + byte[].class, new ByteArraySerializer(), + MessageAddNumber.class, new JsonSerializer<>(), + MessageCalculateSum.class, new JsonSerializer<>()))); + } - return new KafkaConsumer<>(props); + @Bean + public KafkaTemplate kafkaTemplate( + ProducerFactory producerFactory) + { + return new KafkaTemplate<>(producerFactory); + } + + @Bean + public DeadLetterPublishingRecoverer deadLetterPublishingRecoverer( + KafkaOperations kafkaTemplate) + { + return new DeadLetterPublishingRecoverer(kafkaTemplate); + } + + @Bean + public DefaultErrorHandler errorHandler( + DeadLetterPublishingRecoverer recoverer) + { + return new DefaultErrorHandler( + recoverer, + new FixedBackOff(0l, 0l)); }