X-Git-Url: http://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplicationConfiguration.java;h=b5f6187c82f70f0c5dfecb40845fa60fc6846ffe;hb=66ff7d205e66616de8aaca94503dbbcd7d281f6d;hp=523707f42ea2f5f4c474c19ed7ddfc89f3f71c46;hpb=f18a765cc650b81788f356a80f975926930600c5;p=demos%2Fkafka%2Ftraining diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java index 523707f..b5f6187 100644 --- a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java +++ b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java @@ -1,17 +1,25 @@ package de.juplo.kafka; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.kafka.support.serializer.JsonDeserializer; +import java.util.Map; import java.util.Optional; -import java.util.Properties; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; + +import org.springframework.kafka.config.KafkaListenerEndpointRegistry; +import org.springframework.kafka.core.DefaultKafkaProducerFactory; +import org.springframework.kafka.core.KafkaOperations; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.core.ProducerFactory; +import org.springframework.kafka.listener.DeadLetterPublishingRecoverer; +import org.springframework.kafka.listener.DefaultErrorHandler; +import org.springframework.kafka.support.serializer.DelegatingByTypeSerializer; +import org.springframework.kafka.support.serializer.JsonSerializer; +import org.springframework.util.backoff.FixedBackOff; @Configuration @@ -19,7 +27,7 @@ import java.util.concurrent.Executors; public class ApplicationConfiguration { @Bean - public ApplicationRecordHandler recordHandler( + public ApplicationRecordHandler applicationRecordHandler( AdderResults adderResults, KafkaProperties kafkaProperties, ApplicationProperties applicationProperties) @@ -41,8 +49,7 @@ public class ApplicationConfiguration ApplicationRecordHandler recordHandler, AdderResults adderResults, StateRepository stateRepository, - KafkaProperties kafkaProperties, - ApplicationProperties applicationProperties) + KafkaProperties kafkaProperties) { return new ApplicationRebalanceListener( recordHandler, @@ -52,49 +59,52 @@ public class ApplicationConfiguration } @Bean - public EndlessConsumer endlessConsumer( - KafkaConsumer kafkaConsumer, - ExecutorService executor, - ApplicationRebalanceListener rebalanceListener, - ApplicationRecordHandler recordHandler, + public EndlessConsumer endlessConsumer( + RecordHandler recordHandler, KafkaProperties kafkaProperties, - ApplicationProperties applicationProperties) + KafkaListenerEndpointRegistry endpointRegistry) { return - new EndlessConsumer<>( - executor, + new EndlessConsumer( kafkaProperties.getClientId(), - applicationProperties.getTopic(), - kafkaConsumer, - rebalanceListener, + endpointRegistry, recordHandler); } @Bean - public ExecutorService executor() + public ProducerFactory producerFactory( + KafkaProperties properties) { - return Executors.newSingleThreadExecutor(); + return new DefaultKafkaProducerFactory<>( + properties.getProducer().buildProperties(), + new StringSerializer(), + new DelegatingByTypeSerializer( + Map.of( + byte[].class, new ByteArraySerializer(), + MessageAddNumber.class, new JsonSerializer<>(), + MessageCalculateSum.class, new JsonSerializer<>()))); } - @Bean(destroyMethod = "close") - public KafkaConsumer kafkaConsumer(KafkaProperties kafkaProperties) + @Bean + public KafkaTemplate kafkaTemplate( + ProducerFactory producerFactory) { - Properties props = new Properties(); + return new KafkaTemplate<>(producerFactory); + } - props.put("bootstrap.servers", kafkaProperties.getBootstrapServers()); - props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.StickyAssignor"); - props.put("group.id", kafkaProperties.getConsumer().getGroupId()); - props.put("client.id", kafkaProperties.getClientId()); - props.put("auto.offset.reset", kafkaProperties.getConsumer().getAutoOffsetReset()); - props.put("auto.commit.interval.ms", (int)kafkaProperties.getConsumer().getAutoCommitInterval().toMillis()); - props.put("metadata.max.age.ms", kafkaProperties.getConsumer().getProperties().get("metadata.max.age.ms")); - props.put("key.deserializer", StringDeserializer.class.getName()); - props.put("value.deserializer", JsonDeserializer.class.getName()); - props.put(JsonDeserializer.TRUSTED_PACKAGES, "de.juplo.kafka"); - props.put(JsonDeserializer.TYPE_MAPPINGS, - Message.Type.ADD + ":" + MessageAddNumber.class.getName() + "," + - Message.Type.CALC + ":" + MessageCalculateSum.class.getName()); + @Bean + public DeadLetterPublishingRecoverer deadLetterPublishingRecoverer( + KafkaOperations kafkaTemplate) + { + return new DeadLetterPublishingRecoverer(kafkaTemplate); + } - return new KafkaConsumer<>(props); + @Bean + public DefaultErrorHandler errorHandler( + DeadLetterPublishingRecoverer recoverer) + { + return new DefaultErrorHandler( + recoverer, + new FixedBackOff(0l, 0l)); } }