X-Git-Url: http://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplicationConfiguration.java;h=de77c60b422ace95604e9ca07e70207614e1a9f3;hb=b059b0e509cca9a16ae209ade49c967a66201de9;hp=b077a90efc137edc525dc5d57ccc7174b00c5091;hpb=2d84eda74475aaffff11ddfebe56d309b9aff2e9;p=demos%2Fkafka%2Ftraining diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java index b077a90..de77c60 100644 --- a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java +++ b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java @@ -1,74 +1,34 @@ package de.juplo.kafka; import org.apache.kafka.clients.consumer.Consumer; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.serialization.StringDeserializer; +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.time.Clock; -import java.util.Properties; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; +import org.springframework.kafka.core.ConsumerFactory; @Configuration -@EnableConfigurationProperties(ApplicationProperties.class) +@EnableConfigurationProperties({ KafkaProperties.class, ApplicationProperties.class }) public class ApplicationConfiguration { @Bean - public WordcountRecordHandler wordcountRecordHandler( - PartitionStatisticsRepository repository, - Consumer consumer, - ApplicationProperties properties) - { - return new WordcountRecordHandler( - repository, - properties.getClientId(), - properties.getTopic(), - Clock.systemDefaultZone(), - properties.getCommitInterval(), - consumer); - } - - @Bean - public EndlessConsumer endlessConsumer( - KafkaConsumer kafkaConsumer, - ExecutorService executor, - WordcountRecordHandler wordcountRecordHandler, - ApplicationProperties properties) + public SimpleConsumer endlessConsumer( + Consumer kafkaConsumer, + KafkaProperties kafkaProperties, + ApplicationProperties applicationProperties) { return - new EndlessConsumer<>( - executor, - properties.getClientId(), - properties.getTopic(), - kafkaConsumer, - wordcountRecordHandler); - } - - @Bean - public ExecutorService executor() - { - return Executors.newSingleThreadExecutor(); + new SimpleConsumer( + kafkaProperties.getClientId(), + applicationProperties.getTopic(), + kafkaConsumer); } @Bean(destroyMethod = "close") - public KafkaConsumer kafkaConsumer(ApplicationProperties properties) + public Consumer kafkaConsumer(ConsumerFactory factory) { - Properties props = new Properties(); - - props.put("bootstrap.servers", properties.getBootstrapServer()); - props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor"); - props.put("group.id", properties.getGroupId()); - props.put("client.id", properties.getClientId()); - props.put("enable.auto.commit", false); - props.put("auto.offset.reset", properties.getAutoOffsetReset()); - props.put("metadata.max.age.ms", "1000"); - props.put("key.deserializer", StringDeserializer.class.getName()); - props.put("value.deserializer", StringDeserializer.class.getName()); - - return new KafkaConsumer<>(props); + return factory.createConsumer(); } }