X-Git-Url: https://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplicationConfiguration.java;h=a580eb0e8f8363961d46fd6dd59376cae038bc0b;hb=48271d5fd1dbab2dc71d1490e6826048eb0716e2;hp=0d178230f1cca94301016b96c881ec20f02b6d44;hpb=fc682d9890787ef363b3e189f6f880a043f3c541;p=demos%2Fkafka%2Ftraining diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java index 0d17823..a580eb0 100644 --- a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java +++ b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java @@ -1,13 +1,14 @@ package de.juplo.kafka; -import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - -import java.time.Clock; +import org.springframework.kafka.support.serializer.JsonSerializer; import java.util.Properties; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -18,49 +19,30 @@ import java.util.concurrent.Executors; public class ApplicationConfiguration { @Bean - public WordcountRecordHandler wordcountRecordHandler( - PartitionStatisticsRepository repository, - Consumer consumer, + public ApplicationRecordHandler recordHandler( + KafkaProducer kafkaProducer, ApplicationProperties properties) { - return new WordcountRecordHandler( - repository, - properties.getTopic(), - Clock.systemDefaultZone(), - properties.getCommitInterval(), - consumer); - } - - @Bean - public WordcountRebalanceListener wordcountRebalanceListener( - WordcountRecordHandler wordcountRecordHandler, - PartitionStatisticsRepository repository, - Consumer consumer, - ApplicationProperties properties) - { - return new WordcountRebalanceListener( - wordcountRecordHandler, - repository, + return new ApplicationRecordHandler( + kafkaProducer, properties.getClientId(), - consumer); + properties.getTopicOut()); } @Bean - public EndlessConsumer endlessConsumer( - KafkaConsumer kafkaConsumer, + public EndlessConsumer endlessConsumer( + KafkaConsumer kafkaConsumer, ExecutorService executor, - WordcountRebalanceListener wordcountRebalanceListener, - WordcountRecordHandler wordcountRecordHandler, + ApplicationRecordHandler recordHandler, ApplicationProperties properties) { return new EndlessConsumer<>( executor, properties.getClientId(), - properties.getTopic(), + properties.getTopicIn(), kafkaConsumer, - wordcountRebalanceListener, - wordcountRecordHandler); + recordHandler); } @Bean @@ -70,7 +52,7 @@ public class ApplicationConfiguration } @Bean(destroyMethod = "close") - public KafkaConsumer kafkaConsumer(ApplicationProperties properties) + public KafkaConsumer kafkaConsumer(ApplicationProperties properties) { Properties props = new Properties(); @@ -78,12 +60,30 @@ public class ApplicationConfiguration props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor"); props.put("group.id", properties.getGroupId()); props.put("client.id", properties.getClientId()); - props.put("enable.auto.commit", false); props.put("auto.offset.reset", properties.getAutoOffsetReset()); + props.put("auto.commit.interval.ms", (int)properties.getCommitInterval().toMillis()); props.put("metadata.max.age.ms", "1000"); props.put("key.deserializer", StringDeserializer.class.getName()); - props.put("value.deserializer", StringDeserializer.class.getName()); + props.put("value.deserializer", IntegerDeserializer.class.getName()); return new KafkaConsumer<>(props); } + + @Bean(destroyMethod = "close") + public KafkaProducer kafkaProducer(ApplicationProperties properties) + { + Properties props = new Properties(); + props.put("bootstrap.servers", properties.getBootstrapServer()); + props.put("client.id", properties.getClientId()); + props.put("acks", properties.getAcks()); + props.put("batch.size", properties.getBatchSize()); + props.put("delivery.timeout.ms", 20000); // 20 Sekunden + props.put("request.timeout.ms", 10000); // 10 Sekunden + props.put("linger.ms", properties.getLingerMs()); + props.put("compression.type", properties.getCompressionType()); + props.put("key.serializer", StringSerializer.class.getName()); + props.put("value.serializer", "TODO: JsonSerializer konfigurieren"); + + return new KafkaProducer<>(props); + } }