X-Git-Url: http://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplicationConfiguration.java;fp=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplicationConfiguration.java;h=753422e66925cbf57ff0cce3ce9dbf52ebd4bc6e;hb=41e5f74b40e4a434483dcc4142aaf8224ea5a478;hp=d0334a2317ab267509c16ae757a4871149fff74e;hpb=81c08612c6636a04864699233e7806d72e2ecf3a;p=demos%2Fkafka%2Ftraining diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java index d0334a2..753422e 100644 --- a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java +++ b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java @@ -1,8 +1,10 @@ package de.juplo.kafka; import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.serialization.LongDeserializer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -17,25 +19,30 @@ import java.util.concurrent.Executors; public class ApplicationConfiguration { @Bean - public RecordHandler noopRecordHandler() + public SumUpRecordHandler sumUpRecordHandler( + KafkaProducer kafkaProducer, + ApplicationProperties properties) { - return record -> {}; + return new SumUpRecordHandler( + kafkaProducer, + properties.getClientId(), + properties.getTopicOut()); } @Bean - public EndlessConsumer endlessConsumer( - KafkaConsumer kafkaConsumer, + public EndlessConsumer endlessConsumer( + KafkaConsumer kafkaConsumer, ExecutorService executor, - RecordHandler noopRecordHandler, + SumUpRecordHandler sumUpRecordHandler, ApplicationProperties properties) { return new EndlessConsumer<>( executor, properties.getClientId(), - properties.getTopic(), + properties.getTopicIn(), kafkaConsumer, - noopRecordHandler); + sumUpRecordHandler); } @Bean @@ -45,7 +52,7 @@ public class ApplicationConfiguration } @Bean(destroyMethod = "close") - public KafkaConsumer kafkaConsumer(ApplicationProperties properties) + public KafkaConsumer kafkaConsumer(ApplicationProperties properties) { Properties props = new Properties(); @@ -57,8 +64,26 @@ public class ApplicationConfiguration props.put("auto.commit.interval.ms", (int)properties.getCommitInterval().toMillis()); props.put("metadata.max.age.ms", "1000"); props.put("key.deserializer", StringDeserializer.class.getName()); - props.put("value.deserializer", LongDeserializer.class.getName()); + props.put("value.deserializer", IntegerDeserializer.class.getName()); return new KafkaConsumer<>(props); } + + @Bean(destroyMethod = "close") + public KafkaProducer kafkaProducer(ApplicationProperties properties) + { + Properties props = new Properties(); + props.put("bootstrap.servers", properties.getBootstrapServer()); + props.put("client.id", properties.getClientId()); + props.put("acks", properties.getAcks()); + props.put("batch.size", properties.getBatchSize()); + props.put("delivery.timeout.ms", 20000); // 20 Sekunden + props.put("request.timeout.ms", 10000); // 10 Sekunden + props.put("linger.ms", properties.getLingerMs()); + props.put("compression.type", properties.getCompressionType()); + props.put("key.serializer", StringSerializer.class.getName()); + props.put("value.serializer", StringSerializer.class.getName()); + + return new KafkaProducer<>(props); + } }