Verbesserungen aus 'deserialization' nach 'stored-offsets' gemerged
[demos/kafka/training] / src / main / java / de / juplo / kafka / ApplicationConfiguration.java
1 package de.juplo.kafka;
2
3 import org.apache.kafka.clients.consumer.Consumer;
4 import org.apache.kafka.clients.consumer.KafkaConsumer;
5 import org.apache.kafka.common.serialization.LongDeserializer;
6 import org.apache.kafka.common.serialization.StringDeserializer;
7 import org.springframework.boot.context.properties.EnableConfigurationProperties;
8 import org.springframework.context.annotation.Bean;
9 import org.springframework.context.annotation.Configuration;
10
11 import java.time.Clock;
12 import java.util.Properties;
13 import java.util.concurrent.ExecutorService;
14 import java.util.concurrent.Executors;
15
16
17 @Configuration
18 @EnableConfigurationProperties(ApplicationProperties.class)
19 public class ApplicationConfiguration
20 {
21   @Bean
22   public KeyCountingRecordHandler messageCountingRecordHandler()
23   {
24     return new KeyCountingRecordHandler();
25   }
26
27   @Bean
28   public KeyCountingRebalanceListener wordcountRebalanceListener(
29       KeyCountingRecordHandler keyCountingRecordHandler,
30       PartitionStatisticsRepository repository,
31       Consumer<String, Long> consumer,
32       ApplicationProperties properties)
33   {
34     return new KeyCountingRebalanceListener(
35         keyCountingRecordHandler,
36         repository,
37         properties.getClientId(),
38         properties.getTopic(),
39         Clock.systemDefaultZone(),
40         properties.getCommitInterval(),
41         consumer);
42   }
43
44   @Bean
45   public EndlessConsumer<String, Long> endlessConsumer(
46       KafkaConsumer<String, Long> kafkaConsumer,
47       ExecutorService executor,
48       KeyCountingRebalanceListener keyCountingRebalanceListener,
49       KeyCountingRecordHandler keyCountingRecordHandler,
50       ApplicationProperties properties)
51   {
52     return
53         new EndlessConsumer<>(
54             executor,
55             properties.getClientId(),
56             properties.getTopic(),
57             kafkaConsumer,
58             keyCountingRebalanceListener,
59             keyCountingRecordHandler);
60   }
61
62   @Bean
63   public ExecutorService executor()
64   {
65     return Executors.newSingleThreadExecutor();
66   }
67
68   @Bean(destroyMethod = "close")
69   public KafkaConsumer<String, Long> kafkaConsumer(ApplicationProperties properties)
70   {
71     Properties props = new Properties();
72
73     props.put("bootstrap.servers", properties.getBootstrapServer());
74     props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
75     props.put("group.id", properties.getGroupId());
76     props.put("client.id", properties.getClientId());
77     props.put("enable.auto.commit", false);
78     props.put("auto.offset.reset", properties.getAutoOffsetReset());
79     props.put("auto.commit.interval.ms", (int)properties.getCommitInterval().toMillis());
80     props.put("metadata.max.age.ms", "1000");
81     props.put("key.deserializer", StringDeserializer.class.getName());
82     props.put("value.deserializer", LongDeserializer.class.getName());
83
84     return new KafkaConsumer<>(props);
85   }
86 }