Der Adder verarbeitet zwei Typen von JSON-Nachrichten anstatt String
[demos/kafka/training] / src / main / java / de / juplo / kafka / ApplicationConfiguration.java
1 package de.juplo.kafka;
2
3 import org.apache.kafka.clients.consumer.KafkaConsumer;
4 import org.apache.kafka.common.serialization.StringDeserializer;
5 import org.springframework.boot.context.properties.EnableConfigurationProperties;
6 import org.springframework.context.annotation.Bean;
7 import org.springframework.context.annotation.Configuration;
8 import org.springframework.kafka.support.serializer.JsonDeserializer;
9
10 import java.util.Optional;
11 import java.util.Properties;
12 import java.util.concurrent.ExecutorService;
13 import java.util.concurrent.Executors;
14
15
16 @Configuration
17 @EnableConfigurationProperties(ApplicationProperties.class)
18 public class ApplicationConfiguration
19 {
20   @Bean
21   public ApplicationRecordHandler recordHandler(
22       AdderResults adderResults,
23       ApplicationProperties properties)
24   {
25     return new ApplicationRecordHandler(
26         adderResults,
27         Optional.ofNullable(properties.getThrottle()),
28         properties.getClientId());
29   }
30
31   @Bean
32   public AdderResults adderResults()
33   {
34     return new AdderResults();
35   }
36
37   @Bean
38   public ApplicationRebalanceListener rebalanceListener(
39       ApplicationRecordHandler recordHandler,
40       AdderResults adderResults,
41       StateRepository stateRepository,
42       ApplicationProperties properties)
43   {
44     return new ApplicationRebalanceListener(
45         recordHandler,
46         adderResults,
47         stateRepository,
48         properties.getClientId());
49   }
50
51   @Bean
52   public EndlessConsumer<String, Message> endlessConsumer(
53       KafkaConsumer<String, Message> kafkaConsumer,
54       ExecutorService executor,
55       ApplicationRebalanceListener rebalanceListener,
56       ApplicationRecordHandler recordHandler,
57       ApplicationProperties properties)
58   {
59     return
60         new EndlessConsumer<>(
61             executor,
62             properties.getClientId(),
63             properties.getTopic(),
64             kafkaConsumer,
65             rebalanceListener,
66             recordHandler);
67   }
68
69   @Bean
70   public ExecutorService executor()
71   {
72     return Executors.newSingleThreadExecutor();
73   }
74
75   @Bean(destroyMethod = "close")
76   public KafkaConsumer<String, Message> kafkaConsumer(ApplicationProperties properties)
77   {
78     Properties props = new Properties();
79
80     props.put("bootstrap.servers", properties.getBootstrapServer());
81     props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.StickyAssignor");
82     props.put("group.id", properties.getGroupId());
83     props.put("client.id", properties.getClientId());
84     props.put("auto.offset.reset", properties.getAutoOffsetReset());
85     props.put("auto.commit.interval.ms", (int)properties.getCommitInterval().toMillis());
86     props.put("metadata.max.age.ms", "1000");
87     props.put("key.deserializer", StringDeserializer.class.getName());
88     props.put("value.deserializer", JsonDeserializer.class.getName());
89     props.put(JsonDeserializer.TRUSTED_PACKAGES, "de.juplo.kafka");
90     props.put(JsonDeserializer.TYPE_MAPPINGS,
91       Message.Type.ADD + ":" + MessageAddNumber.class.getName() + "," +
92       Message.Type.CALC + ":" + MessageCalculateSum.class.getName());
93
94     return new KafkaConsumer<>(props);
95   }
96 }