splitter: 1.1.1 - The configured default-serde is used for serialization
[demos/kafka/wordcount] / src / main / java / de / juplo / kafka / wordcount / splitter / SplitterStreamProcessor.java
1 package de.juplo.kafka.wordcount.splitter;
2
3 import lombok.extern.slf4j.Slf4j;
4 import org.apache.kafka.clients.consumer.ConsumerConfig;
5 import org.apache.kafka.common.serialization.Serdes;
6 import org.apache.kafka.streams.KafkaStreams;
7 import org.apache.kafka.streams.StreamsBuilder;
8 import org.apache.kafka.streams.StreamsConfig;
9 import org.apache.kafka.streams.kstream.Consumed;
10 import org.apache.kafka.streams.kstream.KStream;
11 import org.apache.kafka.streams.kstream.Produced;
12 import org.springframework.boot.SpringApplication;
13 import org.springframework.context.ConfigurableApplicationContext;
14 import org.springframework.kafka.support.serializer.JsonSerde;
15 import org.springframework.stereotype.Component;
16
17 import jakarta.annotation.PostConstruct;
18 import jakarta.annotation.PreDestroy;
19 import java.util.Arrays;
20 import java.util.Properties;
21 import java.util.concurrent.CompletableFuture;
22 import java.util.regex.Pattern;
23
24 import static org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT;
25
26
27 @Slf4j
28 @Component
29 public class SplitterStreamProcessor
30 {
31         final static Pattern PATTERN = Pattern.compile("\\W+");
32
33         public final KafkaStreams streams;
34
35
36         public SplitterStreamProcessor(
37                         SplitterApplicationProperties properties,
38                         ConfigurableApplicationContext context)
39         {
40                 StreamsBuilder builder = new StreamsBuilder();
41
42                 JsonSerde<Recording> recordSerde =
43                                 new JsonSerde<>(Recording.class).ignoreTypeHeaders();
44
45                 KStream<String, Recording> source = builder.stream(
46                                 properties.getInputTopic(),
47                                 Consumed.with(Serdes.String(), recordSerde));
48
49                 source
50                                 .flatMapValues(recording -> Arrays
51                                                         .stream(PATTERN.split(recording.getSentence()))
52                                                         .map(word -> Word.of(recording.getUser(), word))
53                                                         .toList())
54                                 .to(properties.getOutputTopic());
55
56                 Properties props = new Properties();
57                 props.put(StreamsConfig.APPLICATION_ID_CONFIG, properties.getApplicationId());
58                 props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServer());
59                 props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
60                 props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName());
61                 props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
62
63                 streams = new KafkaStreams(builder.build(), props);
64                 streams.setUncaughtExceptionHandler((Throwable e) ->
65                 {
66                         log.error("Unexpected error!", e);
67                         CompletableFuture.runAsync(() ->
68                         {
69                                 log.info("Stopping application...");
70                                 SpringApplication.exit(context, () -> 1);
71                         });
72                         return SHUTDOWN_CLIENT;
73                 });
74         }
75
76         @PostConstruct
77         public void start()
78         {
79                 log.info("Starting Stream-Processor");
80                 streams.start();
81         }
82
83         @PreDestroy
84         public void stop()
85         {
86                 log.info("Stopping Stream-Processor");
87                 streams.close();
88         }
89 }