--- /dev/null
+package de.juplo.kafka.wordcount.top10;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.PreDestroy;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.common.serialization.Serdes;
+import org.apache.kafka.streams.KafkaStreams;
+import org.apache.kafka.streams.KeyValue;
+import org.apache.kafka.streams.StreamsBuilder;
+import org.apache.kafka.streams.StreamsConfig;
+import org.springframework.boot.SpringApplication;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.stereotype.Component;
+
+import java.util.Properties;
+import java.util.concurrent.CompletableFuture;
+import java.util.regex.Pattern;
+
+import static org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT;
+
+
+@Slf4j
+@Component
+public class Top10ApplicationConfiguration
+{
+ final static Pattern PATTERN = Pattern.compile("\\W+");
+
+ public final KafkaStreams streams;
+
+
+ public Top10ApplicationConfiguration(
+ Top10ApplicationProperties properties,
+ ObjectMapper mapper,
+ ConfigurableApplicationContext context)
+ {
+ StreamsBuilder builder = new StreamsBuilder();
+
+ builder
+ .<String, String>stream(properties.getInputTopic())
+ .map((keyJson, countStr) ->
+ {
+ try
+ {
+ Key key = mapper.readValue(keyJson, Key.class);
+ Long count = Long.parseLong(countStr);
+ Entry entry = Entry.of(key.getWord(), count);
+ String entryJson = mapper.writeValueAsString(entry);
+ return new KeyValue<>(key.getUsername(), entryJson);
+ }
+ catch (JsonProcessingException e)
+ {
+ throw new RuntimeException(e);
+ }
+ })
+ .groupByKey()
+ .aggregate(
+ () -> "{\"entries\" : []}",
+ (username, entryJson, rankingJson) ->
+ {
+ try
+ {
+ Ranking ranking = mapper.readValue(rankingJson, Ranking.class);
+ ranking.add(mapper.readValue(entryJson, Entry.class));
+ return mapper.writeValueAsString(ranking);
+ }
+ catch (JsonProcessingException e)
+ {
+ throw new RuntimeException(e);
+ }
+ }
+ )
+ .toStream()
+ .to(properties.getOutputTopic());
+
+ Properties props = new Properties();
+ props.put(StreamsConfig.APPLICATION_ID_CONFIG, properties.getApplicationId());
+ props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServer());
+ props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
+ props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
+ props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
+
+ streams = new KafkaStreams(builder.build(), props);
+ streams.setUncaughtExceptionHandler((Throwable e) ->
+ {
+ log.error("Unexpected error!", e);
+ CompletableFuture.runAsync(() ->
+ {
+ log.info("Stopping application...");
+ SpringApplication.exit(context, () -> 1);
+ });
+ return SHUTDOWN_CLIENT;
+ });
+ }
+
+ @PostConstruct
+ public void start()
+ {
+ log.info("Starting Stream-Processor");
+ streams.start();
+ }
+
+ @PreDestroy
+ public void stop()
+ {
+ log.info("Stopping Stream-Processor");
+ streams.close();
+ }
+}