From 68216b7c8922266da1739b7bac85257f067535ce Mon Sep 17 00:00:00 2001 From: Kai Moritz Date: Sun, 5 Feb 2023 14:27:19 +0100 Subject: [PATCH] splitter: 1.1.4 - Moved code for config/init into `SplitterApplication` --- pom.xml | 2 +- .../splitter/SplitterApplication.java | 51 +++++++++++++++++++ .../splitter/SplitterStreamProcessor.java | 48 +++-------------- 3 files changed, 58 insertions(+), 43 deletions(-) diff --git a/pom.xml b/pom.xml index 3abaee4..2f3cb70 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ de.juplo.kafka.wordcount splitter - 1.1.3 + 1.1.4 Wordcount-Splitter Stream-processor of the multi-user wordcount-example, that splits the sentences up into single words diff --git a/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterApplication.java b/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterApplication.java index 491c549..f01460f 100644 --- a/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterApplication.java +++ b/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterApplication.java @@ -1,14 +1,65 @@ package de.juplo.kafka.wordcount.splitter; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.common.serialization.Serdes; +import org.apache.kafka.streams.StreamsConfig; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.kafka.support.serializer.JsonDeserializer; +import org.springframework.kafka.support.serializer.JsonSerde; +import org.springframework.kafka.support.serializer.JsonSerializer; + +import java.util.Properties; +import java.util.concurrent.CompletableFuture; + +import static org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT; @SpringBootApplication @EnableConfigurationProperties(SplitterApplicationProperties.class) +@Slf4j public class SplitterApplication { + @Bean(initMethod = "start", destroyMethod = "stop") + public SplitterStreamProcessor streamProcessor( + SplitterApplicationProperties properties, + ConfigurableApplicationContext context) + { + Properties propertyMap = new Properties(); + + propertyMap.put(StreamsConfig.APPLICATION_ID_CONFIG, properties.getApplicationId()); + propertyMap.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServer()); + propertyMap.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName()); + propertyMap.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName()); + propertyMap.put(JsonDeserializer.TRUSTED_PACKAGES, Recording.class.getName() ); + propertyMap.put(JsonDeserializer.VALUE_DEFAULT_TYPE, Recording.class.getName()); + propertyMap.put(JsonSerializer.ADD_TYPE_INFO_HEADERS, false); + propertyMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + + SplitterStreamProcessor streamProcessor = new SplitterStreamProcessor( + properties.getInputTopic(), + properties.getOutputTopic(), + propertyMap); + + streamProcessor.streams.setUncaughtExceptionHandler((Throwable e) -> + { + log.error("Unexpected error!", e); + CompletableFuture.runAsync(() -> + { + log.info("Stopping application..."); + SpringApplication.exit(context, () -> 1); + }); + return SHUTDOWN_CLIENT; + }); + + + return streamProcessor; + } + public static void main(String[] args) { SpringApplication.run(SplitterApplication.class, args); diff --git a/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterStreamProcessor.java b/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterStreamProcessor.java index 66188a7..60c569b 100644 --- a/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterStreamProcessor.java +++ b/src/main/java/de/juplo/kafka/wordcount/splitter/SplitterStreamProcessor.java @@ -1,31 +1,16 @@ package de.juplo.kafka.wordcount.splitter; import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsBuilder; -import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.kstream.KStream; -import org.springframework.boot.SpringApplication; -import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.kafka.support.serializer.JsonDeserializer; -import org.springframework.kafka.support.serializer.JsonSerde; -import org.springframework.kafka.support.serializer.JsonSerializer; -import org.springframework.stereotype.Component; -import jakarta.annotation.PostConstruct; -import jakarta.annotation.PreDestroy; import java.util.Arrays; import java.util.Properties; -import java.util.concurrent.CompletableFuture; import java.util.regex.Pattern; -import static org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT; - @Slf4j -@Component public class SplitterStreamProcessor { final static Pattern PATTERN = Pattern.compile("[^\\p{IsAlphabetic}]+"); @@ -34,51 +19,30 @@ public class SplitterStreamProcessor public SplitterStreamProcessor( - SplitterApplicationProperties properties, - ConfigurableApplicationContext context) + String inputTopic, + String outputTopic, + Properties properties) { StreamsBuilder builder = new StreamsBuilder(); - KStream source = builder.stream(properties.getInputTopic()); + KStream source = builder.stream(inputTopic); source .flatMapValues(recording -> Arrays .stream(PATTERN.split(recording.getSentence())) .map(word -> Word.of(recording.getUser(), word)) .toList()) - .to(properties.getOutputTopic()); - - Properties props = new Properties(); - props.put(StreamsConfig.APPLICATION_ID_CONFIG, properties.getApplicationId()); - props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServer()); - props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName()); - props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName()); - props.put(JsonDeserializer.TRUSTED_PACKAGES, Recording.class.getName() ); - props.put(JsonDeserializer.VALUE_DEFAULT_TYPE, Recording.class.getName()); - props.put(JsonSerializer.ADD_TYPE_INFO_HEADERS, false); - props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + .to(outputTopic); - streams = new KafkaStreams(builder.build(), props); - streams.setUncaughtExceptionHandler((Throwable e) -> - { - log.error("Unexpected error!", e); - CompletableFuture.runAsync(() -> - { - log.info("Stopping application..."); - SpringApplication.exit(context, () -> 1); - }); - return SHUTDOWN_CLIENT; - }); + streams = new KafkaStreams(builder.build(), properties); } - @PostConstruct public void start() { log.info("Starting Stream-Processor"); streams.start(); } - @PreDestroy public void stop() { log.info("Stopping Stream-Processor"); -- 2.20.1