package de.juplo.kafka.wordcount.counter;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.apache.kafka.common.serialization.Serdes;
-import org.apache.kafka.streams.KafkaStreams;
-import org.apache.kafka.streams.KeyValue;
-import org.apache.kafka.streams.StreamsBuilder;
-import org.apache.kafka.streams.StreamsConfig;
-import org.apache.kafka.streams.kstream.KStream;
-import org.springframework.boot.SpringApplication;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.stereotype.Component;
-
-import javax.annotation.PostConstruct;
-import javax.annotation.PreDestroy;
-import java.util.Arrays;
-import java.util.Properties;
-import java.util.concurrent.CompletableFuture;
-import java.util.regex.Pattern;
+import org.apache.kafka.streams.*;
+import org.apache.kafka.streams.kstream.Consumed;
+import org.apache.kafka.streams.kstream.Materialized;
+import org.apache.kafka.streams.kstream.Produced;
+import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
+import org.apache.kafka.streams.state.QueryableStoreTypes;
+import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
+import org.springframework.kafka.support.serializer.JsonSerde;
+import org.springframework.kafka.support.serializer.JsonSerializer;
-import static org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT;
+import java.util.Map;
+import java.util.Properties;
+import java.util.stream.Collectors;
@Slf4j
-@Component
public class CounterStreamProcessor
{
- final static Pattern PATTERN = Pattern.compile("\\W+");
+ public static final String STORE_NAME = "counter";
+
public final KafkaStreams streams;
public CounterStreamProcessor(
- CounterApplicationProperties properties,
- ObjectMapper mapper,
- ConfigurableApplicationContext context)
+ String inputTopic,
+ String outputTopic,
+ Properties properties,
+ KeyValueBytesStoreSupplier storeSupplier)
+ {
+ Topology topology = CounterStreamProcessor.buildTopology(
+ inputTopic,
+ outputTopic,
+ storeSupplier);
+
+ streams = new KafkaStreams(topology, properties);
+ }
+
+ static Topology buildTopology(
+ String inputTopic,
+ String outputTopic,
+ KeyValueBytesStoreSupplier storeSupplier)
{
StreamsBuilder builder = new StreamsBuilder();
- KStream<String, String> source = builder.stream(properties.getInputTopic());
- source
- .flatMapValues(sentence -> Arrays.asList(PATTERN.split(sentence)))
- .map((username, word) ->
- {
- try
- {
- String key = mapper.writeValueAsString(Key.of(username, word));
- return new KeyValue<>(key, word);
- }
- catch (JsonProcessingException e)
- {
- throw new RuntimeException(e);
- }
- })
+ builder
+ .stream(inputTopic, Consumed.with(inKeySerde(), inValueSerde()))
+ .map((key, word) -> new KeyValue<>(word, word))
.groupByKey()
- .count()
- .mapValues(value->Long.toString(value))
+ .count(
+ Materialized
+ .<Word, Long>as(storeSupplier)
+ .withKeySerde(new JsonSerde<>(Word.class))) // No headers are present: fixed typing is needed!
.toStream()
- .to(properties.getOutputTopic());
-
- Properties props = new Properties();
- props.put(StreamsConfig.APPLICATION_ID_CONFIG, properties.getApplicationId());
- props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServer());
- props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
- props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
- props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
-
- streams = new KafkaStreams(builder.build(), props);
- streams.setUncaughtExceptionHandler((Throwable e) ->
- {
- log.error("Unexpected error!", e);
- CompletableFuture.runAsync(() ->
- {
- log.info("Stopping application...");
- SpringApplication.exit(context, () -> 1);
- });
- return SHUTDOWN_CLIENT;
- });
+ .map((word, counter) -> new KeyValue<>(word, WordCounter.of(word, counter)))
+ .to(outputTopic, Produced.with(outKeySerde(), outValueSerde()));
+
+ Topology topology = builder.build();
+ log.info("\n\n{}", topology.describe());
+
+ return topology;
+ }
+
+ ReadOnlyKeyValueStore<Word, Long> getStore()
+ {
+ return streams.store(StoreQueryParameters.fromNameAndType(STORE_NAME, QueryableStoreTypes.keyValueStore()));
}
- @PostConstruct
public void start()
{
log.info("Starting Stream-Processor");
streams.start();
}
- @PreDestroy
public void stop()
{
log.info("Stopping Stream-Processor");
streams.close();
}
+
+
+
+ public static JsonSerde<User> inKeySerde()
+ {
+ return new JsonSerde<>(User.class);
+ }
+
+ public static JsonSerde<Word> inValueSerde()
+ {
+ return new JsonSerde<>(Word.class);
+ }
+
+ public static JsonSerde<Word> outKeySerde()
+ {
+ return serde(true);
+ }
+
+ public static JsonSerde<WordCounter> outValueSerde()
+ {
+ return serde(false);
+ }
+
+ public static <T> JsonSerde<T> serde(boolean isKey)
+ {
+ JsonSerde<T> serde = new JsonSerde<>();
+ serde.configure(
+ Map.of(JsonSerializer.TYPE_MAPPINGS, typeMappingsConfig()),
+ isKey);
+ return serde;
+ }
+
+ private static String typeMappingsConfig()
+ {
+ return typeMappingsConfig(Word.class, WordCounter.class);
+ }
+
+ public static String typeMappingsConfig(Class wordClass, Class wordCounterClass)
+ {
+ return Map.of(
+ "word", wordClass,
+ "counter", wordCounterClass)
+ .entrySet()
+ .stream()
+ .map(entry -> entry.getKey() + ":" + entry.getValue().getName())
+ .collect(Collectors.joining(","));
+ }
}