package de.juplo.kafka.wordcount.counter;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
-import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
+import org.springframework.kafka.support.serializer.JsonSerde;
import java.util.Properties;
{
StreamsBuilder builder = new StreamsBuilder();
- KStream<String, Word> source = builder.stream(
- inputTopic,
- Consumed.with(Serdes.String(), null));
+ KStream<User, Word> source = builder.stream(inputTopic);
source
.map((key, word) -> new KeyValue<>(word, word))
.groupByKey()
- .count(Materialized.as(storeSupplier))
+ .count(
+ Materialized
+ .<Word, Long>as(storeSupplier)
+ .withKeySerde(new JsonSerde<>().copyWithType(Word.class).forKeys()))
.toStream()
.map((word, counter) -> new KeyValue<>(word, WordCounter.of(word, counter)))
.to(outputTopic);