</parent>
<groupId>de.juplo.kafka.wordcount</groupId>
<artifactId>counter</artifactId>
- <version>1.2.2</version>
+ <version>1.2.3</version>
<name>Wordcount-Counter</name>
<description>Word-counting stream-processor of the multi-user wordcount-example</description>
<properties>
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.Stores;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import org.springframework.kafka.support.serializer.JsonDeserializer;
+import org.springframework.kafka.support.serializer.JsonSerde;
+import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.Properties;
import java.util.concurrent.CompletableFuture;
propertyMap.put(StreamsConfig.APPLICATION_ID_CONFIG, properties.getApplicationId());
propertyMap.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServer());
- propertyMap.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());
- propertyMap.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());
+ propertyMap.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, JsonSerde.class.getName());
+ propertyMap.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName());
+ propertyMap.put(JsonDeserializer.TRUSTED_PACKAGES, CounterApplication.class.getPackageName());
+ propertyMap.put(JsonDeserializer.KEY_DEFAULT_TYPE, Word.class.getName());
+ propertyMap.put(JsonDeserializer.VALUE_DEFAULT_TYPE, Word.class.getName());
+ propertyMap.put(
+ JsonDeserializer.TYPE_MAPPINGS,
+ "W:" + Word.class.getName() + "," +
+ "C:" + WordCount.class.getName());
propertyMap.put(StreamsConfig.STATE_DIR_CONFIG, "target");
if (properties.getCommitInterval() != null)
propertyMap.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, properties.getCommitInterval());
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.kstream.*;
-import org.springframework.kafka.support.serializer.JsonSerde;
import java.util.Properties;
KStream<String, Word> source = builder.stream(
inputTopic,
- Consumed.with(
- Serdes.String(),
- new JsonSerde<>(Word.class)
- .ignoreTypeHeaders()));
+ Consumed.with(Serdes.String(), null));
source
.map((key, word) -> new KeyValue<>(word, word))
- .groupByKey(Grouped.with(
- new JsonSerde<>(Word.class)
- .forKeys()
- .noTypeInfo(),
- new JsonSerde<>(Word.class)
- .noTypeInfo()))
+ .groupByKey()
.count(Materialized.as(storeSupplier))
.toStream()
.map((word, count) -> new KeyValue<>(word, WordCount.of(word, count)))
- .to(
- outputTopic,
- Produced.with(
- new JsonSerde<>(Word.class)
- .forKeys()
- .noTypeInfo(),
- new JsonSerde<>(WordCount.class)
- .noTypeInfo()));
+ .to(outputTopic);
Topology topology = builder.build();
log.info("\n\n{}", topology.describe());