</parent>
<groupId>de.juplo.kafka.wordcount</groupId>
<artifactId>splitter</artifactId>
- <version>1.0.1</version>
+ <version>1.1.0</version>
<name>Wordcount-Splitter</name>
<description>Stream-processor of the multi-user wordcount-example, that splits the sentences up into single words</description>
<properties>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka</artifactId>
+ </dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
+
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka-test</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.awaitility</groupId>
+ <artifactId>awaitility</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
--- /dev/null
+package de.juplo.kafka.wordcount.splitter;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import lombok.Data;
+
+
+@Data
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class Recording
+{
+ private String user;
+ private String sentence;
+}
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.KStream;
+import org.apache.kafka.streams.kstream.Produced;
import org.springframework.boot.SpringApplication;
import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.kafka.support.serializer.JsonSerde;
import org.springframework.stereotype.Component;
import jakarta.annotation.PostConstruct;
{
StreamsBuilder builder = new StreamsBuilder();
- KStream<String, String> source = builder.stream(properties.getInputTopic());
+ JsonSerde<Recording> recordSerde =
+ new JsonSerde<>(Recording.class).ignoreTypeHeaders();
+ JsonSerde<Word> wordSerde =
+ new JsonSerde<>(Word.class).noTypeInfo();
+
+ KStream<String, Recording> source = builder.stream(
+ properties.getInputTopic(),
+ Consumed.with(Serdes.String(), recordSerde));
+
source
- .flatMapValues(sentence -> Arrays.asList(PATTERN.split(sentence)))
- .to(properties.getOutputTopic());
+ .flatMapValues(recording -> Arrays
+ .stream(PATTERN.split(recording.getSentence()))
+ .map(word -> Word.of(recording.getUser(), word))
+ .toList())
+ .to(properties.getOutputTopic(), Produced.with(Serdes.String(), wordSerde));
Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, properties.getApplicationId());
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServer());
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
- props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
+ props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName());
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streams = new KafkaStreams(builder.build(), props);
--- /dev/null
+package de.juplo.kafka.wordcount.splitter;
+
+import lombok.Value;
+
+
+@Value(staticConstructor = "of")
+public class Word
+{
+ String user;
+ String word;
+}
--- /dev/null
+package de.juplo.kafka.wordcount.splitter;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.context.annotation.Bean;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.test.context.EmbeddedKafka;
+
+import java.time.Duration;
+import java.util.LinkedList;
+import java.util.List;
+
+import static de.juplo.kafka.wordcount.splitter.SplitterApplicationTests.*;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.awaitility.Awaitility.*;
+
+
+@SpringBootTest(
+ properties = {
+ "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+ "juplo.wordcount.splitter.bootstrap-server=${spring.embedded.kafka.brokers}",
+ "juplo.wordcount.splitter.input-topic=" + TOPIC_IN,
+ "juplo.wordcount.splitter.output-topic=" + TOPIC_OUT })
+@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }, partitions = PARTITIONS)
+@Slf4j
+public class SplitterApplicationTests
+{
+ public final static String TOPIC_IN = "in";
+ public final static String TOPIC_OUT = "out";
+ static final int PARTITIONS = 2;
+
+ @Autowired
+ KafkaTemplate<String, String> kafkaTemplate;
+ @Autowired
+ ObjectMapper mapper;
+ @Autowired
+ Consumer consumer;
+
+
+ @BeforeEach
+ public void clear()
+ {
+ consumer.received.clear();
+ }
+
+
+ @Test
+ void testSendMessage() throws Exception
+ {
+ Recording recording = new Recording();
+ recording.setUser("peter");
+ recording.setSentence("Hallo Welt!");
+ kafkaTemplate.send(TOPIC_IN, mapper.writeValueAsString(recording));
+
+ String word1 = mapper.writeValueAsString(Word.of("peter", "Hallo"));
+ String word2 = mapper.writeValueAsString(Word.of("peter", "Welt"));
+
+ await("Expexted converted data")
+ .atMost(Duration.ofSeconds(5))
+ .untilAsserted(() ->
+ {
+ assertThat(consumer.received).hasSize(2);
+ assertThat(consumer.received.get(0).value()).isEqualTo(word1);
+ assertThat(consumer.received.get(1).value()).isEqualTo(word2);
+ });
+ }
+
+
+ static class Consumer
+ {
+ final List<ConsumerRecord<String, String>> received = new LinkedList<>();
+
+ @KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
+ public void receive(ConsumerRecord<String, String> record)
+ {
+ log.debug("Received message: {}", record);
+ received.add(record);
+ }
+ }
+
+ @TestConfiguration
+ static class Configuration
+ {
+ @Bean
+ Consumer consumer()
+ {
+ return new Consumer();
+ }
+ }
+}