--- /dev/null
+package de.juplo.kafka.wordcount.splitter;
+
+import de.juplo.kafka.wordcount.counter.TestWord;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.streams.KeyValue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.context.annotation.Bean;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.KafkaHeaders;
+import org.springframework.kafka.test.context.EmbeddedKafka;
+import org.springframework.messaging.handler.annotation.Header;
+import org.springframework.messaging.handler.annotation.Payload;
+import org.springframework.util.LinkedMultiValueMap;
+import org.springframework.util.MultiValueMap;
+
+import java.time.Duration;
+import java.util.function.BiConsumer;
+import java.util.stream.Stream;
+
+import static de.juplo.kafka.wordcount.splitter.SplitterApplicationIT.*;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.awaitility.Awaitility.await;
+
+
+@SpringBootTest(
+ properties = {
+ "spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer",
+ "spring.kafka.producer.properties.spring.json.add.type.headers=false",
+ "spring.kafka.consumer.auto-offset-reset=earliest",
+ "spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer",
+ "spring.kafka.consumer.properties.spring.json.value.default.type=de.juplo.kafka.wordcount.counter.TestWord",
+ "spring.kafka.consumer.properties.spring.json.trusted.packages=de.juplo.kafka.wordcount.splitter",
+ "logging.level.root=WARN",
+ "logging.level.de.juplo=DEBUG",
+ "juplo.wordcount.splitter.bootstrap-server=${spring.embedded.kafka.brokers}",
+ "juplo.wordcount.splitter.input-topic=" + TOPIC_IN,
+ "juplo.wordcount.splitter.output-topic=" + TOPIC_OUT })
+@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT })
+@Slf4j
+public class SplitterApplicationIT
+{
+ public final static String TOPIC_IN = "in";
+ public final static String TOPIC_OUT = "out";
+
+ @Autowired
+ KafkaTemplate<String, Recording> kafkaTemplate;
+ @Autowired
+ Consumer consumer;
+
+
+ @BeforeEach
+ public void clear()
+ {
+ consumer.received.clear();
+ }
+
+ static void writeInputData(BiConsumer<String, Recording> consumer)
+ {
+ Recording recording;
+
+ recording = new Recording();
+ recording.setUser("peter");
+ recording.setSentence("Hallo Welt!");
+ consumer.accept(recording.getUser(), recording);
+
+ recording = new Recording();
+ recording.setUser("klaus");
+ recording.setSentence("Müsch gäb's auch!");
+ consumer.accept(recording.getUser(), recording);
+
+ recording = new Recording();
+ recording.setUser("peter");
+ recording.setSentence("Boäh, echt! ß mal nä Nümmäh!");
+ consumer.accept(recording.getUser(), recording);
+ }
+
+ @Test
+ void testSendMessage() throws Exception
+ {
+ writeInputData((user, recording) -> kafkaTemplate.send(TOPIC_IN, user, recording));
+
+ await("Expexted converted data")
+ .atMost(Duration.ofSeconds(5))
+ .untilAsserted(() ->
+ assertExpectedMessages(consumer.getReceivedMessages()));
+ }
+
+
+ static void assertExpectedMessages(MultiValueMap<String, TestWord> receivedMessages)
+ {
+ MultiValueMap<String, TestWord> expected = new LinkedMultiValueMap<>();
+ expectedMessages.forEach(keyValue -> expected.add(keyValue.key, keyValue.value));
+ await("Received expected messages")
+ .atMost(Duration.ofSeconds(5))
+ .untilAsserted(() -> expected.forEach((user, word) ->
+ assertThat(receivedMessages.get(user)).containsExactlyElementsOf(word)));
+ }
+
+ static Stream<KeyValue<String, TestWord>> expectedMessages = Stream.of(
+ KeyValue.pair(
+ "peter",
+ new TestWord("peter", "Hallo")),
+ KeyValue.pair(
+ "peter",
+ new TestWord("peter", "Welt")),
+ KeyValue.pair(
+ "klaus",
+ new TestWord("klaus", "Müsch")),
+ KeyValue.pair(
+ "klaus",
+ new TestWord("klaus", "gäb")),
+ KeyValue.pair(
+ "klaus",
+ new TestWord("klaus", "s")),
+ KeyValue.pair(
+ "klaus",
+ new TestWord("klaus", "auch")),
+ KeyValue.pair(
+ "peter",
+ new TestWord("peter", "Boäh")),
+ KeyValue.pair(
+ "peter",
+ new TestWord("peter", "echt")),
+ KeyValue.pair(
+ "peter",
+ new TestWord("peter", "ß")),
+ KeyValue.pair(
+ "peter",
+ new TestWord("peter", "mal")),
+ KeyValue.pair(
+ "peter",
+ new TestWord("peter", "nä")),
+ KeyValue.pair(
+ "peter",
+ new TestWord("peter", "Nümmäh")));
+
+
+ static class Consumer
+ {
+ private final MultiValueMap<String, TestWord> received = new LinkedMultiValueMap<>();
+
+ @KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
+ public synchronized void receive(
+ @Header(KafkaHeaders.RECEIVED_KEY) String key,
+ @Payload TestWord value)
+ {
+ log.debug("Received message: {}={}", key, value);
+ received.add(key, value);
+ }
+
+ synchronized MultiValueMap<String, TestWord> getReceivedMessages()
+ {
+ return received;
+ }
+ }
+
+
+ @TestConfiguration
+ static class Configuration
+ {
+ @Bean
+ Consumer consumer()
+ {
+ return new Consumer();
+ }
+ }
+}