From: Kai Moritz Date: Sun, 26 May 2024 21:10:08 +0000 (+0200) Subject: splitter: 1.2.0 - Separated test-data in class `TestData` -- COPY X-Git-Tag: splitter-1.2.0~12 X-Git-Url: http://juplo.de/gitweb/?a=commitdiff_plain;h=d7fa29b35fef537b65e9110f708e781925ebff9f;p=demos%2Fkafka%2Fwordcount splitter: 1.2.0 - Separated test-data in class `TestData` -- COPY --- diff --git a/src/test/java/de/juplo/kafka/wordcount/splitter/TestData.java b/src/test/java/de/juplo/kafka/wordcount/splitter/TestData.java new file mode 100644 index 0000000..d1bbc0f --- /dev/null +++ b/src/test/java/de/juplo/kafka/wordcount/splitter/TestData.java @@ -0,0 +1,172 @@ +package de.juplo.kafka.wordcount.splitter; + +import de.juplo.kafka.wordcount.counter.TestWord; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.streams.KeyValue; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.support.KafkaHeaders; +import org.springframework.kafka.test.context.EmbeddedKafka; +import org.springframework.messaging.handler.annotation.Header; +import org.springframework.messaging.handler.annotation.Payload; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +import java.time.Duration; +import java.util.function.BiConsumer; +import java.util.stream.Stream; + +import static de.juplo.kafka.wordcount.splitter.SplitterApplicationIT.*; +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; + + +@SpringBootTest( + properties = { + "spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer", + "spring.kafka.producer.properties.spring.json.add.type.headers=false", + "spring.kafka.consumer.auto-offset-reset=earliest", + "spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer", + "spring.kafka.consumer.properties.spring.json.value.default.type=de.juplo.kafka.wordcount.counter.TestWord", + "spring.kafka.consumer.properties.spring.json.trusted.packages=de.juplo.kafka.wordcount.splitter", + "logging.level.root=WARN", + "logging.level.de.juplo=DEBUG", + "juplo.wordcount.splitter.bootstrap-server=${spring.embedded.kafka.brokers}", + "juplo.wordcount.splitter.input-topic=" + TOPIC_IN, + "juplo.wordcount.splitter.output-topic=" + TOPIC_OUT }) +@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }) +@Slf4j +public class SplitterApplicationIT +{ + public final static String TOPIC_IN = "in"; + public final static String TOPIC_OUT = "out"; + + @Autowired + KafkaTemplate kafkaTemplate; + @Autowired + Consumer consumer; + + + @BeforeEach + public void clear() + { + consumer.received.clear(); + } + + static void writeInputData(BiConsumer consumer) + { + Recording recording; + + recording = new Recording(); + recording.setUser("peter"); + recording.setSentence("Hallo Welt!"); + consumer.accept(recording.getUser(), recording); + + recording = new Recording(); + recording.setUser("klaus"); + recording.setSentence("Müsch gäb's auch!"); + consumer.accept(recording.getUser(), recording); + + recording = new Recording(); + recording.setUser("peter"); + recording.setSentence("Boäh, echt! ß mal nä Nümmäh!"); + consumer.accept(recording.getUser(), recording); + } + + @Test + void testSendMessage() throws Exception + { + writeInputData((user, recording) -> kafkaTemplate.send(TOPIC_IN, user, recording)); + + await("Expexted converted data") + .atMost(Duration.ofSeconds(5)) + .untilAsserted(() -> + assertExpectedMessages(consumer.getReceivedMessages())); + } + + + static void assertExpectedMessages(MultiValueMap receivedMessages) + { + MultiValueMap expected = new LinkedMultiValueMap<>(); + expectedMessages.forEach(keyValue -> expected.add(keyValue.key, keyValue.value)); + await("Received expected messages") + .atMost(Duration.ofSeconds(5)) + .untilAsserted(() -> expected.forEach((user, word) -> + assertThat(receivedMessages.get(user)).containsExactlyElementsOf(word))); + } + + static Stream> expectedMessages = Stream.of( + KeyValue.pair( + "peter", + new TestWord("peter", "Hallo")), + KeyValue.pair( + "peter", + new TestWord("peter", "Welt")), + KeyValue.pair( + "klaus", + new TestWord("klaus", "Müsch")), + KeyValue.pair( + "klaus", + new TestWord("klaus", "gäb")), + KeyValue.pair( + "klaus", + new TestWord("klaus", "s")), + KeyValue.pair( + "klaus", + new TestWord("klaus", "auch")), + KeyValue.pair( + "peter", + new TestWord("peter", "Boäh")), + KeyValue.pair( + "peter", + new TestWord("peter", "echt")), + KeyValue.pair( + "peter", + new TestWord("peter", "ß")), + KeyValue.pair( + "peter", + new TestWord("peter", "mal")), + KeyValue.pair( + "peter", + new TestWord("peter", "nä")), + KeyValue.pair( + "peter", + new TestWord("peter", "Nümmäh"))); + + + static class Consumer + { + private final MultiValueMap received = new LinkedMultiValueMap<>(); + + @KafkaListener(groupId = "TEST", topics = TOPIC_OUT) + public synchronized void receive( + @Header(KafkaHeaders.RECEIVED_KEY) String key, + @Payload TestWord value) + { + log.debug("Received message: {}={}", key, value); + received.add(key, value); + } + + synchronized MultiValueMap getReceivedMessages() + { + return received; + } + } + + + @TestConfiguration + static class Configuration + { + @Bean + Consumer consumer() + { + return new Consumer(); + } + } +}