X-Git-Url: https://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Ftest%2Fjava%2Fde%2Fjuplo%2Fkafka%2Fwordcount%2Fcounter%2FCounterApplicationIT.java;h=ad4faf2a120d90dd2b59c0d8a469376eab361ce2;hb=48dabf093db63d517252f47b15f597e80852e9d4;hp=5a3507ac0611cef4a87e34288b4a2cca7b9af675;hpb=35943a021ac4b25fad29861ecc083b6974ea2732;p=demos%2Fkafka%2Fwordcount diff --git a/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java b/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java index 5a3507a..ad4faf2 100644 --- a/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java +++ b/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java @@ -1,11 +1,6 @@ package de.juplo.kafka.wordcount.counter; -import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; import org.apache.kafka.streams.state.Stores; import org.junit.jupiter.api.BeforeEach; @@ -16,38 +11,45 @@ import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Primary; import org.springframework.kafka.annotation.KafkaListener; -import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; -import org.springframework.kafka.core.*; -import org.springframework.kafka.support.serializer.JsonDeserializer; -import org.springframework.kafka.support.serializer.JsonSerializer; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.support.KafkaHeaders; import org.springframework.kafka.test.context.EmbeddedKafka; +import org.springframework.messaging.handler.annotation.Header; +import org.springframework.messaging.handler.annotation.Payload; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; import java.time.Duration; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Properties; +import java.util.stream.Stream; -import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*; -import static de.juplo.kafka.wordcount.counter.TestData.convertToMap; -import static org.awaitility.Awaitility.*; +import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_IN; +import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_OUT; +import static org.awaitility.Awaitility.await; @SpringBootTest( properties = { - "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}", + "spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer", + "spring.kafka.producer.properties.spring.json.add.type.headers=false", + "spring.kafka.consumer.auto-offset-reset=earliest", + "spring.kafka.consumer.key-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer", + "spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer", + "spring.kafka.consumer.properties.spring.json.use.type.headers=false", + "spring.kafka.consumer.properties.spring.json.key.default.type=de.juplo.kafka.wordcount.counter.Word", + "spring.kafka.consumer.properties.spring.json.value.default.type=de.juplo.kafka.wordcount.counter.WordCounter", + "logging.level.root=WARN", + "logging.level.de.juplo=DEBUG", "juplo.wordcount.counter.bootstrap-server=${spring.embedded.kafka.brokers}", "juplo.wordcount.counter.commit-interval=0", "juplo.wordcount.counter.cacheMaxBytes=0", "juplo.wordcount.counter.input-topic=" + TOPIC_IN, "juplo.wordcount.counter.output-topic=" + TOPIC_OUT }) -@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }, partitions = PARTITIONS) +@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }) @Slf4j public class CounterApplicationIT { - public final static String TOPIC_IN = "in"; - public final static String TOPIC_OUT = "out"; - static final int PARTITIONS = 2; + public static final String TOPIC_IN = "in"; + public static final String TOPIC_OUT = "out"; @Autowired KafkaTemplate kafkaTemplate; @@ -65,27 +67,30 @@ public class CounterApplicationIT @Test void testSendMessage() { - TestData.writeInputData((key, value) -> kafkaTemplate.send(TOPIC_IN, key, value)); + Stream + .of(TestData.INPUT_MESSAGES) + .forEach(word -> kafkaTemplate.send(TOPIC_IN, word.getUser(), word)); - await("Expexted converted data") + await("Expected messages") .atMost(Duration.ofSeconds(10)) - .untilAsserted(() -> TestData.assertExpectedResult(consumer.getReceivedMessages())); + .untilAsserted(() -> TestData.assertExpectedMessages(consumer.getReceivedMessages())); } - @RequiredArgsConstructor static class Consumer { - private final List> received = new LinkedList<>(); + private final MultiValueMap received = new LinkedMultiValueMap<>(); @KafkaListener(groupId = "TEST", topics = TOPIC_OUT) - public synchronized void receive(ConsumerRecord record) + public synchronized void receive( + @Header(KafkaHeaders.RECEIVED_KEY) Word word, + @Payload WordCounter counter) { - log.debug("Received message: {}", record); - received.add(KeyValue.pair(record.key(),record.value())); + log.debug("Received message: {} -> {}", word, counter); + received.add(word, counter); } - synchronized List> getReceivedMessages() + synchronized MultiValueMap getReceivedMessages() { return received; } @@ -94,45 +99,6 @@ public class CounterApplicationIT @TestConfiguration static class Configuration { - @Bean - ProducerFactory producerFactory(Properties streamProcessorProperties) - { - Map propertyMap = convertToMap(streamProcessorProperties); - - propertyMap.put( - ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, - JsonSerializer.class.getName()); - propertyMap.put( - ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, - JsonSerializer.class.getName()); - - return new DefaultKafkaProducerFactory<>(propertyMap); - } - - @Bean - ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory( - Properties streamProcessorProperties) - { - Map propertyMap = convertToMap(streamProcessorProperties); - - propertyMap.put( - ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, - JsonDeserializer.class.getName()); - propertyMap.put( - ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, - JsonDeserializer.class.getName()); - - ConsumerFactory consumerFactory = - new DefaultKafkaConsumerFactory<>(propertyMap); - - ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory = - new ConcurrentKafkaListenerContainerFactory<>(); - - kafkaListenerContainerFactory.setConsumerFactory(consumerFactory); - - return kafkaListenerContainerFactory; - } - @Bean Consumer consumer() {