X-Git-Url: https://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Ftest%2Fjava%2Fde%2Fjuplo%2Fkafka%2Fwordcount%2Fcounter%2FCounterApplicationIT.java;h=78d103c282fbe7c4857220d6ca581cdc89264189;hb=e94a327bebf468e2bcb5b686346a18a1409ec254;hp=c6eb0a65ccdff5f023dc048c16747bfdd5bee79f;hpb=4072a93fd94a847c86ea422d74b17b0913b7de2a;p=demos%2Fkafka%2Fwordcount diff --git a/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java b/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java index c6eb0a6..78d103c 100644 --- a/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java +++ b/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java @@ -2,7 +2,10 @@ package de.juplo.kafka.wordcount.counter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; import org.apache.kafka.streams.state.Stores; import org.junit.jupiter.api.BeforeEach; @@ -13,15 +16,24 @@ import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Primary; import org.springframework.kafka.annotation.KafkaListener; -import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; +import org.springframework.kafka.core.*; +import org.springframework.kafka.support.serializer.JsonDeserializer; +import org.springframework.kafka.support.serializer.JsonSerializer; import org.springframework.kafka.test.context.EmbeddedKafka; import java.time.Duration; import java.util.LinkedList; import java.util.List; +import java.util.Map; +import java.util.Properties; import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*; -import static org.awaitility.Awaitility.*; +import static de.juplo.kafka.wordcount.counter.TestData.convertToMap; +import static de.juplo.kafka.wordcount.counter.TestData.parseHeader; +import static org.awaitility.Awaitility.await; +import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.*; +import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME; @SpringBootTest( @@ -41,7 +53,7 @@ public class CounterApplicationIT static final int PARTITIONS = 2; @Autowired - KafkaTemplate kafkaTemplate; + KafkaTemplate kafkaTemplate; @Autowired Consumer consumer; @@ -60,26 +72,75 @@ public class CounterApplicationIT await("Expexted converted data") .atMost(Duration.ofSeconds(10)) - .untilAsserted(() -> TestData.assertExpectedResult(consumer.received)); + .untilAsserted(() -> TestData.assertExpectedResult(consumer.getReceivedMessages())); } @RequiredArgsConstructor static class Consumer { - private final List received = new LinkedList<>(); + private final List> received = new LinkedList<>(); @KafkaListener(groupId = "TEST", topics = TOPIC_OUT) - public void receive(ConsumerRecord record) + public synchronized void receive(ConsumerRecord record) { - log.debug("Received message: {}", record); - received.add(Message.of(record.key(),record.value())); + log.debug( + "Received message: {} -> {}, key: {}, value: {}", + record.key(), + record.value(), + parseHeader(record.headers(), KEY_DEFAULT_CLASSID_FIELD_NAME), + parseHeader(record.headers(), DEFAULT_CLASSID_FIELD_NAME)); + received.add(KeyValue.pair(record.key(),record.value())); + } + + synchronized List> getReceivedMessages() + { + return received; } } @TestConfiguration static class Configuration { + @Bean + ProducerFactory producerFactory(Properties streamProcessorProperties) + { + Map propertyMap = convertToMap(streamProcessorProperties); + + propertyMap.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + JsonSerializer.class.getName()); + propertyMap.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + JsonSerializer.class.getName()); + + return new DefaultKafkaProducerFactory<>(propertyMap); + } + + @Bean + ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory( + Properties streamProcessorProperties) + { + Map propertyMap = convertToMap(streamProcessorProperties); + + propertyMap.put( + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, + JsonDeserializer.class.getName()); + propertyMap.put( + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + JsonDeserializer.class.getName()); + + ConsumerFactory consumerFactory = + new DefaultKafkaConsumerFactory<>(propertyMap); + + ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory = + new ConcurrentKafkaListenerContainerFactory<>(); + + kafkaListenerContainerFactory.setConsumerFactory(consumerFactory); + + return kafkaListenerContainerFactory; + } + @Bean Consumer consumer() {