package de.juplo.kafka.wordcount.counter;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.RequiredArgsConstructor;
-import lombok.Value;
import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.Stores;
import org.junit.jupiter.api.BeforeEach;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Primary;
import org.springframework.kafka.annotation.KafkaListener;
-import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
+import org.springframework.kafka.core.*;
+import org.springframework.kafka.support.serializer.JsonDeserializer;
+import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.context.EmbeddedKafka;
-import org.springframework.util.LinkedMultiValueMap;
-import org.springframework.util.MultiValueMap;
import java.time.Duration;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.stream.Collectors;
import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*;
-import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.*;
static final int PARTITIONS = 2;
@Autowired
- KafkaTemplate<String, String> kafkaTemplate;
- @Autowired
- ObjectMapper mapper;
+ KafkaTemplate<String, Word> kafkaTemplate;
@Autowired
Consumer consumer;
@Test
- void testSendMessage() throws Exception
+ void testSendMessage()
{
- kafkaTemplate.send(TOPIC_IN, "peter", "Hallo");
- kafkaTemplate.send(TOPIC_IN, "klaus", "Müsch");
- kafkaTemplate.send(TOPIC_IN, "peter", "Welt");
- kafkaTemplate.send(TOPIC_IN, "klaus", "Müsch");
- kafkaTemplate.send(TOPIC_IN, "klaus", "s");
- kafkaTemplate.send(TOPIC_IN, "peter", "Boäh");
- kafkaTemplate.send(TOPIC_IN, "peter", "Welt");
- kafkaTemplate.send(TOPIC_IN, "peter", "Boäh");
- kafkaTemplate.send(TOPIC_IN, "klaus", "s");
- kafkaTemplate.send(TOPIC_IN, "peter", "Boäh");
- kafkaTemplate.send(TOPIC_IN, "klaus", "s");
-
- Message peter1 = Message.of(
- "{\"username\":\"peter\",\"word\":\"Hallo\"}",
- "1");
- Message peter2 = Message.of(
- "{\"username\":\"peter\",\"word\":\"Welt\"}",
- "1");
- Message peter3 = Message.of(
- "{\"username\":\"peter\",\"word\":\"Boäh\"}",
- "1");
- Message peter4 = Message.of(
- "{\"username\":\"peter\",\"word\":\"Welt\"}",
- "2");
- Message peter5 = Message.of(
- "{\"username\":\"peter\",\"word\":\"Boäh\"}",
- "2");
- Message peter6 = Message.of(
- "{\"username\":\"peter\",\"word\":\"Boäh\"}",
- "3");
-
- Message klaus1 = Message.of(
- "{\"username\":\"klaus\",\"word\":\"Müsch\"}",
- "1");
- Message klaus2 = Message.of(
- "{\"username\":\"klaus\",\"word\":\"Müsch\"}",
- "2");
- Message klaus3 = Message.of(
- "{\"username\":\"klaus\",\"word\":\"s\"}",
- "1");
- Message klaus4 = Message.of(
- "{\"username\":\"klaus\",\"word\":\"s\"}",
- "2");
- Message klaus5 = Message.of(
- "{\"username\":\"klaus\",\"word\":\"s\"}",
- "3");
+ TestData.writeInputData((key, value) -> kafkaTemplate.send(TOPIC_IN, key, value));
await("Expexted converted data")
.atMost(Duration.ofSeconds(10))
- .untilAsserted(() ->
- {
- assertThat(consumer.received).hasSize(2);
- assertThat(consumer.received.get("klaus")).containsExactly(klaus1, klaus2, klaus3, klaus4, klaus5);
- assertThat(consumer.received.get("peter")).containsExactly(peter1, peter2, peter3, peter4, peter5, peter6);
- });
+ .untilAsserted(() -> TestData.assertExpectedResult(consumer.getReceivedMessages()));
}
@RequiredArgsConstructor
static class Consumer
{
- private final MultiValueMap<String, Message> received = new LinkedMultiValueMap<>();
- private final ObjectMapper mapper;
+ private final List<Message> received = new LinkedList<>();
@KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
- public void receive(ConsumerRecord<String, String> record) throws JsonProcessingException
+ public synchronized void receive(ConsumerRecord<Word, WordCount> record)
{
log.debug("Received message: {}", record);
- Key key = mapper.readValue(record.key(), Key.class);
- received.add(key.getUsername(), Message.of(record.key(),record.value()));
+ received.add(Message.of(record.key(),record.value()));
}
- }
- @Value(staticConstructor = "of")
- static class Message
- {
- String key;
- String value;
+ synchronized List<Message> getReceivedMessages()
+ {
+ return received;
+ }
}
@TestConfiguration
static class Configuration
{
@Bean
- Consumer consumer(ObjectMapper mapper)
+ ProducerFactory<?, ?> producerFactory(Properties streamProcessorProperties)
+ {
+ Map<String, Object> propertyMap = streamProcessorProperties
+ .entrySet()
+ .stream()
+ .collect(
+ Collectors.toMap(
+ entry -> (String)entry.getKey(),
+ entry -> entry.getValue()
+ ));
+
+ propertyMap.put(
+ ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
+ JsonSerializer.class.getName());
+ propertyMap.put(
+ ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
+ JsonSerializer.class.getName());
+
+ return new DefaultKafkaProducerFactory<>(propertyMap);
+ }
+
+ @Bean
+ ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
+ Properties streamProcessorProperties)
+ {
+ Map<String, Object> propertyMap = streamProcessorProperties
+ .entrySet()
+ .stream()
+ .collect(
+ Collectors.toMap(
+ entry -> (String)entry.getKey(),
+ entry -> entry.getValue()
+ ));
+
+ propertyMap.put(
+ ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
+ JsonDeserializer.class.getName());
+ propertyMap.put(
+ ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
+ JsonDeserializer.class.getName());
+
+ ConsumerFactory<? super Object, ? super Object> consumerFactory =
+ new DefaultKafkaConsumerFactory<>(propertyMap);
+
+ ConcurrentKafkaListenerContainerFactory<Object, Object> kafkaListenerContainerFactory =
+ new ConcurrentKafkaListenerContainerFactory<>();
+
+ kafkaListenerContainerFactory.setConsumerFactory(consumerFactory);
+
+ return kafkaListenerContainerFactory;
+ }
+
+ @Bean
+ Consumer consumer()
{
- return new Consumer(mapper);
+ return new Consumer();
}
@Primary