import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.Stores;
import org.junit.jupiter.api.BeforeEach;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Primary;
import org.springframework.kafka.annotation.KafkaListener;
-import org.springframework.kafka.core.DefaultKafkaProducerFactory;
-import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.core.ProducerFactory;
+import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
+import org.springframework.kafka.core.*;
+import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.context.EmbeddedKafka;
import java.util.List;
import java.util.Map;
import java.util.Properties;
-import java.util.stream.Collectors;
import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*;
+import static de.juplo.kafka.wordcount.counter.TestData.convertToMap;
import static org.awaitility.Awaitility.*;
@RequiredArgsConstructor
static class Consumer
{
- private final List<Message> received = new LinkedList<>();
+ private final List<KeyValue<Word, WordCount>> received = new LinkedList<>();
@KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
- public synchronized void receive(ConsumerRecord<String, String> record)
+ public synchronized void receive(ConsumerRecord<Word, WordCount> record)
{
log.debug("Received message: {}", record);
- received.add(Message.of(record.key(),record.value()));
+ received.add(KeyValue.pair(record.key(),record.value()));
}
- synchronized List<Message> getReceivedMessages()
+ synchronized List<KeyValue<Word, WordCount>> getReceivedMessages()
{
return received;
}
@Bean
ProducerFactory<?, ?> producerFactory(Properties streamProcessorProperties)
{
- Map<String, Object> propertyMap = streamProcessorProperties
- .entrySet()
- .stream()
- .collect(
- Collectors.toMap(
- entry -> (String)entry.getKey(),
- entry -> entry.getValue()
- ));
+ Map<String, Object> propertyMap = convertToMap(streamProcessorProperties);
propertyMap.put(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
return new DefaultKafkaProducerFactory<>(propertyMap);
}
+ @Bean
+ ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
+ Properties streamProcessorProperties)
+ {
+ Map<String, Object> propertyMap = convertToMap(streamProcessorProperties);
+
+ propertyMap.put(
+ ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
+ JsonDeserializer.class.getName());
+ propertyMap.put(
+ ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
+ JsonDeserializer.class.getName());
+
+ ConsumerFactory<? super Object, ? super Object> consumerFactory =
+ new DefaultKafkaConsumerFactory<>(propertyMap);
+
+ ConcurrentKafkaListenerContainerFactory<Object, Object> kafkaListenerContainerFactory =
+ new ConcurrentKafkaListenerContainerFactory<>();
+
+ kafkaListenerContainerFactory.setConsumerFactory(consumerFactory);
+
+ return kafkaListenerContainerFactory;
+ }
+
@Bean
Consumer consumer()
{