projects
/
demos
/
kafka
/
wordcount
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
counter: 1.2.14 - Set up type-mappings for JSON-Deserialization
[demos/kafka/wordcount]
/
src
/
test
/
java
/
de
/
juplo
/
kafka
/
wordcount
/
counter
/
CounterApplicationIT.java
diff --git
a/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java
b/src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java
index
fea89ab
..
78d103c
100644
(file)
--- a/
src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java
+++ b/
src/test/java/de/juplo/kafka/wordcount/counter/CounterApplicationIT.java
@@
-27,10
+27,13
@@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.List;
import java.util.Map;
import java.util.Properties;
-import java.util.stream.Collectors;
import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*;
import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*;
-import static org.awaitility.Awaitility.*;
+import static de.juplo.kafka.wordcount.counter.TestData.convertToMap;
+import static de.juplo.kafka.wordcount.counter.TestData.parseHeader;
+import static org.awaitility.Awaitility.await;
+import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.*;
+import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME;
@SpringBootTest(
@SpringBootTest(
@@
-76,16
+79,21
@@
public class CounterApplicationIT
@RequiredArgsConstructor
static class Consumer
{
@RequiredArgsConstructor
static class Consumer
{
- private final List<KeyValue<Word, WordCount>> received = new LinkedList<>();
+ private final List<KeyValue<Word, WordCount
er
>> received = new LinkedList<>();
@KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
@KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
- public synchronized void receive(ConsumerRecord<Word, WordCount> record)
+ public synchronized void receive(ConsumerRecord<Word, WordCount
er
> record)
{
{
- log.debug("Received message: {}", record);
+ log.debug(
+ "Received message: {} -> {}, key: {}, value: {}",
+ record.key(),
+ record.value(),
+ parseHeader(record.headers(), KEY_DEFAULT_CLASSID_FIELD_NAME),
+ parseHeader(record.headers(), DEFAULT_CLASSID_FIELD_NAME));
received.add(KeyValue.pair(record.key(),record.value()));
}
received.add(KeyValue.pair(record.key(),record.value()));
}
- synchronized List<KeyValue<Word, WordCount>> getReceivedMessages()
+ synchronized List<KeyValue<Word, WordCount
er
>> getReceivedMessages()
{
return received;
}
{
return received;
}
@@
-97,14
+105,7
@@
public class CounterApplicationIT
@Bean
ProducerFactory<?, ?> producerFactory(Properties streamProcessorProperties)
{
@Bean
ProducerFactory<?, ?> producerFactory(Properties streamProcessorProperties)
{
- Map<String, Object> propertyMap = streamProcessorProperties
- .entrySet()
- .stream()
- .collect(
- Collectors.toMap(
- entry -> (String)entry.getKey(),
- entry -> entry.getValue()
- ));
+ Map<String, Object> propertyMap = convertToMap(streamProcessorProperties);
propertyMap.put(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
propertyMap.put(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
@@
-120,14
+121,7
@@
public class CounterApplicationIT
ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
Properties streamProcessorProperties)
{
ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
Properties streamProcessorProperties)
{
- Map<String, Object> propertyMap = streamProcessorProperties
- .entrySet()
- .stream()
- .collect(
- Collectors.toMap(
- entry -> (String)entry.getKey(),
- entry -> entry.getValue()
- ));
+ Map<String, Object> propertyMap = convertToMap(streamProcessorProperties);
propertyMap.put(
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
propertyMap.put(
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,