package de.juplo.kafka.wordcount.counter;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.Stores;
import org.junit.jupiter.api.BeforeEach;
import java.util.LinkedList;
import java.util.List;
-import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.*;
-import static org.awaitility.Awaitility.*;
+import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_IN;
+import static de.juplo.kafka.wordcount.counter.CounterApplicationIT.TOPIC_OUT;
+import static de.juplo.kafka.wordcount.counter.TestData.parseHeader;
+import static org.awaitility.Awaitility.await;
+import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME;
+import static org.springframework.kafka.support.mapping.AbstractJavaTypeMapper.KEY_DEFAULT_CLASSID_FIELD_NAME;
@SpringBootTest(
properties = {
- "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+ "spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer",
+ "spring.kafka.producer.properties.spring.json.add.type.headers=false",
+ "spring.kafka.consumer.auto-offset-reset=earliest",
+ "spring.kafka.consumer.key-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer",
+ "spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer",
+ "spring.kafka.consumer.properties.spring.json.use.type.headers=false",
+ "spring.kafka.consumer.properties.spring.json.key.default.type=de.juplo.kafka.wordcount.counter.Word",
+ "spring.kafka.consumer.properties.spring.json.value.default.type=de.juplo.kafka.wordcount.counter.WordCounter",
+ "logging.level.root=WARN",
+ "logging.level.de.juplo=DEBUG",
"juplo.wordcount.counter.bootstrap-server=${spring.embedded.kafka.brokers}",
"juplo.wordcount.counter.commit-interval=0",
"juplo.wordcount.counter.cacheMaxBytes=0",
"juplo.wordcount.counter.input-topic=" + TOPIC_IN,
"juplo.wordcount.counter.output-topic=" + TOPIC_OUT })
-@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT }, partitions = PARTITIONS)
+@EmbeddedKafka(topics = { TOPIC_IN, TOPIC_OUT })
@Slf4j
public class CounterApplicationIT
{
- public final static String TOPIC_IN = "in";
- public final static String TOPIC_OUT = "out";
- static final int PARTITIONS = 2;
+ public static final String TOPIC_IN = "in";
+ public static final String TOPIC_OUT = "out";
@Autowired
- KafkaTemplate<String, String> kafkaTemplate;
- @Autowired
- ObjectMapper mapper;
+ KafkaTemplate<String, Word> kafkaTemplate;
@Autowired
Consumer consumer;
await("Expexted converted data")
.atMost(Duration.ofSeconds(10))
- .untilAsserted(() -> TestData.assertExpectedResult(consumer.received, mapper));
+ .untilAsserted(() -> TestData.assertExpectedResult(consumer.getReceivedMessages()));
}
- @RequiredArgsConstructor
static class Consumer
{
- private final List<Message> received = new LinkedList<>();
+ private final List<KeyValue<Word, WordCounter>> received = new LinkedList<>();
@KafkaListener(groupId = "TEST", topics = TOPIC_OUT)
- public void receive(ConsumerRecord<String, String> record)
+ public synchronized void receive(ConsumerRecord<Word, WordCounter> record)
+ {
+ log.debug(
+ "Received message: {} -> {}, key: {}, value: {}",
+ record.key(),
+ record.value(),
+ parseHeader(record.headers(), KEY_DEFAULT_CLASSID_FIELD_NAME),
+ parseHeader(record.headers(), DEFAULT_CLASSID_FIELD_NAME));
+ received.add(KeyValue.pair(record.key(),record.value()));
+ }
+
+ synchronized List<KeyValue<Word, WordCounter>> getReceivedMessages()
{
- log.debug("Received message: {}", record);
- received.add(Message.of(record.key(),record.value()));
+ return received;
}
}