import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
+import org.springframework.context.annotation.Primary;
+import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
import java.util.function.BiConsumer;
+import java.util.function.BiFunction;
import java.util.function.Consumer;
-import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@SpringJUnitConfig(
initializers = ConfigDataApplicationContextInitializer.class,
classes = {
+ EndlessConsumer.class,
KafkaAutoConfiguration.class,
ApplicationTests.Configuration.class })
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@Autowired
KafkaProducer<String, Bytes> kafkaProducer;
@Autowired
- org.apache.kafka.clients.consumer.Consumer<String, Long> kafkaConsumer;
+ org.apache.kafka.clients.consumer.Consumer<String, ClientMessage> kafkaConsumer;
@Autowired
KafkaConsumer<Bytes, Bytes> offsetConsumer;
@Autowired
@Autowired
KafkaProperties kafkaProperties;
@Autowired
- ExecutorService executor;
+ EndlessConsumer endlessConsumer;
+ @Autowired
+ RecordHandler recordHandler;
- Consumer<ConsumerRecord<String, Long>> testHandler;
- EndlessConsumer<String, Long> endlessConsumer;
Map<TopicPartition, Long> oldOffsets;
Map<TopicPartition, Long> newOffsets;
- Set<ConsumerRecord<String, Long>> receivedRecords;
+ Set<ConsumerRecord<String, ClientMessage>> receivedRecords;
/** Tests methods */
@Order(1) // << The poistion pill is not skipped. Hence, this test must run first
void commitsCurrentOffsetsOnSuccess() throws ExecutionException, InterruptedException
{
- send100Messages(i -> new Bytes(valueSerializer.serialize(TOPIC, i)));
+ send100Messages((key, counter) -> serialize(key, counter));
await("100 records received")
.atMost(Duration.ofSeconds(30))
@Order(2)
void commitsOffsetOfErrorForReprocessingOnError()
{
- send100Messages(counter ->
+ send100Messages((key, counter) ->
counter == 77
? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!"))
- : new Bytes(valueSerializer.serialize(TOPIC, counter)));
-
- await("Consumer failed")
- .atMost(Duration.ofSeconds(30))
- .until(() -> !endlessConsumer.running());
-
- checkSeenOffsetsForProgress();
- compareToCommitedOffsets(newOffsets);
+ : serialize(key, counter));
- endlessConsumer.start();
await("Consumer failed")
.atMost(Duration.ofSeconds(30))
- .until(() -> !endlessConsumer.running());
+ .untilAsserted(() -> checkSeenOffsetsForProgress());
- checkSeenOffsetsForProgress();
compareToCommitedOffsets(newOffsets);
assertThat(receivedRecords.size())
.describedAs("Received not all sent events")
.describedAs("Consumer should not be running")
.isThrownBy(() -> endlessConsumer.exitStatus());
assertThat(endlessConsumer.exitStatus())
- .describedAs("Consumer should have exited abnormally")
- .containsInstanceOf(RecordDeserializationException.class);
+ .containsInstanceOf(RecordDeserializationException.class)
+ .describedAs("Consumer should have exited abnormally");
}
}
- void send100Messages(Function<Long, Bytes> messageGenerator)
+ void send100Messages(BiFunction<Integer, Long, Bytes> messageGenerator)
{
long i = 0;
{
for (int key = 0; key < 10; key++)
{
- Bytes value = messageGenerator.apply(++i);
+ Bytes value = messageGenerator.apply(key, ++i);
ProducerRecord<String, Bytes> record =
new ProducerRecord<>(
Integer.toString(key%2),
value);
+ record.headers().add("__TypeId__", "message".getBytes());
kafkaProducer.send(record, (metadata, e) ->
{
if (metadata != null)
}
}
+ Bytes serialize(Integer key, Long value)
+ {
+ ClientMessage message = new ClientMessage();
+ message.setClient(key.toString());
+ message.setMessage(value.toString());
+ return new Bytes(valueSerializer.serialize(TOPIC, message));
+ }
+
@BeforeEach
public void init()
{
- testHandler = record -> {} ;
+ recordHandler.testHandler = (record) -> {};
oldOffsets = new HashMap<>();
newOffsets = new HashMap<>();
newOffsets.put(tp, offset - 1);
});
- Consumer<ConsumerRecord<String, Long>> captureOffsetAndExecuteTestHandler =
+ recordHandler.captureOffsets =
record ->
{
+ receivedRecords.add(record);
newOffsets.put(
new TopicPartition(record.topic(), record.partition()),
record.offset());
- receivedRecords.add(record);
- testHandler.accept(record);
};
- endlessConsumer =
- new EndlessConsumer<>(
- executor,
- kafkaProperties.getConsumer().getClientId(),
- applicationProperties.getTopic(),
- kafkaConsumer,
- captureOffsetAndExecuteTestHandler);
-
endlessConsumer.start();
}
}
}
+ public static class RecordHandler implements Consumer<ConsumerRecord<String, ClientMessage>>
+ {
+ Consumer<ConsumerRecord<String, ClientMessage>> captureOffsets;
+ Consumer<ConsumerRecord<String, ClientMessage>> testHandler;
+
+
+ @Override
+ public void accept(ConsumerRecord<String, ClientMessage> record)
+ {
+ captureOffsets
+ .andThen(testHandler)
+ .accept(record);
+ }
+ }
@TestConfiguration
@Import(ApplicationConfiguration.class)
public static class Configuration
{
+ @Primary
+ @Bean
+ public Consumer<ConsumerRecord<String, ClientMessage>> testHandler()
+ {
+ return new RecordHandler();
+ }
+
@Bean
- Serializer<Long> serializer()
+ Serializer<ClientMessage> serializer()
{
- return new LongSerializer();
+ return new JsonSerializer<>();
}
@Bean