X-Git-Url: https://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Ftest%2Fjava%2Fde%2Fjuplo%2Fkafka%2FApplicationTests.java;h=caa25c54313ae2fd4b01e4820bcf135c62d57c5d;hb=83a4bf324f5a7ec6010a7921118ec7d6e8f997cf;hp=e35b223666e6b1519292f513f5cd9280b1f5278c;hpb=2939f4b7bae23df34968ce3d87be1b83cf0fba90;p=demos%2Fkafka%2Ftraining diff --git a/src/test/java/de/juplo/kafka/ApplicationTests.java b/src/test/java/de/juplo/kafka/ApplicationTests.java index e35b223..caa25c5 100644 --- a/src/test/java/de/juplo/kafka/ApplicationTests.java +++ b/src/test/java/de/juplo/kafka/ApplicationTests.java @@ -6,13 +6,13 @@ import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.TopicPartition; -import org.apache.kafka.common.serialization.BytesDeserializer; -import org.apache.kafka.common.serialization.BytesSerializer; -import org.apache.kafka.common.serialization.LongSerializer; -import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.common.errors.RecordDeserializationException; +import org.apache.kafka.common.serialization.*; import org.apache.kafka.common.utils.Bytes; import org.junit.jupiter.api.*; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.test.autoconfigure.data.mongo.AutoConfigureDataMongo; import org.springframework.boot.test.context.ConfigDataApplicationContextInitializer; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; @@ -33,7 +33,7 @@ import java.util.stream.IntStream; import static de.juplo.kafka.ApplicationTests.PARTITIONS; import static de.juplo.kafka.ApplicationTests.TOPIC; -import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.*; import static org.awaitility.Awaitility.*; @@ -42,8 +42,11 @@ import static org.awaitility.Awaitility.*; @TestPropertySource( properties = { "consumer.bootstrap-server=${spring.embedded.kafka.brokers}", - "consumer.topic=" + TOPIC }) + "consumer.topic=" + TOPIC, + "spring.mongodb.embedded.version=4.4.13" }) @EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS) +@EnableAutoConfiguration +@AutoConfigureDataMongo @Slf4j class ApplicationTests { @@ -52,8 +55,9 @@ class ApplicationTests StringSerializer stringSerializer = new StringSerializer(); - LongSerializer longSerializer = new LongSerializer(); + @Autowired + Serializer valueSerializer; @Autowired KafkaProducer kafkaProducer; @Autowired @@ -64,11 +68,14 @@ class ApplicationTests ApplicationProperties properties; @Autowired ExecutorService executor; + @Autowired + PartitionStatisticsRepository repository; Consumer> testHandler; EndlessConsumer endlessConsumer; Map oldOffsets; Map newOffsets; + Set> receivedRecords; /** Tests methods */ @@ -77,14 +84,11 @@ class ApplicationTests @Order(1) // << The poistion pill is not skipped. Hence, this test must run first void commitsCurrentOffsetsOnSuccess() throws ExecutionException, InterruptedException { - send100Messages(i -> new Bytes(longSerializer.serialize(TOPIC, i))); - - Set> received = new HashSet<>(); - testHandler = record -> received.add(record); + send100Messages(i -> new Bytes(valueSerializer.serialize(TOPIC, i))); await("100 records received") .atMost(Duration.ofSeconds(30)) - .until(() -> received.size() >= 100); + .until(() -> receivedRecords.size() >= 100); await("Offsets committed") .atMost(Duration.ofSeconds(10)) @@ -93,23 +97,45 @@ class ApplicationTests checkSeenOffsetsForProgress(); compareToCommitedOffsets(newOffsets); }); + + assertThatExceptionOfType(IllegalStateException.class) + .isThrownBy(() -> endlessConsumer.exitStatus()) + .describedAs("Consumer should still be running"); } @Test @Order(2) - void commitsNoOffsetsOnError() + void commitsOffsetOfErrorForReprocessingOnError() { send100Messages(counter -> counter == 77 ? new Bytes(stringSerializer.serialize(TOPIC, "BOOM!")) - : new Bytes(longSerializer.serialize(TOPIC, counter))); + : new Bytes(valueSerializer.serialize(TOPIC, counter))); await("Consumer failed") .atMost(Duration.ofSeconds(30)) .until(() -> !endlessConsumer.running()); checkSeenOffsetsForProgress(); - compareToCommitedOffsets(oldOffsets); + compareToCommitedOffsets(newOffsets); + + endlessConsumer.start(); + await("Consumer failed") + .atMost(Duration.ofSeconds(30)) + .until(() -> !endlessConsumer.running()); + + checkSeenOffsetsForProgress(); + compareToCommitedOffsets(newOffsets); + assertThat(receivedRecords.size()) + .describedAs("Received not all sent events") + .isLessThan(100); + + assertThatNoException() + .describedAs("Consumer should not be running") + .isThrownBy(() -> endlessConsumer.exitStatus()); + assertThat(endlessConsumer.exitStatus()) + .describedAs("Consumer should have exited abnormally") + .containsInstanceOf(RecordDeserializationException.class); } @@ -121,7 +147,9 @@ class ApplicationTests { Long expected = offsetsToCheck.get(tp) + 1; log.debug("Checking, if the offset for {} is {}", tp, expected); - assertThat(offset).isEqualTo(expected); + assertThat(offset) + .describedAs("Committed offset corresponds to the offset of the consumer") + .isEqualTo(expected); }); } @@ -139,7 +167,9 @@ class ApplicationTests withProgress.add(tp); } }); - assertThat(withProgress).isNotEmpty().describedAs("Found no partitions with any offset-progress"); + assertThat(withProgress) + .describedAs("Some offsets must have changed, compared to the old offset-positions") + .isNotEmpty(); } @@ -211,6 +241,7 @@ class ApplicationTests oldOffsets = new HashMap<>(); newOffsets = new HashMap<>(); + receivedRecords = new HashSet<>(); doForCurrentOffsets((tp, offset) -> { @@ -224,12 +255,14 @@ class ApplicationTests newOffsets.put( new TopicPartition(record.topic(), record.partition()), record.offset()); + receivedRecords.add(record); testHandler.accept(record); }; endlessConsumer = new EndlessConsumer<>( executor, + repository, properties.getClientId(), properties.getTopic(), kafkaConsumer, @@ -256,6 +289,12 @@ class ApplicationTests @Import(ApplicationConfiguration.class) public static class Configuration { + @Bean + Serializer serializer() + { + return new LongSerializer(); + } + @Bean KafkaProducer kafkaProducer(ApplicationProperties properties) {