From: Kai Moritz Date: Wed, 11 May 2022 17:23:40 +0000 (+0200) Subject: Springify: Gemeinsame DLQ für Poison Pills und Fachlogik-Fehler konfiguriert X-Git-Url: http://juplo.de/gitweb/?a=commitdiff_plain;h=1709f0e4f41be7e3b955d19769697a517633827d;p=demos%2Fkafka%2Ftraining Springify: Gemeinsame DLQ für Poison Pills und Fachlogik-Fehler konfiguriert --- diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java index 6ab716e..4923b09 100644 --- a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java +++ b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java @@ -2,16 +2,20 @@ package de.juplo.kafka; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.kafka.core.ConsumerFactory; -import org.springframework.kafka.core.KafkaOperations; +import org.springframework.kafka.core.*; import org.springframework.kafka.listener.DeadLetterPublishingRecoverer; import org.springframework.kafka.listener.DefaultErrorHandler; +import org.springframework.kafka.support.serializer.DelegatingByTypeSerializer; +import org.springframework.kafka.support.serializer.JsonSerializer; import org.springframework.util.backoff.FixedBackOff; +import java.util.Map; import java.util.function.Consumer; @@ -28,6 +32,23 @@ public class ApplicationConfiguration }; } + @Bean + public ProducerFactory producerFactory(KafkaProperties properties) { + return new DefaultKafkaProducerFactory<>( + properties.getProducer().buildProperties(), + new StringSerializer(), + new DelegatingByTypeSerializer(Map.of( + byte[].class, new ByteArraySerializer(), + ClientMessage.class, new JsonSerializer<>()))); + } + + @Bean + public KafkaTemplate kafkaTemplate( + ProducerFactory producerFactory) { + + return new KafkaTemplate<>(producerFactory); + } + @Bean public DeadLetterPublishingRecoverer recoverer( ApplicationProperties properties, diff --git a/src/test/java/de/juplo/kafka/ApplicationTests.java b/src/test/java/de/juplo/kafka/ApplicationTests.java index 3a10cd1..43a4f61 100644 --- a/src/test/java/de/juplo/kafka/ApplicationTests.java +++ b/src/test/java/de/juplo/kafka/ApplicationTests.java @@ -6,7 +6,6 @@ import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.TopicPartition; -import org.apache.kafka.common.errors.RecordDeserializationException; import org.apache.kafka.common.serialization.*; import org.apache.kafka.common.utils.Bytes; import org.junit.jupiter.api.*; @@ -18,7 +17,6 @@ import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; -import org.springframework.kafka.listener.MessageListenerContainer; import org.springframework.kafka.support.serializer.JsonSerializer; import org.springframework.kafka.test.context.EmbeddedKafka; import org.springframework.test.context.TestPropertySource; @@ -45,7 +43,6 @@ import static org.awaitility.Awaitility.*; EndlessConsumer.class, KafkaAutoConfiguration.class, ApplicationTests.Configuration.class }) -@TestMethodOrder(MethodOrderer.OrderAnnotation.class) @TestPropertySource( properties = { "spring.kafka.consumer.bootstrap-servers=${spring.embedded.kafka.brokers}", @@ -86,7 +83,6 @@ class ApplicationTests /** Tests methods */ @Test - @Order(1) // << The poistion pill is not skipped. Hence, this test must run first void commitsCurrentOffsetsOnSuccess() throws ExecutionException, InterruptedException { send100Messages((key, counter) -> serialize(key, counter)); @@ -109,8 +105,7 @@ class ApplicationTests } @Test - @Order(2) - void commitsCurrentOffsetsOnError() + void commitsCurrentOffsetsOnDeserializationError() { send100Messages((key, counter) -> counter == 77 @@ -141,6 +136,34 @@ class ApplicationTests .isTrue(); } + @Test + void commitsOffsetOnProgramLogicErrorFoo() + { + recordHandler.testHandler = (record) -> + { + if (Integer.parseInt(record.value().message)%10 ==0) + throw new RuntimeException("BOOM: " + record.value().message + "%10 == 0"); + }; + + send100Messages((key, counter) -> serialize(key, counter)); + + await("80 records received") + .atMost(Duration.ofSeconds(30)) + .until(() -> receivedRecords.size() == 100); + + await("Offsets committed") + .atMost(Duration.ofSeconds(10)) + .untilAsserted(() -> + { + checkSeenOffsetsForProgress(); + compareToCommitedOffsets(newOffsets); + }); + + assertThat(endlessConsumer.isRunning()) + .describedAs("Consumer should still be running") + .isTrue(); + } + /** Helper methods for the verification of expectations */