From 1709f0e4f41be7e3b955d19769697a517633827d Mon Sep 17 00:00:00 2001
From: Kai Moritz <kai@juplo.de>
Date: Wed, 11 May 2022 19:23:40 +0200
Subject: [PATCH] =?utf8?q?Springify:=20Gemeinsame=20DLQ=20f=C3=BCr=20Poiso?=
 =?utf8?q?n=20Pills=20und=20Fachlogik-Fehler=20konfiguriert?=
MIME-Version: 1.0
Content-Type: text/plain; charset=utf8
Content-Transfer-Encoding: 8bit

---
 .../juplo/kafka/ApplicationConfiguration.java | 25 +++++++++++--
 .../java/de/juplo/kafka/ApplicationTests.java | 35 +++++++++++++++----
 2 files changed, 52 insertions(+), 8 deletions(-)

diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
index 6ab716e8..4923b09e 100644
--- a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
+++ b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
@@ -2,16 +2,20 @@ package de.juplo.kafka;
 
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.common.serialization.ByteArraySerializer;
+import org.apache.kafka.common.serialization.StringSerializer;
 import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
 import org.springframework.boot.context.properties.EnableConfigurationProperties;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
-import org.springframework.kafka.core.ConsumerFactory;
-import org.springframework.kafka.core.KafkaOperations;
+import org.springframework.kafka.core.*;
 import org.springframework.kafka.listener.DeadLetterPublishingRecoverer;
 import org.springframework.kafka.listener.DefaultErrorHandler;
+import org.springframework.kafka.support.serializer.DelegatingByTypeSerializer;
+import org.springframework.kafka.support.serializer.JsonSerializer;
 import org.springframework.util.backoff.FixedBackOff;
 
+import java.util.Map;
 import java.util.function.Consumer;
 
 
@@ -28,6 +32,23 @@ public class ApplicationConfiguration
     };
   }
 
+  @Bean
+  public ProducerFactory<String, Object> producerFactory(KafkaProperties properties) {
+    return new DefaultKafkaProducerFactory<>(
+        properties.getProducer().buildProperties(),
+        new StringSerializer(),
+        new DelegatingByTypeSerializer(Map.of(
+            byte[].class, new ByteArraySerializer(),
+            ClientMessage.class, new JsonSerializer<>())));
+  }
+
+  @Bean
+  public KafkaTemplate<String, Object> kafkaTemplate(
+      ProducerFactory<String, Object> producerFactory) {
+
+    return new KafkaTemplate<>(producerFactory);
+  }
+
   @Bean
   public DeadLetterPublishingRecoverer recoverer(
       ApplicationProperties properties,
diff --git a/src/test/java/de/juplo/kafka/ApplicationTests.java b/src/test/java/de/juplo/kafka/ApplicationTests.java
index 3a10cd1b..43a4f612 100644
--- a/src/test/java/de/juplo/kafka/ApplicationTests.java
+++ b/src/test/java/de/juplo/kafka/ApplicationTests.java
@@ -6,7 +6,6 @@ import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.common.TopicPartition;
-import org.apache.kafka.common.errors.RecordDeserializationException;
 import org.apache.kafka.common.serialization.*;
 import org.apache.kafka.common.utils.Bytes;
 import org.junit.jupiter.api.*;
@@ -18,7 +17,6 @@ import org.springframework.boot.test.context.TestConfiguration;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Import;
 import org.springframework.context.annotation.Primary;
-import org.springframework.kafka.listener.MessageListenerContainer;
 import org.springframework.kafka.support.serializer.JsonSerializer;
 import org.springframework.kafka.test.context.EmbeddedKafka;
 import org.springframework.test.context.TestPropertySource;
@@ -45,7 +43,6 @@ import static org.awaitility.Awaitility.*;
 				EndlessConsumer.class,
 				KafkaAutoConfiguration.class,
 				ApplicationTests.Configuration.class })
-@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
 @TestPropertySource(
 		properties = {
 				"spring.kafka.consumer.bootstrap-servers=${spring.embedded.kafka.brokers}",
@@ -86,7 +83,6 @@ class ApplicationTests
 	/** Tests methods */
 
 	@Test
-	@Order(1) // << The poistion pill is not skipped. Hence, this test must run first
 	void commitsCurrentOffsetsOnSuccess() throws ExecutionException, InterruptedException
 	{
 		send100Messages((key, counter) -> serialize(key, counter));
@@ -109,8 +105,7 @@ class ApplicationTests
 	}
 
 	@Test
-	@Order(2)
-	void commitsCurrentOffsetsOnError()
+	void commitsCurrentOffsetsOnDeserializationError()
 	{
 		send100Messages((key, counter) ->
 				counter == 77
@@ -141,6 +136,34 @@ class ApplicationTests
 				.isTrue();
 	}
 
+	@Test
+	void commitsOffsetOnProgramLogicErrorFoo()
+	{
+		recordHandler.testHandler = (record) ->
+		{
+			if (Integer.parseInt(record.value().message)%10 ==0)
+				throw new RuntimeException("BOOM: " + record.value().message + "%10 == 0");
+		};
+
+		send100Messages((key, counter) -> serialize(key, counter));
+
+		await("80 records received")
+				.atMost(Duration.ofSeconds(30))
+				.until(() -> receivedRecords.size() == 100);
+
+		await("Offsets committed")
+				.atMost(Duration.ofSeconds(10))
+				.untilAsserted(() ->
+				{
+					checkSeenOffsetsForProgress();
+					compareToCommitedOffsets(newOffsets);
+				});
+
+		assertThat(endlessConsumer.isRunning())
+				.describedAs("Consumer should still be running")
+				.isTrue();
+	}
+
 
 	/** Helper methods for the verification of expectations */
 
-- 
2.20.1