`ExampleProducer` in eine Spring-Boot App umgebaut (ohne Spring Kafka)
authorKai Moritz <kai@juplo.de>
Mon, 28 Oct 2024 13:28:57 +0000 (14:28 +0100)
committerKai Moritz <kai@juplo.de>
Tue, 29 Oct 2024 17:06:32 +0000 (18:06 +0100)
13 files changed:
.dockerignore
.maven-dockerinclude
Dockerfile
README.sh
docker/docker-compose.yml
pom.xml
src/main/java/de/juplo/kafka/Application.java [new file with mode: 0644]
src/main/java/de/juplo/kafka/ApplicationConfiguration.java [new file with mode: 0644]
src/main/java/de/juplo/kafka/ApplicationProperties.java [new file with mode: 0644]
src/main/java/de/juplo/kafka/ExampleProducer.java
src/main/resources/application.yml [new file with mode: 0644]
src/main/resources/logback.xml
src/test/java/de/juplo/kafka/ApplicationTests.java [new file with mode: 0644]

index 49f82a9..1ad9963 100644 (file)
@@ -1,3 +1,2 @@
 *
 !target/*.jar
-!target/libs/*.jar
index a00c65f..fd6cecd 100644 (file)
@@ -1,2 +1 @@
 target/*.jar
-target/libs/*.jar
index 74e66ed..9e196ff 100644 (file)
@@ -1,6 +1,5 @@
 FROM eclipse-temurin:21-jre
 VOLUME /tmp
 COPY target/*.jar /opt/app.jar
-COPY target/libs /opt/libs
 ENTRYPOINT [ "java", "-jar", "/opt/app.jar" ]
-CMD [ "kafka:9092", "test", "DCKR" ]
+CMD []
index 3d98ace..499780a 100755 (executable)
--- a/README.sh
+++ b/README.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-IMAGE=juplo/simple-producer:1.0-SNAPSHOT
+IMAGE=juplo/spring-producer:1.0-SNAPSHOT
 
 if [ "$1" = "cleanup" ]
 then
@@ -27,10 +27,16 @@ docker compose -f docker/docker-compose.yml up --remove-orphans setup || exit 1
 
 
 docker compose -f docker/docker-compose.yml up -d producer
-sleep 5
-
-docker compose -f docker/docker-compose.yml exec cli kafkacat -b kafka:9092 -t test -c 20 -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n'
+docker compose -f docker/docker-compose.yml up -d consumer-1 consumer-2
+sleep 15
 
 docker compose -f docker/docker-compose.yml stop producer
-docker compose -f docker/docker-compose.yml exec cli kafkacat -b kafka:9092 -t test -e -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n'
-docker compose -f docker/docker-compose.yml logs producer
+
+echo
+echo "Von consumer-1 empfangen:"
+docker compose -f docker/docker-compose.yml logs consumer-1 | grep '\ test\/.'
+echo
+echo "Von consumer-2 empfangen:"
+docker compose -f docker/docker-compose.yml logs consumer-2 | grep '\ test\/.'
+
+docker compose -f docker/docker-compose.yml stop consumer-1 consumer-2
index 80f0aec..69ac986 100644 (file)
@@ -190,8 +190,19 @@ services:
       - kafka-3
 
   producer:
-    image: juplo/simple-producer:1.0-SNAPSHOT
-    command: kafka:9092 test producer
+    image: juplo/spring-producer:1.0-SNAPSHOT
+    environment:
+      producer.bootstrap-server: kafka:9092
+      producer.client-id: producer
+      producer.topic: test
+
+  consumer-1:
+    image: juplo/simple-consumer:1.0-SNAPSHOT
+    command: kafka:9092 test my-group consumer-1
+
+  consumer-2:
+    image: juplo/simple-consumer:1.0-SNAPSHOT
+    command: kafka:9092 test my-group consumer-2
 
 volumes:
   zookeeper-data:
diff --git a/pom.xml b/pom.xml
index ad7f17a..02707b9 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -12,9 +12,9 @@
   </parent>
 
   <groupId>de.juplo.kafka</groupId>
-  <artifactId>simple-producer</artifactId>
-  <name>Super Simple Producer</name>
-  <description>A Simple Producer, programmed with pure Java, that sends messages via Kafka</description>
+  <artifactId>spring-producer</artifactId>
+  <name>Spring Producer</name>
+  <description>A Simple Spring-Boot-Producer, that takes messages via POST and confirms successs</description>
   <version>1.0-SNAPSHOT</version>
 
   <properties>
   </properties>
 
   <dependencies>
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-validation</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.kafka</groupId>
       <artifactId>kafka-clients</artifactId>
       <artifactId>lombok</artifactId>
     </dependency>
     <dependency>
-      <groupId>ch.qos.logback</groupId>
-      <artifactId>logback-classic</artifactId>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-test</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.kafka</groupId>
+      <artifactId>spring-kafka</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.kafka</groupId>
+      <artifactId>spring-kafka-test</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.awaitility</groupId>
+      <artifactId>awaitility</artifactId>
+      <scope>test</scope>
     </dependency>
   </dependencies>
 
   <build>
     <plugins>
       <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
         <executions>
           <execution>
-            <id>copy-dependencies</id>
-            <phase>package</phase>
             <goals>
-              <goal>copy-dependencies</goal>
+              <goal>build-info</goal>
             </goals>
-            <configuration>
-              <outputDirectory>${project.build.directory}/libs</outputDirectory>
-            </configuration>
           </execution>
         </executions>
       </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <configuration>
-          <archive>
-            <manifest>
-              <addClasspath>true</addClasspath>
-              <classpathPrefix>libs/</classpathPrefix>
-              <mainClass>de.juplo.kafka.ExampleProducer</mainClass>
-            </manifest>
-          </archive>
-        </configuration>
-      </plugin>
       <plugin>
         <groupId>pl.project13.maven</groupId>
         <artifactId>git-commit-id-plugin</artifactId>
diff --git a/src/main/java/de/juplo/kafka/Application.java b/src/main/java/de/juplo/kafka/Application.java
new file mode 100644 (file)
index 0000000..0069257
--- /dev/null
@@ -0,0 +1,14 @@
+package de.juplo.kafka;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+
+@SpringBootApplication
+public class Application
+{
+  public static void main(String[] args)
+  {
+    SpringApplication.run(Application.class, args);
+  }
+}
diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
new file mode 100644 (file)
index 0000000..1c4262e
--- /dev/null
@@ -0,0 +1,46 @@
+package de.juplo.kafka;
+
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.Properties;
+
+
+@Configuration
+@EnableConfigurationProperties(ApplicationProperties.class)
+public class ApplicationConfiguration
+{
+  @Bean
+  public ExampleProducer exampleProducer(
+      ApplicationProperties properties,
+      KafkaProducer<String, String> kafkaProducer)
+  {
+    return
+        new ExampleProducer(
+            properties.getClientId(),
+            properties.getTopic(),
+            kafkaProducer);
+  }
+
+  @Bean
+  public KafkaProducer<String, String> kafkaProducer(ApplicationProperties properties)
+  {
+    Properties props = new Properties();
+    props.put("bootstrap.servers", properties.getBootstrapServer());
+    props.put("client.id", properties.getClientId());
+    props.put("acks", properties.getAcks());
+    props.put("batch.size", properties.getBatchSize());
+    props.put("metadata.maxage.ms",   5000); //  5 Sekunden
+    props.put("delivery.timeout.ms", 20000); // 20 Sekunden
+    props.put("request.timeout.ms",  10000); // 10 Sekunden
+    props.put("linger.ms", properties.getLingerMs());
+    props.put("compression.type", properties.getCompressionType());
+    props.put("key.serializer", StringSerializer.class.getName());
+    props.put("value.serializer", StringSerializer.class.getName());
+
+    return new KafkaProducer<>(props);
+  }
+}
diff --git a/src/main/java/de/juplo/kafka/ApplicationProperties.java b/src/main/java/de/juplo/kafka/ApplicationProperties.java
new file mode 100644 (file)
index 0000000..4bf66a8
--- /dev/null
@@ -0,0 +1,35 @@
+package de.juplo.kafka;
+
+import jakarta.validation.constraints.NotEmpty;
+import jakarta.validation.constraints.NotNull;
+import lombok.Getter;
+import lombok.Setter;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+
+
+@ConfigurationProperties(prefix = "producer")
+@Getter
+@Setter
+public class ApplicationProperties
+{
+  @NotNull
+  @NotEmpty
+  private String bootstrapServer;
+  @NotNull
+  @NotEmpty
+  private String clientId;
+  @NotNull
+  @NotEmpty
+  private String topic;
+  @NotNull
+  @NotEmpty
+  private String acks;
+  @NotNull
+  private Integer batchSize;
+  @NotNull
+  private Integer lingerMs;
+  @NotNull
+  @NotEmpty
+  private String compressionType;
+}
index c12a75e..38bcb9f 100644 (file)
@@ -2,41 +2,36 @@ package de.juplo.kafka;
 
 import lombok.extern.slf4j.Slf4j;
 import org.apache.kafka.clients.producer.Producer;
-import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.serialization.StringSerializer;
-
-import java.util.Properties;
 
 
 @Slf4j
-public class ExampleProducer
+public class ExampleProducer implements Runnable
 {
   private final String id;
   private final String topic;
   private final Producer<String, String> producer;
+  private final Thread workerThread;
 
   private volatile boolean running = true;
-  private volatile boolean done = false;
   private long produced = 0;
 
+
   public ExampleProducer(
-    String broker,
+    String id,
     String topic,
-    String clientId)
+    Producer<String, String> producer)
   {
-    Properties props = new Properties();
-    props.put("bootstrap.servers", broker);
-    props.put("client.id", clientId); // Nur zur Wiedererkennung
-    props.put("key.serializer", StringSerializer.class.getName());
-    props.put("value.serializer", StringSerializer.class.getName());
-    props.put("metadata.maxage.ms", 5000);
-
-    this.id = clientId;
+    this.id = id;
     this.topic = topic;
-    producer = new KafkaProducer<>(props);
+    this.producer = producer;
+
+    workerThread = new Thread(this, "ExampleProducer Worker-Thread");
+    workerThread.start();
   }
 
+
+  @Override
   public void run()
   {
     long i = 0;
@@ -55,10 +50,7 @@ public class ExampleProducer
     }
     finally
     {
-      log.info("{}: Closing the KafkaProducer", id);
-      producer.close();
       log.info("{}: Produced {} messages in total, exiting!", id, produced);
-      done = true;
     }
   }
 
@@ -114,44 +106,10 @@ public class ExampleProducer
   }
 
 
-  public static void main(String[] args) throws Exception
+  public void shutdown() throws InterruptedException
   {
-    String broker = ":9092";
-    String topic = "test";
-    String clientId = "DEV";
-
-    switch (args.length)
-    {
-      case 3:
-        clientId = args[2];
-      case 2:
-        topic = args[1];
-      case 1:
-        broker = args[0];
-    }
-
-    ExampleProducer instance = new ExampleProducer(broker, topic, clientId);
-
-    Runtime.getRuntime().addShutdownHook(new Thread(() ->
-    {
-      instance.running = false;
-      while (!instance.done)
-      {
-        log.info("Waiting for main-thread...");
-        try
-        {
-          Thread.sleep(1000);
-        }
-        catch (InterruptedException e) {}
-      }
-      log.info("Shutdown completed.");
-    }));
-
-    log.info(
-        "Running ExampleProducer: broker={}, topic={}, client-id={}",
-        broker,
-        topic,
-        clientId);
-    instance.run();
+    log.info("{} joining the worker-thread...", id);
+    running = false;
+    workerThread.join();
   }
 }
diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml
new file mode 100644 (file)
index 0000000..d102dd0
--- /dev/null
@@ -0,0 +1,12 @@
+producer:
+  bootstrap-server: :9092
+  client-id: DEV
+  topic: test
+  acks: -1
+  batch-size: 16384
+  linger-ms: 0
+  compression-type: gzip
+logging:
+  level:
+    root: INFO
+    de.juplo: TRACE
index 7a25e76..9c7af76 100644 (file)
@@ -7,8 +7,6 @@
     </encoder>
   </appender>
 
-  <logger name="de.juplo" level="TRACE"/>
-
   <root level="INFO">
     <appender-ref ref="STDOUT" />
   </root>
diff --git a/src/test/java/de/juplo/kafka/ApplicationTests.java b/src/test/java/de/juplo/kafka/ApplicationTests.java
new file mode 100644 (file)
index 0000000..71b9bf6
--- /dev/null
@@ -0,0 +1,82 @@
+package de.juplo.kafka;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.context.annotation.Bean;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.test.context.EmbeddedKafka;
+
+import java.time.Duration;
+import java.util.LinkedList;
+import java.util.List;
+
+import static de.juplo.kafka.ApplicationTests.PARTITIONS;
+import static de.juplo.kafka.ApplicationTests.TOPIC;
+import static org.awaitility.Awaitility.await;
+
+
+@SpringBootTest(
+    properties = {
+        "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+        "spring.kafka.consumer.auto-offset-reset=earliest",
+        "producer.bootstrap-server=${spring.embedded.kafka.brokers}",
+        "producer.topic=" + TOPIC})
+@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
+@Slf4j
+public class ApplicationTests
+{
+  static final String TOPIC = "FOO";
+  static final int PARTITIONS = 10;
+
+  @Autowired
+  Consumer consumer;
+
+
+  @BeforeEach
+  public void clear()
+  {
+    consumer.received.clear();
+  }
+
+
+  @Test
+  public void testApplicationStartup()
+  {
+  }
+
+  @Test
+  public void testSendMessage() throws Exception
+  {
+    await("Some messages were send")
+        .atMost(Duration.ofSeconds(5))
+        .until(() -> consumer.received.size() >= 1);
+  }
+
+
+  static class Consumer
+  {
+    final List<ConsumerRecord<String, String>> received = new LinkedList<>();
+
+    @KafkaListener(groupId = "TEST", topics = TOPIC)
+    public void receive(ConsumerRecord<String, String> record)
+    {
+      log.debug("Received message: {}", record);
+      received.add(record);
+    }
+  }
+
+  @TestConfiguration
+  static class Configuration
+  {
+    @Bean
+    Consumer consumer()
+    {
+      return new Consumer();
+    }
+  }
+}