Möglichst einfacher Producer auf Basis des KafkaTemplate
authorKai Moritz <kai@juplo.de>
Fri, 11 Nov 2022 16:24:07 +0000 (17:24 +0100)
committerKai Moritz <kai@juplo.de>
Sat, 12 Nov 2022 10:27:17 +0000 (11:27 +0100)
Dockerfile
README.sh
docker-compose.yml
pom.xml
src/main/java/de/juplo/kafka/Application.java
src/main/resources/application.yml [new file with mode: 0644]
src/main/resources/logback.xml [deleted file]
src/test/java/de/juplo/kafka/ApplicationTests.java [new file with mode: 0644]

index 4213525..9f3eac8 100644 (file)
@@ -1,6 +1,5 @@
 FROM openjdk:11-jre
 VOLUME /tmp
 COPY target/*.jar /opt/app.jar
-COPY target/libs /opt/libs
 ENTRYPOINT [ "java", "-jar", "/opt/app.jar", "kafka:9092", "test" ]
 CMD [ "DCKR" ]
index 30d4be2..270dfa8 100755 (executable)
--- a/README.sh
+++ b/README.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-IMAGE=juplo/simple-producer:1.0-SNAPSHOT
+IMAGE=juplo/supersimple-producer:1.0-SNAPSHOT
 
 if [ "$1" = "cleanup" ]
 then
@@ -24,11 +24,6 @@ else
 fi
 
 docker-compose up setup
-docker-compose up -d producer
-sleep 5
+docker-compose up producer
 
-docker-compose exec cli kafkacat -b kafka:9092 -t test -q -c 20 -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n'
-
-docker-compose stop producer
 docker-compose exec cli kafkacat -b kafka:9092 -t test -q -e -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n'
-docker-compose logs producer
index 19a1f19..4ddf59a 100644 (file)
@@ -102,5 +102,8 @@ services:
     command: sleep infinity
 
   producer:
-    image: juplo/simple-producer:1.0-SNAPSHOT
-    command: producer
+    image: juplo/supersimple-producer:1.0-SNAPSHOT
+    environment:
+      spring.kafka.bootstrap-servers: kafka:9092
+      spring.kafka.client-id: producer
+    command: --num=3
diff --git a/pom.xml b/pom.xml
index 179103a..058aa14 100644 (file)
--- a/pom.xml
+++ b/pom.xml
   </parent>
 
   <groupId>de.juplo.kafka</groupId>
-  <artifactId>simple-producer</artifactId>
-  <name>Super Simple Producer</name>
+  <artifactId>supersimple-producer</artifactId>
   <version>1.0-SNAPSHOT</version>
+  <name>Super Simple Producer</name>
+  <description>Most minimal Kafka Producer ever!</description>
+
+  <properties>
+    <java.version>11</java.version>
+  </properties>
 
   <dependencies>
     <dependency>
-      <groupId>org.apache.kafka</groupId>
-      <artifactId>kafka-clients</artifactId>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-configuration-processor</artifactId>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.kafka</groupId>
+      <artifactId>spring-kafka</artifactId>
     </dependency>
     <dependency>
       <groupId>org.projectlombok</groupId>
       <artifactId>lombok</artifactId>
     </dependency>
     <dependency>
-      <groupId>ch.qos.logback</groupId>
-      <artifactId>logback-classic</artifactId>
+      <groupId>org.springframework.boot</groupId>
+      <artifactId>spring-boot-starter-test</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework.kafka</groupId>
+      <artifactId>spring-kafka-test</artifactId>
+      <scope>test</scope>
     </dependency>
   </dependencies>
 
   <build>
     <plugins>
       <plugin>
-        <groupId>pl.project13.maven</groupId>
-        <artifactId>git-commit-id-plugin</artifactId>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-maven-plugin</artifactId>
         <executions>
           <execution>
-            <id>copy-dependencies</id>
-            <phase>package</phase>
             <goals>
-              <goal>copy-dependencies</goal>
+              <goal>build-info</goal>
             </goals>
-            <configuration>
-              <outputDirectory>${project.build.directory}/libs</outputDirectory>
-            </configuration>
           </execution>
         </executions>
       </plugin>
       <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <configuration>
-          <archive>
-            <manifest>
-              <addClasspath>true</addClasspath>
-              <classpathPrefix>libs/</classpathPrefix>
-              <mainClass>de.juplo.kafka.Application</mainClass>
-            </manifest>
-          </archive>
-        </configuration>
+        <groupId>pl.project13.maven</groupId>
+        <artifactId>git-commit-id-plugin</artifactId>
       </plugin>
       <plugin>
         <groupId>io.fabric8</groupId>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <artifactId>maven-failsafe-plugin</artifactId>
+      </plugin>
     </plugins>
   </build>
 
-
 </project>
index 9fcaa6e..dda9b11 100644 (file)
 package de.juplo.kafka;
 
 import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.producer.KafkaProducer;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.serialization.StringSerializer;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.boot.ApplicationRunner;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.SendResult;
+import org.springframework.util.concurrent.ListenableFuture;
 
-import java.util.Properties;
+import java.util.List;
 
 
 @Slf4j
-public class SimpleProducer
+@SpringBootApplication
+public class Application implements ApplicationRunner
 {
-  private final String id;
-  private final String topic;
-  private final KafkaProducer<String, String> producer;
+  public final static String ARG_NUM = "num";
 
-  private long produced = 0;
-  private volatile boolean running = true;
-  private volatile boolean done = false;
+  @Autowired
+  KafkaTemplate<String, String> kafkaTemplate;
 
-  public SimpleProducer(String broker, String topic, String clientId)
-  {
-    Properties props = new Properties();
-    props.put("bootstrap.servers", broker);
-    props.put("client.id", clientId); // Nur zur Wiedererkennung
-    props.put("key.serializer", StringSerializer.class.getName());
-    props.put("value.serializer", StringSerializer.class.getName());
-
-    producer = new KafkaProducer<>(props);
 
-    this.topic = topic;
-    this.id = clientId;
+  void send(String key, String value)
+  {
+    ListenableFuture<SendResult<String, String>> listenableFuture =
+        kafkaTemplate.sendDefault(key, value);
+
+    listenableFuture.addCallback(
+        result -> log.debug(
+            "Sent {}={} to partition={}, offset={}",
+            result.getProducerRecord().key(),
+            result.getProducerRecord().value(),
+            result.getRecordMetadata().partition(),
+            result.getRecordMetadata().offset()),
+        e -> log.error("ERROR sendig message", e));
   }
 
-  public void run()
+  @Override
+  public void run(ApplicationArguments args)
   {
-    long i = 0;
+    int num = 10;
 
-    try
+    if (args.containsOption(ARG_NUM))
     {
-      for (; running ; i++)
+      List<String> numArgs = args.getOptionValues(ARG_NUM);
+      if (numArgs.size() > 1)
       {
-        send(Long.toString(i%10), Long.toString(i));
-        Thread.sleep(500);
+        log.error(
+            "Only one occurence of argument {} is allowed, but found: {}",
+            ARG_NUM,
+            numArgs.size());
+        return;
       }
-    }
-    catch (InterruptedException e) {}
-    finally
-    {
-      log.info("{}: Closing the KafkaProducer", id);
-      producer.close();
-      log.info("{}: Produced {} messages in total, exiting!", id, produced);
-      done = true;
-    }
-  }
-
-  void send(String key, String value)
-  {
-    final long time = System.currentTimeMillis();
-
-    final ProducerRecord<String, String> record = new ProducerRecord<>(
-        topic,  // Topic
-        key,    // Key
-        value   // Value
-    );
 
-    producer.send(record, (metadata, e) ->
-    {
-      long now = System.currentTimeMillis();
-      if (e == null)
+      try
       {
-        // HANDLE SUCCESS
-        produced++;
-        log.debug(
-            "{} - Sent key={} message={} partition={}/{} timestamp={} latency={}ms",
-            id,
-            record.key(),
-            record.value(),
-            metadata.partition(),
-            metadata.offset(),
-            metadata.timestamp(),
-            now - time
-        );
+        num = Integer.parseInt(numArgs.get(0));
       }
-      else
+      catch (NumberFormatException e)
       {
-        // HANDLE ERROR
-        log.error(
-            "{} - ERROR key={} timestamp={} latency={}ms: {}",
-            id,
-            record.key(),
-            metadata == null ? -1 : metadata.timestamp(),
-            now - time,
-            e.toString()
-        );
+        log.error("{} is not a number: {}", numArgs.get(0), e.getMessage());
       }
-    });
-
-    long now = System.currentTimeMillis();
-    log.trace(
-        "{} - Queued #{} key={} latency={}ms",
-        id,
-        value,
-        record.key(),
-        now - time
-    );
-  }
-
-
-  public static void main(String[] args) throws Exception
-  {
-    String broker = ":9092";
-    String topic = "test";
-    String clientId = "DEV";
-
-    switch (args.length)
-    {
-      case 3:
-        clientId = args[2];
-      case 2:
-        topic = args[1];
-      case 1:
-        broker = args[0];
     }
 
-    SimpleProducer instance = new SimpleProducer(broker, topic, clientId);
-
-    Runtime.getRuntime().addShutdownHook(new Thread(() ->
+    for (int i = 0; i < num; i++)
     {
-      instance.running = false;
-      while (!instance.done)
+      send(Long.toString(i%10), Long.toString(i));
+      try
       {
-        log.info("Waiting for main-thread...");
-        try
-        {
-          Thread.sleep(1000);
-        }
-        catch (InterruptedException e) {}
+        Thread.sleep(500);
+      }
+      catch (InterruptedException e)
+      {
+        log.info("Interrupted after sending {} messages", i);
+        return;
       }
-      log.info("Shutdown completed.");
-    }));
+    }
+  }
+
 
-    log.info(
-        "Running SimpleProducer: broker={}, topic={}, client-id={}",
-        broker,
-        topic,
-        clientId);
-    instance.run();
+  public static void main(String[] args)
+  {
+    SpringApplication.run(Application.class, args);
   }
 }
diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml
new file mode 100644 (file)
index 0000000..46825f4
--- /dev/null
@@ -0,0 +1,13 @@
+spring:
+  kafka:
+    bootstrap-servers: :9092
+    client-id: DEV
+    producer:
+      key-serializer: org.apache.kafka.common.serialization.StringSerializer
+      value-serializer: org.apache.kafka.common.serialization.StringSerializer
+    template:
+      default-topic: test
+logging:
+  level:
+    root: INFO
+    de.juplo: DEBUG
diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml
deleted file mode 100644 (file)
index b8e6780..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
-
-  <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
-    <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
-      <Pattern>%highlight(%-5level) %m%n</Pattern>
-    </encoder>
-  </appender>
-
-  <logger name="de.juplo" level="TRACE"/>
-  <!-- logger name="org.apache.kafka.clients" level="DEBUG" / -->
-
-  <root level="INFO">
-    <appender-ref ref="STDOUT" />
-  </root>
-
-</configuration>
diff --git a/src/test/java/de/juplo/kafka/ApplicationTests.java b/src/test/java/de/juplo/kafka/ApplicationTests.java
new file mode 100644 (file)
index 0000000..714175e
--- /dev/null
@@ -0,0 +1,24 @@
+package de.juplo.kafka;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.kafka.test.context.EmbeddedKafka;
+
+import static de.juplo.kafka.ApplicationTests.TOPIC;
+
+
+@SpringBootTest(
+    properties = {
+        "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+        "spring.kafka.template.default-topic=" + TOPIC
+    })
+@EmbeddedKafka(topics = TOPIC)
+public class ApplicationTests
+{
+  public final static String TOPIC = "out";
+
+  @Test
+  public void testApplicationStartup()
+  {
+  }
+}