`simple-producer` in `simple-consumer` verwandelt grundlagen/simple-consumer--REBASE-ANFANG
authorKai Moritz <kai@juplo.de>
Thu, 26 Sep 2024 13:21:01 +0000 (15:21 +0200)
committerKai Moritz <kai@juplo.de>
Sat, 28 Sep 2024 08:38:58 +0000 (10:38 +0200)
Dockerfile
README.sh
docker/docker-compose.yml
pom.xml
src/main/java/de/juplo/kafka/ExampleConsumer.java [new file with mode: 0644]
src/main/java/de/juplo/kafka/ExampleProducer.java [deleted file]

index 74e66ed..22819af 100644 (file)
@@ -3,4 +3,4 @@ VOLUME /tmp
 COPY target/*.jar /opt/app.jar
 COPY target/libs /opt/libs
 ENTRYPOINT [ "java", "-jar", "/opt/app.jar" ]
-CMD [ "kafka:9092", "test", "DCKR" ]
+CMD [ "kafka:9092", "test", "my-group", "DCKR" ]
index 97ca7cc..ff0b58b 100755 (executable)
--- a/README.sh
+++ b/README.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-IMAGE=juplo/simple-producer:1.0-SNAPSHOT
+IMAGE=juplo/simple-consumer:1.0-SNAPSHOT
 
 if [ "$1" = "cleanup" ]
 then
@@ -10,7 +10,7 @@ then
 fi
 
 docker compose -f docker/docker-compose.yml up -d --remove-orphans kafka-1 kafka-2 kafka-3
-docker compose -f docker/docker-compose.yml rm -svf producer
+docker compose -f docker/docker-compose.yml rm -svf consumer
 
 if [[
   $(docker image ls -q $IMAGE) == "" ||
@@ -30,11 +30,15 @@ sleep 1
 docker compose -f docker/docker-compose.yml logs setup
 
 docker compose -f docker/docker-compose.yml ps
+
 docker compose -f docker/docker-compose.yml up -d producer
+docker compose -f docker/docker-compose.yml up -d consumer
+
 sleep 5
+docker compose -f docker/docker-compose.yml stop consumer
 
-docker compose -f docker/docker-compose.yml exec cli kafkacat -b kafka:9092 -t test -c 20 -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n'
+docker compose -f docker/docker-compose.yml start consumer
+sleep 5
 
-docker compose -f docker/docker-compose.yml stop producer
-docker compose -f docker/docker-compose.yml exec cli kafkacat -b kafka:9092 -t test -e -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n'
-docker compose -f docker/docker-compose.yml logs producer
+docker compose -f docker/docker-compose.yml stop producer consumer
+docker compose -f docker/docker-compose.yml logs consumer
index 531a116..f68b6fb 100644 (file)
@@ -191,6 +191,10 @@ services:
     image: juplo/simple-producer:1.0-SNAPSHOT
     command: kafka:9092 test producer
 
+  consumer:
+    image: juplo/simple-consumer:1.0-SNAPSHOT
+    command: kafka:9092 test my-group consumer
+
 volumes:
   zookeeper-data:
   zookeeper-log:
diff --git a/pom.xml b/pom.xml
index ad7f17a..2d81d24 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -12,9 +12,9 @@
   </parent>
 
   <groupId>de.juplo.kafka</groupId>
-  <artifactId>simple-producer</artifactId>
-  <name>Super Simple Producer</name>
-  <description>A Simple Producer, programmed with pure Java, that sends messages via Kafka</description>
+  <artifactId>simple-consumer</artifactId>
+  <name>Simple Consumer-Group</name>
+  <description>Super Simple Consumer-Group, that is implemented as a plain Java-program</description>
   <version>1.0-SNAPSHOT</version>
 
   <properties>
@@ -62,7 +62,7 @@
             <manifest>
               <addClasspath>true</addClasspath>
               <classpathPrefix>libs/</classpathPrefix>
-              <mainClass>de.juplo.kafka.ExampleProducer</mainClass>
+              <mainClass>de.juplo.kafka.ExampleConsumer</mainClass>
             </manifest>
           </archive>
         </configuration>
diff --git a/src/main/java/de/juplo/kafka/ExampleConsumer.java b/src/main/java/de/juplo/kafka/ExampleConsumer.java
new file mode 100644 (file)
index 0000000..1715813
--- /dev/null
@@ -0,0 +1,137 @@
+package de.juplo.kafka;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.errors.WakeupException;
+import org.apache.kafka.common.serialization.StringDeserializer;
+
+import java.time.Duration;
+import java.util.Arrays;
+import java.util.Properties;
+
+
+@Slf4j
+public class ExampleConsumer
+{
+  private final String id;
+  private final String topic;
+  private final Consumer<String, String> consumer;
+
+  private volatile boolean running = false;
+  private long consumed = 0;
+
+  public ExampleConsumer(String broker, String topic, String groupId, String clientId)
+  {
+    Properties props = new Properties();
+    props.put("bootstrap.servers", broker);
+    props.put("group.id", groupId); // ID für die Offset-Commits
+    props.put("client.id", clientId); // Nur zur Wiedererkennung
+    props.put("auto.offset.reset", "earliest"); // Von Beginn an lesen
+    props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
+    props.put("key.deserializer", StringDeserializer.class.getName());
+    props.put("value.deserializer", StringDeserializer.class.getName());
+
+    this.id = clientId;
+    this.topic = topic;
+    consumer = new KafkaConsumer<>(props);
+  }
+
+
+  public void run()
+  {
+    try
+    {
+      log.info("{} - Subscribing to topic {}", id, topic);
+      consumer.subscribe(Arrays.asList(topic));
+      running = true;
+
+      while (true)
+      {
+        ConsumerRecords<String, String> records =
+            consumer.poll(Duration.ofSeconds(1));
+
+        log.info("{} - Received {} messages", id, records.count());
+        for (ConsumerRecord<String, String> record : records)
+        {
+          consumed++;
+          log.info(
+              "{} - {}: {}/{} - {}={}",
+              id,
+              record.offset(),
+              record.topic(),
+              record.partition(),
+              record.key(),
+              record.value()
+          );
+        }
+      }
+    }
+    catch(WakeupException e)
+    {
+      log.info("{} - Consumer was signaled to finish its work", id);
+    }
+    catch(Exception e)
+    {
+      log.error("{} - Unexpected error: {}, unsubscribing!", id, e.toString());
+      consumer.unsubscribe();
+    }
+    finally
+    {
+      running = false;
+      log.info("{} - Closing the KafkaConsumer", id);
+      consumer.close();
+      log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
+    }
+  }
+
+
+  public static void main(String[] args) throws Exception
+  {
+    String broker = ":9092";
+    String topic = "test";
+    String groupId = "my-group";
+    String clientId = "DEV";
+
+    switch (args.length)
+    {
+      case 4:
+        clientId = args[3];
+      case 3:
+        groupId = args[2];
+      case 2:
+        topic = args[1];
+      case 1:
+        broker = args[0];
+    }
+
+
+    ExampleConsumer instance = new ExampleConsumer(broker, topic, groupId, clientId);
+
+    Runtime.getRuntime().addShutdownHook(new Thread(() ->
+    {
+      instance.consumer.wakeup();
+
+      while (instance.running)
+      {
+        log.info("Waiting for main-thread...");
+        try
+        {
+          Thread.sleep(1000);
+        }
+        catch (InterruptedException e) {}
+      }
+      log.info("Shutdown completed.");
+    }));
+
+    log.info(
+        "Running SimpleConsumer: broker={}, topic={}, group-id={}, client-id={}",
+        broker,
+        topic,
+        groupId,
+        clientId);
+    instance.run();
+  }
+}
diff --git a/src/main/java/de/juplo/kafka/ExampleProducer.java b/src/main/java/de/juplo/kafka/ExampleProducer.java
deleted file mode 100644 (file)
index 06e14ef..0000000
+++ /dev/null
@@ -1,153 +0,0 @@
-package de.juplo.kafka;
-
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.producer.Producer;
-import org.apache.kafka.clients.producer.KafkaProducer;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.serialization.StringSerializer;
-
-import java.util.Properties;
-
-
-@Slf4j
-public class ExampleProducer
-{
-  private final String id;
-  private final String topic;
-  private final Producer<String, String> producer;
-
-  private volatile boolean running = true;
-  private volatile boolean done = false;
-  private long produced = 0;
-
-  public ExampleProducer(String broker, String topic, String clientId)
-  {
-    Properties props = new Properties();
-    props.put("bootstrap.servers", broker);
-    props.put("client.id", clientId); // Nur zur Wiedererkennung
-    props.put("key.serializer", StringSerializer.class.getName());
-    props.put("value.serializer", StringSerializer.class.getName());
-
-    this.id = clientId;
-    this.topic = topic;
-    producer = new KafkaProducer<>(props);
-  }
-
-  public void run()
-  {
-    long i = 0;
-
-    try
-    {
-      for (; running; i++)
-      {
-        send(Long.toString(i%10), Long.toString(i));
-        Thread.sleep(500);
-      }
-    }
-    catch (Exception e)
-    {
-      log.error("{} - Unexpected error: {}!", id, e.toString());
-    }
-    finally
-    {
-      log.info("{}: Closing the KafkaProducer", id);
-      producer.close();
-      log.info("{}: Produced {} messages in total, exiting!", id, produced);
-      done = true;
-    }
-  }
-
-  void send(String key, String value)
-  {
-    final long time = System.currentTimeMillis();
-
-    final ProducerRecord<String, String> record = new ProducerRecord<>(
-        topic,  // Topic
-        key,    // Key
-        value   // Value
-    );
-
-    producer.send(record, (metadata, e) ->
-    {
-      long now = System.currentTimeMillis();
-      if (e == null)
-      {
-        // HANDLE SUCCESS
-        produced++;
-        log.debug(
-            "{} - Sent key={} message={} partition={}/{} timestamp={} latency={}ms",
-            id,
-            record.key(),
-            record.value(),
-            metadata.partition(),
-            metadata.offset(),
-            metadata.timestamp(),
-            now - time
-        );
-      }
-      else
-      {
-        // HANDLE ERROR
-        log.error(
-            "{} - ERROR key={} timestamp={} latency={}ms: {}",
-            id,
-            record.key(),
-            metadata == null ? -1 : metadata.timestamp(),
-            now - time,
-            e.toString()
-        );
-      }
-    });
-
-    long now = System.currentTimeMillis();
-    log.trace(
-        "{} - Queued message with key={} latency={}ms",
-        id,
-        record.key(),
-        now - time
-    );
-  }
-
-
-  public static void main(String[] args) throws Exception
-  {
-    String broker = ":9092";
-    String topic = "test";
-    String clientId = "DEV";
-
-    switch (args.length)
-    {
-      case 3:
-        clientId = args[2];
-      case 2:
-        topic = args[1];
-      case 1:
-        broker = args[0];
-    }
-
-    ExampleProducer instance = new ExampleProducer(broker, topic, clientId);
-
-    Runtime.getRuntime().addShutdownHook(new Thread(() ->
-    {
-      instance.running = false;
-      while (!instance.done)
-      {
-        log.info("Waiting for main-thread...");
-        try
-        {
-          Thread.sleep(1000);
-        }
-        catch (InterruptedException e) {}
-      }
-      log.info("Shutdown completed.");
-    }));
-
-    log.info(
-        "Running ExampleProducer: broker={}, topic={}, client-id={}",
-        broker,
-        topic,
-        clientId);
-    instance.run();
-  }
-}