Für das Versenden wird das `KafkaTemplate` von Spring verwendet
[demos/kafka/training] / src / main / java / de / juplo / kafka / RestProducer.java
index e564a66..56f3382 100644 (file)
@@ -1,16 +1,14 @@
 package de.juplo.kafka;
 
 import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.serialization.StringSerializer;
+import org.apache.kafka.clients.producer.RecordMetadata;
 import org.springframework.http.HttpStatus;
-import org.springframework.kafka.support.serializer.JsonSerializer;
+import org.springframework.kafka.core.KafkaTemplate;
 import org.springframework.web.bind.annotation.*;
 import org.springframework.web.context.request.async.DeferredResult;
 
 import javax.annotation.PreDestroy;
-import java.util.Properties;
 import java.util.concurrent.ExecutionException;
 
 
@@ -20,51 +18,74 @@ public class RestProducer
 {
   private final String id;
   private final String topic;
-  private final KafkaProducer<String, ClientMessage> producer;
+  private final KafkaTemplate<String, Object> kafkaTemplate;
 
   private long produced = 0;
 
-  public RestProducer(ApplicationProperties properties)
+  public RestProducer(
+      ApplicationProperties properties,
+      KafkaTemplate<String, Object> kafkaTemplate)
   {
     this.id = properties.getClientId();
     this.topic = properties.getTopic();
-
-    Properties props = new Properties();
-    props.put("bootstrap.servers", properties.getBootstrapServer());
-    props.put("client.id", properties.getClientId());
-    props.put("acks", properties.getAcks());
-    props.put("batch.size", properties.getBatchSize());
-    props.put("delivery.timeout.ms", 20000); // 20 Sekunden
-    props.put("request.timeout.ms",  10000); // 10 Sekunden
-    props.put("linger.ms", properties.getLingerMs());
-    props.put("compression.type", properties.getCompressionType());
-    props.put("key.serializer", StringSerializer.class.getName());
-    props.put("value.serializer", JsonSerializer.class.getName());
-
-    this.producer = new KafkaProducer<>(props);
+    this.kafkaTemplate = kafkaTemplate;
   }
 
   @PostMapping(path = "{key}")
-  public DeferredResult<ProduceResult> send(
+  public DeferredResult<ProduceResult> message(
       @PathVariable String key,
       @RequestBody String value)
   {
-    DeferredResult<ProduceResult> result = new DeferredResult<>();
+    key = key.trim();
+    final ProducerRecord<String, Object> record = new ProducerRecord<>(
+        topic,  // Topic
+        key,    // Key
+        new ClientMessage(key, value) // Value
+    );
 
-    final long time = System.currentTimeMillis();
+    return send(record);
+  }
 
-    final ProducerRecord<String, ClientMessage> record = new ProducerRecord<>(
+  @PutMapping(path = "{key}")
+  public DeferredResult<ProduceResult> message(@PathVariable String key)
+  {
+    key = key.trim();
+    final ProducerRecord<String, Object> record = new ProducerRecord<>(
         topic,  // Topic
         key,    // Key
-        new ClientMessage(key, value) // Value
+        new FooMessage(key, System.currentTimeMillis()) // Value
     );
 
-    producer.send(record, (metadata, e) ->
-    {
-      long now = System.currentTimeMillis();
-      if (e == null)
+    return send(record);
+  }
+
+  @PostMapping(path = "/")
+  public DeferredResult<ProduceResult> greeting(
+      @RequestBody String name)
+  {
+    name = name.trim();
+    final ProducerRecord<String, Object> record = new ProducerRecord<>(
+        topic,  // Topic
+        name,    // Key
+        new Greeting(name) // Value
+    );
+
+    return send(record);
+  }
+
+  private DeferredResult<ProduceResult> send(ProducerRecord<String, Object> record)
+  {
+    DeferredResult<ProduceResult> result = new DeferredResult<>();
+
+    final long time = System.currentTimeMillis();
+
+    kafkaTemplate.send(record).addCallback(
+      (sendResult) ->
       {
+        long now = System.currentTimeMillis();
+
         // HANDLE SUCCESS
+        RecordMetadata metadata = sendResult.getRecordMetadata();
         produced++;
         result.setResult(new ProduceSuccess(metadata.partition(), metadata.offset()));
         log.debug(
@@ -77,27 +98,26 @@ public class RestProducer
             metadata.timestamp(),
             now - time
         );
-      }
-      else
+      },
+      (e) ->
       {
+        long now = System.currentTimeMillis();
+
         // HANDLE ERROR
         result.setErrorResult(new ProduceFailure(e));
         log.error(
-            "{} - ERROR key={} timestamp={} latency={}ms: {}",
+            "{} - ERROR key={} timestamp=-1 latency={}ms: {}",
             id,
             record.key(),
-            metadata == null ? -1 : metadata.timestamp(),
             now - time,
             e.toString()
         );
-      }
-    });
+      });
 
     long now = System.currentTimeMillis();
     log.trace(
-        "{} - Queued #{} key={} latency={}ms",
+        "{} - Queued key={} latency={}ms",
         id,
-        value,
         record.key(),
         now - time
     );
@@ -116,8 +136,6 @@ public class RestProducer
   public void destroy() throws ExecutionException, InterruptedException
   {
     log.info("{} - Destroy!", id);
-    log.info("{} - Closing the KafkaProducer", id);
-    producer.close();
     log.info("{}: Produced {} messages in total, exiting!", id, produced);
   }
 }