refactor: `RestProducer` wird explizit erzeugt
[demos/kafka/training] / src / main / java / de / juplo / kafka / RestProducer.java
index dea49f0..debe366 100644 (file)
@@ -1,52 +1,33 @@
 package de.juplo.kafka;
 
+import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.serialization.StringSerializer;
-import org.springframework.http.MediaType;
+import org.springframework.http.HttpStatus;
 import org.springframework.web.bind.annotation.*;
 import org.springframework.web.context.request.async.DeferredResult;
 
-import javax.annotation.PreDestroy;
-import java.util.Properties;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
+import java.math.BigInteger;
 
 
 @Slf4j
-@RestController
+@RequestMapping
+@ResponseBody
+@RequiredArgsConstructor
 public class RestProducer
 {
   private final String id;
   private final String topic;
+  private final Integer partition;
   private final KafkaProducer<String, String> producer;
 
   private long produced = 0;
 
-  public RestProducer(ApplicationProperties properties)
-  {
-    this.id = properties.getClientId();
-    this.topic = properties.getTopic();
-
-    Properties props = new Properties();
-    props.put("bootstrap.servers", properties.getBootstrapServer());
-    props.put("client.id", properties.getClientId());
-    props.put("acks", properties.getAcks());
-    props.put("batch.size", properties.getBatchSize());
-    props.put("delivery.timeout.ms", 20000); // 20 Sekunden
-    props.put("request.timeout.ms",  10000); // 10 Sekunden
-    props.put("linger.ms", properties.getLingerMs());
-    props.put("compression.type", properties.getCompressionType());
-    props.put("key.serializer", StringSerializer.class.getName());
-    props.put("value.serializer", StringSerializer.class.getName());
-
-    this.producer = new KafkaProducer<>(props);
-  }
-
   @PostMapping(path = "{key}")
   public DeferredResult<ProduceResult> send(
       @PathVariable String key,
+      @RequestHeader(name = "X-id", required = false) Long correlationId,
       @RequestBody String value)
   {
     DeferredResult<ProduceResult> result = new DeferredResult<>();
@@ -55,10 +36,17 @@ public class RestProducer
 
     final ProducerRecord<String, String> record = new ProducerRecord<>(
         topic,  // Topic
+        partition, // Partition
         key,    // Key
         value   // Value
     );
 
+    record.headers().add("source", id.getBytes());
+    if (correlationId != null)
+    {
+      record.headers().add("id", BigInteger.valueOf(correlationId).toByteArray());
+    }
+
     producer.send(record, (metadata, e) ->
     {
       long now = System.currentTimeMillis();
@@ -95,9 +83,8 @@ public class RestProducer
 
     long now = System.currentTimeMillis();
     log.trace(
-        "{} - Queued #{} key={} latency={}ms",
+        "{} - Queued message with key={} latency={}ms",
         id,
-        value,
         record.key(),
         now - time
     );
@@ -105,12 +92,10 @@ public class RestProducer
     return result;
   }
 
-  @PreDestroy
-  public void destroy() throws ExecutionException, InterruptedException
+  @ExceptionHandler
+  @ResponseStatus(HttpStatus.BAD_REQUEST)
+  public ErrorResponse illegalStateException(IllegalStateException e)
   {
-    log.info("{} - Destroy!", id);
-    log.info("{} - Closing the KafkaProducer", id);
-    producer.close();
-    log.info("{}: Produced {} messages in total, exiting!", id, produced);
+    return new ErrorResponse(e.getMessage(), HttpStatus.BAD_REQUEST.value());
   }
 }