19f70ba2692b7bb319fdb9a7709bf9f5c267869d
[demos/kafka/training] / src / main / java / de / juplo / kafka / RestProducer.java
1 package de.juplo.kafka;
2
3 import lombok.extern.slf4j.Slf4j;
4 import org.apache.kafka.clients.producer.KafkaProducer;
5 import org.apache.kafka.clients.producer.ProducerRecord;
6 import org.apache.kafka.common.serialization.StringSerializer;
7 import org.springframework.http.MediaType;
8 import org.springframework.web.bind.annotation.*;
9 import org.springframework.web.context.request.async.DeferredResult;
10
11 import javax.annotation.PreDestroy;
12 import java.util.Properties;
13 import java.util.concurrent.ExecutionException;
14 import java.util.concurrent.ExecutorService;
15
16
17 @Slf4j
18 @RestController
19 public class RestProducer
20 {
21   private final String id;
22   private final String topic;
23   private final KafkaProducer<String, String> producer;
24
25   private long produced = 0;
26
27   public RestProducer(ApplicationProperties properties)
28   {
29     this.id = properties.getClientId();
30     this.topic = properties.getTopic();
31
32     Properties props = new Properties();
33     props.put("bootstrap.servers", properties.getBootstrapServer());
34     props.put("client.id", properties.getClientId());
35     props.put("acks", properties.getAcks());
36     props.put("batch.size", properties.getBatchSize());
37     props.put("linger.ms", properties.getLingerMs());
38     props.put("compression.type", properties.getCompressionType());
39     props.put("key.serializer", StringSerializer.class.getName());
40     props.put("value.serializer", StringSerializer.class.getName());
41
42     this.producer = new KafkaProducer<>(props);
43   }
44
45   @PostMapping(path = "{key}")
46   public DeferredResult<ProduceResult> send(
47       @PathVariable String key,
48       @RequestBody String value)
49   {
50     DeferredResult<ProduceResult> result = new DeferredResult<>();
51
52     final long time = System.currentTimeMillis();
53
54     final ProducerRecord<String, String> record = new ProducerRecord<>(
55         topic,  // Topic
56         key,    // Key
57         value   // Value
58     );
59
60     producer.send(record, (metadata, e) ->
61     {
62       long now = System.currentTimeMillis();
63       if (e == null)
64       {
65         // HANDLE SUCCESS
66         produced++;
67         result.setResult(new ProduceSuccess(metadata.partition(), metadata.offset()));
68         log.debug(
69             "{} - Sent key={} message={} partition={}/{} timestamp={} latency={}ms",
70             id,
71             record.key(),
72             record.value(),
73             metadata.partition(),
74             metadata.offset(),
75             metadata.timestamp(),
76             now - time
77         );
78       }
79       else
80       {
81         // HANDLE ERROR
82         result.setErrorResult(new ProduceFailure(e));
83         log.error(
84             "{} - ERROR key={} timestamp={} latency={}ms: {}",
85             id,
86             record.key(),
87             metadata == null ? -1 : metadata.timestamp(),
88             now - time,
89             e.toString()
90         );
91       }
92     });
93
94     long now = System.currentTimeMillis();
95     log.trace(
96         "{} - Queued #{} key={} latency={}ms",
97         id,
98         value,
99         record.key(),
100         now - time
101     );
102
103     return result;
104   }
105
106   @PreDestroy
107   public void destroy() throws ExecutionException, InterruptedException
108   {
109     log.info("{} - Destroy!", id);
110     log.info("{} - Closing the KafkaProducer", id);
111     producer.close();
112     log.info("{}: Produced {} messages in total, exiting!", id, produced);
113   }
114 }