X-Git-Url: http://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FRestProducer.java;h=423a8a3cdbec74975e33fb69c16b9d8b94d04b64;hb=56bf19f4f150e7ab97eed32f01a1f470b9f896a6;hp=19f70ba2692b7bb319fdb9a7709bf9f5c267869d;hpb=50da4f6f74a8f4f567b7af8556480f81256c61dc;p=demos%2Fkafka%2Ftraining diff --git a/src/main/java/de/juplo/kafka/RestProducer.java b/src/main/java/de/juplo/kafka/RestProducer.java index 19f70ba..423a8a3 100644 --- a/src/main/java/de/juplo/kafka/RestProducer.java +++ b/src/main/java/de/juplo/kafka/RestProducer.java @@ -1,17 +1,14 @@ package de.juplo.kafka; import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.serialization.StringSerializer; -import org.springframework.http.MediaType; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.springframework.http.HttpStatus; +import org.springframework.kafka.core.KafkaTemplate; import org.springframework.web.bind.annotation.*; import org.springframework.web.context.request.async.DeferredResult; import javax.annotation.PreDestroy; -import java.util.Properties; import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; @Slf4j @@ -19,96 +16,105 @@ import java.util.concurrent.ExecutorService; public class RestProducer { private final String id; - private final String topic; - private final KafkaProducer producer; + private final KafkaTemplate kafkaTemplate; private long produced = 0; - public RestProducer(ApplicationProperties properties) + public RestProducer( + ApplicationProperties properties, + KafkaTemplate kafkaTemplate) { this.id = properties.getClientId(); - this.topic = properties.getTopic(); - - Properties props = new Properties(); - props.put("bootstrap.servers", properties.getBootstrapServer()); - props.put("client.id", properties.getClientId()); - props.put("acks", properties.getAcks()); - props.put("batch.size", properties.getBatchSize()); - props.put("linger.ms", properties.getLingerMs()); - props.put("compression.type", properties.getCompressionType()); - props.put("key.serializer", StringSerializer.class.getName()); - props.put("value.serializer", StringSerializer.class.getName()); - - this.producer = new KafkaProducer<>(props); + this.kafkaTemplate = kafkaTemplate; } @PostMapping(path = "{key}") - public DeferredResult send( + public DeferredResult message( @PathVariable String key, @RequestBody String value) + { + key = key.trim(); + return send(key, new ClientMessage(key, value)); + } + + @PutMapping(path = "{key}") + public DeferredResult message(@PathVariable String key) + { + key = key.trim(); + return send(key, new FooMessage(key, System.currentTimeMillis())); + } + + @PostMapping(path = "/") + public DeferredResult greeting( + @RequestBody String name) + { + name = name.trim(); + return send(name, new Greeting(name)); + } + + private DeferredResult send(String key, Object value) { DeferredResult result = new DeferredResult<>(); final long time = System.currentTimeMillis(); - final ProducerRecord record = new ProducerRecord<>( - topic, // Topic - key, // Key - value // Value - ); - - producer.send(record, (metadata, e) -> - { - long now = System.currentTimeMillis(); - if (e == null) + kafkaTemplate.sendDefault(key, value).addCallback( + (sendResult) -> { + long now = System.currentTimeMillis(); + // HANDLE SUCCESS + RecordMetadata metadata = sendResult.getRecordMetadata(); produced++; result.setResult(new ProduceSuccess(metadata.partition(), metadata.offset())); log.debug( "{} - Sent key={} message={} partition={}/{} timestamp={} latency={}ms", id, - record.key(), - record.value(), + key, + value, metadata.partition(), metadata.offset(), metadata.timestamp(), now - time ); - } - else + }, + (e) -> { + long now = System.currentTimeMillis(); + // HANDLE ERROR result.setErrorResult(new ProduceFailure(e)); log.error( - "{} - ERROR key={} timestamp={} latency={}ms: {}", + "{} - ERROR key={} timestamp=-1 latency={}ms: {}", id, - record.key(), - metadata == null ? -1 : metadata.timestamp(), + key, now - time, e.toString() ); - } - }); + }); long now = System.currentTimeMillis(); log.trace( - "{} - Queued #{} key={} latency={}ms", + "{} - Queued key={} latency={}ms", id, - value, - record.key(), + key, now - time ); return result; } + @ExceptionHandler + @ResponseStatus(HttpStatus.BAD_REQUEST) + public ErrorResponse illegalStateException(IllegalStateException e) + { + return new ErrorResponse(e.getMessage(), HttpStatus.BAD_REQUEST.value()); + } + @PreDestroy public void destroy() throws ExecutionException, InterruptedException { log.info("{} - Destroy!", id); - log.info("{} - Closing the KafkaProducer", id); - producer.close(); log.info("{}: Produced {} messages in total, exiting!", id, produced); } }