X-Git-Url: https://juplo.de/gitweb/?a=blobdiff_plain;f=src%2Fmain%2Fjava%2Fde%2Fjuplo%2Fkafka%2FRestProducer.java;h=4be2dcd74d23c6666f3c208f6aa73e6c9eb41002;hb=afe6ed701888b20b659d1a788108a842c7307f09;hp=59d2c77e8da56e8a4325ab88233f8bf6e61de158;hpb=1b978296e798614b3ca8317b43acd1a44a774ecd;p=demos%2Fkafka%2Ftraining diff --git a/src/main/java/de/juplo/kafka/RestProducer.java b/src/main/java/de/juplo/kafka/RestProducer.java index 59d2c77..4be2dcd 100644 --- a/src/main/java/de/juplo/kafka/RestProducer.java +++ b/src/main/java/de/juplo/kafka/RestProducer.java @@ -1,74 +1,75 @@ package de.juplo.kafka; +import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.*; import org.springframework.web.context.request.async.DeferredResult; -import javax.annotation.PreDestroy; -import java.util.Properties; -import java.util.concurrent.ExecutionException; +import java.math.BigInteger; @Slf4j -@RestController +@RequestMapping +@ResponseBody +@RequiredArgsConstructor public class RestProducer { private final String id; private final String topic; private final Integer partition; - private final KafkaProducer producer; + private final KafkaProducer producer; private long produced = 0; - public RestProducer(ApplicationProperties properties) - { - this.id = properties.getClientId(); - this.topic = properties.getTopic(); - this.partition = properties.getPartition(); - - Properties props = new Properties(); - props.put("bootstrap.servers", properties.getBootstrapServer()); - props.put("client.id", properties.getClientId()); - props.put("acks", properties.getAcks()); - props.put("batch.size", properties.getBatchSize()); - props.put("delivery.timeout.ms", 20000); // 20 Sekunden - props.put("request.timeout.ms", 10000); // 10 Sekunden - props.put("linger.ms", properties.getLingerMs()); - props.put("compression.type", properties.getCompressionType()); - props.put("key.serializer", StringSerializer.class.getName()); - props.put("value.serializer", StringSerializer.class.getName()); - - this.producer = new KafkaProducer<>(props); - } - @PostMapping(path = "{key}") public DeferredResult send( @PathVariable String key, @RequestHeader(name = "X-id", required = false) Long correlationId, - @RequestBody String value) + @RequestBody Integer number) { - DeferredResult result = new DeferredResult<>(); + ResultRecorder result = new ResultRecorder(number+1); + + for (int i = 1; i <= number; i++) + { + send(key, new AddNumberMessage(number, i), correlationId, result); + } + send(key, new CalculateSumMessage(number), correlationId, result); + + return result.getDeferredResult(); + } + private void send( + String key, + Object value, + Long correlationId, + ResultRecorder result) + { final long time = System.currentTimeMillis(); - final ProducerRecord record = new ProducerRecord<>( + final ProducerRecord record = new ProducerRecord<>( topic, // Topic + partition, // Partition key, // Key value // Value ); + record.headers().add("source", id.getBytes()); + if (correlationId != null) + { + record.headers().add("id", BigInteger.valueOf(correlationId).toByteArray()); + } + producer.send(record, (metadata, e) -> { long now = System.currentTimeMillis(); if (e == null) { // HANDLE SUCCESS + result.addSuccess(metadata); produced++; - result.setResult(new ProduceSuccess(metadata.partition(), metadata.offset())); log.debug( "{} - Sent key={} message={} partition={}/{} timestamp={} latency={}ms", id, @@ -83,7 +84,7 @@ public class RestProducer else { // HANDLE ERROR - result.setErrorResult(new ProduceFailure(e)); + result.addFailure(e); log.error( "{} - ERROR key={} timestamp={} latency={}ms: {}", id, @@ -102,8 +103,6 @@ public class RestProducer record.key(), now - time ); - - return result; } @ExceptionHandler @@ -112,13 +111,4 @@ public class RestProducer { return new ErrorResponse(e.getMessage(), HttpStatus.BAD_REQUEST.value()); } - - @PreDestroy - public void destroy() throws ExecutionException, InterruptedException - { - log.info("{} - Destroy!", id); - log.info("{} - Closing the KafkaProducer", id); - producer.close(); - log.info("{}: Produced {} messages in total, exiting!", id, produced); - } }