#!/bin/bash
-IMAGE=juplo/rest-producer:1.0-SNAPSHOT
+IMAGE=juplo/springified-producer:1.0-SNAPSHOT
if [ "$1" = "cleanup" ]
then
sleep 15
-echo foo | http -v :8080/bar
+echo 'Hallo Welt!' | http -v :8080/peter
+echo peter | http -v :8080/
+http -v PUT :8080/peter
dd if=/dev/zero bs=1024 count=1024 | http -v :8080/fehler
http -v :8081/seen
command: sleep infinity
producer:
- image: juplo/rest-producer:1.0-SNAPSHOT
+ image: juplo/springified-producer:1.0-SNAPSHOT
ports:
- 8080:8080
environment:
+ spring.kafka.bootstrap-servers: kafka:9092
server.port: 8080
- producer.bootstrap-server: kafka:9092
producer.client-id: producer
- producer.topic: test
+ spring.kafka.template.default-topic: test
consumer:
image: juplo/endless-consumer:1.0-SNAPSHOT
</parent>
<groupId>de.juplo.kafka</groupId>
- <artifactId>rest-producer</artifactId>
- <name>REST Producer</name>
- <description>A Simple Producer that takes messages via POST and confirms successs</description>
+ <artifactId>springified-producer</artifactId>
+ <name>Springified REST Producer</name>
+ <description>A Simple Producer that is implemented with the help of Spring Kafka and takes messages via POST and confirms successs</description>
<version>1.0-SNAPSHOT</version>
<dependencies>
<optional>true</optional>
</dependency>
<dependency>
- <groupId>org.apache.kafka</groupId>
- <artifactId>kafka-clients</artifactId>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>org.springframework.kafka</groupId>
- <artifactId>spring-kafka</artifactId>
- <scope>test</scope>
- </dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
package de.juplo.kafka;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.annotation.Bean;
-import org.springframework.util.Assert;
-
-import java.util.concurrent.Executors;
@SpringBootApplication
@Setter
public class ApplicationProperties
{
- private String bootstrapServer;
private String clientId;
private String topic;
- private String acks;
- private Integer batchSize;
- private Integer lingerMs;
- private String compressionType;
}
--- /dev/null
+package de.juplo.kafka;
+
+import lombok.Value;
+
+
+@Value
+public class ClientMessage
+{
+ private final String client;
+ private final String message;
+}
--- /dev/null
+package de.juplo.kafka;
+
+import lombok.Value;
+
+
+@Value
+public class FooMessage
+{
+ private final String client;
+ private final Long timestamp;
+}
--- /dev/null
+package de.juplo.kafka;
+
+import lombok.Value;
+
+import java.time.LocalDateTime;
+
+
+@Value
+public class Greeting
+{
+ private final String name;
+ private final LocalDateTime when = LocalDateTime.now();
+}
private final Integer status;
- public ProduceFailure(Exception e)
+ public ProduceFailure(Throwable e)
{
status = 500;
exception = e.getClass().getSimpleName();
package de.juplo.kafka;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.producer.KafkaProducer;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.serialization.StringSerializer;
+import org.apache.kafka.clients.producer.RecordMetadata;
import org.springframework.http.HttpStatus;
-import org.springframework.http.MediaType;
+import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.context.request.async.DeferredResult;
import javax.annotation.PreDestroy;
-import java.util.Properties;
import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
@Slf4j
public class RestProducer
{
private final String id;
- private final String topic;
- private final KafkaProducer<String, String> producer;
+ private final KafkaTemplate<String, Object> kafkaTemplate;
private long produced = 0;
- public RestProducer(ApplicationProperties properties)
+ public RestProducer(
+ ApplicationProperties properties,
+ KafkaTemplate<String, Object> kafkaTemplate)
{
this.id = properties.getClientId();
- this.topic = properties.getTopic();
-
- Properties props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
- props.put("client.id", properties.getClientId());
- props.put("acks", properties.getAcks());
- props.put("batch.size", properties.getBatchSize());
- props.put("delivery.timeout.ms", 20000); // 20 Sekunden
- props.put("request.timeout.ms", 10000); // 10 Sekunden
- props.put("linger.ms", properties.getLingerMs());
- props.put("compression.type", properties.getCompressionType());
- props.put("key.serializer", StringSerializer.class.getName());
- props.put("value.serializer", StringSerializer.class.getName());
-
- this.producer = new KafkaProducer<>(props);
+ this.kafkaTemplate = kafkaTemplate;
}
@PostMapping(path = "{key}")
- public DeferredResult<ProduceResult> send(
+ public DeferredResult<ProduceResult> message(
@PathVariable String key,
@RequestBody String value)
+ {
+ key = key.trim();
+ return send(key, new ClientMessage(key, value));
+ }
+
+ @PutMapping(path = "{key}")
+ public DeferredResult<ProduceResult> message(@PathVariable String key)
+ {
+ key = key.trim();
+ return send(key, new FooMessage(key, System.currentTimeMillis()));
+ }
+
+ @PostMapping(path = "/")
+ public DeferredResult<ProduceResult> greeting(
+ @RequestBody String name)
+ {
+ name = name.trim();
+ return send(name, new Greeting(name));
+ }
+
+ private DeferredResult<ProduceResult> send(String key, Object value)
{
DeferredResult<ProduceResult> result = new DeferredResult<>();
final long time = System.currentTimeMillis();
- final ProducerRecord<String, String> record = new ProducerRecord<>(
- topic, // Topic
- key, // Key
- value // Value
- );
-
- producer.send(record, (metadata, e) ->
- {
- long now = System.currentTimeMillis();
- if (e == null)
+ kafkaTemplate.sendDefault(key, value).addCallback(
+ (sendResult) ->
{
+ long now = System.currentTimeMillis();
+
// HANDLE SUCCESS
+ RecordMetadata metadata = sendResult.getRecordMetadata();
produced++;
result.setResult(new ProduceSuccess(metadata.partition(), metadata.offset()));
log.debug(
"{} - Sent key={} message={} partition={}/{} timestamp={} latency={}ms",
id,
- record.key(),
- record.value(),
+ key,
+ value,
metadata.partition(),
metadata.offset(),
metadata.timestamp(),
now - time
);
- }
- else
+ },
+ (e) ->
{
+ long now = System.currentTimeMillis();
+
// HANDLE ERROR
result.setErrorResult(new ProduceFailure(e));
log.error(
- "{} - ERROR key={} timestamp={} latency={}ms: {}",
+ "{} - ERROR key={} timestamp=-1 latency={}ms: {}",
id,
- record.key(),
- metadata == null ? -1 : metadata.timestamp(),
+ key,
now - time,
e.toString()
);
- }
- });
+ });
long now = System.currentTimeMillis();
log.trace(
- "{} - Queued #{} key={} latency={}ms",
+ "{} - Queued key={} latency={}ms",
id,
- value,
- record.key(),
+ key,
now - time
);
public void destroy() throws ExecutionException, InterruptedException
{
log.info("{} - Destroy!", id);
- log.info("{} - Closing the KafkaProducer", id);
- producer.close();
log.info("{}: Produced {} messages in total, exiting!", id, produced);
}
}
producer:
- bootstrap-server: :9092
client-id: DEV
topic: test
- acks: -1
- batch-size: 16384
- linger-ms: 0
- compression-type: gzip
management:
endpoint:
shutdown:
enabled: true
info:
kafka:
- bootstrap-server: ${producer.bootstrap-server}
+ bootstrap-servers: ${spring.kafka.bootstrap-servers}
client-id: ${producer.client-id}
- topic: ${producer.topic}
- acks: ${producer.acks}
- batch-size: ${producer.batch-size}
- linger-ms: ${producer.linger-ms}
- compression-type: ${producer.compression-type}
+ topic: ${spring.kafka.template.default-topic}
+ acks: ${spring.kafka.producer.acks}
+ batch-size: ${spring.kafka.producer.batch-size}
+ linger-ms: ${spring.kafka.producer.properties.linger.ms}
+ compression-type: ${spring.kafka.producer.compression-type}
+spring:
+ kafka:
+ bootstrap-servers: :9092
+ producer:
+ acks: -1
+ batch-size: 16384
+ compression-type: gzip
+ key-serializer: org.apache.kafka.common.serialization.StringSerializer
+ value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
+ properties:
+ linger.ms: 0
+ delivery.timeout.ms: 20000 # 20 Sekunden
+ request.timeout.ms: 10000 # 10 Sekunden
+ spring.json.type.mapping: >
+ message:de.juplo.kafka.ClientMessage,
+ foo:de.juplo.kafka.FooMessage,
+ greeting:de.juplo.kafka.Greeting
+ template:
+ default-topic: test
logging:
level:
root: INFO
@SpringBootTest(
properties = {
- "spring.kafka.consumer.bootstrap-servers=${spring.embedded.kafka.brokers}",
- "producer.bootstrap-server=${spring.embedded.kafka.brokers}",
- "producer.topic=" + TOPIC})
+ "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+ "spring.kafka.template.default-topic=" + TOPIC})
@AutoConfigureMockMvc
@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
@Slf4j
@Test
- void testSendMessage() throws Exception
+ void testSendClientMessage() throws Exception
{
mockMvc
.perform(post("/peter").content("Hallo Welt!"))
.until(() -> consumer.received.size() == 1);
}
+ @Test
+ void testSendFooMessage() throws Exception
+ {
+ mockMvc
+ .perform(put("/peter"))
+ .andExpect(status().isOk());
+ await("Message was send")
+ .atMost(Duration.ofSeconds(5))
+ .until(() -> consumer.received.size() == 1);
+ }
+
+ @Test
+ void testSendGreeting() throws Exception
+ {
+ mockMvc
+ .perform(post("/").content("peter"))
+ .andExpect(status().isOk());
+ await("Message was send")
+ .atMost(Duration.ofSeconds(5))
+ .until(() -> consumer.received.size() == 1);
+ }
+
static class Consumer
{