#!/bin/bash
-IMAGE=juplo/spring-consumer:1.0-SNAPSHOT
+IMAGE=juplo/spring-consumer-json:1.0-SNAPSHOT
if [ "$1" = "cleanup" ]
then
"$1" = "build"
]]
then
+ docker-compose rm -svf consumer-1 consumer-2
mvn clean install || exit
else
echo "Using image existing images:"
fi
docker-compose up setup
-docker-compose up -d producer
-
-mvn spring-boot:run &
-sleep 10
-kill $(jobs -p)
-mvn spring-boot:run &
-sleep 10
-docker-compose stop producer
-kill $(jobs -p)
+docker-compose up -d producer consumer-1 consumer-2
+
+while ! [[ $(http 0:8080/actuator/health 2> /dev/null) =~ "UP" ]]; do echo "Waiting for producer..."; sleep 1; done
+while ! [[ $(http 0:8081/actuator/health 2> /dev/null) =~ "UP" ]]; do echo "Waiting for consumer-1..."; sleep 1; done
+while ! [[ $(http 0:8082/actuator/health 2> /dev/null) =~ "UP" ]]; do echo "Waiting for consumer-2..."; sleep 1; done
+
+echo 6 | http -v :8080/peter
+echo 77 | http -v :8080/klaus
+echo 66 | http -v :8080/peter
+echo 7 | http -v :8080/klaus
+
+docker-compose stop consumer-1 consumer-2
+
+docker-compose logs consumer-1
+docker-compose logs consumer-2
command: sleep infinity
producer:
- image: juplo/simple-producer:1.0-SNAPSHOT
- command: producer
+ image: juplo/rest-producer-json:1.0-SNAPSHOT
+ ports:
+ - 8080:8080
+ environment:
+ server.port: 8080
+ producer.bootstrap-server: kafka:9092
+ producer.client-id: producer
+ producer.topic: test
+
+ consumer-1:
+ image: juplo/spring-consumer-json:1.0-SNAPSHOT
+ ports:
+ - 8081:8080
+ environment:
+ server.port: 8080
+ spring.kafka.bootstrap-servers: kafka:9092
+ spring.kafka.client-id: consumer-1
+ spring.kafka.consumer.group-id: my-group
+ simple.consumer.topic: test
+
+ consumer-2:
+ image: juplo/spring-consumer-json:1.0-SNAPSHOT
+ ports:
+ - 8082:8080
+ environment:
+ server.port: 8080
+ spring.kafka.bootstrap-servers: kafka:9092
+ spring.kafka.client-id: consumer-2
+ spring.kafka.consumer.group-id: my-group
+ simple.consumer.topic: test
</parent>
<groupId>de.juplo.kafka</groupId>
- <artifactId>spring-consumer</artifactId>
+ <artifactId>spring-consumer-json</artifactId>
<version>1.0-SNAPSHOT</version>
<name>Spring Consumer</name>
<description>Super Simple Consumer-Group, that is implemented as Spring-Boot application and configured by Spring Kafka</description>
{
@Bean
public SimpleConsumer simpleConsumer(
- Consumer<String, String> kafkaConsumer,
+ Consumer<String, Message> kafkaConsumer,
KafkaProperties kafkaProperties,
ApplicationProperties applicationProperties)
{
--- /dev/null
+package de.juplo.kafka;
+
+
+public abstract class Message
+{
+ public enum Type {ADD, CALC}
+
+ public abstract Type getType();
+}
--- /dev/null
+package de.juplo.kafka;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import lombok.Data;
+
+
+@Data
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class MessageAddNumber extends Message
+{
+ private Integer next;
+
+
+ @Override
+ public Type getType()
+ {
+ return Type.ADD;
+ }
+}
--- /dev/null
+package de.juplo.kafka;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import lombok.Data;
+
+
+@Data
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class MessageCalculateSum extends Message
+{
+ @Override
+ public Type getType()
+ {
+ return Type.CALC;
+ }
+}
{
private final String id;
private final String topic;
- private final Consumer<String, String> consumer;
+ private final Consumer<String, Message> consumer;
private long consumed = 0;
while (true)
{
- ConsumerRecords<String, String> records =
+ ConsumerRecords<String, Message> records =
consumer.poll(Duration.ofSeconds(1));
log.info("{} - Received {} messages", id, records.count());
- for (ConsumerRecord<String, String> record : records)
+ for (ConsumerRecord<String, Message> record : records)
{
handleRecord(
record.topic(),
Integer partition,
Long offset,
String key,
- String value)
+ Message value)
{
consumed++;
log.info("{} - {}: {}/{} - {}={}", id, offset, topic, partition, key, value);
auto-offset-reset: earliest
auto-commit-interval: 5s
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
- value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
+ value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer
properties:
partition.assignment.strategy: org.apache.kafka.clients.consumer.StickyAssignor
metadata.max.age.ms: 1000
+ spring.json.type.mapping: >
+ ADD:de.juplo.kafka.MessageAddNumber,
+ CALC:de.juplo.kafka.MessageCalculateSum
logging:
level:
root: INFO
--- /dev/null
+package de.juplo.kafka;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+
+public class MessageTest
+{
+ ObjectMapper mapper = new ObjectMapper();
+
+ @Test
+ @DisplayName("Deserialize a MessageAddNumber message")
+ public void testDeserializeMessageAddNumber()
+ {
+ Assertions.assertDoesNotThrow(() -> mapper.readValue("{\"next\":42}", MessageAddNumber.class));
+ Assertions.assertDoesNotThrow(() -> mapper.readValue("{\"number\":666,\"next\":42}", MessageAddNumber.class));
+ }
+
+ @Test
+ @DisplayName("Deserialize a MessageCalculateSum message")
+ public void testDeserializeMessageCalculateSum() throws JsonProcessingException
+ {
+ Assertions.assertDoesNotThrow(() -> mapper.readValue("{}", MessageCalculateSum.class));
+ Assertions.assertDoesNotThrow(() -> mapper.readValue("{\"number\":666}", MessageCalculateSum.class));
+ }
+}