From: Kai Moritz Date: Mon, 28 Oct 2024 13:28:57 +0000 (+0100) Subject: `ExampleProducer` in eine Spring-Boot App umgebaut (ohne Spring Kafka) X-Git-Tag: producer/spring-producer--fixedsharding--null~8 X-Git-Url: http://juplo.de/gitweb/?a=commitdiff_plain;h=b842626705f0cd040cf24449fa48d894f0760d6b;p=demos%2Fkafka%2Ftraining `ExampleProducer` in eine Spring-Boot App umgebaut (ohne Spring Kafka) --- diff --git a/.dockerignore b/.dockerignore index 49f82a9..1ad9963 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,3 +1,2 @@ * !target/*.jar -!target/libs/*.jar diff --git a/.maven-dockerinclude b/.maven-dockerinclude index a00c65f..fd6cecd 100644 --- a/.maven-dockerinclude +++ b/.maven-dockerinclude @@ -1,2 +1 @@ target/*.jar -target/libs/*.jar diff --git a/Dockerfile b/Dockerfile index 74e66ed..9e196ff 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,5 @@ FROM eclipse-temurin:21-jre VOLUME /tmp COPY target/*.jar /opt/app.jar -COPY target/libs /opt/libs ENTRYPOINT [ "java", "-jar", "/opt/app.jar" ] -CMD [ "kafka:9092", "test", "DCKR" ] +CMD [] diff --git a/README.sh b/README.sh index 3d98ace..499780a 100755 --- a/README.sh +++ b/README.sh @@ -1,6 +1,6 @@ #!/bin/bash -IMAGE=juplo/simple-producer:1.0-SNAPSHOT +IMAGE=juplo/spring-producer:1.0-SNAPSHOT if [ "$1" = "cleanup" ] then @@ -27,10 +27,16 @@ docker compose -f docker/docker-compose.yml up --remove-orphans setup || exit 1 docker compose -f docker/docker-compose.yml up -d producer -sleep 5 - -docker compose -f docker/docker-compose.yml exec cli kafkacat -b kafka:9092 -t test -c 20 -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n' +docker compose -f docker/docker-compose.yml up -d consumer-1 consumer-2 +sleep 15 docker compose -f docker/docker-compose.yml stop producer -docker compose -f docker/docker-compose.yml exec cli kafkacat -b kafka:9092 -t test -e -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n' -docker compose -f docker/docker-compose.yml logs producer + +echo +echo "Von consumer-1 empfangen:" +docker compose -f docker/docker-compose.yml logs consumer-1 | grep '\ test\/.' +echo +echo "Von consumer-2 empfangen:" +docker compose -f docker/docker-compose.yml logs consumer-2 | grep '\ test\/.' + +docker compose -f docker/docker-compose.yml stop consumer-1 consumer-2 diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 80f0aec..69ac986 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -190,8 +190,19 @@ services: - kafka-3 producer: - image: juplo/simple-producer:1.0-SNAPSHOT - command: kafka:9092 test producer + image: juplo/spring-producer:1.0-SNAPSHOT + environment: + producer.bootstrap-server: kafka:9092 + producer.client-id: producer + producer.topic: test + + consumer-1: + image: juplo/simple-consumer:1.0-SNAPSHOT + command: kafka:9092 test my-group consumer-1 + + consumer-2: + image: juplo/simple-consumer:1.0-SNAPSHOT + command: kafka:9092 test my-group consumer-2 volumes: zookeeper-data: diff --git a/pom.xml b/pom.xml index ad7f17a..02707b9 100644 --- a/pom.xml +++ b/pom.xml @@ -12,9 +12,9 @@ de.juplo.kafka - simple-producer - Super Simple Producer - A Simple Producer, programmed with pure Java, that sends messages via Kafka + spring-producer + Spring Producer + A Simple Spring-Boot-Producer, that takes messages via POST and confirms successs 1.0-SNAPSHOT @@ -22,6 +22,15 @@ + + org.springframework.boot + spring-boot-configuration-processor + true + + + org.springframework.boot + spring-boot-starter-validation + org.apache.kafka kafka-clients @@ -31,42 +40,40 @@ lombok - ch.qos.logback - logback-classic + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.kafka + spring-kafka + test + + + org.springframework.kafka + spring-kafka-test + test + + + org.awaitility + awaitility + test - org.apache.maven.plugins - maven-dependency-plugin + org.springframework.boot + spring-boot-maven-plugin - copy-dependencies - package - copy-dependencies + build-info - - ${project.build.directory}/libs - - - org.apache.maven.plugins - maven-jar-plugin - - - - true - libs/ - de.juplo.kafka.ExampleProducer - - - - pl.project13.maven git-commit-id-plugin diff --git a/src/main/java/de/juplo/kafka/Application.java b/src/main/java/de/juplo/kafka/Application.java new file mode 100644 index 0000000..0069257 --- /dev/null +++ b/src/main/java/de/juplo/kafka/Application.java @@ -0,0 +1,14 @@ +package de.juplo.kafka; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + + +@SpringBootApplication +public class Application +{ + public static void main(String[] args) + { + SpringApplication.run(Application.class, args); + } +} diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java new file mode 100644 index 0000000..1c4262e --- /dev/null +++ b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java @@ -0,0 +1,46 @@ +package de.juplo.kafka; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import java.util.Properties; + + +@Configuration +@EnableConfigurationProperties(ApplicationProperties.class) +public class ApplicationConfiguration +{ + @Bean + public ExampleProducer exampleProducer( + ApplicationProperties properties, + KafkaProducer kafkaProducer) + { + return + new ExampleProducer( + properties.getClientId(), + properties.getTopic(), + kafkaProducer); + } + + @Bean + public KafkaProducer kafkaProducer(ApplicationProperties properties) + { + Properties props = new Properties(); + props.put("bootstrap.servers", properties.getBootstrapServer()); + props.put("client.id", properties.getClientId()); + props.put("acks", properties.getAcks()); + props.put("batch.size", properties.getBatchSize()); + props.put("metadata.maxage.ms", 5000); // 5 Sekunden + props.put("delivery.timeout.ms", 20000); // 20 Sekunden + props.put("request.timeout.ms", 10000); // 10 Sekunden + props.put("linger.ms", properties.getLingerMs()); + props.put("compression.type", properties.getCompressionType()); + props.put("key.serializer", StringSerializer.class.getName()); + props.put("value.serializer", StringSerializer.class.getName()); + + return new KafkaProducer<>(props); + } +} diff --git a/src/main/java/de/juplo/kafka/ApplicationProperties.java b/src/main/java/de/juplo/kafka/ApplicationProperties.java new file mode 100644 index 0000000..4bf66a8 --- /dev/null +++ b/src/main/java/de/juplo/kafka/ApplicationProperties.java @@ -0,0 +1,35 @@ +package de.juplo.kafka; + +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; + + + +@ConfigurationProperties(prefix = "producer") +@Getter +@Setter +public class ApplicationProperties +{ + @NotNull + @NotEmpty + private String bootstrapServer; + @NotNull + @NotEmpty + private String clientId; + @NotNull + @NotEmpty + private String topic; + @NotNull + @NotEmpty + private String acks; + @NotNull + private Integer batchSize; + @NotNull + private Integer lingerMs; + @NotNull + @NotEmpty + private String compressionType; +} diff --git a/src/main/java/de/juplo/kafka/ExampleProducer.java b/src/main/java/de/juplo/kafka/ExampleProducer.java index c12a75e..38bcb9f 100644 --- a/src/main/java/de/juplo/kafka/ExampleProducer.java +++ b/src/main/java/de/juplo/kafka/ExampleProducer.java @@ -2,41 +2,36 @@ package de.juplo.kafka; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.producer.Producer; -import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.serialization.StringSerializer; - -import java.util.Properties; @Slf4j -public class ExampleProducer +public class ExampleProducer implements Runnable { private final String id; private final String topic; private final Producer producer; + private final Thread workerThread; private volatile boolean running = true; - private volatile boolean done = false; private long produced = 0; + public ExampleProducer( - String broker, + String id, String topic, - String clientId) + Producer producer) { - Properties props = new Properties(); - props.put("bootstrap.servers", broker); - props.put("client.id", clientId); // Nur zur Wiedererkennung - props.put("key.serializer", StringSerializer.class.getName()); - props.put("value.serializer", StringSerializer.class.getName()); - props.put("metadata.maxage.ms", 5000); - - this.id = clientId; + this.id = id; this.topic = topic; - producer = new KafkaProducer<>(props); + this.producer = producer; + + workerThread = new Thread(this, "ExampleProducer Worker-Thread"); + workerThread.start(); } + + @Override public void run() { long i = 0; @@ -55,10 +50,7 @@ public class ExampleProducer } finally { - log.info("{}: Closing the KafkaProducer", id); - producer.close(); log.info("{}: Produced {} messages in total, exiting!", id, produced); - done = true; } } @@ -114,44 +106,10 @@ public class ExampleProducer } - public static void main(String[] args) throws Exception + public void shutdown() throws InterruptedException { - String broker = ":9092"; - String topic = "test"; - String clientId = "DEV"; - - switch (args.length) - { - case 3: - clientId = args[2]; - case 2: - topic = args[1]; - case 1: - broker = args[0]; - } - - ExampleProducer instance = new ExampleProducer(broker, topic, clientId); - - Runtime.getRuntime().addShutdownHook(new Thread(() -> - { - instance.running = false; - while (!instance.done) - { - log.info("Waiting for main-thread..."); - try - { - Thread.sleep(1000); - } - catch (InterruptedException e) {} - } - log.info("Shutdown completed."); - })); - - log.info( - "Running ExampleProducer: broker={}, topic={}, client-id={}", - broker, - topic, - clientId); - instance.run(); + log.info("{} joining the worker-thread...", id); + running = false; + workerThread.join(); } } diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml new file mode 100644 index 0000000..d102dd0 --- /dev/null +++ b/src/main/resources/application.yml @@ -0,0 +1,12 @@ +producer: + bootstrap-server: :9092 + client-id: DEV + topic: test + acks: -1 + batch-size: 16384 + linger-ms: 0 + compression-type: gzip +logging: + level: + root: INFO + de.juplo: TRACE diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml index 7a25e76..9c7af76 100644 --- a/src/main/resources/logback.xml +++ b/src/main/resources/logback.xml @@ -7,8 +7,6 @@ - - diff --git a/src/test/java/de/juplo/kafka/ApplicationTests.java b/src/test/java/de/juplo/kafka/ApplicationTests.java new file mode 100644 index 0000000..71b9bf6 --- /dev/null +++ b/src/test/java/de/juplo/kafka/ApplicationTests.java @@ -0,0 +1,82 @@ +package de.juplo.kafka; + +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.test.context.EmbeddedKafka; + +import java.time.Duration; +import java.util.LinkedList; +import java.util.List; + +import static de.juplo.kafka.ApplicationTests.PARTITIONS; +import static de.juplo.kafka.ApplicationTests.TOPIC; +import static org.awaitility.Awaitility.await; + + +@SpringBootTest( + properties = { + "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}", + "spring.kafka.consumer.auto-offset-reset=earliest", + "producer.bootstrap-server=${spring.embedded.kafka.brokers}", + "producer.topic=" + TOPIC}) +@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS) +@Slf4j +public class ApplicationTests +{ + static final String TOPIC = "FOO"; + static final int PARTITIONS = 10; + + @Autowired + Consumer consumer; + + + @BeforeEach + public void clear() + { + consumer.received.clear(); + } + + + @Test + public void testApplicationStartup() + { + } + + @Test + public void testSendMessage() throws Exception + { + await("Some messages were send") + .atMost(Duration.ofSeconds(5)) + .until(() -> consumer.received.size() >= 1); + } + + + static class Consumer + { + final List> received = new LinkedList<>(); + + @KafkaListener(groupId = "TEST", topics = TOPIC) + public void receive(ConsumerRecord record) + { + log.debug("Received message: {}", record); + received.add(record); + } + } + + @TestConfiguration + static class Configuration + { + @Bean + Consumer consumer() + { + return new Consumer(); + } + } +}