+++ /dev/null
-*
-!target/*.jar
+++ /dev/null
-target/*.jar
+++ /dev/null
-FROM eclipse-temurin:21-jre
-VOLUME /tmp
-COPY target/*.jar /opt/app.jar
-ENTRYPOINT [ "java", "-jar", "/opt/app.jar" ]
-CMD []
image: juplo/simple-consumer:1.0-SNAPSHOT
command: kafka:9092 test my-group consumer-2
+ consumer-3:
+ image: juplo/simple-consumer:1.0-SNAPSHOT
+ command: kafka:9092 test my-group consumer-3
+
+ consumer-4:
+ image: juplo/simple-consumer:1.0-SNAPSHOT
+ command: kafka:9092 test my-group consumer-4
+
volumes:
zookeeper-data:
zookeeper-log:
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-parent</artifactId>
- <version>3.3.4</version>
- <relativePath/> <!-- lookup parent from repository -->
- </parent>
-
- <groupId>de.juplo.kafka</groupId>
- <artifactId>spring-producer</artifactId>
- <name>Spring Producer</name>
- <description>A Simple Spring-Boot-Producer, that takes messages via POST and confirms successs</description>
- <version>1.0-SNAPSHOT</version>
-
- <properties>
- <java.version>21</java.version>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-web</artifactId>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-actuator</artifactId>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-configuration-processor</artifactId>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-validation</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.kafka</groupId>
- <artifactId>kafka-clients</artifactId>
- </dependency>
- <dependency>
- <groupId>org.projectlombok</groupId>
- <artifactId>lombok</artifactId>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-test</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.springframework.kafka</groupId>
- <artifactId>spring-kafka</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.springframework.kafka</groupId>
- <artifactId>spring-kafka-test</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.awaitility</groupId>
- <artifactId>awaitility</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-maven-plugin</artifactId>
- <executions>
- <execution>
- <goals>
- <goal>build-info</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>pl.project13.maven</groupId>
- <artifactId>git-commit-id-plugin</artifactId>
- </plugin>
- <plugin>
- <groupId>io.fabric8</groupId>
- <artifactId>docker-maven-plugin</artifactId>
- <version>0.45.0</version>
- <configuration>
- <images>
- 
- </images>
- </configuration>
- <executions>
- <execution>
- <id>build</id>
- <phase>package</phase>
- <goals>
- <goal>build</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-
-</project>
+++ /dev/null
-package de.juplo.kafka;
-
-import org.springframework.boot.SpringApplication;
-import org.springframework.boot.autoconfigure.SpringBootApplication;
-
-
-@SpringBootApplication
-public class Application
-{
- public static void main(String[] args)
- {
- SpringApplication.run(Application.class, args);
- }
-}
+++ /dev/null
-package de.juplo.kafka;
-
-import org.apache.kafka.clients.producer.KafkaProducer;
-import org.apache.kafka.clients.producer.Producer;
-import org.apache.kafka.common.serialization.StringSerializer;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.time.Duration;
-import java.util.Properties;
-
-
-@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
-public class ApplicationConfiguration
-{
- @Bean
- public ExampleProducer exampleProducer(
- ApplicationProperties properties,
- Producer<String, String> kafkaProducer,
- ConfigurableApplicationContext applicationContext)
- {
- return
- new ExampleProducer(
- properties.getClientId(),
- properties.getProducerProperties().getTopic(),
- properties.getProducerProperties().getThrottle() == null
- ? Duration.ofMillis(500)
- : properties.getProducerProperties().getThrottle(),
- kafkaProducer,
- () -> applicationContext.close());
- }
-
- @Bean(destroyMethod = "")
- public KafkaProducer<String, String> kafkaProducer(ApplicationProperties properties)
- {
- Properties props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
- props.put("client.id", properties.getClientId());
- props.put("acks", properties.getProducerProperties().getAcks());
- props.put("delivery.timeout.ms", (int)properties.getProducerProperties().getDeliveryTimeout().toMillis());
- props.put("max.block.ms", (int)properties.getProducerProperties().getMaxBlock().toMillis());
- props.put("buffer.memory", properties.getProducerProperties().getBufferMemory());
- props.put("batch.size", properties.getProducerProperties().getBatchSize());
- props.put("metadata.max.age.ms", 5000); // 5 Sekunden
- props.put("request.timeout.ms", 5000); // 5 Sekunden
- props.put("linger.ms", properties.getProducerProperties().getLinger().toMillis());
- props.put("compression.type", properties.getProducerProperties().getCompressionType());
- props.put("key.serializer", StringSerializer.class.getName());
- props.put("value.serializer", StringSerializer.class.getName());
-
- return new KafkaProducer<>(props);
- }
-}
+++ /dev/null
-package de.juplo.kafka;
-
-import jakarta.validation.constraints.NotEmpty;
-import jakarta.validation.constraints.NotNull;
-import lombok.Getter;
-import lombok.Setter;
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.validation.annotation.Validated;
-
-import java.time.Duration;
-
-
-@ConfigurationProperties(prefix = "juplo")
-@Validated
-@Getter
-@Setter
-public class ApplicationProperties
-{
- @NotNull
- @NotEmpty
- private String bootstrapServer;
- @NotNull
- @NotEmpty
- private String clientId;
-
- @NotNull
- private ProducerProperties producer;
-
-
- public ProducerProperties getProducerProperties()
- {
- return producer;
- }
-
-
- @Validated
- @Getter
- @Setter
- static class ProducerProperties
- {
- @NotNull
- @NotEmpty
- private String topic;
- @NotNull
- @NotEmpty
- private String acks;
- @NotNull
- private Duration deliveryTimeout;
- @NotNull
- private Duration maxBlock;
- @NotNull
- private Long bufferMemory;
- @NotNull
- private Integer batchSize;
- @NotNull
- private Duration linger;
- @NotNull
- @NotEmpty
- private String compressionType;
- private Duration throttle;
- }
-}
+++ /dev/null
-package de.juplo.kafka;
-
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.producer.Producer;
-import org.apache.kafka.clients.producer.ProducerRecord;
-
-import java.time.Duration;
-
-
-@Slf4j
-public class ExampleProducer implements Runnable
-{
- private final String id;
- private final String topic;
- private final Duration throttle;
- private final Producer<String, String> producer;
- private final Thread workerThread;
- private final Runnable closeCallback;
-
- private volatile boolean running = true;
- private long produced = 0;
-
-
- public ExampleProducer(
- String id,
- String topic,
- Duration throttle,
- Producer<String, String> producer,
- Runnable closeCallback)
- {
- this.id = id;
- this.topic = topic;
- this.throttle = throttle;
- this.producer = producer;
-
- workerThread = new Thread(this, "ExampleProducer Worker-Thread");
- workerThread.start();
-
- this.closeCallback = closeCallback;
- }
-
-
- @Override
- public void run()
- {
- long i = 0;
-
- try
- {
- for (; running; i++)
- {
- send(Long.toString(i%10), Long.toString(i));
-
- if (throttle.isPositive())
- {
- try
- {
- Thread.sleep(throttle);
- }
- catch (InterruptedException e)
- {
- log.warn("{} - Interrupted while throttling!", e);
- }
- }
- }
- }
- catch (Exception e)
- {
- log.error("{} - Unexpected error!", id, e);
- log.info("{} - Triggering exit of application!", id);
- new Thread(closeCallback).start();
- }
- finally
- {
- log.info("{}: Closing the KafkaProducer", id);
- producer.close();
- log.info("{}: Produced {} messages in total, exiting!", id, produced);
- }
- }
-
- void send(String key, String value)
- {
- final long time = System.currentTimeMillis();
-
- final ProducerRecord<String, String> record = new ProducerRecord<>(
- topic, // Topic
- key, // Key
- value // Value
- );
-
- producer.send(record, (metadata, e) ->
- {
- long now = System.currentTimeMillis();
- if (e == null)
- {
- // HANDLE SUCCESS
- produced++;
- log.debug(
- "{} - Sent message {}={}, partition={}:{}, timestamp={}, latency={}ms",
- id,
- record.key(),
- record.value(),
- metadata.partition(),
- metadata.offset(),
- metadata.timestamp(),
- now - time
- );
- }
- else
- {
- // HANDLE ERROR
- log.error(
- "{} - ERROR for message {}={}, timestamp={}, latency={}ms: {}",
- id,
- record.key(),
- record.value(),
- metadata == null ? -1 : metadata.timestamp(),
- now - time,
- e.toString()
- );
- }
- });
-
- long now = System.currentTimeMillis();
- log.trace(
- "{} - Queued message {}={}, latency={}ms",
- id,
- record.key(),
- record.value(),
- now - time
- );
- }
-
-
- public void shutdown() throws InterruptedException
- {
- log.info("{} joining the worker-thread...", id);
- running = false;
- workerThread.join();
- }
-}
+++ /dev/null
-juplo:
- bootstrap-server: :9092
- client-id: DEV
- producer:
- topic: test
- acks: -1
- delivery-timeout: 10s
- max-block: 5s
- buffer-memory: 33554432
- batch-size: 16384
- linger: 0
- compression-type: gzip
- throttle: 500
-management:
- endpoint:
- shutdown:
- enabled: true
- endpoints:
- web:
- exposure:
- include: "*"
- info:
- env:
- enabled: true
- java:
- enabled: true
-info:
- kafka:
- bootstrap-server: ${juplo.bootstrap-server}
- client-id: ${juplo.client-id}
- producer:
- topic: ${juplo.producer.topic}
- acks: ${juplo.producer.acks}
- delivery-timeout: ${juplo.producer.delivery-timeout}
- max-block: ${juplo.producer.max-block}
- buffer-memory: ${juplo.producer.buffer-memory}
- batch-size: ${juplo.producer.batch-size}
- linger: ${juplo.producer.linger}
- compression-type: ${juplo.producer.compression-type}
- throttle: ${juplo.producer.throttle}
-logging:
- level:
- root: INFO
- de.juplo: TRACE
-server:
- port: 8880
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
-
- <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
- <Pattern>%d{HH:mm:ss.SSS} | %highlight(%-5level) %msg%n</Pattern>
- </encoder>
- </appender>
-
- <root level="INFO">
- <appender-ref ref="STDOUT" />
- </root>
-
-</configuration>
+++ /dev/null
-package de.juplo.kafka;
-
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
-import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.boot.test.context.TestConfiguration;
-import org.springframework.context.annotation.Bean;
-import org.springframework.kafka.annotation.KafkaListener;
-import org.springframework.kafka.test.context.EmbeddedKafka;
-import org.springframework.test.web.servlet.MockMvc;
-
-import java.time.Duration;
-import java.util.LinkedList;
-import java.util.List;
-
-import static de.juplo.kafka.ApplicationTests.PARTITIONS;
-import static de.juplo.kafka.ApplicationTests.TOPIC;
-import static org.awaitility.Awaitility.await;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-
-@SpringBootTest(
- properties = {
- "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
- "spring.kafka.consumer.auto-offset-reset=earliest",
- "juplo.bootstrap-server=${spring.embedded.kafka.brokers}",
- "juplo.producer.topic=" + TOPIC})
-@AutoConfigureMockMvc
-@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
-@Slf4j
-public class ApplicationTests
-{
- static final String TOPIC = "FOO";
- static final int PARTITIONS = 10;
-
- @Autowired
- MockMvc mockMvc;
- @Autowired
- Consumer consumer;
-
-
- @BeforeEach
- public void clear()
- {
- consumer.received.clear();
- }
-
-
- @Test
- public void testApplicationStartup()
- {
- await("Application is healthy")
- .atMost(Duration.ofSeconds(5))
- .untilAsserted(() -> mockMvc
- .perform(get("/actuator/health"))
- .andExpect(status().isOk())
- .andExpect(jsonPath("status").value("UP")));
- }
-
- @Test
- public void testSendMessage() throws Exception
- {
- await("Some messages were send")
- .atMost(Duration.ofSeconds(5))
- .until(() -> consumer.received.size() >= 1);
- }
-
-
- static class Consumer
- {
- final List<ConsumerRecord<String, String>> received = new LinkedList<>();
-
- @KafkaListener(groupId = "TEST", topics = TOPIC)
- public void receive(ConsumerRecord<String, String> record)
- {
- log.debug("Received message: {}", record);
- received.add(record);
- }
- }
-
- @TestConfiguration
- static class Configuration
- {
- @Bean
- Consumer consumer()
- {
- return new Consumer();
- }
- }
-}