*
!Dockerfile
!target/*.jar
-!target/libs/*.jar
target/*.jar
-target/libs/*.jar
FROM eclipse-temurin:21-jre
VOLUME /tmp
COPY target/*.jar /opt/app.jar
-COPY target/libs /opt/libs
ENTRYPOINT [ "java", "-jar", "/opt/app.jar" ]
-CMD [ "kafka:9092", "test", "DCKR" ]
+CMD []
#!/bin/bash
-IMAGE=juplo/simple-producer:1.0-SNAPSHOT
+IMAGE=juplo/spring-producer:1.0-SNAPSHOT
if [ "$1" = "cleanup" ]
then
docker compose -f docker/docker-compose.yml up -d producer
-sleep 5
-
-docker compose -f docker/docker-compose.yml exec cli kafkacat -b kafka:9092 -t test -c 20 -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n'
+docker compose -f docker/docker-compose.yml up -d peter ute
+sleep 15
docker compose -f docker/docker-compose.yml stop producer
-docker compose -f docker/docker-compose.yml exec cli kafkacat -b kafka:9092 -t test -e -f'topic=%t\tpartition=%p\toffset=%o\tkey=%k\tvalue=%s\n'
-docker compose -f docker/docker-compose.yml logs producer
+
+echo
+echo "Von peter empfangen:"
+docker compose -f docker/docker-compose.yml logs peter | grep '\ test\/.'
+echo
+echo "Von ute empfangen:"
+docker compose -f docker/docker-compose.yml logs ute | grep '\ test\/.'
+
+docker compose -f docker/docker-compose.yml stop peter ute
dependencies {
implementation 'org.apache.kafka:kafka-clients'
- implementation 'ch.qos.logback:logback-classic'
+ implementation 'org.springframework.boot:spring-boot-starter-actuator'
+ implementation 'org.springframework.boot:spring-boot-starter-validation'
+ implementation 'org.springframework.boot:spring-boot-starter-web'
compileOnly 'org.projectlombok:lombok'
+ developmentOnly 'org.springframework.boot:spring-boot-devtools'
+ annotationProcessor 'org.springframework.boot:spring-boot-configuration-processor'
annotationProcessor 'org.projectlombok:lombok'
+ testImplementation 'org.springframework.boot:spring-boot-starter-test'
+ testImplementation 'org.springframework.kafka:spring-kafka'
+ testImplementation 'org.springframework.kafka:spring-kafka-test'
+ testCompileOnly 'org.projectlombok:lombok'
+ testAnnotationProcessor 'org.projectlombok:lombok'
+ testRuntimeOnly 'org.junit.platform:junit-platform-launcher'
+}
+
+tasks.named('test') {
+ useJUnitPlatform()
}
docker {
- kafka-3
producer:
- image: juplo/simple-producer:1.0-SNAPSHOT
- command: kafka:9092 test producer
+ image: juplo/spring-producer:1.0-SNAPSHOT
+ environment:
+ juplo.bootstrap-server: kafka:9092
+ juplo.client-id: producer
+ juplo.producer.topic: test
+
+ consumer:
+ image: juplo/simple-consumer:1.0-SNAPSHOT
+ command: kafka:9092 test my-group consumer
+
+ peter:
+ image: juplo/simple-consumer:1.0-SNAPSHOT
+ command: kafka:9092 test my-group peter
+
+ ute:
+ image: juplo/simple-consumer:1.0-SNAPSHOT
+ command: kafka:9092 test my-group ute
volumes:
zookeeper-data:
</parent>
<groupId>de.juplo.kafka</groupId>
- <artifactId>simple-producer</artifactId>
- <name>Super Simple Producer</name>
- <description>A Simple Producer, programmed with pure Java, that sends messages via Kafka</description>
+ <artifactId>spring-producer</artifactId>
+ <name>Spring Producer</name>
+ <description>A Simple Producer, based on Spring Boot, that sends messages via Kafka</description>
<version>1.0-SNAPSHOT</version>
<properties>
</properties>
<dependencies>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-web</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-actuator</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-configuration-processor</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-validation</artifactId>
+ </dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-test</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka-test</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.awaitility</groupId>
+ <artifactId>awaitility</artifactId>
+ <scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
<goals>
- <goal>copy-dependencies</goal>
+ <goal>build-info</goal>
</goals>
- <configuration>
- <outputDirectory>${project.build.directory}/libs</outputDirectory>
- </configuration>
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <configuration>
- <archive>
- <manifest>
- <addClasspath>true</addClasspath>
- <classpathPrefix>libs/</classpathPrefix>
- <mainClass>de.juplo.kafka.ExampleProducer</mainClass>
- </manifest>
- </archive>
- </configuration>
- </plugin>
<plugin>
<groupId>pl.project13.maven</groupId>
<artifactId>git-commit-id-plugin</artifactId>
-rootProject.name = 'simple-producer'
+rootProject.name = 'spring-producer'
--- /dev/null
+package de.juplo.kafka;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+
+@SpringBootApplication
+public class Application
+{
+ public static void main(String[] args)
+ {
+ SpringApplication.run(Application.class, args);
+ }
+}
--- /dev/null
+package de.juplo.kafka;
+
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.Producer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.time.Duration;
+import java.util.Properties;
+
+
+@Configuration
+@EnableConfigurationProperties(ApplicationProperties.class)
+public class ApplicationConfiguration
+{
+ @Bean
+ public ExampleProducer exampleProducer(
+ ApplicationProperties properties,
+ Producer<String, String> kafkaProducer,
+ ConfigurableApplicationContext applicationContext)
+ {
+ return
+ new ExampleProducer(
+ properties.getClientId(),
+ properties.getProducerProperties().getTopic(),
+ properties.getProducerProperties().getThrottle() == null
+ ? Duration.ofMillis(500)
+ : properties.getProducerProperties().getThrottle(),
+ kafkaProducer,
+ () -> applicationContext.close());
+ }
+
+ @Bean(destroyMethod = "")
+ public KafkaProducer<String, String> kafkaProducer(ApplicationProperties properties)
+ {
+ Properties props = new Properties();
+ props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("client.id", properties.getClientId());
+ props.put("acks", properties.getProducerProperties().getAcks());
+ props.put("delivery.timeout.ms", (int)properties.getProducerProperties().getDeliveryTimeout().toMillis());
+ props.put("max.block.ms", (int)properties.getProducerProperties().getMaxBlock().toMillis());
+ props.put("buffer.memory", properties.getProducerProperties().getBufferMemory());
+ props.put("batch.size", properties.getProducerProperties().getBatchSize());
+ props.put("metadata.max.age.ms", 5000); // 5 Sekunden
+ props.put("request.timeout.ms", 5000); // 5 Sekunden
+ props.put("linger.ms", properties.getProducerProperties().getLinger().toMillis());
+ props.put("compression.type", properties.getProducerProperties().getCompressionType());
+ props.put("key.serializer", StringSerializer.class.getName());
+ props.put("value.serializer", StringSerializer.class.getName());
+
+ return new KafkaProducer<>(props);
+ }
+}
--- /dev/null
+package de.juplo.kafka;
+
+import jakarta.validation.constraints.NotEmpty;
+import jakarta.validation.constraints.NotNull;
+import lombok.Getter;
+import lombok.Setter;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.validation.annotation.Validated;
+
+import java.time.Duration;
+
+
+@ConfigurationProperties(prefix = "juplo")
+@Validated
+@Getter
+@Setter
+public class ApplicationProperties
+{
+ @NotNull
+ @NotEmpty
+ private String bootstrapServer;
+ @NotNull
+ @NotEmpty
+ private String clientId;
+
+ @NotNull
+ private ProducerProperties producer;
+
+
+ public ProducerProperties getProducerProperties()
+ {
+ return producer;
+ }
+
+
+ @Validated
+ @Getter
+ @Setter
+ static class ProducerProperties
+ {
+ @NotNull
+ @NotEmpty
+ private String topic;
+ @NotNull
+ @NotEmpty
+ private String acks;
+ @NotNull
+ private Duration deliveryTimeout;
+ @NotNull
+ private Duration maxBlock;
+ @NotNull
+ private Long bufferMemory;
+ @NotNull
+ private Integer batchSize;
+ @NotNull
+ private Duration linger;
+ @NotNull
+ @NotEmpty
+ private String compressionType;
+ private Duration throttle;
+ }
+}
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.Producer;
-import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.serialization.StringSerializer;
-import java.util.Properties;
+import java.time.Duration;
@Slf4j
-public class ExampleProducer
+public class ExampleProducer implements Runnable
{
private final String id;
private final String topic;
+ private final Duration throttle;
private final Producer<String, String> producer;
+ private final Thread workerThread;
+ private final Runnable closeCallback;
private volatile boolean running = true;
- private volatile boolean done = false;
private long produced = 0;
+
public ExampleProducer(
- String broker,
+ String id,
String topic,
- String clientId)
+ Duration throttle,
+ Producer<String, String> producer,
+ Runnable closeCallback)
{
- Properties props = new Properties();
- props.put("bootstrap.servers", broker);
- props.put("client.id", clientId); // Nur zur Wiedererkennung
- props.put("key.serializer", StringSerializer.class.getName());
- props.put("value.serializer", StringSerializer.class.getName());
-
- this.id = clientId;
+ this.id = id;
this.topic = topic;
- producer = new KafkaProducer<>(props);
+ this.throttle = throttle;
+ this.producer = producer;
+
+ workerThread = new Thread(this, "ExampleProducer Worker-Thread");
+ workerThread.start();
+
+ this.closeCallback = closeCallback;
}
+
+ @Override
public void run()
{
long i = 0;
for (; running; i++)
{
send(Long.toString(i%10), Long.toString(i));
- Thread.sleep(500);
+
+ if (throttle.isPositive())
+ {
+ try
+ {
+ Thread.sleep(throttle);
+ }
+ catch (InterruptedException e)
+ {
+ log.warn("{} - Interrupted while throttling!", e);
+ }
+ }
}
}
catch (Exception e)
{
log.error("{} - Unexpected error!", id, e);
+ log.info("{} - Triggering exit of application!", id);
+ new Thread(closeCallback).start();
}
finally
{
log.info("{}: Closing the KafkaProducer", id);
producer.close();
log.info("{}: Produced {} messages in total, exiting!", id, produced);
- done = true;
}
}
}
- public static void main(String[] args) throws Exception
+ public void shutdown() throws InterruptedException
{
- String broker = ":9092";
- String topic = "test";
- String clientId = "DEV";
-
- switch (args.length)
- {
- case 3:
- clientId = args[2];
- case 2:
- topic = args[1];
- case 1:
- broker = args[0];
- }
-
- ExampleProducer instance = new ExampleProducer(broker, topic, clientId);
-
- Runtime.getRuntime().addShutdownHook(new Thread(() ->
- {
- instance.running = false;
- while (!instance.done)
- {
- log.info("Waiting for main-thread...");
- try
- {
- Thread.sleep(1000);
- }
- catch (InterruptedException e) {}
- }
- log.info("Shutdown completed.");
- }));
-
- log.info(
- "Running ExampleProducer: broker={}, topic={}, client-id={}",
- broker,
- topic,
- clientId);
- instance.run();
+ log.info("{} joining the worker-thread...", id);
+ running = false;
+ workerThread.join();
}
}
--- /dev/null
+juplo:
+ bootstrap-server: :9092
+ client-id: DEV
+ producer:
+ topic: test
+ acks: -1
+ delivery-timeout: 10s
+ max-block: 5s
+ buffer-memory: 33554432
+ batch-size: 16384
+ linger: 0
+ compression-type: gzip
+ throttle: 500
+management:
+ endpoint:
+ shutdown:
+ enabled: true
+ endpoints:
+ web:
+ exposure:
+ include: "*"
+ info:
+ env:
+ enabled: true
+ java:
+ enabled: true
+info:
+ kafka:
+ bootstrap-server: ${juplo.bootstrap-server}
+ client-id: ${juplo.client-id}
+ producer:
+ topic: ${juplo.producer.topic}
+ acks: ${juplo.producer.acks}
+ delivery-timeout: ${juplo.producer.delivery-timeout}
+ max-block: ${juplo.producer.max-block}
+ buffer-memory: ${juplo.producer.buffer-memory}
+ batch-size: ${juplo.producer.batch-size}
+ linger: ${juplo.producer.linger}
+ compression-type: ${juplo.producer.compression-type}
+ throttle: ${juplo.producer.throttle}
+logging:
+ level:
+ root: INFO
+ de.juplo: TRACE
+server:
+ port: 8880
</encoder>
</appender>
- <logger name="de.juplo" level="TRACE"/>
-
<root level="INFO">
<appender-ref ref="STDOUT" />
</root>
--- /dev/null
+package de.juplo.kafka;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.context.annotation.Bean;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.test.context.EmbeddedKafka;
+import org.springframework.test.web.servlet.MockMvc;
+
+import java.time.Duration;
+import java.util.LinkedList;
+import java.util.List;
+
+import static de.juplo.kafka.ApplicationTests.PARTITIONS;
+import static de.juplo.kafka.ApplicationTests.TOPIC;
+import static org.awaitility.Awaitility.await;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
+
+@SpringBootTest(
+ properties = {
+ "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+ "spring.kafka.consumer.auto-offset-reset=earliest",
+ "juplo.bootstrap-server=${spring.embedded.kafka.brokers}",
+ "juplo.producer.topic=" + TOPIC})
+@AutoConfigureMockMvc
+@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
+@Slf4j
+public class ApplicationTests
+{
+ static final String TOPIC = "FOO";
+ static final int PARTITIONS = 10;
+
+ @Autowired
+ MockMvc mockMvc;
+ @Autowired
+ Consumer consumer;
+
+
+ @BeforeEach
+ public void clear()
+ {
+ consumer.received.clear();
+ }
+
+
+ @Test
+ public void testApplicationStartup()
+ {
+ await("Application is healthy")
+ .atMost(Duration.ofSeconds(5))
+ .untilAsserted(() -> mockMvc
+ .perform(get("/actuator/health"))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("status").value("UP")));
+ }
+
+ @Test
+ public void testSendMessage() throws Exception
+ {
+ await("Some messages were send")
+ .atMost(Duration.ofSeconds(5))
+ .until(() -> consumer.received.size() >= 1);
+ }
+
+
+ static class Consumer
+ {
+ final List<ConsumerRecord<String, String>> received = new LinkedList<>();
+
+ @KafkaListener(groupId = "TEST", topics = TOPIC)
+ public void receive(ConsumerRecord<String, String> record)
+ {
+ log.debug("Received message: {}", record);
+ received.add(record);
+ }
+ }
+
+ @TestConfiguration
+ static class Configuration
+ {
+ @Bean
+ Consumer consumer()
+ {
+ return new Consumer();
+ }
+ }
+}