*
!target/*.jar
-!target/libs/*.jar
target/*.jar
-target/libs/*.jar
FROM eclipse-temurin:21-jre
VOLUME /tmp
COPY target/*.jar /opt/app.jar
-COPY target/libs /opt/libs
ENTRYPOINT [ "java", "-jar", "/opt/app.jar" ]
-CMD [ "kafka:9092", "test", "my-group", "DCKR" ]
+CMD []
#!/bin/bash
-IMAGE=juplo/simple-consumer:1.0-SNAPSHOT
+IMAGE=juplo/spring-consumer:1.1-SNAPSHOT
if [ "$1" = "cleanup" ]
then
command: kafka:9092 test producer
consumer:
- image: juplo/simple-consumer:1.0-SNAPSHOT
- command: kafka:9092 test my-group consumer
+ image: juplo/spring-consumer:1.1-SNAPSHOT
+ environment:
+ juplo.consumer.bootstrap-server: kafka:9092
+ juplo.consumer.client-id: consumer
+ juplo.consumer.topic: test
volumes:
zookeeper-data:
</parent>
<groupId>de.juplo.kafka</groupId>
- <artifactId>simple-consumer</artifactId>
- <name>Simple Consumer-Group</name>
- <description>Super Simple Consumer-Group, that is implemented as a plain Java-program</description>
- <version>1.0-SNAPSHOT</version>
+ <artifactId>spring-consumer</artifactId>
+ <name>Spring Consumer</name>
+ <description>Super Simple Consumer-Group, that is implemented as Spring-Boot application and configured by Spring Kafka</description>
+ <version>1.1-SNAPSHOT</version>
<properties>
<java.version>21</java.version>
</properties>
<dependencies>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-web</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-actuator</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-configuration-processor</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-validation</artifactId>
+ </dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-test</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka-test</artifactId>
+ <scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
<goals>
- <goal>copy-dependencies</goal>
+ <goal>build-info</goal>
</goals>
- <configuration>
- <outputDirectory>${project.build.directory}/libs</outputDirectory>
- </configuration>
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <configuration>
- <archive>
- <manifest>
- <addClasspath>true</addClasspath>
- <classpathPrefix>libs/</classpathPrefix>
- <mainClass>de.juplo.kafka.ExampleConsumer</mainClass>
- </manifest>
- </archive>
- </configuration>
- </plugin>
<plugin>
<groupId>pl.project13.maven</groupId>
<artifactId>git-commit-id-plugin</artifactId>
--- /dev/null
+package de.juplo.kafka;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+
+@SpringBootApplication
+public class Application
+{
+ public static void main(String[] args)
+ {
+ SpringApplication.run(Application.class, args);
+ }
+}
--- /dev/null
+package de.juplo.kafka;
+
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.consumer.StickyAssignor;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.Properties;
+
+
+@Configuration
+@EnableConfigurationProperties(ApplicationProperties.class)
+public class ApplicationConfiguration
+{
+ @Bean
+ public ExampleConsumer exampleConsumer(
+ Consumer<String, String> kafkaConsumer,
+ ApplicationProperties properties)
+ {
+ return
+ new ExampleConsumer(
+ properties.getClientId(),
+ properties.getTopic(),
+ kafkaConsumer);
+ }
+
+ @Bean
+ public KafkaConsumer<String, String> kafkaConsumer(ApplicationProperties properties)
+ {
+ Properties props = new Properties();
+ props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("client.id", properties.getClientId());
+ props.put("group.id", properties.getGroupId());
+ if (properties.getAutoOffsetReset() != null)
+ {
+ props.put("auto.offset.reset", properties.getAutoOffsetReset().name());
+ }
+ if (properties.autoCommitInterval != null)
+ {
+ props.put("auto.commit.interval", properties.getAutoCommitInterval());
+ }
+ props.put("metadata.maxage.ms", 5000); // 5 Sekunden
+ props.put("partition.assignment.strategy", StickyAssignor.class.getName());
+ props.put("key.deserializer", StringDeserializer.class.getName());
+ props.put("value.deserializer", StringDeserializer.class.getName());
+
+ return new KafkaConsumer<>(props);
+ }
+}
--- /dev/null
+package de.juplo.kafka;
+
+import jakarta.validation.constraints.NotEmpty;
+import jakarta.validation.constraints.NotNull;
+import lombok.Getter;
+import lombok.Setter;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.validation.annotation.Validated;
+
+import java.time.Duration;
+
+
+@ConfigurationProperties(prefix = "juplo.consumer")
+@Validated
+@Getter
+@Setter
+public class ApplicationProperties
+{
+ @NotNull
+ @NotEmpty
+ private String bootstrapServer;
+ @NotNull
+ @NotEmpty
+ private String clientId;
+ @NotNull
+ @NotEmpty
+ private String groupId;
+ @NotNull
+ @NotEmpty
+ private String topic;
+ ApplicationProperties.OffsetReset autoOffsetReset;
+ Duration autoCommitInterval;
+
+ enum OffsetReset { latest, earliest, none}
+}
package de.juplo.kafka;
import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.WakeupException;
-import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Arrays;
-import java.util.Properties;
@Slf4j
-public class ExampleConsumer
+public class ExampleConsumer implements Runnable
{
private final String id;
private final String topic;
private final Consumer<String, String> consumer;
+ private final Thread workerThread;
private volatile boolean running = false;
private long consumed = 0;
+
public ExampleConsumer(
- String broker,
+ String clientId,
String topic,
- String groupId,
- String clientId)
+ Consumer<String, String> consumer)
{
- Properties props = new Properties();
- props.put("bootstrap.servers", broker);
- props.put("group.id", groupId); // ID für die Offset-Commits
- props.put("client.id", clientId); // Nur zur Wiedererkennung
- props.put("auto.offset.reset", "earliest"); // Von Beginn an lesen
- props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
- props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer", StringDeserializer.class.getName());
- props.put("metadata.maxage.ms", 5000);
-
this.id = clientId;
this.topic = topic;
- consumer = new KafkaConsumer<>(props);
+ this.consumer = consumer;
+
+ workerThread = new Thread(this, "ExampleConsumer Worker-Thread");
+ workerThread.start();
}
+ @Override
public void run()
{
try
consumer.subscribe(Arrays.asList(topic));
running = true;
- while (true)
+ while (running)
{
ConsumerRecords<String, String> records =
consumer.poll(Duration.ofSeconds(1));
}
finally
{
- running = false;
- log.info("{} - Closing the KafkaConsumer", id);
- consumer.close();
log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
}
}
}
- public static void main(String[] args) throws Exception
+ public void shutdown() throws InterruptedException
{
- String broker = ":9092";
- String topic = "test";
- String groupId = "my-group";
- String clientId = "DEV";
-
- switch (args.length)
- {
- case 4:
- clientId = args[3];
- case 3:
- groupId = args[2];
- case 2:
- topic = args[1];
- case 1:
- broker = args[0];
- }
-
-
- ExampleConsumer instance = new ExampleConsumer(broker, topic, groupId, clientId);
-
- Runtime.getRuntime().addShutdownHook(new Thread(() ->
- {
- instance.consumer.wakeup();
-
- while (instance.running)
- {
- log.info("Waiting for main-thread...");
- try
- {
- Thread.sleep(1000);
- }
- catch (InterruptedException e) {}
- }
- log.info("Shutdown completed.");
- }));
-
- log.info(
- "Running ExampleConsumer: broker={}, topic={}, group-id={}, client-id={}",
- broker,
- topic,
- groupId,
- clientId);
- instance.run();
+ log.info("{} joining the worker-thread...", id);
+ running = false;
+ consumer.wakeup();
+ workerThread.join();
}
}
--- /dev/null
+juplo:
+ consumer:
+ bootstrap-server: :9092
+ client-id: DEV
+ group-id: my-group
+ topic: test
+ auto-offset-reset: earliest
+ auto-commit-interval: 5s
+management:
+ endpoint:
+ shutdown:
+ enabled: true
+ endpoints:
+ web:
+ exposure:
+ include: "*"
+ info:
+ env:
+ enabled: true
+ java:
+ enabled: true
+info:
+ kafka:
+ bootstrap-server: ${juplo.consumer.bootstrap-server}
+ client-id: ${juplo.consumer.client-id}
+ group-id: ${juplo.consumer.group-id}
+ topic: ${juplo.consumer.topic}
+ auto-offset-reset: ${juplo.consumer.auto-offset-reset}
+ auto-commit-interval: ${juplo.consumer.auto-commit-interval}
+logging:
+ level:
+ root: INFO
+ de.juplo: DEBUG
+ org.springframework.kafka: INFO
+server:
+ port: 8881
</encoder>
</appender>
- <logger name="de.juplo" level="TRACE"/>
-
<root level="INFO">
<appender-ref ref="STDOUT" />
</root>
--- /dev/null
+package de.juplo.kafka;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.kafka.test.context.EmbeddedKafka;
+import org.springframework.test.web.servlet.MockMvc;
+
+import java.time.Duration;
+
+import static de.juplo.kafka.ApplicationTests.PARTITIONS;
+import static de.juplo.kafka.ApplicationTests.TOPIC;
+import static org.awaitility.Awaitility.await;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
+
+@SpringBootTest(
+ properties = {
+ "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+ "spring.kafka.consumer.auto-offset-reset=earliest",
+ "juplo.consumer.topic=" + TOPIC })
+@AutoConfigureMockMvc
+@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
+public class ApplicationTests
+{
+ static final String TOPIC = "FOO";
+ static final int PARTITIONS = 10;
+
+ @Autowired
+ MockMvc mockMvc;
+
+
+
+ @Test
+ public void testApplicationStartup()
+ {
+ await("Application is healthy")
+ .atMost(Duration.ofSeconds(5))
+ .untilAsserted(() -> mockMvc
+ .perform(get("/actuator/health"))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("status").value("UP")));
+ }
+}