--- /dev/null
+*
+!target/*.jar
--- /dev/null
+target/*.jar
FROM openjdk:11-jre
VOLUME /tmp
COPY target/*.jar /opt/app.jar
-COPY target/libs /opt/libs
ENTRYPOINT [ "java", "-jar", "/opt/app.jar" ]
-CMD [ "DCKR" ]
+CMD []
#!/bin/bash
-IMAGE=juplo/simple-consumer:1.0-SNAPSHOT
+IMAGE=juplo/spring-consumer:1.0-SNAPSHOT
if [ "$1" = "cleanup" ]
then
</parent>
<groupId>de.juplo.kafka</groupId>
- <artifactId>simple-consumer</artifactId>
+ <artifactId>spring-consumer</artifactId>
<version>1.0-SNAPSHOT</version>
- <name>Simple Consumer-Group</name>
- <description>Super Simple Consumer-Group, that is implemented as a plain Java-program</description>
+ <name>Spring Consumer</name>
+ <description>Super Simple Consumer-Group, that is implemented as a Spring-Boot application</description>
+
+ <properties>
+ <java.version>11</java.version>
+ </properties>
<dependencies>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-web</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-validation</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-actuator</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-configuration-processor</artifactId>
+ <optional>true</optional>
+ </dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-test</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.kafka</groupId>
+ <artifactId>spring-kafka-test</artifactId>
+ <scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
- <groupId>pl.project13.maven</groupId>
- <artifactId>git-commit-id-plugin</artifactId>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
<goals>
- <goal>copy-dependencies</goal>
+ <goal>build-info</goal>
</goals>
- <configuration>
- <outputDirectory>${project.build.directory}/libs</outputDirectory>
- </configuration>
</execution>
</executions>
</plugin>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <configuration>
- <archive>
- <manifest>
- <addClasspath>true</addClasspath>
- <classpathPrefix>libs/</classpathPrefix>
- <mainClass>de.juplo.kafka.SimpleConsumer</mainClass>
- </manifest>
- </archive>
- </configuration>
+ <groupId>pl.project13.maven</groupId>
+ <artifactId>git-commit-id-plugin</artifactId>
</plugin>
<plugin>
<groupId>io.fabric8</groupId>
</execution>
</executions>
</plugin>
+ <plugin>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ </plugin>
</plugins>
</build>
-
</project>
--- /dev/null
+package de.juplo.kafka;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.Consumer;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.boot.ApplicationRunner;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
+
+import javax.annotation.PreDestroy;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+
+
+@SpringBootApplication
+@Slf4j
+public class Application implements ApplicationRunner
+{
+ @Autowired
+ ThreadPoolTaskExecutor taskExecutor;
+ @Autowired
+ Consumer<?, ?> kafkaConsumer;
+ @Autowired
+ SimpleConsumer simpleConsumer;
+
+ Future<?> consumerJob;
+
+ @Override
+ public void run(ApplicationArguments args) throws Exception
+ {
+ log.info("Starting SimpleConsumer");
+ consumerJob = taskExecutor.submit(simpleConsumer);
+ }
+
+ @PreDestroy
+ public void shutdown() throws ExecutionException, InterruptedException
+ {
+ log.info("Signaling SimpleConsumer to quit its work");
+ kafkaConsumer.wakeup();
+ log.info("Waiting for SimpleConsumer to finish its work");
+ consumerJob.get();
+ log.info("SimpleConsumer finished its work");
+ }
+
+
+ public static void main(String[] args)
+ {
+ SpringApplication.run(Application.class, args);
+ }
+}
--- /dev/null
+package de.juplo.kafka;
+
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.Properties;
+
+
+@Configuration
+@EnableConfigurationProperties({ ApplicationProperties.class })
+public class ApplicationConfiguration
+{
+ @Bean
+ public SimpleConsumer simpleConsumer(
+ Consumer<String, String> kafkaConsumer,
+ ApplicationProperties applicationProperties)
+ {
+ return
+ new SimpleConsumer(
+ applicationProperties.getClientId(),
+ applicationProperties.getTopic(),
+ kafkaConsumer);
+ }
+
+ @Bean
+ public Consumer<String, String> kafkaConsumer(ApplicationProperties properties)
+ {
+ Properties props = new Properties();
+ props.put("bootstrap.servers", properties.getBootstrapServers());
+ props.put("group.id", properties.getGroupId()); // ID für die Offset-Commits
+ props.put("client.id", properties.getClientId()); // Nur zur Wiedererkennung
+ props.put("auto.offset.reset", properties.getAutoOffsetReset());
+ props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
+ props.put("key.deserializer", StringDeserializer.class.getName());
+ props.put("value.deserializer", StringDeserializer.class.getName());
+
+ return new KafkaConsumer<>(props);
+ }
+}
--- /dev/null
+package de.juplo.kafka;
+
+import lombok.Getter;
+import lombok.Setter;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.validation.annotation.Validated;
+
+import javax.validation.constraints.NotEmpty;
+import javax.validation.constraints.NotNull;
+
+
+@ConfigurationProperties(prefix = "simple.consumer")
+@Validated
+@Getter
+@Setter
+public class ApplicationProperties
+{
+ @NotNull
+ @NotEmpty
+ private String bootstrapServers;
+ @NotNull
+ @NotEmpty
+ private String groupId;
+ @NotNull
+ @NotEmpty
+ private String clientId;
+ @NotNull
+ @NotEmpty
+ private String topic;
+ @NotNull
+ @NotEmpty
+ private String autoOffsetReset;
+}
package de.juplo.kafka;
+import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.WakeupException;
-import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Arrays;
-import java.util.Properties;
@Slf4j
-public class SimpleConsumer
+@RequiredArgsConstructor
+public class SimpleConsumer implements Runnable
{
private final String id;
private final String topic;
- private final KafkaConsumer<String, String> consumer;
+ private final Consumer<String, String> consumer;
- private volatile boolean running = false;
private long consumed = 0;
- public SimpleConsumer(String broker, String topic, String groupId, String clientId)
- {
- Properties props = new Properties();
- props.put("bootstrap.servers", broker);
- props.put("group.id", groupId); // ID für die Offset-Commits
- props.put("client.id", clientId); // Nur zur Wiedererkennung
- props.put("auto.offset.reset", "earliest"); // Von Beginn an lesen
- props.put("partition.assignment.strategy", "org.apache.kafka.clients.consumer.CooperativeStickyAssignor");
- props.put("key.deserializer", StringDeserializer.class.getName());
- props.put("value.deserializer", StringDeserializer.class.getName());
-
- consumer = new KafkaConsumer<>(props);
-
- this.topic = topic;
- this.id = clientId;
- }
-
+ @Override
public void run()
{
try
{
log.info("{} - Subscribing to topic {}", id, topic);
consumer.subscribe(Arrays.asList(topic));
- running = true;
while (true)
{
}
finally
{
- running = false;
log.info("{} - Closing the KafkaConsumer", id);
consumer.close();
log.info("{}: Consumed {} messages in total, exiting!", id, consumed);
}
}
-
-
- public static void main(String[] args) throws Exception
- {
- String broker = ":9092";
- String topic = "test";
- String groupId = "my-group";
- String clientId = "DEV";
-
- switch (args.length)
- {
- case 4:
- clientId = args[3];
- case 3:
- groupId = args[2];
- case 2:
- topic = args[1];
- case 1:
- broker = args[0];
- }
-
-
- SimpleConsumer instance = new SimpleConsumer(broker, topic, groupId, clientId);
-
- Runtime.getRuntime().addShutdownHook(new Thread(() ->
- {
- instance.consumer.wakeup();
-
- while (instance.running)
- {
- log.info("Waiting for main-thread...");
- try
- {
- Thread.sleep(1000);
- }
- catch (InterruptedException e) {}
- }
- log.info("Shutdown completed.");
- }));
-
- log.info(
- "Running SimpleConsumer: broker={}, topic={}, group-id={}, client-id={}",
- broker,
- topic,
- groupId,
- clientId);
- instance.run();
- }
}
--- /dev/null
+simple:
+ consumer:
+ bootstrap-servers: ":9092"
+ group-id: "my-group"
+ client-id: "DEV"
+ topic: test
+ auto-offset-reset: earliest
+management:
+ endpoint:
+ shutdown:
+ enabled: true
+ endpoints:
+ web:
+ exposure:
+ include: "*"
+ info:
+ env:
+ enabled: true
+ java:
+ enabled: true
+info:
+ kafka:
+ bootstrap-server: ${simple.consumer.bootstrap-servers}
+ client-id: ${simple.consumer.client-id}
+ group-id: ${simple.consumer.group-id}
+ topic: ${simple.consumer.topic}
+ auto-offset-reset: ${simple.consumer.auto-offset-reset}
+logging:
+ level:
+ root: INFO
+ de.juplo: DEBUG
+server:
+ port: 8881