From b47ddb40db9534dcc55bc297c6b6e6d2a4d85540 Mon Sep 17 00:00:00 2001 From: Kai Moritz Date: Sun, 2 Feb 2025 16:48:19 +0100 Subject: [PATCH] =?utf8?q?App=20in=20eine=20Lib=20umgebaut,=20die=20die=20?= =?utf8?q?wiederverwendeten=20Nachrichten=20enth=C3=A4lt?= MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit --- .dockerignore | 3 - .maven-dockerexclude | 1 - .maven-dockerinclude | 1 - Dockerfile | 5 - README.sh | 39 ---- build.gradle | 57 +----- docker/docker-compose.yml | 173 ------------------ pom.xml | 71 ++----- settings.gradle | 2 +- src/main/java/de/juplo/kafka/Application.java | 14 -- .../juplo/kafka/ApplicationConfiguration.java | 57 ------ .../de/juplo/kafka/ApplicationProperties.java | 52 ------ .../java/de/juplo/kafka/ExampleConsumer.java | 119 ------------ src/main/resources/application.yml | 37 ---- src/main/resources/logback.xml | 14 -- .../java/de/juplo/kafka/ApplicationTests.java | 46 ----- 16 files changed, 22 insertions(+), 669 deletions(-) delete mode 100644 .dockerignore delete mode 100644 .maven-dockerexclude delete mode 100644 .maven-dockerinclude delete mode 100644 Dockerfile delete mode 100755 README.sh delete mode 100644 docker/docker-compose.yml delete mode 100644 src/main/java/de/juplo/kafka/Application.java delete mode 100644 src/main/java/de/juplo/kafka/ApplicationConfiguration.java delete mode 100644 src/main/java/de/juplo/kafka/ApplicationProperties.java delete mode 100644 src/main/java/de/juplo/kafka/ExampleConsumer.java delete mode 100644 src/main/resources/application.yml delete mode 100644 src/main/resources/logback.xml delete mode 100644 src/test/java/de/juplo/kafka/ApplicationTests.java diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index 6de8137a..00000000 --- a/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!Dockerfile -!target/*.jar diff --git a/.maven-dockerexclude b/.maven-dockerexclude deleted file mode 100644 index 72e8ffc0..00000000 --- a/.maven-dockerexclude +++ /dev/null @@ -1 +0,0 @@ -* diff --git a/.maven-dockerinclude b/.maven-dockerinclude deleted file mode 100644 index fd6cecd2..00000000 --- a/.maven-dockerinclude +++ /dev/null @@ -1 +0,0 @@ -target/*.jar diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 9e196ff0..00000000 --- a/Dockerfile +++ /dev/null @@ -1,5 +0,0 @@ -FROM eclipse-temurin:21-jre -VOLUME /tmp -COPY target/*.jar /opt/app.jar -ENTRYPOINT [ "java", "-jar", "/opt/app.jar" ] -CMD [] diff --git a/README.sh b/README.sh deleted file mode 100755 index 7152ec9d..00000000 --- a/README.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -IMAGE=juplo/spring-consumer:1.1-json-SNAPSHOT - -if [ "$1" = "cleanup" ] -then - docker compose -f docker/docker-compose.yml down -t0 -v --remove-orphans - mvn clean - exit -fi - -docker compose -f docker/docker-compose.yml up -d --remove-orphans kafka-1 kafka-2 kafka-3 -docker compose -f docker/docker-compose.yml rm -svf consumer - -if [[ - $(docker image ls -q $IMAGE) == "" || - "$1" = "build" -]] -then - mvn clean install || exit -else - echo "Using image existing images:" - docker image ls $IMAGE -fi - -docker compose -f docker/docker-compose.yml up --remove-orphans setup || exit 1 - - -docker compose -f docker/docker-compose.yml up -d producer -docker compose -f docker/docker-compose.yml up -d consumer - -sleep 5 -docker compose -f docker/docker-compose.yml stop consumer - -docker compose -f docker/docker-compose.yml start consumer -sleep 5 - -docker compose -f docker/docker-compose.yml stop producer consumer -docker compose -f docker/docker-compose.yml logs consumer diff --git a/build.gradle b/build.gradle index 3ddca4bb..0d3fdb9b 100644 --- a/build.gradle +++ b/build.gradle @@ -1,14 +1,10 @@ -import com.bmuschko.gradle.docker.tasks.image.DockerBuildImage - plugins { id 'java' - id 'org.springframework.boot' version '3.4.1' id 'io.spring.dependency-management' version '1.1.7' - id 'com.bmuschko.docker-remote-api' version '9.3.3' } -group = 'de.juplo.kafka' -version = '1.1-json-SNAPSHOT' +group = 'de.juplo.messages' +version = '1.0-SNAPSHOT' java { toolchain { @@ -26,19 +22,18 @@ repositories { mavenCentral() } +dependencyManagement { + imports { + mavenBom "org.springframework.boot:spring-boot-dependencies:3.1.4" + } +} + dependencies { - implementation 'org.springframework.kafka:spring-kafka' - implementation 'org.springframework.boot:spring-boot-starter-actuator' - implementation 'org.springframework.boot:spring-boot-starter-validation' - implementation 'org.springframework.boot:spring-boot-starter-web' + implementation 'com.fasterxml.jackson.core:jackson-databind' + implementation 'com.fasterxml.jackson.core:jackson-annotations' compileOnly 'org.projectlombok:lombok' - developmentOnly 'org.springframework.boot:spring-boot-devtools' - annotationProcessor 'org.springframework.boot:spring-boot-configuration-processor' annotationProcessor 'org.projectlombok:lombok' testImplementation 'org.springframework.boot:spring-boot-starter-test' - testImplementation 'org.springframework.kafka:spring-kafka-test' - testCompileOnly 'org.projectlombok:lombok' - testAnnotationProcessor 'org.projectlombok:lombok' testRuntimeOnly 'org.junit.platform:junit-platform-launcher' } @@ -46,35 +41,3 @@ tasks.named('test') { useJUnitPlatform() } -docker { - // Optional: Konfiguriere den Docker-Host, falls nötig - // url = 'unix:///var/run/docker.sock' (Standard) -} - - -def targetDir = file("${projectDir}/target") -def appJarName = "${project.name}-${project.version}.jar" - -// Task zum Bereinigen des `target`-Verzeichnisses bei `clean` -clean { - delete targetDir -} - -// Task zum Kopieren des Haupt-JARs -task copyJar(type: Copy) { - from "$buildDir/libs/${appJarName}" - into targetDir - dependsOn build - - doFirst { - def libs = file("${targetDir}/libs") - mkdir(libs) - } -} - -// Docker-Task -task buildDockerImage(type: DockerBuildImage) { - inputDir = file('.') - images = ["juplo/${project.name}:${project.version}"] - dependsOn copyJar -} diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml deleted file mode 100644 index 9fb105f3..00000000 --- a/docker/docker-compose.yml +++ /dev/null @@ -1,173 +0,0 @@ -services: - zookeeper: - image: confluentinc/cp-zookeeper:7.8.0 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ports: - - 2181:2181 - volumes: - - zookeeper-data:/var/lib/zookeeper/data - - zookeeper-log:/var/lib/zookeeper/log - - kafka-1: - image: confluentinc/cp-kafka:7.8.0 - environment: - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENERS: BROKER://:9092, LOCALHOST://:9081 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: BROKER:PLAINTEXT, LOCALHOST:PLAINTEXT - KAFKA_ADVERTISED_LISTENERS: BROKER://kafka-1:9092, LOCALHOST://localhost:9081 - KAFKA_BROKER_ID: 1 - KAFKA_INTER_BROKER_LISTENER_NAME: BROKER - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3 - KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" - KAFKA_LOG_RETENTION_CHECK_INTERVAL_MS: 1000 - volumes: - - kafka-1-data:/var/lib/kafka/data - ports: - - 9081:9081 - stop_grace_period: 120s - depends_on: - - zookeeper - - kafka-2: - image: confluentinc/cp-kafka:7.8.0 - environment: - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENERS: BROKER://:9092, LOCALHOST://:9082 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: BROKER:PLAINTEXT, LOCALHOST:PLAINTEXT - KAFKA_ADVERTISED_LISTENERS: BROKER://kafka-2:9092, LOCALHOST://localhost:9082 - KAFKA_BROKER_ID: 2 - KAFKA_INTER_BROKER_LISTENER_NAME: BROKER - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3 - KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" - KAFKA_LOG_RETENTION_CHECK_INTERVAL_MS: 10000 - volumes: - - kafka-2-data:/var/lib/kafka/data - ports: - - 9092:9082 - - 9082:9082 - networks: - default: - aliases: - - kafka - stop_grace_period: 120s - depends_on: - - zookeeper - - kafka-3: - image: confluentinc/cp-kafka:7.8.0 - environment: - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENERS: BROKER://:9092, LOCALHOST://:9083 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: BROKER:PLAINTEXT, LOCALHOST:PLAINTEXT - KAFKA_ADVERTISED_LISTENERS: BROKER://kafka-3:9092, LOCALHOST://localhost:9083 - KAFKA_BROKER_ID: 3 - KAFKA_INTER_BROKER_LISTENER_NAME: BROKER - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3 - KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" - KAFKA_LOG_RETENTION_CHECK_INTERVAL_MS: 10000 - volumes: - - kafka-3-data:/var/lib/kafka/data - ports: - - 9083:9083 - stop_grace_period: 120s - depends_on: - - zookeeper - - cli: - image: juplo/toolbox - command: sleep infinity - stop_grace_period: 0s - depends_on: - - kafka-1 - - kafka-2 - - kafka-3 - - setup: - image: juplo/toolbox - command: - - bash - - -c - - | - cub kafka-ready -b kafka-1:9092,kafka-2:9092,kafka-3:9092 3 60 > /dev/null 2>&1 || exit 1 - if [ -e INITIALIZED ] - then - echo -n Bereits konfiguriert: - cat INITIALIZED - kafka-topics --bootstrap-server kafka:9092 --describe --topic test - else - kafka-topics --bootstrap-server kafka:9092 \ - --delete \ - --if-exists \ - --topic test - kafka-topics --bootstrap-server kafka:9092 \ - --create \ - --topic test \ - --partitions 2 \ - --replication-factor 3 \ - --config min.insync.replicas=2 \ - && echo Das Topic \'test\' wurde erfolgreich angelegt: \ - && kafka-topics --bootstrap-server kafka:9092 --describe --topic test \ - && date > INITIALIZED - fi - stop_grace_period: 0s - depends_on: - - cli - - akhq: - image: tchiotludo/akhq:0.23.0 - ports: - - 8888:8080 - environment: - AKHQ_CONFIGURATION: | - akhq: - connections: - docker-kafka-server: - properties: - bootstrap.servers: "kafka:9092" - schema-registry: - url: "http://schema-registry:8085" - connect: - - name: "connect" - url: "http://connect:8083" - depends_on: - - kafka-1 - - kafka-2 - - kafka-3 - - producer: - image: juplo/spring-producer:1.0-json-SNAPSHOT - environment: - juplo.bootstrap-server: kafka:9092 - juplo.client-id: producer - juplo.producer.topic: test - juplo.producer.linger-ms: 666 - juplo.producer.throttle-ms: 100 - - consumer: - image: juplo/spring-consumer:1.1-json-SNAPSHOT - environment: - juplo.bootstrap-server: kafka:9092 - juplo.client-id: consumer - juplo.consumer.topic: test - - peter: - image: juplo/spring-consumer:1.1-json-SNAPSHOT - environment: - juplo.bootstrap-server: kafka:9092 - juplo.client-id: peter - juplo.consumer.topic: test - - ute: - image: juplo/spring-consumer:1.1-json-SNAPSHOT - environment: - juplo.bootstrap-server: kafka:9092 - juplo.client-id: ute - juplo.consumer.topic: test - -volumes: - zookeeper-data: - zookeeper-log: - kafka-1-data: - kafka-2-data: - kafka-3-data: diff --git a/pom.xml b/pom.xml index 8dd99a57..e6327588 100644 --- a/pom.xml +++ b/pom.xml @@ -11,93 +11,44 @@ - de.juplo.kafka - spring-consumer - Spring Consumer - Super Simple Consumer-Group, that is implemented as Spring-Boot application and configured by Spring Kafka - 1.1-json-SNAPSHOT + de.juplo.messages + sumup-messages + Messages for the SumUp-Example + Some reusable messages for the example SumUp + 1.0-SNAPSHOT 21 + 3.4.1 - org.springframework.boot - spring-boot-starter-web - - - org.springframework.boot - spring-boot-starter-actuator - - - org.springframework.boot - spring-boot-configuration-processor - true + com.fasterxml.jackson.core + jackson-annotations - org.springframework.boot - spring-boot-starter-validation - - - org.springframework.kafka - spring-kafka + com.fasterxml.jackson.core + jackson-databind org.projectlombok lombok - compile + true org.springframework.boot spring-boot-starter-test test - - org.springframework.kafka - spring-kafka-test - test - - - org.springframework.boot - spring-boot-maven-plugin - - - - build-info - - - - pl.project13.maven git-commit-id-plugin - - io.fabric8 - docker-maven-plugin - 0.45.0 - - - - juplo/%a:%v - - - - - - build - package - - build - - - - diff --git a/settings.gradle b/settings.gradle index cacb2d5a..47e01c11 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1 +1 @@ -rootProject.name = 'spring-consumer' +rootProject.name = 'sumup-messages' diff --git a/src/main/java/de/juplo/kafka/Application.java b/src/main/java/de/juplo/kafka/Application.java deleted file mode 100644 index 0069257f..00000000 --- a/src/main/java/de/juplo/kafka/Application.java +++ /dev/null @@ -1,14 +0,0 @@ -package de.juplo.kafka; - -import org.springframework.boot.SpringApplication; -import org.springframework.boot.autoconfigure.SpringBootApplication; - - -@SpringBootApplication -public class Application -{ - public static void main(String[] args) - { - SpringApplication.run(Application.class, args); - } -} diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java deleted file mode 100644 index 33022bf6..00000000 --- a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java +++ /dev/null @@ -1,57 +0,0 @@ -package de.juplo.kafka; - -import org.apache.kafka.clients.consumer.Consumer; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.clients.consumer.StickyAssignor; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.springframework.kafka.support.serializer.JsonDeserializer; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import java.util.Properties; - - -@Configuration -@EnableConfigurationProperties(ApplicationProperties.class) -public class ApplicationConfiguration -{ - @Bean - public ExampleConsumer exampleConsumer( - Consumer kafkaConsumer, - ApplicationProperties properties, - ConfigurableApplicationContext applicationContext) - { - return - new ExampleConsumer( - properties.getClientId(), - properties.getConsumerProperties().getTopic(), - kafkaConsumer, - () -> applicationContext.close()); - } - - @Bean(destroyMethod = "") - public KafkaConsumer kafkaConsumer(ApplicationProperties properties) - { - Properties props = new Properties(); - props.put("bootstrap.servers", properties.getBootstrapServer()); - props.put("client.id", properties.getClientId()); - props.put("group.id", properties.getConsumerProperties().getGroupId()); - if (properties.getConsumerProperties().getAutoOffsetReset() != null) - { - props.put("auto.offset.reset", properties.getConsumerProperties().getAutoOffsetReset().name()); - } - if (properties.getConsumerProperties().getAutoCommitInterval() != null) - { - props.put("auto.commit.interval", properties.getConsumerProperties().getAutoCommitInterval()); - } - props.put("metadata.maxage.ms", 5000); // 5 Sekunden - props.put("partition.assignment.strategy", StickyAssignor.class.getName()); - props.put("key.deserializer", StringDeserializer.class.getName()); - props.put("value.deserializer", JsonDeserializer.class.getName()); - props.put("spring.json.type.mapping", "ADD:de.juplo.kafka.MessageAddNumber,CALC:de.juplo.kafka.MessageCalculateSum"); - - return new KafkaConsumer<>(props); - } -} diff --git a/src/main/java/de/juplo/kafka/ApplicationProperties.java b/src/main/java/de/juplo/kafka/ApplicationProperties.java deleted file mode 100644 index c8193c9f..00000000 --- a/src/main/java/de/juplo/kafka/ApplicationProperties.java +++ /dev/null @@ -1,52 +0,0 @@ -package de.juplo.kafka; - -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; -import lombok.Getter; -import lombok.Setter; -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.validation.annotation.Validated; - -import java.time.Duration; - - -@ConfigurationProperties(prefix = "juplo") -@Validated -@Getter -@Setter -public class ApplicationProperties -{ - @NotNull - @NotEmpty - private String bootstrapServer; - @NotNull - @NotEmpty - private String clientId; - - @NotNull - private ConsumerProperties consumer; - - - public ConsumerProperties getConsumerProperties() - { - return consumer; - } - - - @Validated - @Getter - @Setter - static class ConsumerProperties - { - @NotNull - @NotEmpty - private String groupId; - @NotNull - @NotEmpty - private String topic; - private OffsetReset autoOffsetReset; - private Duration autoCommitInterval; - - enum OffsetReset { latest, earliest, none } - } -} diff --git a/src/main/java/de/juplo/kafka/ExampleConsumer.java b/src/main/java/de/juplo/kafka/ExampleConsumer.java deleted file mode 100644 index a03fece4..00000000 --- a/src/main/java/de/juplo/kafka/ExampleConsumer.java +++ /dev/null @@ -1,119 +0,0 @@ -package de.juplo.kafka; - -import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.consumer.Consumer; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.common.errors.WakeupException; - -import java.time.Duration; -import java.util.Arrays; - - -@Slf4j -public class ExampleConsumer implements Runnable -{ - private final String id; - private final String topic; - private final Consumer consumer; - private final Thread workerThread; - private final Runnable closeCallback; - - private long consumed = 0; - - - public ExampleConsumer( - String clientId, - String topic, - Consumer consumer, - Runnable closeCallback) - { - this.id = clientId; - this.topic = topic; - this.consumer = consumer; - - workerThread = new Thread(this, "ExampleConsumer Worker-Thread"); - workerThread.start(); - - this.closeCallback = closeCallback; - } - - - @Override - public void run() - { - try - { - log.info("{} - Subscribing to topic {}", id, topic); - consumer.subscribe(Arrays.asList(topic)); - - while (true) - { - ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); - - log.info("{} - Received {} messages", id, records.count()); - for (ConsumerRecord record : records) - { - handleRecord( - record.topic(), - record.partition(), - record.offset(), - record.key(), - record.value()); - } - } - } - catch(WakeupException e) - { - log.info("{} - Consumer was signaled to finish its work", id); - } - catch(Exception e) - { - log.error("{} - Unexpected error, unsubscribing!", id, e); - consumer.unsubscribe(); - log.info("{} - Triggering exit of application!", id); - new Thread(closeCallback).start(); - } - finally - { - log.info("{} - Closing the KafkaConsumer", id); - consumer.close(); - log.info("{}: Consumed {} messages in total, exiting!", id, consumed); - } - } - - private void handleRecord( - String topic, - Integer partition, - Long offset, - String key, - Message value) - { - consumed++; - log.info("{} - partition={}-{}, offset={}: {}={}", id, topic, partition, offset, key, value); - switch (value.getType()) - { - case ADD -> addNumber((MessageAddNumber)value); - case CALC -> calcSum((MessageCalculateSum)value); - default -> log.error("{} - Ignoring message of unknown typ {}", id, value.getType()); - } - } - - private void addNumber(MessageAddNumber addNumber) - { - log.info("{} - Adding number {}", id, addNumber.getNext()); - } - - private void calcSum(MessageCalculateSum calculateSum) - { - log.info("{} - Calculating sum", id); - } - - public void shutdown() throws InterruptedException - { - log.info("{} - Waking up the consumer", id); - consumer.wakeup(); - log.info("{} - Joining the worker thread", id); - workerThread.join(); - } -} diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml deleted file mode 100644 index 7a06731c..00000000 --- a/src/main/resources/application.yml +++ /dev/null @@ -1,37 +0,0 @@ -juplo: - bootstrap-server: :9092 - client-id: DEV - consumer: - group-id: my-group - topic: test - auto-offset-reset: earliest - auto-commit-interval: 5s -management: - endpoint: - shutdown: - enabled: true - endpoints: - web: - exposure: - include: "*" - info: - env: - enabled: true - java: - enabled: true -info: - kafka: - bootstrap-server: ${juplo.bootstrap-server} - client-id: ${juplo.client-id} - consumer: - group-id: ${juplo.consumer.group-id} - topic: ${juplo.consumer.topic} - auto-offset-reset: ${juplo.consumer.auto-offset-reset} - auto-commit-interval: ${juplo.consumer.auto-commit-interval} -logging: - level: - root: INFO - de.juplo: DEBUG - org.springframework.kafka: INFO -server: - port: 8881 diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml deleted file mode 100644 index 9c7af767..00000000 --- a/src/main/resources/logback.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - %d{HH:mm:ss.SSS} | %highlight(%-5level) %msg%n - - - - - - - - diff --git a/src/test/java/de/juplo/kafka/ApplicationTests.java b/src/test/java/de/juplo/kafka/ApplicationTests.java deleted file mode 100644 index ae119bff..00000000 --- a/src/test/java/de/juplo/kafka/ApplicationTests.java +++ /dev/null @@ -1,46 +0,0 @@ -package de.juplo.kafka; - -import org.junit.jupiter.api.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.kafka.test.context.EmbeddedKafka; -import org.springframework.test.web.servlet.MockMvc; - -import java.time.Duration; - -import static de.juplo.kafka.ApplicationTests.PARTITIONS; -import static de.juplo.kafka.ApplicationTests.TOPIC; -import static org.awaitility.Awaitility.await; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - - -@SpringBootTest( - properties = { - "juplo.bootstrap-server=${spring.embedded.kafka.brokers}", - "juplo.consumer.topic=" + TOPIC }) -@AutoConfigureMockMvc -@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS) -public class ApplicationTests -{ - static final String TOPIC = "FOO"; - static final int PARTITIONS = 10; - - @Autowired - MockMvc mockMvc; - - - - @Test - public void testApplicationStartup() - { - await("Application is healthy") - .atMost(Duration.ofSeconds(5)) - .untilAsserted(() -> mockMvc - .perform(get("/actuator/health")) - .andExpect(status().isOk()) - .andExpect(jsonPath("status").value("UP"))); - } -} -- 2.20.1