Separate `README.sh` für Maven und Gradle
authorKai Moritz <kai@juplo.de>
Tue, 25 Mar 2025 17:25:47 +0000 (18:25 +0100)
committerKai Moritz <kai@juplo.de>
Tue, 25 Mar 2025 17:25:47 +0000 (18:25 +0100)
README-gradle.sh [new file with mode: 0755]
README-maven.sh [new file with mode: 0755]
README.sh [deleted file]
docker/docker-compose.yml
pom.xml
src/main/java/de/juplo/kafka/ApplicationConfiguration.java [deleted file]
src/main/java/de/juplo/kafka/ApplicationProperties.java [deleted file]
src/main/java/de/juplo/kafka/ExampleProducer.java [deleted file]
src/test/java/de/juplo/kafka/ApplicationTests.java [deleted file]

diff --git a/README-gradle.sh b/README-gradle.sh
new file mode 100755 (executable)
index 0000000..bc56615
--- /dev/null
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+IMAGE=juplo/technick-check:1.0-SNAPSHOT
+
+if [ "$1" = "cleanup" ]
+then
+  docker compose -f docker/docker-compose.yml down -t0 -v --remove-orphans
+  mvn clean
+  exit
+fi
+
+docker compose -f docker/docker-compose.yml up -d --remove-orphans kafka-1 kafka-2 kafka-3
+docker compose -f docker/docker-compose.yml rm -svf technick-check
+
+if [[
+  $(docker image ls -q $IMAGE) == "" ||
+  "$1" = "build"
+]]
+then
+  mvn clean install || exit
+else
+  echo "Using image existing images:"
+  docker image ls $IMAGE
+fi
+
+docker compose -f docker/docker-compose.yml up --remove-orphans setup || exit 1
+
+
+docker compose -f docker/docker-compose.yml up -d technick-check
diff --git a/README-maven.sh b/README-maven.sh
new file mode 100755 (executable)
index 0000000..bc56615
--- /dev/null
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+IMAGE=juplo/technick-check:1.0-SNAPSHOT
+
+if [ "$1" = "cleanup" ]
+then
+  docker compose -f docker/docker-compose.yml down -t0 -v --remove-orphans
+  mvn clean
+  exit
+fi
+
+docker compose -f docker/docker-compose.yml up -d --remove-orphans kafka-1 kafka-2 kafka-3
+docker compose -f docker/docker-compose.yml rm -svf technick-check
+
+if [[
+  $(docker image ls -q $IMAGE) == "" ||
+  "$1" = "build"
+]]
+then
+  mvn clean install || exit
+else
+  echo "Using image existing images:"
+  docker image ls $IMAGE
+fi
+
+docker compose -f docker/docker-compose.yml up --remove-orphans setup || exit 1
+
+
+docker compose -f docker/docker-compose.yml up -d technick-check
diff --git a/README.sh b/README.sh
deleted file mode 100755 (executable)
index 501349a..0000000
--- a/README.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-
-IMAGE=juplo/spring-producer:1.0-kafkatemplate-SNAPSHOT
-
-if [ "$1" = "cleanup" ]
-then
-  docker compose -f docker/docker-compose.yml down -t0 -v --remove-orphans
-  mvn clean
-  exit
-fi
-
-docker compose -f docker/docker-compose.yml up -d --remove-orphans kafka-1 kafka-2 kafka-3
-docker compose -f docker/docker-compose.yml rm -svf producer
-
-if [[
-  $(docker image ls -q $IMAGE) == "" ||
-  "$1" = "build"
-]]
-then
-  mvn clean install || exit
-else
-  echo "Using image existing images:"
-  docker image ls $IMAGE
-fi
-
-docker compose -f docker/docker-compose.yml up --remove-orphans setup || exit 1
-
-
-docker compose -f docker/docker-compose.yml up -d producer
-docker compose -f docker/docker-compose.yml up -d peter ute
-sleep 15
-
-docker compose -f docker/docker-compose.yml stop producer
-
-echo
-echo "Von peter empfangen:"
-docker compose -f docker/docker-compose.yml logs peter | grep '\ test\/.'
-echo
-echo "Von ute empfangen:"
-docker compose -f docker/docker-compose.yml logs ute | grep '\ test\/.'
-
-docker compose -f docker/docker-compose.yml stop peter ute
index 2bb942b..8b6cab3 100644 (file)
@@ -135,24 +135,8 @@ services:
       - kafka-2
       - kafka-3
 
-  producer:
-    image: juplo/spring-producer:1.0-kafkatemplate-SNAPSHOT
-    environment:
-      spring.kafka.bootstrap-servers: kafka:9092
-      spring.kafka.client-id: producer
-      juplo.producer.topic: test
-
-  consumer:
-    image: juplo/simple-consumer:1.0-SNAPSHOT
-    command: kafka:9092 test my-group consumer
-
-  peter:
-    image: juplo/simple-consumer:1.0-SNAPSHOT
-    command: kafka:9092 test my-group peter
-
-  ute:
-    image: juplo/simple-consumer:1.0-SNAPSHOT
-    command: kafka:9092 test my-group ute
+  technick-check:
+    image: juplo/technick-check:1.0-SNAPSHOT
 
 volumes:
   zookeeper-data:
diff --git a/pom.xml b/pom.xml
index 2795143..20a8c2a 100644 (file)
--- a/pom.xml
+++ b/pom.xml
   </parent>
 
   <groupId>de.juplo.kafka</groupId>
-  <artifactId>spring-producer</artifactId>
-  <name>Spring Producer</name>
-  <description>A Simple Producer, based on the KafkaTemplate and Spring Boot, that sends messages via Kafka</description>
-  <version>1.0-kafkatemplate-SNAPSHOT</version>
+  <artifactId>technick-check</artifactId>
+  <version>1.0-SNAPSHOT</version>
 
   <properties>
     <java.version>21</java.version>
diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
deleted file mode 100644 (file)
index 4a5c8da..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-package de.juplo.kafka;
-
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.kafka.core.KafkaTemplate;
-
-import java.time.Duration;
-
-
-@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
-public class ApplicationConfiguration
-{
-  @Bean
-  public ExampleProducer exampleProducer(
-    ApplicationProperties properties,
-    KafkaProperties kafkaProperties,
-    KafkaTemplate<String, String> kafkaTemplate,
-    ConfigurableApplicationContext applicationContext)
-  {
-    return
-      new ExampleProducer(
-        kafkaProperties.getClientId(),
-        properties.getProducerProperties().getTopic(),
-        properties.getProducerProperties().getThrottle() == null
-          ? Duration.ofMillis(500)
-          : properties.getProducerProperties().getThrottle(),
-        kafkaTemplate,
-        () -> applicationContext.close());
-  }
-}
diff --git a/src/main/java/de/juplo/kafka/ApplicationProperties.java b/src/main/java/de/juplo/kafka/ApplicationProperties.java
deleted file mode 100644 (file)
index 908072c..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-package de.juplo.kafka;
-
-import jakarta.validation.constraints.NotEmpty;
-import jakarta.validation.constraints.NotNull;
-import lombok.Getter;
-import lombok.Setter;
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.validation.annotation.Validated;
-
-import java.time.Duration;
-
-
-@ConfigurationProperties(prefix = "juplo")
-@Validated
-@Getter
-@Setter
-public class ApplicationProperties
-{
-  @NotNull
-  private ProducerProperties producer;
-
-
-  public ProducerProperties getProducerProperties()
-  {
-    return producer;
-  }
-
-
-  @Validated
-  @Getter
-  @Setter
-  static class ProducerProperties
-  {
-    @NotNull
-    @NotEmpty
-    private String topic;
-    private Duration throttle;
-  }
-}
diff --git a/src/main/java/de/juplo/kafka/ExampleProducer.java b/src/main/java/de/juplo/kafka/ExampleProducer.java
deleted file mode 100644 (file)
index 09eaa11..0000000
+++ /dev/null
@@ -1,135 +0,0 @@
-package de.juplo.kafka;
-
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.producer.RecordMetadata;
-import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.support.SendResult;
-
-import java.time.Duration;
-import java.util.concurrent.CompletableFuture;
-
-
-@Slf4j
-public class ExampleProducer implements Runnable
-{
-  private final String id;
-  private final String topic;
-  private final Duration throttle;
-  private final KafkaTemplate<String, String> kafkaTemplate;
-  private final Thread workerThread;
-  private final Runnable closeCallback;
-
-  private volatile boolean running = true;
-  private long produced = 0;
-
-
-  public ExampleProducer(
-    String id,
-    String topic,
-    Duration throttle,
-    KafkaTemplate<String, String> kafkaTemplate,
-    Runnable closeCallback)
-  {
-    this.id = id;
-    this.topic = topic;
-    this.throttle = throttle;
-    this.kafkaTemplate = kafkaTemplate;
-
-    workerThread = new Thread(this, "ExampleProducer Worker-Thread");
-    workerThread.start();
-
-    this.closeCallback = closeCallback;
-  }
-
-
-  @Override
-  public void run()
-  {
-    long i = 0;
-
-    try
-    {
-      for (; running; i++)
-      {
-        send(Long.toString(i%10), Long.toString(i));
-
-        if (throttle.isPositive())
-        {
-          try
-          {
-            Thread.sleep(throttle);
-          }
-          catch (InterruptedException e)
-          {
-            log.warn("{} - Interrupted while throttling!", e);
-          }
-        }
-      }
-    }
-    catch (Exception e)
-    {
-      log.error("{} - Unexpected error!", id, e);
-      log.info("{} - Triggering exit of application!", id);
-      new Thread(closeCallback).start();
-    }
-    finally
-    {
-      log.info("{}: Produced {} messages in total, exiting!", id, produced);
-    }
-  }
-
-  void send(String key, String value)
-  {
-    final long time = System.currentTimeMillis();
-
-    kafkaTemplate.send(topic, key, value).whenComplete((result, e) ->
-    {
-      long now = System.currentTimeMillis();
-      if (e == null)
-      {
-        // HANDLE SUCCESS
-        RecordMetadata metadata = result.getRecordMetadata();
-        log.debug(
-          "{} - Sent message {}={}, partition={}, offset={}, timestamp={}, latency={}ms",
-          id,
-          key,
-          value,
-          metadata.partition(),
-          metadata.offset(),
-          metadata.timestamp(),
-          now - time
-        );
-      }
-      else
-      {
-        // HANDLE ERROR
-        log.error(
-          "{} - ERROR for message {}={}, latency={}ms: {}",
-          id,
-          key,
-          value,
-          now - time,
-          e.toString()
-        );
-      }
-    });
-
-    long now = System.currentTimeMillis();
-    produced++;
-    log.trace(
-      "{} - Queued message {}={}, latency={}ms",
-      id,
-      key,
-      value,
-      now - time
-    );
-  }
-
-
-  public void shutdown() throws InterruptedException
-  {
-    log.info("{} joining the worker-thread...", id);
-    running = false;
-    workerThread.join();
-  }
-}
diff --git a/src/test/java/de/juplo/kafka/ApplicationTests.java b/src/test/java/de/juplo/kafka/ApplicationTests.java
deleted file mode 100644 (file)
index 7687e9c..0000000
+++ /dev/null
@@ -1,94 +0,0 @@
-package de.juplo.kafka;
-
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
-import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.boot.test.context.TestConfiguration;
-import org.springframework.context.annotation.Bean;
-import org.springframework.kafka.annotation.KafkaListener;
-import org.springframework.kafka.test.context.EmbeddedKafka;
-import org.springframework.test.web.servlet.MockMvc;
-
-import java.time.Duration;
-import java.util.LinkedList;
-import java.util.List;
-
-import static de.juplo.kafka.ApplicationTests.PARTITIONS;
-import static de.juplo.kafka.ApplicationTests.TOPIC;
-import static org.awaitility.Awaitility.await;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-
-@SpringBootTest(
-  properties = {
-    "spring.kafka.consumer.auto-offset-reset=earliest",
-    "juplo.producer.topic=" + TOPIC})
-@AutoConfigureMockMvc
-@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
-@Slf4j
-public class ApplicationTests
-{
-  static final String TOPIC = "FOO";
-  static final int PARTITIONS = 10;
-
-  @Autowired
-  MockMvc mockMvc;
-  @Autowired
-  Consumer consumer;
-
-
-  @BeforeEach
-  public void clear()
-  {
-    consumer.received.clear();
-  }
-
-
-  @Test
-  public void testApplicationStartup()
-  {
-    await("Application is healthy")
-      .atMost(Duration.ofSeconds(5))
-      .untilAsserted(() -> mockMvc
-        .perform(get("/actuator/health"))
-        .andExpect(status().isOk())
-        .andExpect(jsonPath("status").value("UP")));
-  }
-
-  @Test
-  public void testSendMessage() throws Exception
-  {
-    await("Some messages were send")
-      .atMost(Duration.ofSeconds(5))
-      .until(() -> consumer.received.size() >= 1);
-  }
-
-
-  static class Consumer
-  {
-    final List<ConsumerRecord<String, String>> received = new LinkedList<>();
-
-    @KafkaListener(groupId = "TEST", topics = TOPIC)
-    public void receive(ConsumerRecord<String, String> record)
-    {
-      log.debug("Received message: {}", record);
-      received.add(record);
-    }
-  }
-
-  @TestConfiguration
-  static class Configuration
-  {
-    @Bean
-    Consumer consumer()
-    {
-      return new Consumer();
-    }
-  }
-}