Umbau zu `supersimple-producer` aus altem Branch übernommen spring/supersimple-producer spring/supersimple-producer--2024-11-13--si
authorKai Moritz <kai@juplo.de>
Tue, 12 Nov 2024 11:36:21 +0000 (12:36 +0100)
committerKai Moritz <kai@juplo.de>
Thu, 14 Nov 2024 18:52:18 +0000 (19:52 +0100)
14 files changed:
.dockerignore [deleted file]
.editorconfig
.maven-dockerexclude [deleted file]
.maven-dockerinclude [deleted file]
README.sh
docker/docker-compose.yml
pom.xml
src/main/java/de/juplo/kafka/Application.java [deleted file]
src/main/java/de/juplo/kafka/ApplicationConfiguration.java [deleted file]
src/main/java/de/juplo/kafka/ApplicationProperties.java [deleted file]
src/main/java/de/juplo/kafka/ExampleProducer.java
src/main/resources/application.yml
src/main/resources/logback.xml [deleted file]
src/test/java/de/juplo/kafka/ApplicationTests.java

diff --git a/.dockerignore b/.dockerignore
deleted file mode 100644 (file)
index 1ad9963..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-*
-!target/*.jar
index c71516c..633c98a 100644 (file)
@@ -7,7 +7,7 @@ tab_width = 2
 charset = utf-8
 end_of_line = lf
 trim_trailing_whitespace = true
-insert_final_newline = true
+insert_final_newline = false
 
 [*.properties]
-charset = latin1
+charset = latin1
\ No newline at end of file
diff --git a/.maven-dockerexclude b/.maven-dockerexclude
deleted file mode 100644 (file)
index 72e8ffc..0000000
+++ /dev/null
@@ -1 +0,0 @@
-*
diff --git a/.maven-dockerinclude b/.maven-dockerinclude
deleted file mode 100644 (file)
index fd6cecd..0000000
+++ /dev/null
@@ -1 +0,0 @@
-target/*.jar
index 499780a..929b422 100755 (executable)
--- a/README.sh
+++ b/README.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-IMAGE=juplo/spring-producer:1.0-SNAPSHOT
+IMAGE=juplo/supersimple-producer:1.0-SNAPSHOT
 
 if [ "$1" = "cleanup" ]
 then
index c417a7f..3591900 100644 (file)
@@ -190,11 +190,11 @@ services:
       - kafka-3
 
   producer:
-    image: juplo/spring-producer:1.0-SNAPSHOT
+    image: juplo/supersimple-producer:1.0-SNAPSHOT
     environment:
-      juplo.bootstrap-server: kafka:9092
-      juplo.client-id: producer
-      juplo.producer.topic: test
+      spring.kafka.bootstrap-servers: kafka:9092
+      spring.kafka.client-id: producer
+      spring.kafka.template.default-topic: test
 
   consumer-1:
     image: juplo/simple-consumer:1.0-SNAPSHOT
diff --git a/pom.xml b/pom.xml
index 841299b..8f6258c 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -12,9 +12,9 @@
   </parent>
 
   <groupId>de.juplo.kafka</groupId>
-  <artifactId>spring-producer</artifactId>
-  <name>Spring Producer</name>
-  <description>A Simple Spring-Boot-Producer, that takes messages via POST and confirms successs</description>
+  <artifactId>supersimple-producer</artifactId>
+  <name>Super Simple Producer</name>
+  <description>Most minimal Kafka Producer ever!</description>
   <version>1.0-SNAPSHOT</version>
 
   <properties>
   <dependencies>
     <dependency>
       <groupId>org.springframework.boot</groupId>
-      <artifactId>spring-boot-starter-web</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.springframework.boot</groupId>
-      <artifactId>spring-boot-starter-actuator</artifactId>
+      <artifactId>spring-boot-starter</artifactId>
     </dependency>
     <dependency>
       <groupId>org.springframework.boot</groupId>
       <optional>true</optional>
     </dependency>
     <dependency>
-      <groupId>org.springframework.boot</groupId>
-      <artifactId>spring-boot-starter-validation</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.kafka</groupId>
-      <artifactId>kafka-clients</artifactId>
+      <groupId>org.springframework.kafka</groupId>
+      <artifactId>spring-kafka</artifactId>
     </dependency>
     <dependency>
       <groupId>org.projectlombok</groupId>
       <artifactId>spring-boot-starter-test</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.springframework.kafka</groupId>
-      <artifactId>spring-kafka</artifactId>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>org.springframework.kafka</groupId>
       <artifactId>spring-kafka-test</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.awaitility</groupId>
-      <artifactId>awaitility</artifactId>
-      <scope>test</scope>
-    </dependency>
   </dependencies>
 
   <build>
diff --git a/src/main/java/de/juplo/kafka/Application.java b/src/main/java/de/juplo/kafka/Application.java
deleted file mode 100644 (file)
index 0069257..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-package de.juplo.kafka;
-
-import org.springframework.boot.SpringApplication;
-import org.springframework.boot.autoconfigure.SpringBootApplication;
-
-
-@SpringBootApplication
-public class Application
-{
-  public static void main(String[] args)
-  {
-    SpringApplication.run(Application.class, args);
-  }
-}
diff --git a/src/main/java/de/juplo/kafka/ApplicationConfiguration.java b/src/main/java/de/juplo/kafka/ApplicationConfiguration.java
deleted file mode 100644 (file)
index 7540dd3..0000000
+++ /dev/null
@@ -1,56 +0,0 @@
-package de.juplo.kafka;
-
-import org.apache.kafka.clients.producer.KafkaProducer;
-import org.apache.kafka.clients.producer.Producer;
-import org.apache.kafka.common.serialization.StringSerializer;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.time.Duration;
-import java.util.Properties;
-
-
-@Configuration
-@EnableConfigurationProperties(ApplicationProperties.class)
-public class ApplicationConfiguration
-{
-  @Bean
-  public ExampleProducer exampleProducer(
-      ApplicationProperties properties,
-      Producer<String, String> kafkaProducer,
-      ConfigurableApplicationContext applicationContext)
-  {
-    return
-        new ExampleProducer(
-            properties.getClientId(),
-            properties.getProducerProperties().getTopic(),
-            properties.getProducerProperties().getThrottle() == null
-              ? Duration.ofMillis(500)
-              : properties.getProducerProperties().getThrottle(),
-            kafkaProducer,
-            () -> applicationContext.close());
-  }
-
-  @Bean(destroyMethod = "")
-  public KafkaProducer<String, String> kafkaProducer(ApplicationProperties properties)
-  {
-    Properties props = new Properties();
-    props.put("bootstrap.servers", properties.getBootstrapServer());
-    props.put("client.id", properties.getClientId());
-    props.put("acks", properties.getProducerProperties().getAcks());
-    props.put("delivery.timeout.ms", (int)properties.getProducerProperties().getDeliveryTimeout().toMillis());
-    props.put("max.block.ms", (int)properties.getProducerProperties().getMaxBlock().toMillis());
-    props.put("buffer.memory", properties.getProducerProperties().getBufferMemory());
-    props.put("batch.size", properties.getProducerProperties().getBatchSize());
-    props.put("metadata.max.age.ms",  5000); //  5 Sekunden
-    props.put("request.timeout.ms",   5000); //  5 Sekunden
-    props.put("linger.ms", properties.getProducerProperties().getLinger().toMillis());
-    props.put("compression.type", properties.getProducerProperties().getCompressionType());
-    props.put("key.serializer", StringSerializer.class.getName());
-    props.put("value.serializer", StringSerializer.class.getName());
-
-    return new KafkaProducer<>(props);
-  }
-}
diff --git a/src/main/java/de/juplo/kafka/ApplicationProperties.java b/src/main/java/de/juplo/kafka/ApplicationProperties.java
deleted file mode 100644 (file)
index 4323262..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-package de.juplo.kafka;
-
-import jakarta.validation.constraints.NotEmpty;
-import jakarta.validation.constraints.NotNull;
-import lombok.Getter;
-import lombok.Setter;
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.validation.annotation.Validated;
-
-import java.time.Duration;
-
-
-@ConfigurationProperties(prefix = "juplo")
-@Validated
-@Getter
-@Setter
-public class ApplicationProperties
-{
-  @NotNull
-  @NotEmpty
-  private String bootstrapServer;
-  @NotNull
-  @NotEmpty
-  private String clientId;
-
-  @NotNull
-  private ProducerProperties producer;
-
-
-  public ProducerProperties getProducerProperties()
-  {
-    return producer;
-  }
-
-
-  @Validated
-  @Getter
-  @Setter
-  static class ProducerProperties
-  {
-    @NotNull
-    @NotEmpty
-    private String topic;
-    @NotNull
-    @NotEmpty
-    private String acks;
-    @NotNull
-    private Duration deliveryTimeout;
-    @NotNull
-    private Duration maxBlock;
-    @NotNull
-    private Long bufferMemory;
-    @NotNull
-    private Integer batchSize;
-    @NotNull
-    private Duration linger;
-    @NotNull
-    @NotEmpty
-    private String compressionType;
-    private Duration throttle;
-  }
-}
index bc5cf89..300d11e 100644 (file)
 package de.juplo.kafka;
 
 import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.producer.Producer;
-import org.apache.kafka.clients.producer.ProducerRecord;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.boot.ApplicationRunner;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.SendResult;
 
-import java.time.Duration;
+import java.util.concurrent.CompletableFuture;
 
 
 @Slf4j
-public class ExampleProducer implements Runnable
+// tag::supersimple[]
+@SpringBootApplication
+public class ExampleProducer implements ApplicationRunner
 {
-  private final String id;
-  private final String topic;
-  private final Duration throttle;
-  private final Producer<String, String> producer;
-  private final Thread workerThread;
-  private final Runnable closeCallback;
-
-  private volatile boolean running = true;
-  private long produced = 0;
-
-
-  public ExampleProducer(
-    String id,
-    String topic,
-    Duration throttle,
-    Producer<String, String> producer,
-    Runnable closeCallback)
-  {
-    this.id = id;
-    this.topic = topic;
-    this.throttle = throttle;
-    this.producer = producer;
-
-    workerThread = new Thread(this, "ExampleProducer Worker-Thread");
-    workerThread.start();
-
-    this.closeCallback = closeCallback;
-  }
-
+  @Autowired
+  KafkaTemplate<String, String> kafkaTemplate;
 
   @Override
-  public void run()
+  public void run(ApplicationArguments args)
   {
-    long i = 0;
-
-    try
-    {
-      for (; running; i++)
-      {
-        send(Long.toString(i%10), Long.toString(i));
-
-        if (throttle.isPositive())
-        {
-          try
-          {
-            Thread.sleep(throttle);
-          }
-          catch (InterruptedException e)
-          {
-            log.warn("{} - Interrupted while throttling!", e);
-          }
-        }
-      }
-    }
-    catch (Exception e)
-    {
-      log.error("{} - Unexpected error!", id, e);
-      log.info("{} - Triggering exit of application!", id);
-      new Thread(closeCallback).start();
-    }
-    finally
+    for (int i = 0; true; i++)
     {
-      log.info("{}: Closing the KafkaProducer", id);
-      producer.close();
-      log.info("{}: Produced {} messages in total, exiting!", id, produced);
+      // end::supersimple[]
+      // tag::callback[]
+      CompletableFuture<SendResult<String, String>> completableFuture =
+      // tag::supersimple[]
+          kafkaTemplate.sendDefault(Long.toString(i%10), Long.toString(i));
+      // end::supersimple[]
+
+      completableFuture.thenAccept(result ->
+        log.info(
+          "Sent {}={} to partition={}, offset={}",
+          result.getProducerRecord().key(),
+          result.getProducerRecord().value(),
+          result.getRecordMetadata().partition(),
+          result.getRecordMetadata().offset()));
+
+      completableFuture.exceptionally(e -> {
+        log.error("ERROR sendig message", e);
+        return null;
+      });
+      // end::callback[]
+      // tag::supersimple[]
     }
   }
 
-  void send(String key, String value)
-  {
-    final long time = System.currentTimeMillis();
-
-    final ProducerRecord<String, String> record = new ProducerRecord<>(
-        topic,  // Topic
-        key,    // Key
-        value   // Value
-    );
-
-    producer.send(record, (metadata, e) ->
-    {
-      long now = System.currentTimeMillis();
-      if (e == null)
-      {
-        // HANDLE SUCCESS
-        produced++;
-        log.debug(
-            "{} - Sent message {}={}, partition={}:{}, timestamp={}, latency={}ms",
-            id,
-            record.key(),
-            record.value(),
-            metadata.partition(),
-            metadata.offset(),
-            metadata.timestamp(),
-            now - time
-        );
-      }
-      else
-      {
-        // HANDLE ERROR
-        log.error(
-            "{} - ERROR for message {}={}, timestamp={}, latency={}ms: {}",
-            id,
-            record.key(),
-            record.value(),
-            metadata == null ? -1 : metadata.timestamp(),
-            now - time,
-            e.toString()
-        );
-      }
-    });
-
-    long now = System.currentTimeMillis();
-    log.trace(
-        "{} - Queued message {}={}, latency={}ms",
-        id,
-        record.key(),
-        record.value(),
-        now - time
-    );
-  }
-
-
-  public void shutdown() throws InterruptedException
+  public static void main(String[] args)
   {
-    log.info("{} joining the worker-thread...", id);
-    running = false;
-    workerThread.join();
+    SpringApplication.run(ExampleProducer.class, args);
   }
 }
+// end::supersimple[]
index 98ea128..51cc46c 100644 (file)
@@ -1,46 +1,5 @@
-juplo:
-  bootstrap-server: :9092
-  client-id: DEV
-  producer:
-    topic: test
-    acks: -1
-    delivery-timeout: 10s
-    max-block: 5s
-    buffer-memory: 33554432
-    batch-size: 16384
-    linger: 0
-    compression-type: gzip
-    throttle: 500
-management:
-  endpoint:
-    shutdown:
-      enabled: true
-  endpoints:
-    web:
-      exposure:
-        include: "*"
-  info:
-    env:
-      enabled: true
-    java:
-      enabled: true
-info:
+spring:
   kafka:
-    bootstrap-server: ${juplo.bootstrap-server}
-    client-id: ${juplo.client-id}
-    producer:
-      topic: ${juplo.producer.topic}
-      acks: ${juplo.producer.acks}
-      delivery-timeout: ${juplo.producer.delivery-timeout}
-      max-block: ${juplo.producer.max-block}
-      buffer-memory: ${juplo.producer.buffer-memory}
-      batch-size: ${juplo.producer.batch-size}
-      linger: ${juplo.producer.linger}
-      compression-type: ${juplo.producer.compression-type}
-      throttle: ${juplo.producer.throttle}
-logging:
-  level:
-    root: INFO
-    de.juplo: TRACE
-server:
-  port: 8880
+    bootstrap-servers: :9092
+    template:
+      default-topic: test
diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml
deleted file mode 100644 (file)
index 9c7af76..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
-
-  <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
-    <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
-      <Pattern>%d{HH:mm:ss.SSS} | %highlight(%-5level) %msg%n</Pattern>
-    </encoder>
-  </appender>
-
-  <root level="INFO">
-    <appender-ref ref="STDOUT" />
-  </root>
-
-</configuration>
index fe8609e..714175e 100644 (file)
@@ -1,96 +1,24 @@
 package de.juplo.kafka;
 
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
 import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.boot.test.context.TestConfiguration;
-import org.springframework.context.annotation.Bean;
-import org.springframework.kafka.annotation.KafkaListener;
 import org.springframework.kafka.test.context.EmbeddedKafka;
-import org.springframework.test.web.servlet.MockMvc;
 
-import java.time.Duration;
-import java.util.LinkedList;
-import java.util.List;
-
-import static de.juplo.kafka.ApplicationTests.PARTITIONS;
 import static de.juplo.kafka.ApplicationTests.TOPIC;
-import static org.awaitility.Awaitility.await;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
 
 
 @SpringBootTest(
     properties = {
         "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
-        "spring.kafka.consumer.auto-offset-reset=earliest",
-        "juplo.bootstrap-server=${spring.embedded.kafka.brokers}",
-        "juplo.producer.topic=" + TOPIC})
-@AutoConfigureMockMvc
-@EmbeddedKafka(topics = TOPIC, partitions = PARTITIONS)
-@Slf4j
+        "spring.kafka.template.default-topic=" + TOPIC
+    })
+@EmbeddedKafka(topics = TOPIC)
 public class ApplicationTests
 {
-  static final String TOPIC = "FOO";
-  static final int PARTITIONS = 10;
-
-  @Autowired
-  MockMvc mockMvc;
-  @Autowired
-  Consumer consumer;
-
-
-  @BeforeEach
-  public void clear()
-  {
-    consumer.received.clear();
-  }
-
+  public final static String TOPIC = "out";
 
   @Test
   public void testApplicationStartup()
   {
-    await("Application is healthy")
-      .atMost(Duration.ofSeconds(5))
-      .untilAsserted(() -> mockMvc
-        .perform(get("/actuator/health"))
-        .andExpect(status().isOk())
-        .andExpect(jsonPath("status").value("UP")));
-  }
-
-  @Test
-  public void testSendMessage() throws Exception
-  {
-    await("Some messages were send")
-        .atMost(Duration.ofSeconds(5))
-        .until(() -> consumer.received.size() >= 1);
-  }
-
-
-  static class Consumer
-  {
-    final List<ConsumerRecord<String, String>> received = new LinkedList<>();
-
-    @KafkaListener(groupId = "TEST", topics = TOPIC)
-    public void receive(ConsumerRecord<String, String> record)
-    {
-      log.debug("Received message: {}", record);
-      received.add(record);
-    }
-  }
-
-  @TestConfiguration
-  static class Configuration
-  {
-    @Bean
-    Consumer consumer()
-    {
-      return new Consumer();
-    }
   }
 }