* `ApplicationConfiguration` eingeführt.
* Um eine bessere Kontrolle über die verwendeten Parameter zu erhalten,
wird `RestProducer` jetzt explizit in `ApplicationConfiguration` erzeugt.
* `KafkaProducer` wird in `ApplicationConfiguration` erzeugt und in
`RestProducer` hereingereicht.
package de.juplo.kafka;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.annotation.Bean;
-import org.springframework.util.Assert;
-
-import java.util.concurrent.Executors;
@SpringBootApplication
-@EnableConfigurationProperties(ApplicationProperties.class)
public class Application
{
public static void main(String[] args)
--- /dev/null
+package de.juplo.kafka;
+
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.Properties;
+
+
+@Configuration
+@EnableConfigurationProperties(ApplicationProperties.class)
+public class ApplicationConfiguration
+{
+ @Bean
+ public RestProducer restProducer(
+ ApplicationProperties properties,
+ KafkaProducer<String, String> kafkaProducer)
+ {
+ return
+ new RestProducer(
+ properties.getClientId(),
+ properties.getTopic(),
+ properties.getPartition(),
+ kafkaProducer);
+ }
+
+ @Bean(destroyMethod = "close")
+ public KafkaProducer<String, String> kafkaProducer(ApplicationProperties properties)
+ {
+ Properties props = new Properties();
+ props.put("bootstrap.servers", properties.getBootstrapServer());
+ props.put("client.id", properties.getClientId());
+ props.put("acks", properties.getAcks());
+ props.put("batch.size", properties.getBatchSize());
+ props.put("delivery.timeout.ms", 20000); // 20 Sekunden
+ props.put("request.timeout.ms", 10000); // 10 Sekunden
+ props.put("linger.ms", properties.getLingerMs());
+ props.put("compression.type", properties.getCompressionType());
+ props.put("key.serializer", StringSerializer.class.getName());
+ props.put("value.serializer", StringSerializer.class.getName());
+
+ return new KafkaProducer<>(props);
+ }
+}
package de.juplo.kafka;
+import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.context.request.async.DeferredResult;
import javax.annotation.PreDestroy;
-import java.util.Properties;
import java.util.concurrent.ExecutionException;
@Slf4j
-@RestController
+@RequestMapping
+@ResponseBody
+@RequiredArgsConstructor
public class RestProducer
{
private final String id;
private long produced = 0;
- public RestProducer(ApplicationProperties properties)
- {
- this.id = properties.getClientId();
- this.topic = properties.getTopic();
- this.partition = properties.getPartition();
-
- Properties props = new Properties();
- props.put("bootstrap.servers", properties.getBootstrapServer());
- props.put("client.id", properties.getClientId());
- props.put("acks", properties.getAcks());
- props.put("batch.size", properties.getBatchSize());
- props.put("delivery.timeout.ms", 20000); // 20 Sekunden
- props.put("request.timeout.ms", 10000); // 10 Sekunden
- props.put("linger.ms", properties.getLingerMs());
- props.put("compression.type", properties.getCompressionType());
- props.put("key.serializer", StringSerializer.class.getName());
- props.put("value.serializer", StringSerializer.class.getName());
-
- this.producer = new KafkaProducer<>(props);
- }
-
@PostMapping(path = "{key}")
public DeferredResult<ProduceResult> send(
@PathVariable String key,
{
return new ErrorResponse(e.getMessage(), HttpStatus.BAD_REQUEST.value());
}
-
- @PreDestroy
- public void destroy() throws ExecutionException, InterruptedException
- {
- log.info("{} - Destroy!", id);
- log.info("{} - Closing the KafkaProducer", id);
- producer.close();
- log.info("{}: Produced {} messages in total, exiting!", id, produced);
- }
}
import static de.juplo.kafka.ApplicationTests.TOPIC;
import static org.awaitility.Awaitility.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;