From: Kai Moritz Date: Wed, 24 Apr 2024 15:01:58 +0000 (+0200) Subject: Generierung von passenden Testdaten über den Confluent Datagen Connector X-Git-Url: http://juplo.de/gitweb/?a=commitdiff_plain;h=ad5e1605f6150c7747164d8bb5c73dcc4a7801a3;p=demos%2Fkafka%2Ftraining Generierung von passenden Testdaten über den Confluent Datagen Connector --- diff --git a/.gitignore b/.gitignore index 6268bca..260deee 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ docker/confluentinc-kafka-connect-s3-* +docker/confluentinc-kafka-connect-datagen-* diff --git a/README.sh b/README.sh index 9dfefa0..ba773b3 100755 --- a/README.sh +++ b/README.sh @@ -22,31 +22,18 @@ echo cat s3-sink-connector.json | jq .config | http -v put :8083/connector-plugins/io.confluent.connect.s3.S3SinkConnector/config/validate echo -echo "Schreibe Nachrichtem mit Avro-Schema in das Topic \"test\"" +echo "Simuliere einen Producer mit dem Confluent Datagen Connector" echo -docker compose -f docker/docker-compose.yml exec -T cli \ - kafka-avro-console-producer \ - --broker-list kafka:9092 \ - --topic test \ - --property value.schema='{"type":"record","name":"myrecord","fields":[{"name":"f1","type":"string"}]}' \ - --property schema.registry.url=http://schema-registry:8085 << EOF -{"f1":"foo"} -{"f1":"bar"} -{"f1":"foofoo"} -{"f1":"barbar"} -{"f1":"foobar"} -{"f1":"barfoo"} -EOF +cat datagen-source-connector.json | http -v post :8083/connectors echo -echo "Lese Nachrichtem mit Avro-Schema aus dem Topic \"test\"" +echo "Lese probeweise 5 Nachrichten aus den erzeugten Daten ein" echo docker compose -f docker/docker-compose.yml exec cli \ kafka-avro-console-consumer \ --bootstrap-server kafka:9092 \ --topic test \ --property schema.registry.url=http://schema-registry:8085 \ - --from-beginning \ --max-messages 5 echo @@ -59,6 +46,17 @@ echo "Schlafe für 10 Sekunden..." echo sleep 10 +echo +echo "Prüfe den Status des Confluent S3 Sink Connector" +echo +http -v :8083/connectors/s3-sink/status + +echo +echo "Entferne datagen-source und s3-sink" +echo +http -v delete :8083/connectors/datagen-source +http -v delete :8083/connectors/s3-sink + echo echo "Liste die im S3-Bucket erzeugten Dateien auf" echo diff --git a/datagen-source-connector.json b/datagen-source-connector.json new file mode 100644 index 0000000..e9bc9bc --- /dev/null +++ b/datagen-source-connector.json @@ -0,0 +1,9 @@ +{ + "name": "datagen-source", + "config": { + "name": "datagen-source", + "connector.class": "io.confluent.kafka.connect.datagen.DatagenConnector", + "kafka.topic": "test", + "schema.string": "{\"type\":\"record\",\"name\":\"myrecord\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}" + } +} diff --git a/docker/confluentinc-kafka-connect-datagen b/docker/confluentinc-kafka-connect-datagen new file mode 120000 index 0000000..6b068b8 --- /dev/null +++ b/docker/confluentinc-kafka-connect-datagen @@ -0,0 +1 @@ +confluentinc-kafka-connect-datagen-0.6.5 \ No newline at end of file diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 5a5ee55..9dc76ad 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -114,6 +114,7 @@ services: - 8083:8083 volumes: - ./confluentinc-kafka-connect-s3:/usr/share/java/plugins/confluentinc-kafka-connect-s3:ro + - ./confluentinc-kafka-connect-datagen:/usr/share/java/plugins/confluentinc-kafka-connect-datagen:ro depends_on: - schema-registry