Skip to content

Commit

Permalink
adding kafka connect profile to dev-toolkit - #261 (#291)
Browse files Browse the repository at this point in the history
  • Loading branch information
hifly81 authored Dec 18, 2024
1 parent c31368e commit ea26882
Show file tree
Hide file tree
Showing 7 changed files with 101 additions and 2 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,7 @@ Currently supported profiles:
- _replicator_: it will add a Kafka connect cluster with Confluent Replicator between _kafka1-kafka2-kafka3-kafka4_ and a new cluster with 1 broker _broker-dest_
- _schema-registry_: it will add Confluent Schema Registry.
- _schema-registry-primary-secondary_: it will add 2 Confluent Schema Registry, primary and secondary.
- _connect_: it will add Kafka Connect with a datagen source connector and a file sink connector.
- _ksqldb_: it will add ksqldb server. It requires _schema-registry_ profile.
- _consumer_: it will add a demo application implemented with Spring with full client metrics
- _consumer-minimal_: it will add a demo application implemented with Spring with a limited number of client metrics
Expand Down
13 changes: 13 additions & 0 deletions dev-toolkit/connect/connector_datagen.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"name" : "datagen-sample",
"config": {
"connector.class": "io.confluent.kafka.connect.datagen.DatagenConnector",
"kafka.topic" : "pageviews",
"quickstart" : "pageviews",
"tasks.max" : "4",
"max.interval": 1000,
"iterations": 10000000,
"topic.creation.default.replication.factor": 1,
"topic.creation.default.partitions": 10
}
}
10 changes: 10 additions & 0 deletions dev-toolkit/connect/connector_filesink.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"name" : "filesink-sample",
"config": {
"connector.class": "FileStreamSink",
"topics" : "pageviews",
"file": "/tmp/pageviews.txt",
"name" : "filesink-sample",
"tasks.max": "1"
}
}
53 changes: 53 additions & 0 deletions dev-toolkit/docker-compose.connect.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
services:
connect:
image: confluentinc/cp-enterprise-replicator:${CFLT_TAG}
profiles:
- connect
hostname: connect
container_name: connect
depends_on:
- kafka1
- kafka2
- kafka3
- kafka4
ports:
- "8083:8083"
environment:
CONNECT_REST_ADVERTISED_PORT: 8083
CONNECT_REST_PORT: 8083
CONNECT_LISTENERS: http://0.0.0.0:8083
CONNECT_BOOTSTRAP_SERVERS: kafka1:29092,kafka2:29092,kafka3:29092,kafka4:29092
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 2
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 2
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 2
CONNECT_CONNECTOR_CLIENT_CONFIG_OVERRIDE_POLICY: All
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_LOG4J_LOGGERS: org.reflections=ERROR
CONNECT_LOG4J_APPENDER_STDOUT_LAYOUT_CONVERSIONPATTERN: "[%d] %p %X{connector.context}%m (%c:%L)%n"
CONNECT_PLUGIN_PATH: /usr/share/java,/usr/share/confluent-hub-components,/usr/share/filestream-connectors
CONNECT_REST_EXTENSION_CLASSES: io.confluent.connect.replicator.monitoring.ReplicatorMonitoringExtension
KAFKA_OPTS: "-javaagent:/tmp/jmx_prometheus_javaagent-1.1.0.jar=1234:/tmp/kafka_connect.yml"
volumes:
- $PWD/jmx-exporter/jmx_prometheus_javaagent-1.1.0.jar:/tmp/jmx_prometheus_javaagent-1.1.0.jar
- $PWD/jmx-exporter/kafka_connect.yml:/tmp/kafka_connect.yml
command:
- bash
- -c
- |
echo "Installing Connector"
confluent-hub install --no-prompt confluentinc/kafka-connect-datagen:0.6.6
confluent-hub install --no-prompt confluentinc/kafka-connect-replicator:7.8.0
cp /usr/share/java/kafka-connect-replicator/replicator-rest-extension-*.jar /etc/kafka-connect/jars/
#
echo "Launching Kafka Connect worker"
/etc/confluent/docker/run &
#
sleep infinity
23 changes: 22 additions & 1 deletion dev-toolkit/start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,30 @@ $DOCKER_COMPOSE_CMD ${docker_args[@]} \
-f docker-compose.schema-registry-primary-secondary.yaml \
-f docker-compose.jr.yaml \
-f docker-compose.clusterlinking.yaml \
-f docker-compose.connect.yaml \
up -d

# if docker_args contains connect, then start the connect
if [[ " ${docker_args[@]} " =~ " connect " ]]; then

echo -e "\nWaiting 60 seconds before starting datagen connector..."
sleep 60

echo -e "\nCreating topic pageviews..."
docker exec kafka1 bash -c "KAFKA_OPTS= kafka-topics --bootstrap-server kafka1:29092 --create --topic pageviews --replication-factor 1 --partitions 1"
sleep 3

echo -e "\nStarting datagen connector..."

curl -X POST -H Accept:application/json -H Content-Type:application/json http://localhost:8083/connectors/ -d @connect/connector_datagen.json

echo -e "\nStarting filesink connector..."

curl -X POST -H Accept:application/json -H Content-Type:application/json http://localhost:8083/connectors/ -d @connect/connector_filesink.json


fi

# if docker_args contains replicator, then start the replicator
if [[ " ${docker_args[@]} " =~ " replicator " ]]; then

Expand All @@ -163,7 +185,6 @@ if [[ " ${docker_args[@]} " =~ " replicator " ]]; then
curl --request PUT \
--url http://localhost:8083/connectors/replicator/config \
--header 'content-type: application/json' \
--header 'user-agent: vscode-restclient' \
--data '{"connector.class": "io.confluent.connect.replicator.ReplicatorSourceConnector","topic.regex": "quotes","key.converter": "io.confluent.connect.replicator.util.ByteArrayConverter","value.converter": "io.confluent.connect.replicator.util.ByteArrayConverter","header.converter": "io.confluent.connect.replicator.util.ByteArrayConverter","src.kafka.bootstrap.servers": "kafka1:29092,kafka2:29092,kafka3:29092,kafka4:29092","dest.kafka.bootstrap.servers": "broker-replicator-dst:29092","error.tolerance": "all","errors.log.enable": "true","errors.log.include.messages": "true","confluent.topic.replication.factor": 1,"provenance.header.enable": "true","topic.timestamp.type": "LogAppendTime","topic.rename.format": "replica-${topic}","tasks.max": "1"}'

echo -e "\nWaiting 45 seconds to initialize replicator connector..."
Expand Down
1 change: 1 addition & 0 deletions dev-toolkit/stop.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ $DOCKER_COMPOSE_CMD \
-f docker-compose.schema-registry-primary-secondary.yaml \
-f docker-compose.jr.yaml \
-f docker-compose.clusterlinking.yaml \
-f docker-compose.connect.yaml \
down -v
rm -rf jmx-exporter
rm -rf assets
Expand Down
2 changes: 1 addition & 1 deletion jmxexporter-prometheus-grafana/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ List of provided dashboards:
- [Kafka lag exporter _(cp-demo,dev-toolkit)_](https://github.com/confluentinc/jmx-monitoring-stacks/blob/main/jmxexporter-prometheus-grafana/README.md#kafka-lag-exporter)
- [Kafka transaction coordinator _(cp-demo,dev-toolkit)_](https://github.com/confluentinc/jmx-monitoring-stacks/blob/main/jmxexporter-prometheus-grafana/README.md#kafka-transaction-coordinator)
- [Schema Registry cluster _(cp-demo,dev-toolkit)_](https://github.com/confluentinc/jmx-monitoring-stacks/blob/main/jmxexporter-prometheus-grafana/README.md#schema-registry-cluster)
- [Kafka Connect cluster _(cp-demo)_](https://github.com/confluentinc/jmx-monitoring-stacks/blob/main/jmxexporter-prometheus-grafana/README.md#kafka-connect-cluster)
- [Kafka Connect cluster _(cp-demo,dev-toolkit)_](https://github.com/confluentinc/jmx-monitoring-stacks/blob/main/jmxexporter-prometheus-grafana/README.md#kafka-connect-cluster)
- [ksqlDB cluster _(cp-demo,dev-toolkit)_](https://github.com/confluentinc/jmx-monitoring-stacks/blob/main/jmxexporter-prometheus-grafana/README.md#ksqldb-cluster)
- [Kafka streams _(cp-demo)_](https://github.com/confluentinc/jmx-monitoring-stacks/blob/main/jmxexporter-prometheus-grafana/README.md#kafka-streams)
- [Kafka streams RocksDB _(cp-demo)_](https://github.com/confluentinc/jmx-monitoring-stacks/blob/main/jmxexporter-prometheus-grafana/README.md#kafka-streams-rocksdb)
Expand Down

0 comments on commit ea26882

Please sign in to comment.