Skip to content

giocosmiano/exploring_kafka

Repository files navigation

   $ tar xvzf kafka_2.12-2.3.0.tgz
   $ mv ~/Downloads/kafka_2.12-2.3.0 ~/Documents/_applications/
  • Add these settings to .bashrc for personal preference
   export APPLICATIONS_HOME="${HOME}/Documents/_applications"
   export KAFKA_HOME="${APPLICATIONS_HOME}/kafka_2.12-2.3.0"
   export KAFKA_BIN="${KAFKA_HOME}/bin"
   export KAFKA_CONFIG="${KAFKA_HOME}/config"

   alias kafka-restart='sudo systemctl restart confluentKafka'
   alias kafka-start='sudo systemctl start confluentKafka'
   alias kafka-stop='sudo systemctl stop confluentKafka'
   alias kafka-status='sudo systemctl status confluentKafka'
   alias kafka-enable='sudo systemctl enable confluentKafka'
   alias kafka-disable='sudo systemctl disable confluentKafka'
  • Configure to start kafka automatically with the server
   $ sudo cp kafka.service /etc/systemd/system
   $ sudo cp zookeepr.service /etc/systemd/system
  • Update the systemd service after copying kafka/zookeeper services
   $ sudo systemctl daemon-reload
  • Enable to auto-start kafka service
   $ kafka-enable
  • Start/Re-Start kafka service
   $ kafka-start
   $ kafka-restart
  • Check kafka version (OR sudo systemctl status kafka)
   $ kafka-status
  $ $HOME/jdk14.sh
  $ $HOME/Documents/_applications/cmak-3.0.0.5/bin/cmak -Dhttp.port=9990 -Dconfig.file=$HOME/Documents/_applications/cmak-3.0.0.5/conf/application.conf -Dcmak.zkhosts="localhost:2181"
  • change the following in conf/application.conf
kafka-manager.zkhosts="localhost:2181"
basicAuthentication.username="admin"
basicAuthentication.password="password"
  • Adding new cluster in Kafka manager
:2181,:2181,:2181
  $ . "$HOME/jdk8.sh"
  $ java -jar "$HOME/Documents/_applications/trifecta-bundle-0.18.13.bin.jar" --http-start -Dtrifecta.web.port=9980

Kafka UI Tools

  $ docker run -d --rm -p 9000:9000 \
      -e KAFKA_BROKERCONNECT=localhost:9092 \
      -e JVM_OPTS="-Xms32M -Xmx64M" \
      -e SERVER_SERVLET_CONTEXTPATH="/" \
      obsidiandynamics/kafdrop:latest
  $ docker pull landoop/fast-data-dev
  $ docker run --rm --net=host lensesio/fast-data-dev
  $ docker pull landoop/schema-registry-ui
  alias confluentZookeeperStart='cd ${CONFLUENT_HOME}; bin/zookeeper-server-start etc/kafka/zookeeper.properties'
  alias confluentKafkaStart='cd ${CONFLUENT_HOME}; bin/kafka-server-start etc/kafka/server.properties'
  alias confluentSchemaStart='cd ${CONFLUENT_HOME}; bin/schema-registry-start etc/schema-registry/schema-registry.properties'
  alias dockerLensesIOSchemaUI='docker run --rm -it -p 127.0.0.1:8710:8000 -e "SCHEMAREGISTRY_URL=http:https://localhost:8081" landoop/schema-registry-ui'
  $ confluentZookeeperStart
  $ confluentKafkaStart
  $ confluentSchemaStart
  $ dockerLensesIOSchemaUI
  $ docker pull landoop/kafka-topics-ui
  alias confluentZookeeperStart='cd ${CONFLUENT_HOME}; bin/zookeeper-server-start etc/kafka/zookeeper.properties'
  alias confluentKafkaStart='cd ${CONFLUENT_HOME}; bin/kafka-server-start etc/kafka/server.properties'
  alias confluentRestStart='cd ${CONFLUENT_HOME}; bin/kafka-rest-start etc/kafka-rest/kafka-rest.properties'
  alias dockerLensesIOTopicsUI='docker run --rm -it -p 127.0.0.1:8720:8000 -e "KAFKA_REST_PROXY_URL=http:https://localhost:8082" -e "PROXY=false" landoop/kafka-topics-ui'
  $ confluentZookeeperStart
  $ confluentKafkaStart
  $ confluentRestStart
  $ dockerLensesIOTopicsUI
  $ docker pull landoop/kafka-connect-ui
  alias confluentKafkaStart='cd ${CONFLUENT_HOME}; bin/kafka-server-start etc/kafka/server.properties'
  alias confluentZookeeperStart='cd ${CONFLUENT_HOME}; bin/zookeeper-server-start etc/kafka/zookeeper.properties'
  alias confluentConnectStart='cd ${CONFLUENT_HOME}; bin/connect-distributed etc/kafka/connect-distributed.properties'
  alias dockerLensesIOConnectUI='docker run --rm -it -p 127.0.0.1:8730:8000 -e "CONNECT_URL=http:https://localhost:8083" -e "PROXY=false" landoop/kafka-connect-ui'
  $ confluentZookeeperStart
  $ confluentKafkaStart
  $ confluentConnectStart
  $ dockerLensesIOConnectUI

Kafka sample commands

# create topics
  $ kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 3 --topic sample-input-topic
  $ kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 3 --topic sample-output-topic

# check/describe/delete topics
  $ kafka-topics.sh --list --zookeeper localhost:2181
  $ kafka-topics.sh --list --zookeeper localhost:2181 --topic sample-input-topic
  $ kafka-topics.sh --describe --zookeeper localhost:2181 --topic sample-input-topic
  $ kafka-topics.sh --delete --zookeeper localhost:2181 --topic sample-input-topic

# start kafka producer to manually enter some data
  $ kafka-console-producer.sh --broker-list localhost:9092 --topic sample-input-topic

# enter
  > kafka hello world
  > kafka sample data processing
  > kafka the quick brown fox jumps over the lazy dog
# exit

# start kafka producer to pipe in some file
  $ kafka-console-producer.sh --broker-list localhost:9092 --topic sample-input-topic < sampleTextFile.txt

# start kafka consumer
  $ kafka-console-consumer.sh --topic sample-input-topic --bootstrap-server localhost:9092 --from-beginning

  $ kafka-console-consumer.sh -bootstrap-server localhost:9092 \
      --topic sample-input-topic \
      --from-beginning \
      --formatter kafka.tools.DefaultMessageFormatter \
      --property print.key=true \
      --property print.value=true \
      --property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer \
      --property value.deserializer=org.apache.kafka.common.serialization.IntegerDeserializer
/tmp/confluent.0YtCGnLS/

Confluent Control Center and LensesIO UIs

  $ confluentControlCenterStart
  $ dockerLensesIOSchemaUI
  $ dockerLensesIOTopicsUI
  $ dockerLensesIOConnectUI

Connecting Kafka to MongoDB Source

  $ kafka-topics --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic mongoConn.sampleGioDB.books
  $ kafka-topics --list --zookeeper localhost:2181
  $ cd $CONFLUENT_HOME
  $ bin/connect-standalone  etc/schema-registry/connect-avro-standalone-mongodb-source.properties  etc/kafka/connect-mongodb-source.properties  ### This worked so use this
  $ bin/connect-distributed etc/schema-registry/connect-avro-distributed-mongodb-source.properties etc/kafka/connect-mongodb-source.properties  ### Doesn't work. Still needs to investigate why??? 
  $ kafka-console-consumer --bootstrap-server localhost:9092 --topic mongoConn.sampleGioDB.books  --from-beginning

Connecting Kafka to Elasticsearch Sink

  $ kafka-topics --list --zookeeper localhost:2181
  $ cd $CONFLUENT_HOME
  $ bin/connect-standalone  etc/schema-registry/connect-avro-standalone-elasticsearch-sink.properties  etc/kafka-connect-elasticsearch/connect-elasticsearch-sink.properties  ### This worked so use this
  $ bin/connect-distributed etc/schema-registry/connect-avro-distributed-elasticsearch-sink.properties etc/kafka-connect-elasticsearch/connect-elasticsearch-sink.properties  ### Doesn't work. Still needs to investigate why??? 
  $ kafka-console-consumer --bootstrap-server localhost:9092 --topic mongoConn.sampleGioDB.books  --from-beginning

Setting up Confluent Kafka Connect Plugins such as Kafka Connector MongoDB Source

   $ confluent-hub install confluentinc/kafka-connect-elasticsearch:5.5.1
   $ confluent-hub install debezium/debezium-connector-mongodb:1.2.1
   $ confluent-hub install debezium/debezium-connector-mysql:1.2.1
   $ confluent-hub install mongodb/kafka-connect-mongodb:1.2.0
   $ confluent-hub install jcustenborder/kafka-connect-redis:0.0.2.11
   $ confluent-hub install hpgrahsl/kafka-connect-mongodb:1.4.0
  • Create plugins directory under $CONFLUENT_HOME, then create symlink from lib directory where the .jar files are
   $ cd $CONFLUENT_HOME
   $ mkdir plugins
   $ cd plugins
   $ ln -s ../share/confluent-hub-components/confluentinc-kafka-connect-elasticsearch/lib elasticsearch
   $ ln -s ../share/confluent-hub-components/debezium-debezium-connector-mongodb/lib debezium-mongodb
   $ ln -s ../share/confluent-hub-components/debezium-debezium-connector-mysql/lib debezium-mysql
   $ ln -s ../share/confluent-hub-components/mongodb-kafka-connect-mongodb/lib mongodb
   $ ln -s ../share/confluent-hub-components/jcustenborder-kafka-connect-redis/lib redis
  • Add the plugins path in the following files
    • $CONFLUENT_HOME/etc/kafka/connect-distributed.properties
    • $CONFLUENT_HOME/etc/kafka/connect-standalone.properties
    • $CONFLUENT_HOME/etc/schema-registry/connect-avro-distributed.properties
    • $CONFLUENT_HOME/etc/schema-registry/connect-avro-standalone.properties
plugin.path=$HOME/Documents/_applications/confluent-5.5.1/share/java,$HOME/Documents/_applications/confluent-5.5.1/share/confluent-hub-components,$HOME/Documents/_applications/confluent-5.5.1/plugins
  • The above plugins .jar files should work but in case it didn't get added to classpath then manually add the $CLASSPATH to .bashrc e.g.
   export CLASSPATH="$HOME/Documents/_applications/confluent-5.5.1/share/confluent-hub-components/debezium-debezium-connector-mongodb/*"
   $ trifectaStart
   $ confluentKSqlStart
   $ ksql
   ksql> CREATE STREAM sampleGioBooks (id VARCHAR) WITH (kafka_topic='mongoConn.sampleGioDB.books', value_format='JSON');
   ksql> describe extended sampleGioBooks
   ksql> print 'mongoConn.sampleGioDB.books' from beginning;

Kafka Blogs

Kafka Diagrams

  $ confluent local list
  $ confluent local start # to start `all` services
  $ confluent local start ksql-server # to start `ksql-server` service only

OR

  alias confluentZookeeperStart='cd ${CONFLUENT_HOME}; bin/zookeeper-server-start etc/kafka/zookeeper.properties'
  alias confluentKafkaStart='cd ${CONFLUENT_HOME}; bin/kafka-server-start etc/kafka/server.properties'
  alias confluentKSqlStart='cd ${CONFLUENT_HOME}; bin/ksql-server-start etc/ksqldb/ksql-server.properties'
  $ confluentKSqlStart
  $ ksql
  $ confluent local list
  $ confluent local start # to start `all` services
  $ confluent local start connect # to start `connect` service only

About

No description, website, or topics provided.

Resources

Stars

Watchers

Forks

Releases

No releases published

Packages

No packages published

Languages