Build a Kafka development environment using roaming Kafka

Source: Internet
Author: User
Reprinted with the source: marker. Next we will build a Kafka development environment.
Add dependency

To build a development environment, you need to introduce the jar package of Kafka. One way is to add the jar package under Lib in the Kafka installation package to the classpath of the project, which is relatively simple. However, we use another more popular method: Using Maven to manage jar package dependencies. After creating a Maven project, add the following dependency to Pom. xml:
<dependency>         <groupId> org.apache.kafka</groupId >         <artifactId> kafka_2.10</artifactId >         <version> 0.8.0</ version></dependency>

After adding dependencies, you will find that the dependencies of two jar packages cannot be found. It doesn't matter if I want to help you think about it. Click here to download the two jar packages. After decompression, you have two options. The first one is to install the jar package to the local repository using the install command of MVN, the other is to directly copy the decompressed folder to the com folder of the MVN local repository. For example, my local repository is D: \ MVN, my directory structure is as follows:


Configuration program
The first is an interface that acts as a configuration file and configures Various connection parameters of Kafka:
package com.sohu.kafkademon;public interface KafkaProperties{    final static String zkConnect = "10.22.10.139:2181";    final static String groupId = "group1";    final static String topic = "topic1";    final static String kafkaServerURL = "10.22.10.139";    final static int kafkaServerPort = 9092;    final static int kafkaProducerBufferSize = 64 * 1024;    final static int connectionTimeOut = 20000;    final static int reconnectInterval = 10000;    final static String topic2 = "topic2";    final static String topic3 = "topic3";    final static String clientId = "SimpleConsumerDemoClient";}


Producer
package com.sohu.kafkademon;import java.util.Properties;import kafka.producer.KeyedMessage;import kafka.producer.ProducerConfig;/** * @author leicui [email protected] */public class KafkaProducer extends Thread{    private final kafka.javaapi.producer.Producer<Integer, String> producer;    private final String topic;    private final Properties props = new Properties();    public KafkaProducer(String topic)    {        props.put("serializer.class", "kafka.serializer.StringEncoder");        props.put("metadata.broker.list", "10.22.10.139:9092");        producer = new kafka.javaapi.producer.Producer<Integer, String>(new ProducerConfig(props));        this.topic = topic;    }    @Override    public void run() {        int messageNo = 1;        while (true)        {            String messageStr = new String("Message_" + messageNo);            System.out.println("Send:" + messageStr);            producer.send(new KeyedMessage<Integer, String>(topic, messageStr));            messageNo++;            try {                sleep(3000);            } catch (InterruptedException e) {                // TODO Auto-generated catch block                e.printStackTrace();            }        }    }}



Consumer
package com.sohu.kafkademon;import java.util.HashMap;import java.util.List;import java.util.Map;import java.util.Properties;import kafka.consumer.ConsumerConfig;import kafka.consumer.ConsumerIterator;import kafka.consumer.KafkaStream;import kafka.javaapi.consumer.ConsumerConnector;/** * @author leicui [email protected] */public class KafkaConsumer extends Thread{    private final ConsumerConnector consumer;    private final String topic;    public KafkaConsumer(String topic)    {        consumer = kafka.consumer.Consumer.createJavaConsumerConnector(                createConsumerConfig());        this.topic = topic;    }    private static ConsumerConfig createConsumerConfig()    {        Properties props = new Properties();        props.put("zookeeper.connect", KafkaProperties.zkConnect);        props.put("group.id", KafkaProperties.groupId);        props.put("zookeeper.session.timeout.ms", "40000");        props.put("zookeeper.sync.time.ms", "200");        props.put("auto.commit.interval.ms", "1000");        return new ConsumerConfig(props);    }    @Override    public void run() {        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();        topicCountMap.put(topic, new Integer(1));        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);        KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);        ConsumerIterator<byte[], byte[]> it = stream.iterator();        while (it.hasNext()) {            System.out.println("receive:" + new String(it.next().message()));            try {                sleep(3000);            } catch (InterruptedException e) {                e.printStackTrace();            }        }    }}


Simple sending and receiving
Run the following program to send and receive messages:
package com.sohu.kafkademon;/** * @author leicui [email protected] */public class KafkaConsumerProducerDemo{    public static void main(String[] args)    {        KafkaProducer producerThread = new KafkaProducer(KafkaProperties.topic);        producerThread.start();        KafkaConsumer consumerThread = new KafkaConsumer(KafkaProperties.topic);        consumerThread.start();    }}


High-level consumer
The following is a program for sending and receiving load:
package com.sohu.kafkademon;import java.util.HashMap;import java.util.List;import java.util.Map;import java.util.Properties;import kafka.consumer.ConsumerConfig;import kafka.consumer.ConsumerIterator;import kafka.consumer.KafkaStream;import kafka.javaapi.consumer.ConsumerConnector;/** * @author leicui [email protected] */public class KafkaConsumer extends Thread{    private final ConsumerConnector consumer;    private final String topic;    public KafkaConsumer(String topic)    {        consumer = kafka.consumer.Consumer.createJavaConsumerConnector(                createConsumerConfig());        this.topic = topic;    }    private static ConsumerConfig createConsumerConfig()    {        Properties props = new Properties();        props.put("zookeeper.connect", KafkaProperties.zkConnect);        props.put("group.id", KafkaProperties.groupId);        props.put("zookeeper.session.timeout.ms", "40000");        props.put("zookeeper.sync.time.ms", "200");        props.put("auto.commit.interval.ms", "1000");        return new ConsumerConfig(props);    }    @Override    public void run() {        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();        topicCountMap.put(topic, new Integer(1));        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);        KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);        ConsumerIterator<byte[], byte[]> it = stream.iterator();        while (it.hasNext()) {            System.out.println("receive:" + new String(it.next().message()));            try {                sleep(3000);            } catch (InterruptedException e) {                e.printStackTrace();            }        }    }}





Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.