First, flume configuration
Flume requires more than 1.6 version
Flume-conf.properties file configuration content, sinks output as Kafka product
a1.sources = R1
a1.sinks = K1
a1.channels = C1
# describe/configure the source
a1.sources.r1.type = exec< C4/>a1.sources.r1.command = tail-f/home/airib/work/log.log
# Describe the sink
#a1. Sinks.k1.type = Logger
A1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
a1.sinks.k1.topic = Test
A1.sinks.k1.brokerList = localhost:9092
a1.sinks.k1.requiredAcks = 1
a1.sinks.k1.batchSize =
# Use a Channel which buffers events in memory
a1.channels.c1.type = memory a1.channels.c1.capacity
=
1000 a1.channels.c1.transactionCapacity =
# Bind The source and sink to the channel
a1.sources.r1.channels = c1
a1.sinks.k1.channel = C1
Flume Start
Bin/flume-ng agent--conf conf--conf-file conf/flume-conf.properties--name A1-dflume.root.logger=info,console
Two Kafka consumer Java source code
Package Com.hgp.kafka.kafka;
Import Java.util.HashMap;
Import java.util.List;
Import Java.util.Map;
Import java.util.Properties;
Import Kafka.consumer.ConsumerConfig;
Import Kafka.consumer.ConsumerIterator;
Import Kafka.consumer.KafkaStream;
Import Kafka.javaapi.consumer.ConsumerConnector;
Import Kafka.serializer.StringDecoder;
Import kafka.utils.VerifiableProperties;
public class Kafkaconsumer {private final consumerconnector consumer;
Private Kafkaconsumer () {Properties props = new properties ();
Zookeeper Configuration props.put ("Zookeeper.connect", "localhost:2181");
Group represents a consumer group Props.put ("Group.id", "Jd-group");
ZK Connection Timeout Props.put ("zookeeper.session.timeout.ms", "4000");
Props.put ("zookeeper.sync.time.ms", "200");
Props.put ("auto.commit.interval.ms", "1000");
Props.put ("Auto.offset.reset", "smallest");
Serialization class Props.put ("Serializer.class", "Kafka.serializer.StringEncoder"); Consumerconfig config = new Consumerconfig (props);
Consumer = kafka.consumer.Consumer.createJavaConsumerConnector (config);
} void Consume () {map<string, integer> topiccountmap = new hashmap<string, integer> ();
Topiccountmap.put ("Test", New Integer (1));
Stringdecoder Keydecoder = new Stringdecoder (new Verifiableproperties ());
Stringdecoder Valuedecoder = new Stringdecoder (new Verifiableproperties ()); Map<string, list<kafkastream<string, string>>> consumermap = Consumer.createmessagestre
AMS (Topiccountmap,keydecoder,valuedecoder);
kafkastream<string, string> stream = Consumermap.get ("Test"). Get (0);
Consumeriterator<string, string> it = Stream.iterator ();
while (It.hasnext ()) System.out.println (It.next (). message ());
public static void Main (string[] args) {new Kafkaconsumer (). consume ();
}
}
Kafka Start command
Start Zookeeper server:
bin/zookeeper-server-start.sh Config/zookeeper.properties &
Start Kafka Server:
bin/kafka-server-start.sh Config/server.properties &
Run Producer:
bin/kafka-console-producer.sh--broker-list localhost:9092--topic test
Run Consumer:
bin/kafka-console-consumer.sh--zookeeper localhost:2181--topic test--from-beginning