The previous article introduced node's consumption of Kafka data, which is about the production of Kafka data.
Previous article link: http://blog.csdn.net/xiedong9857/article/details/55506266
In fact, things are very simple, I use express to build a background to accept data, and then forwarded to the zookeeper on the line, specifically not to say, a two-file
1, Server.js
/**
* Created by John on 2016/11/10.
* *
"use strict";
Express = require (' Express ');
app = Express ();
Kafka = require ('./tokafka.js ');
Bodyparser = require (' Body-parser ');
Create application/x-www-form-urlencoded encoding parsing
})
(req, res) {
//output JSON format
(Result) {
res.send (result)
});
() {
host = Server.address (). Address;
port = server.address (). Port;
Console.log ("Application instance, Access address is http://%s:%s", host, Port)
});
2, Tokafka.js
varKafka = require (' Kafka-node ');varKeyedmessage = Kafka. Keyedmessage;varProducer = Kafka. Producer;varClient =NewKafka. Client (' 122.225.108.94:12181,122.225.108.94:12182,60.191.137.38:12181 ');varProducer =NewProducer (client); Console.log (' Connecting Kafka ');classTokafka {StaticProduce (key, message, CB) {Console.log (4); Letpayloads = [{topic: ' Datacloudlevel ', messages:NewKeyedmessage (key, Message)}]; Producer.on (' Ready ',function() {Console.log (3)}); Producer.send (Payloads,function(Err, data) {if(!!
ERR) {Console.log (Err)} console.log (data);
Console.log (key + message);
CB (data);
}); } module.exports = Tokafka;