Conclusion: 1. in Kafka, messages can be consumed multiple times by consumers in different groups. observe the Kafka information in zookeeper: [ZK: air00: 2181 (connected) 8] ls/[consumers, config, controller, admin, brokers, Zookeeper, controller_epoch] [ZK: air00: 2181 (connected) 9] ls/consumers [test01, test02] [ZK: air00: 2181 (connected) 10] ls/consumers/test01 [offsets, owners, IDS] [ZK: air00: 2181 (connected) 11] ls/consumers/test01/offsets [test] [ZK: air00: 2181 (connected) 12] ls/consumers/test01/offsets/test [1, 0] [ZK: air00: 2181 (connected) 13] 3. new consumers cannot obtain old data. We can see that the consumer information exists in the node in zookeeper. Producer: Package COM. kafka. test; import Java. util. *; import Kafka. producer. keyedmessage; import Kafka. producer. producerconfig; import Kafka. java API. producer. producer; public class producer01 {public static void main (string [] ARGs) {string topic = "test"; properties props = new properties (); // 9092 props. put ("serializer. class "," Kafka. serializer. stringencoder "); props. put ("metadata. broker. list "," air00: 9092 "); producerconfig Config = new producerconfig (props); Producer <string, string> producer = new producer <string, string> (config); producer. send (New keyedmessage <string, string> (topic, "test"); producer. close () ;}} consumer: Import Java. util. hashmap; import Java. util. list; import Java. util. map; import Java. util. properties; import Kafka. consumer. consumerconfig; import Kafka. consumer. consumeriterator; import Kafka. consumer. kafkastream; public class consumer01 {static string groupid = "test01"; static string topic = "test"; public static void main (string [] ARGs) {properties props = new properties (); props. put ("zookeeper. connect "," air00: 2181, air01: 2181, air02: 2181 "); props. put ("group. ID ", groupid); props. put ("zookeeper. session. timeout. ms "," 400 "); props. put ("zookeeper. sync. time. ms "," 200 "); props. put ("auto. commit. interval. milliseconds "," 1000 "); Kafka. java API. consumer. consumerconneconsumer = Kafka. consumer. consumer. createjavaconsumerconnector (New consumerconfig (props); Map <string, integer> topiccountmap = new hashmap <string, integer> (); topiccountmap. put (topic, new INTEGER (1); Map <string, list <kafkastream <byte [], byte []> consumermap = consumer. createmessagestreams (topiccountmap); kafkastream <byte [], byte []> stream = consumermap. get (topic ). get (0); consumeriterator <byte [], byte []> it = stream. iterator (); While (it. hasnext () system. out. println (new string (it. next (). message ()));}}
Kafka basics 01