First create the MAVEN project, the package that needs to be introduced:
<dependencies> <dependency> <groupId>org.apache.kafka</groupId> < artifactid>kafka-clients</artifactid> <version>0.10.2.1</version> </dependency > <dependency> <groupId>org.apache.kafka</groupId> <artifactid>kafka_ 2.10</artifactid> <version>0.10.2.1</version> </dependency> </ Dependencies>
Then you can implement the producer and the consumer, when you create the topic, if you need to delete the existing topic, you need to configure delete.topic.enable=true, otherwise you cannot delete the corresponding topic.
/**
Consumers
**/
Public classkafkaconsumerdemo{PrivateFinal Kafkaconsumer<string, string>consumer; PrivateKafkaconsumerdemo () {Properties props=NewProperties (); Props.put ("bootstrap.servers","10.xxx.xxx.149:9092, 10.xxx.xxx.182:9092, 10:xxx.xxx.190:9092"); Props.put ("group.id","Test"); Props.put ("Enable.auto.commit","true"); Props.put ("auto.commit.interval.ms"," +"); Props.put ("Key.deserializer","Org.apache.kafka.common.serialization.StringDeserializer"); Props.put ("Value.deserializer","Org.apache.kafka.common.serialization.StringDeserializer"); Consumer=NewKafkaconsumer<string, string>(props); } voidconsume () {Consumer.subscribe (Arrays.aslist (kafkaproducerdemo.topic)); while(true) {consumerrecords<string, string> records = Consumer.poll ( -); for(Consumerrecord<string, string>record:records) System. out. printf ("offset =%d, key =%s, value =%s%n", Record.offset (), Record.key (), Record.value ()); } } Public Static voidMain (string[] args) {NewKafkaconsumerdemo (). consume (); }}
/*** Generated by **/ Public classkafkaproducerdemo{Private FinalProducer<string, string>Kafkaproducer; Public Final StaticString TOPIC = "Java_topic"; PrivateKafkaproducerdemo () {Kafkaproducer=Createkafkaproducer (); } PrivateProducer<string, string>Createkafkaproducer () {Properties props=NewProperties (); Props.put ("Bootstrap.servers", "10.185.156.149:9092, 10.185.156.182:9092, 10:185.156.190:9092"); Props.put ("ACKs", "all"); Props.put ("Retries", 0); Props.put ("Batch.size", 16384); Props.put ("Linger.ms", 1); Props.put ("Buffer.memory", 33554432); Props.put ("Key.serializer", "Org.apache.kafka.common.serialization.StringSerializer"); Props.put ("Value.serializer", "Org.apache.kafka.common.serialization.StringSerializer"); Producer<string, string> kafkaproducer =NewKafkaproducer<string, string>(props); returnKafkaproducer; } voidProduce () { for(inti = 1; i < 1000; i++) { Try{Thread.Sleep (1000); } Catch(interruptedexception e) {e.printstacktrace (); } String Key= string.valueof ("key" +i); String Data= "Hello Kafka message:" +key; Kafkaproducer.send (NewProducerrecord<> (TOPIC, key, data),NewCallback () {@Override Public voidoncompletion (Recordmetadata recordmetadata, Exception e) {//Do sth } }); SYSTEM.OUT.PRINTLN (data); } } Public Static voidMain (string[] args) {kafkacreatetopic.createtopic ("Java_topic", 3, 1); NewKafkaproducerdemo (). produce (); }}
/**
Create topic
**/
Public classkafkacreatetopic{ Public Static voidCreatetopic (String topic,intpartitions,intreplicationfactor) {zkutils Zkutils= Zkutils.apply ("10.xxx.xxx.149:2181", 30000, 30000, jaasutils.iszksecurityenabled ()); if(adminutils.topicexists (zkutils, topic)) {deletetopic (zkutils, topic); } adminutils.createtopic (Zkutils, topic, partitions, Replicationfactor,NewProperties (), rackawaremode.enforced$. module$); Zkutils.close (); } Public Static voiddeletetopic (zkutils zkutils, String topic) {adminutils.deletetopic (zkutils, topic); System.out.println ("Delete the topic" +topic); }}
Java implementations of producers and consumers