1. Consumer Code
Importcom.twitter.bijection.Injection;ImportCom.twitter.bijection.avro.GenericAvroCodecs;ImportOrg.apache.avro.Schema;ImportOrg.apache.avro.generic.GenericData;ImportOrg.apache.avro.generic.GenericRecord;ImportOrg.apache.kafka.clients.producer.KafkaProducer;ImportOrg.apache.kafka.clients.producer.ProducerRecord;Importjava.util.Properties;/*** Created by P on 2018/10/8.*/ Public classAvrokafkaproducer { Public Static FinalString User_schema = "{\ n" + "\" type\ ": \" record\ ", \ n" + "\" name\ ": \" Customer\ ", \ n" + "\" fields\ ": [\ n" + "{\" name\ ": \" id\ ", \" type\ ": \" int\ "},\n" + "{\" name\ ": \" NA Me\ ", \" type\ ": \" string\ "},\n" + "{\" name\ ": \" email\ ", \" type\ ": [\" Null\ ", \" string\ "],\" default\ ": \" null\ "}\n" + "]\n" + "}"; Public Static voidMain (string[] args) {Properties kafkaprops=NewProperties (); Kafkaprops.put ("Bootstrap.servers", "ip:9092"); Kafkaprops.put ("Key.serializer", "Org.apache.kafka.common.serialization.StringSerializer"); Kafkaprops.put ("Value.serializer", "Org.apache.kafka.common.serialization.ByteArraySerializer"); Kafkaprops.put ("Partitioner.class", "Mypartitioner"); Schema.parser Parser=NewSchema.parser (); Schema schema=Parser.parse (User_schema); Injection<genericrecord,byte[]> injection =genericavrocodecs.tobinary (Schema); Kafkaproducer producer=NewKafkaproducer<string,byte[]>(Kafkaprops); for(inti = 0;i < 1000;i++) {Genericdata.record Record=NewGenericdata.record (Schema); Record.put ("id", i); Record.put ("Name", "name-" +i); Record.put ("Email", "email-" +i); byte[] bytes =injection.apply (record); Producerrecord<string,byte[]> record1 =NewProducerrecord<string,byte[]> ("Customer", "customer-" +i,bytes); Producer.send (RECORD1); } producer.close (); System.out.println (User_schema); }}
2. Consumer Code
Importcom.twitter.bijection.Injection;ImportCom.twitter.bijection.avro.GenericAvroCodecs;ImportOrg.apache.avro.Schema;ImportOrg.apache.avro.generic.GenericRecord;ImportOrg.apache.kafka.clients.consumer.ConsumerRecord;Importorg.apache.kafka.clients.consumer.ConsumerRecords;ImportOrg.apache.kafka.clients.consumer.KafkaConsumer;Importjava.util.Collections;Importjava.util.Properties;/*** Created by P on 2018/10/14.*/ Public classAvrokafkaconsumer { Public Static FinalString User_schema = "{\ n" + "\" type\ ": \" record\ ", \ n" + "\" name\ ": \" Customer\ ", \ n" + "\" fields\ ": [\ n" + "{\" name\ ": \" id\ ", \" type\ ": \" int\ "},\n" + "{\" name\ ": \" NA Me\ ", \" type\ ": \" string\ "},\n" + "{\" name\ ": \" email\ ", \" type\ ": [\" Null\ ", \" string\ "],\" default\ ": \" null\ "}\n" + "]\n" + "}"; Public Static voidMain (string[] args) {Properties kafkaprops=NewProperties (); Kafkaprops.put ("Bootstrap.servers", "ip:9092"); Kafkaprops.put ("Key.deserializer", "Org.apache.kafka.common.serialization.StringDeserializer"); Kafkaprops.put ("Value.deserializer", "Org.apache.kafka.common.serialization.ByteArrayDeserializer"); Kafkaprops.put ("Group.id", "Demoavrokafkaconsumer"); Kafkaprops.put ("Auto.offset.reset", "earliest"); Kafkaconsumer<string,byte[]> consumer =NewKafkaconsumer<string,byte[]>(Kafkaprops); Consumer.subscribe (Collections.singletonlist ("Customer")); Schema.parser Parser=NewSchema.parser (); Schema schema=Parser.parse (User_schema); Injection<genericrecord,byte[]> injection =genericavrocodecs.tobinary (Schema); Try { while(true) {consumerrecords<string,byte[]> records = Consumer.poll (10); for(Consumerrecord<string,byte[]>record:records) {Genericrecord Record1=Injection.invert (Record.value ()). get (); System.out.println (Record.key ()+ ":" + record1.get ("id") + "\ T" + record1.get ("name") + "\ T" + record1.get ("email"))); } } } finally{consumer.close (); } }}
3. Pom Dependency
<dependency> <groupId>org.apache.kafka</groupId> <artifactid>kafka_2.11</ artifactid> <version>1.0.0</version> </dependency> <dependency> <groupId>org.apache.avro</groupId> <artifactId>avro</artifactId> <version >1.7.6-cdh5.9.1</version> </dependency> <dependency> <groupId> com.twitter</groupid> <artifactId>bijection-avro_2.11</artifactId> <version> 0.9.6</version> </dependency>
Using AVRO encoding and decoding messages in Kafka