Java instance of Kafka communication

Source: Internet
Author: User
Tags serialization

    • Depend on:

Kafka_2.12-2.0.0.jar, Kafka-clients-2.0.0.jar, Log4j-1.2.17.jar, Slf4j-api-1.7.25.jar, Slf4j-log4j12-1.7.25.jar

    • Lkafkaconstants.java
 Packagekafka_proj; Public Interfaceikafkaconstants { Public StaticString kafka_brokers = "192.168.65.130:9092"; //Public static String kafka_brokers = "192.168.65.130:9092, 192.168.65.131:9092, 192.168.65.132:9092";     Public StaticInteger Message_count = 1000;  Public StaticString client_id = "0";  Public StaticString topic_name = "java";  Public StaticString group_id_config = "Group1";  Public StaticInteger Max_no_message_found_count = 100;  Public StaticString offset_reset_latest = "LATEST";  Public StaticString offset_reset_earlier = "earliest";  Public StaticInteger max_poll_records = 1; }
    • Consumercreator.java
 Packagekafka_proj;Importjava.util.Collections;Importjava.util.Properties;ImportOrg.apache.kafka.clients.consumer.Consumer;ImportOrg.apache.kafka.clients.consumer.ConsumerConfig;ImportOrg.apache.kafka.clients.consumer.KafkaConsumer;ImportOrg.apache.kafka.common.serialization.LongDeserializer;ImportOrg.apache.kafka.common.serialization.StringDeserializer;Importkafka_proj. ikafkaconstants; Public classConsumercreator { Public StaticConsumer<long, string>Createconsumer () {Properties props=NewProperties ();        Props.put (Consumerconfig.bootstrap_servers_config, ikafkaconstants.kafka_brokers);        Props.put (Consumerconfig.group_id_config, ikafkaconstants.group_id_config); Props.put (Consumerconfig.key_deserializer_class_config, Longdeserializer.class. GetName ()); Props.put (Consumerconfig.value_deserializer_class_config, Stringdeserializer.class. GetName ());        Props.put (Consumerconfig.max_poll_records_config, ikafkaconstants.max_poll_records); Props.put (Consumerconfig.enable_auto_commit_config,"False");        Props.put (Consumerconfig.auto_offset_reset_config, ikafkaconstants.offset_reset_earlier); Consumer<long, string> consumer =NewKafkaconsumer<>(props);        Consumer.subscribe (Collections.singletonlist (ikafkaconstants.topic_name)); returnconsumer; }}
    • Custompartitioner.java
 Packagekafka_proj;ImportJava.util.Map;ImportOrg.apache.kafka.clients.producer.Partitioner;ImportOrg.apache.kafka.common.Cluster; Public classCustompartitionerImplementspartitioner{Private Static Final intPartition_count=6; @Override Public voidConfigure (map<string,?>configs) {} @Override Public intPartition (String topic, Object Key,byte[] keybytes, Object value,byte[] valuebytes, Cluster Cluster) {Integer keyInt=Integer.parseint (key.tostring ()); returnKeyInt%Partition_count; } @Override Public voidclose () {}}
    • Producercreator.java
 Packagekafka_proj;Importjava.util.Properties;ImportOrg.apache.kafka.clients.producer.KafkaProducer;ImportOrg.apache.kafka.clients.producer.Producer;ImportOrg.apache.kafka.clients.producer.ProducerConfig;ImportOrg.apache.kafka.common.serialization.LongSerializer;ImportOrg.apache.kafka.common.serialization.StringSerializer;Importkafka_proj. ikafkaconstants; Public classProducercreator { Public StaticProducer<long, string>Createproducer () {Properties props=NewProperties ();        Props.put (Producerconfig.bootstrap_servers_config, ikafkaconstants.kafka_brokers);        Props.put (Producerconfig.client_id_config, ikafkaconstants.client_id); Props.put (Producerconfig.key_serializer_class_config, Longserializer.class. GetName ()); Props.put (Producerconfig.value_serializer_class_config, Stringserializer.class. GetName ()); //props.put (Producerconfig.partitioner_class_config, CustomPartitioner.class.getName ());        return NewKafkaproducer<>(props); }}
    • App.java
 Packagekafka_proj;Importjava.util.concurrent.ExecutionException;ImportOrg.apache.kafka.clients.consumer.Consumer;Importorg.apache.kafka.clients.consumer.ConsumerRecords;ImportOrg.apache.kafka.clients.producer.Producer;ImportOrg.apache.kafka.clients.producer.ProducerRecord;ImportOrg.apache.kafka.clients.producer.RecordMetadata;Importkafka_proj. ikafkaconstants;Importkafka_proj. Consumercreator;Importkafka_proj. Producercreator; Public classApp { Public Static voidMain (string[] args) {//runproducer ();Runconsumer (); }    Static voidRunconsumer () {Consumer<long, string> consumer =Consumercreator.createconsumer (); intNomessagefound = 0;  while(true) {System.out.println ("True"); Consumerrecords<long, string> consumerrecords = consumer.poll (1000); //The time in milliseconds consumer would wait if no record is found at broker.          if(Consumerrecords.count () = = 0) {Nomessagefound++; if(Nomessagefound >ikafkaconstants.max_no_message_found_count)//If No message found count is reached to threshold exit loop.                  Break; Else                  Continue; }          //print each record.Consumerrecords.foreach (Record,{System.out.println ("Record Key" +Record.key ()); System.out.println ("Record value" +Record.value ()); System.out.println ("Record Partition" +record.partition ()); System.out.println ("Record offset" +Record.offset ());          }); //commits the offset of record to broker.Consumer.commitasync ();    } consumer.close (); }    Static voidRunproducer () {Producer<long, string> producer =Producercreator.createproducer ();  for(intindex = 0; Index < Ikafkaconstants.message_count; index++) {Producerrecord<long, string> record =NewProducerrecord<long, string>(Ikafkaconstants.topic_name,"This is record" +index); Try{recordmetadata metadata=Producer.send (record). get (); System.out.println ("Record sent with key" + index + "to partition" +metadata.partition ()+ "with offset" +Metadata.offset ()); }             Catch(executionexception e) {System.out.println ("Error in sending record");                  System.out.println (e); }              Catch(interruptedexception e) {System.out.println ("Error in sending record");                  System.out.println (e); }         }    }}

Java instance of Kafka communication

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.