Kafka (eight) Python producer and consumer API usage

Source: Internet
Author: User
Tags sendmsg

Single thread producer

#!/usr/bin/env python# -*- coding: utf-8 -*-import randomimport sysfrom  kafka import kafkaproducerfrom kafka.client import logimport timeimport  json__metaclass__ = typeclass producer:    def __init__ (Self,  kafkaserver= ' 127.0.0.1 ',  kafkaport= ' 9092 ',  clientid= "Procucer01",  topic= ' Test '):          ""          for setting up producer configuration information, These configuration items can be found in the source code, and the following are the necessary parameters.         :p Aram kafkaserver: kafka Server ip         :p Aram kafkaport: kafka Work Port          :p aram clientid:  producer name         :p aram topic:  Theme          "" "        self._ Bootstrap_server =  ' {host}:{port} '. Format (host=kafkaserver, port=kafkaport)          self._topic = topic        self._clientid  = ClientId         "" "          Initializes a producer instance, the producer is thread-safe, and multiple threads share a producer instance more efficiently than each thread uses a single producer instance to be high          acks:  consumers can only consume the submitted, and only the message in all copies have to count the submission, the producer sent the message to wait for all copies to synchronize the message? This is the value that controls this. The default is 1, which means that the leader copy of the partition is returned if it is successfully written to the log.               0 indicates that the producer does not have to wait and return after sending All copies are written to the message before they are returned. The  all has the highest reliability but the lowest efficiency, 0 efficiency is the highest but the reliability is lowest, so generally uses 1.         retries:  indicates the number of request retries, the default is 0, the above ACKs configuration request completion criteria, if the request fails, the producer will automatically retry, If configured to 0, do not retry. However, a duplicate message may occur if you retry.         key_serializer:  key serializer, not set by default, with byte code          value_serializer:&nbsp, which is worth the serializer, is not set by default and is byte-coded because you can send a single character or send a key-value message          "" "         try:             self._producer = kafkaproducer (bootstrap_servers=self._bootstrap_server, client_id=self._ clientid, acks=1,                                             value_serializer=lambda m:  Json.dumps (M). Encode (' Utf-8 '))         except exception as  err:            print err.message     def _timestamp (self):        t =  Time.time ()     &Nbsp;   return int ((Round (t * 1000)))     #  timestamp converted to normal time     def getnormaltime (self, temp_timestamp, timesize=10):         timeStamp = temp_timeStamp         if timeSize == 13:             timestamp = int (temp_timestamp / 1000)          timearray = time.localtime (TimeStamp)          Otherstyletime = time.strftime ("%y-%m-%d %h:%m:%s",  timearray)          return otherStyleTime    #  send a successful callback function      def _on_send_success (Self, record_metadata):         print  "Topic: %s partition: %d offset: %s " %  (record_metadata.topic, record_metadata.partition,  Record_metadata.offset)     #  send failed callback function     def _on_send_error (SELF, EXCP):         log.error (' I am an errback ',  EXC_INFO=EXCP)     def sendmsg (self, msg, partition=none):          ""          send messages          :p aram msg:  messages         :p Aram  partition:  partitions can also not be specified         :return:          "" "        if not msg:             print  "message cannot be empty. "        &NThe message sent by bsp;   return none        #  must be serialized, Or a byte         message = json.dumps (msg, encoding= ' Utf-8 ' ,  ensure_ascii=false)         try:             timestamp = self._timestamp ()              #  sends the data asynchronously, called immediately after the call returns, because here is actually sent to the buffer, so you can call it multiple times and then flush out together.             self._producer.send (self._topic,  Partition=partition, key=self._clientid, value=message, timestamp_ms=timestamp). Add_callback ( self._on_send_success). Add_errback (Self._on_send_error)              #  the following  flush is blocked, only flush will actually send the buffer data to the peer through the network, and if flush is not called, it will be sent when the time or buffer is full.      &nbSp;      self._producer.flush ()              print self.getnormaltime (timestamp, timesize=13)  +  " send  msg:  " + message        except exception  as err:            print errdef  main ():     p = producer (kafkaserver= "172.16.48.171",  kafkaport= "9092" ,  topic= ' AAA ')     for i in range (Ten):         time.sleep (1)         closePrice =  Random.randint (1, 500)         msg = {              "Stock Code": 60000 + i,              "Yesterday's closing price": closeprice,              "Open Today": 0,             "Today's closing price":  0,         }        p.sendmsg (msg ) if __name__ ==  "__main__":    try:         main ()     finally:        sys.exit ()


Consumers

#!/usr/bin/env python# -*- coding: utf-8 -*-import sysfrom kafka  import kafkaconsumerimport json__metaclass__ = typeclass consumer:     def __init__ (self, kafkaserver= ' 127.0.0.1 ',  kafkaport= ' 9092 ',  groupid= ' TestGroup ',  clientid= "Test",  topic= ' test '):         "" "          is used to set up consumer configuration information, which can be found in the source code, and the following are the necessary parameters.         :p Aram kafkaserver: kafka Server ip         :p Aram kafkaport: kafka Work Port          :P aram groupid:  consumer group id        :p Aram clientid:   Consumer name         :p aram topic:  Theme           "" "        self._bootstrap_server =  ' {host}:{port} '. Format (host=kafkaserver, port=kafkaport)          self._groupId = GroupID         self._topic = topic        self._clientid  = clientid    def consumemsg (self):         try:             "" "              Initializes a consumer instance, the consumer is not thread-safe, so it is recommended that a thread implement a consumer rather than a consumer to have multiple threads share              These are optional parameters that can be passed in when initializing the Kafkaconsumer instance             enable_auto_commit  whether auto-commit, default is True             auto_commit_interval_ms  number of milliseconds to auto-commit interval              "" "             consumer = kafkaconsumer (Self._topic, bootstrap_servers=self._bootstrap_server,                                        group_id=self._groupId, client_id=self._clientId, enable_auto_commit=True,                                       auto_commit_ Interval_ms=5000, value_deserializer=lambda m: json.loads (M.decode (' Utf-8 ')))               "" "              There is no need to display theThe subscription function is called, the theme is specified when initializing the Kafkaconsumer object, and the subscription function is automatically called if the Subject field is not empty, as for               which partition the thread consumes is automatically assigned. If you want to specify partitions manually, you need to use the  assign ()   function, and do not enter a theme at the beginning.              "" "             # consumer.subscribe (self._topiclist)              #  Returns a collection              print  "Current consumption of Partition:",  consumer.partitions_for_topic (self._topic)              print  "Current Subscription theme:",  consumer.subscription ()             while True:                 for msg in  consumer:                    if msg:                          print  "Topic: %s partition: %d offset: %s key:  %s Message: %s  " %  (msg.topic, msg.partition, msg.offset,  Msg.key, msg.value)         except exception as err :             print errdef main ():     try:        c = consumer (KafkaServer= ' 172.16.48.171 ',  topic= ' AAA ')         c.consumemsg ()      except Exception as err:        print  err.messageif __name__ ==  "__main__":     try:        main ()      finally:        sys.exit ()

Execution effect

Kafka (eight) Python producer and consumer API usage

Related Article

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.