Kafka was one of the first high-performance class queue structures developed by LinkedIn with a publish-subscribe feature. It is now one of the Apache projects. supports a wide range of clients from which consume, there are also many third-party clients (note 1), but below we only use their own methods in the package to do consume. Our example is to call Kafka's consumer related class from a servlet to read the message in the remote Kafka.
The code is as follows:
protected voidDoget (HttpServletRequest request, httpservletresponse response)throwsservletexception, IOException {String topic= "Test"; List<Message> list =NewArraylist<message>(); StringBuilder Builder=NewStringBuilder (); Kafkahttpconsumer Consumer=NewKafkahttpconsumer (); List=consumer.consume (topic); Builder.append ("["); for(inti=0; I<list.size (); i++) {builder.append (List.get (i). message); Builder.append (","); } Builder.deletecharat (Builder.length ()-1); Builder.append ("]"); Response.getwriter (). Append (Builder.tostring ()); }
Import java.io.FileNotFoundException;
Import java.io.IOException;
Import Java.nio.charset.Charset;
Import java.util.ArrayList;
Import java.util.Collections;
Import Java.util.Iterator;
Import java.util.List;
Import Java.util.Map;
Import java.util.Properties;
Import Com.fasterxml.jackson.annotation.JsonInclude;
Import Kafka.consumer.Consumer;
Import Kafka.consumer.ConsumerConfig;
Import kafka.consumer.ConsumerTimeoutException;
Import Kafka.consumer.KafkaStream;
Import Kafka.javaapi.consumer.ConsumerConnector;
Import Kafka.message.MessageAndMetadata;
Public classKafkahttpconsumer { PublicList<message>consume (String topic) {Properties prop=NewProperties (); Try{prop.load ( This. GetClass (). getResourceAsStream ("/kafka-http.properties")); } Catch(FileNotFoundException e) {e.printstacktrace (); } Catch(IOException e) {e.printstacktrace (); } consumerconfig config=NewConsumerconfig (prop); Consumerconnector connector=consumer.createjavaconsumerconnector (config); Map<string, integer> streamcounts = Collections.singletonmap (topic, 1); Map<string, list<kafkastream<byte[],byte[]>>> streams =Connector.createmessagestreams (streamcounts); Kafkastream<byte[],byte[]> stream = Streams.get (topic). Get (0);
List<Message> messages =NewArraylist<>(); Try { for(messageandmetadata<byte[],byte[]>messageandmetadata:stream) Messages.add (NewMessage (messageandmetadata)); } Catch(Consumertimeoutexception ignore) {}finally{connector.commitoffsets (); Connector.shutdown (); } returnmessages; } /*For Test*/ Public Static voidMain (string[] args) {Properties prop=NewProperties (); Try{prop.load (kafkahttpconsumer).class. getResourceAsStream ("/kafka-http.properties")); Iterator<Object> ite =Prop.keyset (). iterator (); while(Ite.hasnext ()) {String key=(String) ite.next (); System.out.println ("Value:" +Prop.getproperty (key)); } } Catch(FileNotFoundException e) {e.printstacktrace (); } Catch(IOException e) {e.printstacktrace (); } } Public Static classMessage { PublicString topic; @JsonInclude (JsonInclude.Include.NON_NULL) PublicString Key; PublicString message; Public intpartition; Public Longoffset; PublicMessage (messageandmetadata<byte[],byte[]>message) { This. Topic =Message.topic (); This. Key = Message.key ()! =NULL?NewString (Message.key (), Charset.forname ("Utf-8")):NULL; This. Message =NewString (Message.message (), Charset.forname ("Utf-8")); This. partition =message.partition (); This. offset =Message.offset (); } }}
Note 1:https://cwiki.apache.org/confluence/display/kafka/clients
Consume messages from Kafka using native methods