赞
踩
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class kafkaConsumer {
public static void main(String[] args) {
final String topic = "your_topic_name";
final Properties props = new Properties();
props.put("zookeeper.connect", "10.10.2.227:2181");
props.put("auto.offset.reset", "smallest");
props.put("zookeeper.connectiontimeout.ms", "1000000");
props.put("group.id", "groupTest2");
props.put("zookeeper.session.timeout.ms", "40000");
props.put("zookeeper.sync.time.ms", "200");
props.put("auto.commit.interval.ms", "1000");
props.put("rebalance.max.retries", "5");
props.put("rebalance.backoff.ms", "12000");
ExecutorService threadPoll = Executors.newFixedThreadPool(30);
threadPoll.execute(new Runnable() {
public void run() {
ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, new Integer(1)); // 一次从主题中获取一个数据
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0); //获取每次接收到的这个数据
ConsumerIterator<byte[], byte[]> it = stream.iterator();
System.out.println("# begin to fetch data");
while (it.hasNext()) {
String msg = new String(it.next().message());
System.out.println("[****] kafka received msg: "+new Date().toString() + " receive: " + msg);
}
}
});
}
}
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。