當前位置: 首頁>>代碼示例>>Java>>正文


Java ConsumerConnector.createMessageStreams方法代碼示例

本文整理匯總了Java中kafka.javaapi.consumer.ConsumerConnector.createMessageStreams方法的典型用法代碼示例。如果您正苦於以下問題:Java ConsumerConnector.createMessageStreams方法的具體用法?Java ConsumerConnector.createMessageStreams怎麽用?Java ConsumerConnector.createMessageStreams使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在kafka.javaapi.consumer.ConsumerConnector的用法示例。


在下文中一共展示了ConsumerConnector.createMessageStreams方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: open

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
    _collector = spoutOutputCollector;
    Properties props = new Properties();
    props.put("zookeeper.connect", conf.get(OSMIngest.ZOOKEEPERS));
    props.put("group.id", groupId);
    props.put("zookeeper.sync.time.ms", "200");
    props.put("auto.commit.interval.ms", "1000");
    ConsumerConfig consumerConfig = new ConsumerConfig(props);
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(topic, 1);
    Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap, new StringDecoder(new VerifiableProperties()), new StringDecoder(new VerifiableProperties()));
    List<KafkaStream<String, String>> streams = consumerMap.get(topic);
    KafkaStream<String, String> stream = null;
    if (streams.size() == 1) {
        stream = streams.get(0);
    } else {
        log.error("Streams should be of size 1");
    }
    kafkaIterator = stream.iterator();
}
 
開發者ID:geomesa,項目名稱:geomesa-tutorials,代碼行數:22,代碼來源:OSMKafkaSpout.java

示例2: readTopicToList

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
/**
 * Read topic to list, only using Kafka code.
 */
private static List<MessageAndMetadata<byte[], byte[]>> readTopicToList(String topicName, ConsumerConfig config, final int stopAfter) {
	ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(config);
	// we request only one stream per consumer instance. Kafka will make sure that each consumer group
	// will see each message only once.
	Map<String,Integer> topicCountMap = Collections.singletonMap(topicName, 1);
	Map<String, List<KafkaStream<byte[], byte[]>>> streams = consumerConnector.createMessageStreams(topicCountMap);
	if (streams.size() != 1) {
		throw new RuntimeException("Expected only one message stream but got "+streams.size());
	}
	List<KafkaStream<byte[], byte[]>> kafkaStreams = streams.get(topicName);
	if (kafkaStreams == null) {
		throw new RuntimeException("Requested stream not available. Available streams: "+streams.toString());
	}
	if (kafkaStreams.size() != 1) {
		throw new RuntimeException("Requested 1 stream from Kafka, bot got "+kafkaStreams.size()+" streams");
	}
	LOG.info("Opening Consumer instance for topic '{}' on group '{}'", topicName, config.groupId());
	ConsumerIterator<byte[], byte[]> iteratorToRead = kafkaStreams.get(0).iterator();

	List<MessageAndMetadata<byte[], byte[]>> result = new ArrayList<>();
	int read = 0;
	while(iteratorToRead.hasNext()) {
		read++;
		result.add(iteratorToRead.next());
		if (read == stopAfter) {
			LOG.info("Read "+read+" elements");
			return result;
		}
	}
	return result;
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:35,代碼來源:KafkaConsumerTestBase.java

示例3: addNewConsumer

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
public void addNewConsumer(String topic, Integer threads){
	ConsumerConnector consumer = consumerConnMap.get(topic);
	Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = null;
	
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put(topic, threads);
	consumerMap = consumer.createMessageStreams(topicCountMap);
	
	List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
	ExecutorService executor = Executors.newFixedThreadPool(threads);

	for (final KafkaStream<byte[], byte[]> stream : streams) {
		executor.submit(new Consumer(stream, this));
	}
	
	executorMap.put(topic, executor);
}
 
開發者ID:DTStack,項目名稱:jlogstash-input-plugin,代碼行數:18,代碼來源:KafkaDistributed.java

示例4: consume

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
/**消費消息  [指定Topic]
 * 
 * @param topicName 隊列名稱
 * @param groupId Group Name
 * @return
 */
static MsgIterator consume(String topicName, String groupId) {
	ConsumerConnector consumerConnector = KafkaHelper.getConsumer(groupId);
	
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();	//(topic, #stream) pair
	topicCountMap.put(topicName, new Integer(1));

	//TODO: 可消費多個topic
	Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap);	//Using default decoder
	List<KafkaStream<byte[], byte[]>> streamList = consumerMap.get(topicName);	//The number of items in the list is #streams, Each Stream supoorts an iterator over message/metadata pair
	KafkaStream<byte[], byte[]> stream = streamList.get(0);
	
	//KafkaStream[K,V] K代表partitio Key的類型,V代表Message Value的類型
	ConsumerIterator<byte[], byte[]> it = stream.iterator();
	MsgIterator iter = new MsgIterator(it);
	return iter;
}
 
開發者ID:linzhaoming,項目名稱:easyframe-msg,代碼行數:23,代碼來源:KafkaHelper.java

示例5: kafkaStream

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
@Bean
protected KafkaStream<String, float[]> kafkaStream() {

    final String topicName = retrieveTopicNameFromGatewayAddress(gatewayUrl());

    ConsumerConnector consumerConnector =
            Consumer.createJavaConsumerConnector(consumerConfig());
    Map<String, Integer> topicCounts = new HashMap<>();
    topicCounts.put(topicName, 1);
    VerifiableProperties emptyProps = new VerifiableProperties();
    StringDecoder keyDecoder = new StringDecoder(emptyProps);
    FeatureVectorDecoder valueDecoder = new FeatureVectorDecoder();
    Map<String, List<KafkaStream<String, float[]>>> streams =
            consumerConnector.createMessageStreams(topicCounts, keyDecoder, valueDecoder);
    List<KafkaStream<String, float[]>> streamsByTopic = streams.get(topicName);
    Preconditions.checkNotNull(streamsByTopic, String.format("Topic %s not found in streams map.", topicName));
    Preconditions.checkElementIndex(0, streamsByTopic.size(),
            String.format("List of streams of topic %s is empty.", topicName));
    return streamsByTopic.get(0);
}
 
開發者ID:trustedanalytics,項目名稱:space-shuttle-demo,代碼行數:21,代碼來源:KafkaConfiguration.java

示例6: createKafkaStream

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
public List<KafkaStream<byte[], byte[]>> createKafkaStream(
    String zookeeperConnectString,
    String topic,
    int partitions
) {
  //create consumer
  Properties consumerProps = new Properties();
  consumerProps.put("zookeeper.connect", zookeeperConnectString);
  consumerProps.put("group.id", "testClient");
  consumerProps.put("zookeeper.session.timeout.ms", "6000");
  consumerProps.put("zookeeper.sync.time.ms", "200");
  consumerProps.put("auto.commit.interval.ms", "1000");
  consumerProps.put("consumer.timeout.ms", "500");
  ConsumerConfig consumerConfig = new ConsumerConfig(consumerProps);
  ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
  Map<String, Integer> topicCountMap = new HashMap<>();
  topicCountMap.put(topic, partitions);
  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
  return consumerMap.get(topic);
}
 
開發者ID:streamsets,項目名稱:datacollector,代碼行數:21,代碼來源:SdcKafkaTestUtil.java

示例7: createKafkaStream

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
public static List<KafkaStream<byte[], byte[]>> createKafkaStream(String zookeeperConnectString, String topic, int partitions) {
  //create consumer
  Properties consumerProps = new Properties();
  consumerProps.put("zookeeper.connect", zookeeperConnectString);
  consumerProps.put("group.id", "testClient");
  consumerProps.put("zookeeper.session.timeout.ms", "6000");
  consumerProps.put("zookeeper.sync.time.ms", "200");
  consumerProps.put("auto.commit.interval.ms", "1000");
  consumerProps.put("consumer.timeout.ms", "500");
  ConsumerConfig consumerConfig = new ConsumerConfig(consumerProps);
  ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
  Map<String, Integer> topicCountMap = new HashMap<>();
  topicCountMap.put(topic, partitions);
  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
  return consumerMap.get(topic);

}
 
開發者ID:streamsets,項目名稱:datacollector,代碼行數:18,代碼來源:KafkaTestUtil.java

示例8: run

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
@Override
public void run() {
	int cpus = Runtime.getRuntime().availableProcessors();
	ExecutorService executor = Executors.newFixedThreadPool(cpus);
	
	ConsumerConnector consumer = kafka.consumer.Consumer
			.createJavaConsumerConnector(this.consumerConfig);

	// map topics to thread count
	Map<String, Integer> topicCountMap = new HashMap<>();
	topicCountMap.put(this.topic, threadsPerTopic);
	
	// map topics to list of streams (1 stream per thread per topic)
	Map<String, List<KafkaStream<String, TIn>>> consumerMap = consumer
			.createMessageStreams(topicCountMap, this.keyDecoder, this.valueDecoder);
	
	// actually create/submit threads
	for (final KafkaStream<String, TIn> stream : consumerMap.get(this.topic)) {
		executor.submit(new Consumer<String, TIn>(stream, dispatcherCommand));
	}

	// do not close producer while threads are still running
	// this.producer.close();
}
 
開發者ID:mpopp,項目名稱:MIB,代碼行數:25,代碼來源:EventDispatcher.java

示例9: main

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
public static void main(String[] args) {
	Properties props = new Properties();
	props.put("zookeeper.connect","10.15.62.76:2181");
	props.put("group.id","mygroup001");
	props.put("zookeeper.session.timeout.ms","40000");
	props.put("zookeeper.sync.time.ms","200");
	props.put("auto.commit.interval.ms","1000");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);
	
	Map<String,Integer> topicCountMap = new HashMap<String,Integer>();
	topicCountMap.put("my-topic",new Integer(1));
	System.out.println("zzzzzzzzzzzzz");
	Map<String,List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
	List<KafkaStream<byte[], byte[]>> streams = consumerMap.get("my-topic");
	
	KafkaStream<byte[], byte[]> stream = streams.get(0);
	
	ConsumerIterator<byte[], byte[]> it = stream.iterator();
	System.out.println("before while...");
	while(it.hasNext()){
		System.out.println(new String(it.next().message()));
	}
}
 
開發者ID:YinYanfei,項目名稱:CadalWorkspace,代碼行數:27,代碼來源:myConsumer.java

示例10: open

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
@SuppressWarnings("unchecked")
@Override
public void open(Map conf, TopologyContext context,
		SpoutOutputCollector collector) {
	this._collector = collector;
	
	
	Properties props = new Properties();
	props.put("zk.connect", "10.15.62.104:2181");
	props.put("groupid", "group1");

	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer
			.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("newtopic", new Integer(1));

	Map<String, List<KafkaMessageStream>> consumerMap = consumer
			.createMessageStreams(topicCountMap);

	KafkaMessageStream stream = consumerMap.get("newtopic").get(0);
	
	this.it = stream.iterator();		
}
 
開發者ID:YinYanfei,項目名稱:CadalWorkspace,代碼行數:26,代碼來源:SpoutKafka.java

示例11: open

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
@SuppressWarnings("unchecked")
@Override
public void open(Map conf, TopologyContext context,
		SpoutOutputCollector collector) {
	this._collector = collector;
	this.logger = Logger.getLogger(BoltCassandra.class.getClass().getName());
	
	// Construct kafka part
	Properties props = new Properties();
	props.put("zk.connect", "10.15.62.75:2181");
	props.put("groupid", "sec-group-1");		// 

	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("sec-stream-one", new Integer(1));		// 

	Map<String, List<KafkaMessageStream>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaMessageStream stream = consumerMap.get("sec-stream-one").get(0);		//
	
	this.it = stream.iterator();
}
 
開發者ID:YinYanfei,項目名稱:CadalWorkspace,代碼行數:25,代碼來源:SpoutKafka.java

示例12: KafkaInit

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecRecPage");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-recPage", new Integer(1));// 第二個參數是指用幾個流,多個流是為了並行處理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-recPage").get(0);// 這裏隻有一個流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
開發者ID:YinYanfei,項目名稱:CadalWorkspace,代碼行數:17,代碼來源:RecBookRecPageSpout.java

示例13: KafkaInit

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecTagRecPage");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-recPageTagTag", new Integer(1));// 第二個參數是指用幾個流,多個流是為了並行處理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-recPageTagTag").get(0);// 這裏隻有一個流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
開發者ID:YinYanfei,項目名稱:CadalWorkspace,代碼行數:17,代碼來源:RecTagRecPageSpout.java

示例14: KafkaInit

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecPersonalPage");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-personalPage", new Integer(1));// 第二個參數是指用幾個流,多個流是為了並行處理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-personalPage").get(0);// 這裏隻有一個流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
開發者ID:YinYanfei,項目名稱:CadalWorkspace,代碼行數:17,代碼來源:RecBookPersonalPageSpout.java

示例15: KafkaInit

import kafka.javaapi.consumer.ConsumerConnector; //導入方法依賴的package包/類
private void KafkaInit(){
	Properties props = new Properties();
	props.put("zookeeper.connect", "10.15.62.75:2181,10.15.62.76:2181,10.15.62.77:2181");
	props.put("group.id", "RecTagBook");
	
	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put("Rec-recPageTagBook", new Integer(1));// 第二個參數是指用幾個流,多個流是為了並行處理。

	Map<String, List<KafkaStream<byte[],byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream = consumerMap.get("Rec-recPageTagBook").get(0);// 這裏隻有一個流,所以得get(0)就可以了。
	this.it = stream.iterator();
}
 
開發者ID:YinYanfei,項目名稱:CadalWorkspace,代碼行數:17,代碼來源:RecTagBookSpout.java


注:本文中的kafka.javaapi.consumer.ConsumerConnector.createMessageStreams方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。