当前位置: 首页>>代码示例>>Java>>正文


Java FetchResponse类代码示例

本文整理汇总了Java中kafka.javaapi.FetchResponse的典型用法代码示例。如果您正苦于以下问题:Java FetchResponse类的具体用法?Java FetchResponse怎么用?Java FetchResponse使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


FetchResponse类属于kafka.javaapi包,在下文中一共展示了FetchResponse类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
	final String topic = "test2";
	String clientId = "LowLevelConsumerClient1";
	SimpleConsumer simpleConsumer = new SimpleConsumer(
			"192.168.1.186", 9092, 6000000, 64 * 1000000, clientId);
	FetchRequest req = new FetchRequestBuilder().clientId(clientId)
							.addFetch(topic, 0, 0L, 1000000)
							.addFetch(topic, 1, 0L, 1000000)
							.addFetch(topic, 2, 0L, 1000000)
							.build();
	FetchResponse rep = simpleConsumer.fetch(req);						
	ByteBufferMessageSet messageSet = rep.messageSet(topic, 0);
	for(MessageAndOffset messageAndOffset : messageSet) {
		ByteBuffer payload = messageAndOffset.message().payload();
		long offset = messageAndOffset.offset();
		byte[] bytes = new byte[payload.limit()];
		payload.get(bytes);
		System.out.println("Offset : " + offset + ", Payload : " + new String(bytes, "UTF-8"));
	}
}
 
开发者ID:walle-liao,项目名称:jaf-examples,代码行数:21,代码来源:LowLevelConsumerDemo.java

示例2: main

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
	final String topic = "topic1";
	String clientID = "DemoLowLevelConsumer1";
	SimpleConsumer simpleConsumer = new SimpleConsumer("kafka0", 9092, 100000, 64 * 1000000, clientID);
	FetchRequest req = new FetchRequestBuilder().clientId(clientID)
			.addFetch(topic, 0, 0L, 50).addFetch(topic, 1, 0L, 5000).addFetch(topic, 2, 0L, 1000000).build();
	FetchResponse fetchResponse = simpleConsumer.fetch(req);
	ByteBufferMessageSet messageSet = (ByteBufferMessageSet) fetchResponse.messageSet(topic, 0);
	for (MessageAndOffset messageAndOffset : messageSet) {
		ByteBuffer payload = messageAndOffset.message().payload();
		long offset = messageAndOffset.offset();
		byte[] bytes = new byte[payload.limit()];
		payload.get(bytes);
		System.out.println("Offset:" + offset + ", Payload:" + new String(bytes, "UTF-8"));
	}
}
 
开发者ID:habren,项目名称:KafkaExample,代码行数:17,代码来源:DemoLowLevelConsumer.java

示例3: fetchLatestRecordPayloadBytes

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
private byte[] fetchLatestRecordPayloadBytes(SimpleConsumer kafkaConsumer) {
  FetchRequest fetchRequest = new FetchRequestBuilder().addFetch(destinationTopic, 0, 0, 1000000).build();
  FetchResponse response = kafkaConsumer.fetch(fetchRequest);

  Iterator<MessageAndOffset> messageSetItr = response.messageSet(destinationTopic, 0).iterator();

  // Fast forward to the message at the latest offset in the topic
  MessageAndOffset latestMessage = new MessageAndOffset(new Message(new byte[] { }), 0L);
  while (messageSetItr.hasNext()) {
    latestMessage = messageSetItr.next();
  }

  ByteBuffer payload = latestMessage.message().payload();
  byte[] bytes = new byte[payload.limit()];
  payload.get(bytes);
  return bytes;
}
 
开发者ID:verisign,项目名称:storm-graphite,代码行数:18,代码来源:BaseKafkaReporterTest.java

示例4: fetchNextMessageBuffer

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
@Override
protected Iterator<MessageAndOffset> fetchNextMessageBuffer(KafkaPartition partition, long nextOffset,
    long maxOffset) {
  if (nextOffset > maxOffset) {
    return null;
  }

  FetchRequest fetchRequest = createFetchRequest(partition, nextOffset);

  try {
    FetchResponse fetchResponse = getFetchResponseForFetchRequest(fetchRequest, partition);
    return getIteratorFromFetchResponse(fetchResponse, partition);
  } catch (Exception e) {
    LOG.warn(
        String.format("Fetch message buffer for partition %s has failed: %s. Will refresh topic metadata and retry",
            partition, e));
    return refreshTopicMetadataAndRetryFetch(partition, fetchRequest);
  }
}
 
开发者ID:Hanmourang,项目名称:Gobblin,代码行数:20,代码来源:KafkaWrapper.java

示例5: fetch

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
/** 返回消费的消息Map, 指定offset
 * <li>(Key为Topic name, Value为返回消息的消息List
 * 
 * @param topics The topic names
 * @param partitions Topic position
 * @param offsets	Starting byte offset
 * @return
 * @throws Exception
 */
static Map<String, List<String>> fetch(SimpleConsumer simpleConsumer, String[] topics, int[] partitions, long[] offsets) throws Exception{
	FetchRequest fetchRequest = getFetchRequest(simpleConsumer,topics, partitions, offsets);
	FetchResponse fetchResponse = simpleConsumer.fetch(fetchRequest);
	
	Map<String, List<String>> retMap = new HashMap<String, List<String>>();
	for (int i = 0; i < topics.length; i++) {
		String topic = topics[i];
		List list = new ArrayList<String>();
		retMap.put(topic, list);
		
		ByteBufferMessageSet messageSet = fetchResponse.messageSet(topic, partitions[i]);
		
		for (MessageAndOffset messageAndOffset : messageSet) {
			ByteBuffer payload = messageAndOffset.message().payload();
			byte[] bytes = new byte[payload.limit()];
			payload.get(bytes);
			String msg = new String(bytes, "UTF-8");
			list.add(msg);
		}
	}
	
	return retMap;
}
 
开发者ID:linzhaoming,项目名称:easyframe-msg,代码行数:33,代码来源:SimpleKafkaHelper.java

示例6: openFetchRequest

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
private void openFetchRequest()
{
    if (messageAndOffsetIterator == null) {
        log.debug("Fetching %d bytes from offset %d (%d - %d). %d messages read so far", KAFKA_READ_BUFFER_SIZE, cursorOffset, split.getStart(), split.getEnd(), totalMessages);
        FetchRequest req = new FetchRequestBuilder()
                .clientId("presto-worker-" + Thread.currentThread().getName())
                .addFetch(split.getTopicName(), split.getPartitionId(), cursorOffset, KAFKA_READ_BUFFER_SIZE)
                .build();

        // TODO - this should look at the actual node this is running on and prefer
        // that copy if running locally. - look into NodeInfo
        SimpleConsumer consumer = consumerManager.getConsumer(split.getNodes().get(0));

        FetchResponse fetchResponse = consumer.fetch(req);
        if (fetchResponse.hasError()) {
            short errorCode = fetchResponse.errorCode(split.getTopicName(), split.getPartitionId());
            log.warn("Fetch response has error: %d", errorCode);
            throw new PrestoException(KAFKA_SPLIT_ERROR, "could not fetch data from Kafka, error code is '" + errorCode + "'");
        }

        messageAndOffsetIterator = fetchResponse.messageSet(split.getTopicName(), split.getPartitionId()).iterator();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:24,代码来源:KafkaRecordSet.java

示例7: mockSimpleConsumerForRead

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
private void mockSimpleConsumerForRead(SimpleConsumer mockConsumer,
		String topic, int partition, long readOffset, long readSizeInBytes) {
	List<MessageAndOffset> list = new ArrayList<MessageAndOffset>();
	for (int i = 0; i < readSizeInBytes / eachEventInBytes; i++) {
		JetstreamEvent event = new JetstreamEvent();
		byte[] key = serializer.encodeMessage(event);
		byte[] payload = serializer.encodeMessage(event);
		Message msg = mock(Message.class);
		when(msg.key()).thenReturn(ByteBuffer.wrap(key));
		when(msg.payload()).thenReturn(ByteBuffer.wrap(payload));
		MessageAndOffset msgOffset = new MessageAndOffset(msg, readOffset
				+ i);
		list.add(msgOffset);
	}
	ByteBufferMessageSet messageSet = mock(ByteBufferMessageSet.class);
	when(messageSet.iterator()).thenReturn(list.iterator());
	FetchResponse fetchResponse = mock(FetchResponse.class);
	when(fetchResponse.hasError()).thenReturn(false);
	when(fetchResponse.messageSet(topic, partition)).thenReturn(messageSet);
	when(mockConsumer.fetch(argThat(new IsFetchRequest()))).thenReturn(
			fetchResponse);
}
 
开发者ID:pulsarIO,项目名称:jetstream,代码行数:23,代码来源:PartitionReaderTest.java

示例8: readMessages

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
public List<byte[]> readMessages(String topic) {
  SimpleConsumer consumer = new SimpleConsumer("localhost", 6667, 100000, 64 * 1024, "consumer");
  FetchRequest req = new FetchRequestBuilder()
          .clientId("consumer")
          .addFetch(topic, 0, 0, 100000)
          .build();
  FetchResponse fetchResponse = consumer.fetch(req);
  Iterator<MessageAndOffset> results = fetchResponse.messageSet(topic, 0).iterator();
  List<byte[]> messages = new ArrayList<>();
  while(results.hasNext()) {
    ByteBuffer payload = results.next().message().payload();
    byte[] bytes = new byte[payload.limit()];
    payload.get(bytes);
    messages.add(bytes);
  }
  consumer.close();
  return messages;
}
 
开发者ID:apache,项目名称:metron,代码行数:19,代码来源:KafkaComponent.java

示例9: consume

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
@Override
public Iterator<KafkaConsumerRecord> consume(KafkaPartition partition, long nextOffset, long maxOffset) {
  if (nextOffset > maxOffset) {
    return null;
  }

  FetchRequest fetchRequest = createFetchRequest(partition, nextOffset);

  try {
    FetchResponse fetchResponse = getFetchResponseForFetchRequest(fetchRequest, partition);
    return getIteratorFromFetchResponse(fetchResponse, partition);
  } catch (Exception e) {
    log.warn(String.format(
        "Fetch message buffer for partition %s has failed: %s. Will refresh topic metadata and retry", partition, e));
    return refreshTopicMetadataAndRetryFetch(partition, fetchRequest);
  }
}
 
开发者ID:apache,项目名称:incubator-gobblin,代码行数:18,代码来源:Kafka08ConsumerClient.java

示例10: getIteratorFromFetchResponse

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
private Iterator<KafkaConsumerRecord> getIteratorFromFetchResponse(FetchResponse fetchResponse, KafkaPartition partition) {
  try {
    ByteBufferMessageSet messageBuffer = fetchResponse.messageSet(partition.getTopicName(), partition.getId());
    return Iterators.transform(messageBuffer.iterator(),
        new Function<kafka.message.MessageAndOffset, KafkaConsumerRecord>() {
          @Override
          public KafkaConsumerRecord apply(kafka.message.MessageAndOffset input) {
            return new Kafka08ConsumerRecord(input);
          }
        });
  } catch (Exception e) {
    log.warn(String.format("Failed to retrieve next message buffer for partition %s: %s."
        + "The remainder of this partition will be skipped.", partition, e));
    return null;
  }
}
 
开发者ID:apache,项目名称:incubator-gobblin,代码行数:17,代码来源:Kafka08ConsumerClient.java

示例11: nextMessageSet

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
void nextMessageSet() throws Exception {
  FetchRequest req = 
      new FetchRequestBuilder().
      clientId(name).
      addFetch(topic, partitionMetadata.partitionId(), currentOffset, fetchSize).
      minBytes(1).
      maxWait(1000).
      build();
  
  FetchResponse fetchResponse = consumer.fetch(req);
  if(fetchResponse.hasError()) {
    throw new Exception("TODO: handle the error, reset the consumer....");
  }
  
  currentMessageSet = fetchResponse.messageSet(topic, partitionMetadata.partitionId());
  currentMessageSetIterator = currentMessageSet.iterator();
}
 
开发者ID:DemandCube,项目名称:Scribengin,代码行数:18,代码来源:KafkaPartitionReader.java

示例12: receive

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
public void receive() {
    SimpleConsumer consumer = createConsumer();
    kafka.api.FetchRequest fetchRequest = null;
    FetchResponse fetchResponse = null;
    int partition = part.partitionId();
    int errorNum = 0;
    if (offsetInit) {
        offsetInit = false;
        this.offset = getLastOffset(consumer, topic, partition, offsetTime, consumer.clientId());
    }
    while (offset > -1) {
        if (consumer == null) {
            consumer = createConsumer();
        }
        // 构建获取数据的请求对象, 给定获取数据对应的topic、partition、offset以及每次获取数据最多获取条数
        fetchRequest = new FetchRequestBuilder().clientId(consumer.clientId()).addFetch(topic, partition, offset,
                FETCH_SIZE).build();
        // 发送请求到Kafka,并获得返回值
        fetchResponse = consumer.fetch(fetchRequest);
        // 如果返回对象表示存在异常,进行异常处理,并进行consumer重新连接的操作
        // 当异常连续出现次数超过5次的时候,程序抛出异常
        if (fetchResponse.hasError()) {
            errorNum++;
            short errorCode = fetchResponse.errorCode(topic, partition);
            offset = dealFetchError(consumer, partition, offset, errorCode, errorNum);
            continue;
        }
        errorNum = 0;
        // 接收数据没有异常,那么开始对数据进行具体操作
        offset = dealFetchRecord(fetchResponse, partition, offset);
    }
}
 
开发者ID:wngn123,项目名称:wngn-jms-kafka,代码行数:33,代码来源:KafkaSimpleConsumer.java

示例13: main

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
    generateData();

    SimpleConsumer simpleConsumer = new SimpleConsumer(KafkaProperties.KAFKA_SERVER_URL,
        KafkaProperties.KAFKA_SERVER_PORT,
        KafkaProperties.CONNECTION_TIMEOUT,
        KafkaProperties.KAFKA_PRODUCER_BUFFER_SIZE,
        KafkaProperties.CLIENT_ID);

    System.out.println("Testing single fetch");
    FetchRequest req = new FetchRequestBuilder()
        .clientId(KafkaProperties.CLIENT_ID)
        .addFetch(KafkaProperties.TOPIC2, 0, 0L, 100)
        .build();
    FetchResponse fetchResponse = simpleConsumer.fetch(req);
    printMessages(fetchResponse.messageSet(KafkaProperties.TOPIC2, 0));

    System.out.println("Testing single multi-fetch");
    Map<String, List<Integer>> topicMap = new HashMap<>();
    topicMap.put(KafkaProperties.TOPIC2, Collections.singletonList(0));
    topicMap.put(KafkaProperties.TOPIC3, Collections.singletonList(0));
    req = new FetchRequestBuilder()
        .clientId(KafkaProperties.CLIENT_ID)
        .addFetch(KafkaProperties.TOPIC2, 0, 0L, 100)
        .addFetch(KafkaProperties.TOPIC3, 0, 0L, 100)
        .build();
    fetchResponse = simpleConsumer.fetch(req);
    int fetchReq = 0;
    for (Map.Entry<String, List<Integer>> entry : topicMap.entrySet()) {
        String topic = entry.getKey();
        for (Integer offset : entry.getValue()) {
            System.out.println("Response from fetch request no: " + ++fetchReq);
            printMessages(fetchResponse.messageSet(topic, offset));
        }
    }
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:37,代码来源:SimpleConsumerDemo.java

示例14: fetchMessages

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
public static ByteBufferMessageSet fetchMessages(KafkaConfig config, SimpleConsumer consumer, Partition partition, long offset) throws TopicOffsetOutOfRangeException, RuntimeException {
    ByteBufferMessageSet msgs = null;
    String topic = config.topic;
    int partitionId = partition.partition;
    FetchRequestBuilder builder = new FetchRequestBuilder();
    FetchRequest fetchRequest = builder.addFetch(topic, partitionId, offset, config.fetchSizeBytes).
            clientId(config.clientId).maxWait(config.fetchMaxWait).build();
    FetchResponse fetchResponse;
    try {
        fetchResponse = consumer.fetch(fetchRequest);
    } catch (Exception e) {
        if (e instanceof ConnectException ||
                e instanceof SocketTimeoutException ||
                e instanceof IOException ||
                e instanceof UnresolvedAddressException
                ) {
            LOG.warn("Network error when fetching messages:", e);
            throw new FailedFetchException(e);
        } else {
            throw new RuntimeException(e);
        }
    }
    if (fetchResponse.hasError()) {
        KafkaError error = KafkaError.getError(fetchResponse.errorCode(topic, partitionId));
        if (error.equals(KafkaError.OFFSET_OUT_OF_RANGE) && config.useStartOffsetTimeIfOffsetOutOfRange) {
            String msg = "Got fetch request with offset out of range: [" + offset + "]";
            LOG.warn(msg);
            throw new TopicOffsetOutOfRangeException(msg);
        } else {
            String message = "Error fetching data from [" + partition + "] for topic [" + topic + "]: [" + error + "]";
            LOG.error(message);
            throw new FailedFetchException(message);
        }
    } else {
        msgs = fetchResponse.messageSet(topic, partitionId);
    }
    return msgs;
}
 
开发者ID:redBorder,项目名称:rb-bi,代码行数:39,代码来源:KafkaUtils.java

示例15: main

import kafka.javaapi.FetchResponse; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
    generateData();

    SimpleConsumer simpleConsumer = new SimpleConsumer(KafkaProperties.KAFKA_SERVER_URL,
        KafkaProperties.KAFKA_SERVER_PORT,
        KafkaProperties.CONNECTION_TIMEOUT,
        KafkaProperties.KAFKA_PRODUCER_BUFFER_SIZE,
        KafkaProperties.CLIENT_ID);

    System.out.println("Testing single fetch");
    FetchRequest req = new FetchRequestBuilder()
        .clientId(KafkaProperties.CLIENT_ID)
        .addFetch(KafkaProperties.TOPIC2, 0, 0L, 100)
        .build();
    FetchResponse fetchResponse = simpleConsumer.fetch(req);
    printMessages(fetchResponse.messageSet(KafkaProperties.TOPIC2, 0));

    System.out.println("Testing single multi-fetch");
    Map<String, List<Integer>> topicMap = new HashMap<String, List<Integer>>();
    topicMap.put(KafkaProperties.TOPIC2, Collections.singletonList(0));
    topicMap.put(KafkaProperties.TOPIC3, Collections.singletonList(0));
    req = new FetchRequestBuilder()
        .clientId(KafkaProperties.CLIENT_ID)
        .addFetch(KafkaProperties.TOPIC2, 0, 0L, 100)
        .addFetch(KafkaProperties.TOPIC3, 0, 0L, 100)
        .build();
    fetchResponse = simpleConsumer.fetch(req);
    int fetchReq = 0;
    for (Map.Entry<String, List<Integer>> entry : topicMap.entrySet()) {
        String topic = entry.getKey();
        for (Integer offset : entry.getValue()) {
            System.out.println("Response from fetch request no: " + ++fetchReq);
            printMessages(fetchResponse.messageSet(topic, offset));
        }
    }
}
 
开发者ID:ggj2010,项目名称:javabase,代码行数:37,代码来源:SimpleConsumerDemo.java


注:本文中的kafka.javaapi.FetchResponse类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。