当前位置: 首页>>代码示例>>Java>>正文


Java KafkaConsumer.seek方法代码示例

本文整理汇总了Java中org.apache.kafka.clients.consumer.KafkaConsumer.seek方法的典型用法代码示例。如果您正苦于以下问题:Java KafkaConsumer.seek方法的具体用法?Java KafkaConsumer.seek怎么用?Java KafkaConsumer.seek使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.kafka.clients.consumer.KafkaConsumer的用法示例。


在下文中一共展示了KafkaConsumer.seek方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import org.apache.kafka.clients.consumer.KafkaConsumer; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException, InterruptedException{
    Properties properties = PropertiesUtils.getProps("consumer.properties");
    properties.setProperty("client.id","whtestconsumer");
    properties.setProperty("group.id","whtestconsumer");
    properties.setProperty("bootstrap.servers", "localhost:9092");
    //properties.setProperty("auto.offset.reset", "earliest");


    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties);
    String topic = "uav-test.monitor.result";
    TopicPartition topicPartition = new TopicPartition(topic, 0);
    List<TopicPartition> topics = Arrays.asList(topicPartition);
    consumer.assign(topics);
    consumer.seekToEnd(topics);
    long current = consumer.position(topicPartition);
    consumer.seek(topicPartition, current-1000);

    while (true) {
        ConsumerRecords<String, String> records = consumer.poll(100);
        for (ConsumerRecord<String, String> record : records) {
            System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
        }
        Thread.sleep(1);
    }
}
 
开发者ID:BriData,项目名称:DBus,代码行数:26,代码来源:Kafka.java

示例2: resetToLastCommittedPositions

import org.apache.kafka.clients.consumer.KafkaConsumer; //导入方法依赖的package包/类
private static void resetToLastCommittedPositions(KafkaConsumer<String, String> consumer) {
    for (TopicPartition topicPartition : consumer.assignment()) {
        OffsetAndMetadata offsetAndMetadata = consumer.committed(topicPartition);
        if (offsetAndMetadata != null)
            consumer.seek(topicPartition, offsetAndMetadata.offset());
        else
            consumer.seekToBeginning(singleton(topicPartition));
    }
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:10,代码来源:TransactionalMessageCopier.java

示例3: readKafkaTopic

import org.apache.kafka.clients.consumer.KafkaConsumer; //导入方法依赖的package包/类
@GET
@Path("/readKafkaTopic")
public Response readKafkaTopic(Map<String, Object > map) {
    try {
        Properties properties = PropertiesUtils.getProps("consumer.properties");
        properties.setProperty("client.id","readKafkaTopic");
        properties.setProperty("group.id","readKafkaTopic");
        //properties.setProperty("bootstrap.servers", "localhost:9092");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties);
        String topic = map.get("topic").toString();
        //System.out.println("topic="+topic);
        TopicPartition topicPartition = new TopicPartition(topic, 0);
        List<TopicPartition> topics = Arrays.asList(topicPartition);
        consumer.assign(topics);
        consumer.seekToEnd(topics);
        long current = consumer.position(topicPartition);
        long end = current;
        current -= 1000;
        if(current < 0) current = 0;
        consumer.seek(topicPartition, current);
        List<String> result = new ArrayList<>();
        while (current < end) {
            //System.out.println("topic position = "+current);
            ConsumerRecords<String, String> records = consumer.poll(1000);
            for (ConsumerRecord<String, String> record : records) {
                result.add(record.value());
                //System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
            }
            current = consumer.position(topicPartition);
        }
        consumer.close();
        return Response.ok().entity(result).build();
    } catch (Exception e) {
        logger.error("Error encountered while readKafkaTopic with parameter:{}", JSON.toJSONString(map), e);
        return Response.status(204).entity(new Result(-1, e.getMessage())).build();
    }
}
 
开发者ID:BriData,项目名称:DBus,代码行数:38,代码来源:DataTableResource.java

示例4: retrieveOneMessage

import org.apache.kafka.clients.consumer.KafkaConsumer; //导入方法依赖的package包/类
private static ConsumerRecord<byte[], byte[]> retrieveOneMessage(KafkaConsumer kafkaConsumer,
                                                                 TopicPartition topicPartition,
                                                                 long offset) {
  kafkaConsumer.seek(topicPartition, offset);
  ConsumerRecords<byte[], byte[]> records;
  ConsumerRecord<byte[], byte[]> record = null;
  while (record == null) {
    records = kafkaConsumer.poll(100);
    if (!records.isEmpty()) {
      LOG.debug("records.count() = {}", records.count());
      List<ConsumerRecord<byte[], byte[]>> reclist = records.records(topicPartition);
      if (reclist != null && !reclist.isEmpty()) {
        record = reclist.get(0);
        break;
      } else {
        LOG.info("recList is null or empty");
      }
    }
  }
  return record;
}
 
开发者ID:pinterest,项目名称:doctorkafka,代码行数:22,代码来源:ReplicaStatsManager.java

示例5: KafkaSource

import org.apache.kafka.clients.consumer.KafkaConsumer; //导入方法依赖的package包/类
public KafkaSource () throws IOException, PropertyException  {
    Properties configs = ConfUtils.getProps(CONFIG_PROPERTIES);
    statTopic = configs.getProperty(Constants.STATISTIC_TOPIC);
    if (statTopic == null) {
        throw new PropertyException("配置参数文件内容不能为空! " + Constants.STATISTIC_TOPIC);
    }

    statTopicPartition = new TopicPartition(statTopic, 0);

    Properties statProps = ConfUtils.getProps(CONSUMER_PROPERTIES);
    statProps.setProperty("enable.auto.commit", "false");
    List<TopicPartition> topics = Arrays.asList(statTopicPartition);
    consumer = new KafkaConsumer(statProps);
    consumer.assign(topics);

    long beforeOffset = consumer.position(statTopicPartition);
    String offset = configs.getProperty("kafka.offset");
    if (offset.equalsIgnoreCase("none")) {
        ; // do nothing
    } else if  (offset.equalsIgnoreCase("begin")) {
        consumer.seekToBeginning(Lists.newArrayList(statTopicPartition));
    } else if (offset.equalsIgnoreCase("end")) {
        consumer.seekToEnd(Lists.newArrayList(statTopicPartition));
    } else {
        long nOffset = Long.parseLong(offset);
        consumer.seek(statTopicPartition, nOffset);
    }
    long afferOffset = consumer.position(statTopicPartition);
    LOG.info(String.format("init kafkaSoure OK. beforeOffset %d, afferOffset=%d", beforeOffset, afferOffset));
}
 
开发者ID:BriData,项目名称:DBus,代码行数:31,代码来源:KafkaSource.java

示例6: resetTopicOffsets

import org.apache.kafka.clients.consumer.KafkaConsumer; //导入方法依赖的package包/类
public void resetTopicOffsets(String groupId, String topic, int partition, long newOffsets) {
    KafkaConsumer<String, Serializable> kafkaConsumer = getConsumer(groupId);
    kafkaConsumer.seek(new TopicPartition(topic, partition), newOffsets);
}
 
开发者ID:warlock-china,项目名称:azeroth,代码行数:5,代码来源:KafkaConsumerCommand.java


注:本文中的org.apache.kafka.clients.consumer.KafkaConsumer.seek方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。