当前位置: 首页>>代码示例>>Python>>正文


Python kafka.KafkaConsumer方法代码示例

本文整理汇总了Python中kafka.KafkaConsumer方法的典型用法代码示例。如果您正苦于以下问题:Python kafka.KafkaConsumer方法的具体用法?Python kafka.KafkaConsumer怎么用?Python kafka.KafkaConsumer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在kafka的用法示例。


在下文中一共展示了kafka.KafkaConsumer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _query_backend

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def _query_backend(self):
        consumer = KafkaConsumer(
            bootstrap_servers=KAFKA_HOST, value_deserializer=lambda v: JSONSerializer().loads(v.decode('utf-8'))
        )

        tp = TopicPartition(self.topic, 0)
        consumer.assign([tp])

        count = consumer.position(tp)

        consumer.seek(tp, 0)

        metrics = []
        for i in range(count):
            metrics.append(next(consumer))

        return metrics 
开发者ID:kpn-digital,项目名称:py-timeexecution,代码行数:19,代码来源:test_kafka.py

示例2: _create_consumer

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def _create_consumer(self):
        """Tries to establing the Kafka consumer connection"""
        if not self.closed:
            try:
                self.logger.debug("Creating new kafka consumer using brokers: " +
                                   str(self.settings['KAFKA_HOSTS']) + ' and topic ' +
                                   self.settings['KAFKA_TOPIC_PREFIX'] +
                                   ".outbound_firehose")

                return KafkaConsumer(
                    self.settings['KAFKA_TOPIC_PREFIX'] + ".outbound_firehose",
                    group_id=None,
                    bootstrap_servers=self.settings['KAFKA_HOSTS'],
                    consumer_timeout_ms=self.settings['KAFKA_CONSUMER_TIMEOUT'],
                    auto_offset_reset=self.settings['KAFKA_CONSUMER_AUTO_OFFSET_RESET'],
                    auto_commit_interval_ms=self.settings['KAFKA_CONSUMER_COMMIT_INTERVAL_MS'],
                    enable_auto_commit=self.settings['KAFKA_CONSUMER_AUTO_COMMIT_ENABLE'],
                    max_partition_fetch_bytes=self.settings['KAFKA_CONSUMER_FETCH_MESSAGE_MAX_BYTES'])
            except KeyError as e:
                self.logger.error('Missing setting named ' + str(e),
                                   {'ex': traceback.format_exc()})
            except:
                self.logger.error("Couldn't initialize kafka consumer for topic",
                                   {'ex': traceback.format_exc()})
                raise 
开发者ID:istresearch,项目名称:scrapy-cluster,代码行数:27,代码来源:rest_service.py

示例3: _create_consumer

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def _create_consumer(self):
        """Tries to establing the Kafka consumer connection"""
        try:
            brokers = self.settings['KAFKA_HOSTS']
            self.logger.debug("Creating new kafka consumer using brokers: " +
                               str(brokers) + ' and topic ' +
                               self.settings['KAFKA_INCOMING_TOPIC'])

            return KafkaConsumer(
                self.settings['KAFKA_INCOMING_TOPIC'],
                group_id=self.settings['KAFKA_GROUP'],
                bootstrap_servers=brokers,
                consumer_timeout_ms=self.settings['KAFKA_CONSUMER_TIMEOUT'],
                auto_offset_reset=self.settings['KAFKA_CONSUMER_AUTO_OFFSET_RESET'],
                auto_commit_interval_ms=self.settings['KAFKA_CONSUMER_COMMIT_INTERVAL_MS'],
                enable_auto_commit=self.settings['KAFKA_CONSUMER_AUTO_COMMIT_ENABLE'],
                max_partition_fetch_bytes=self.settings['KAFKA_CONSUMER_FETCH_MESSAGE_MAX_BYTES'])
        except KeyError as e:
            self.logger.error('Missing setting named ' + str(e),
                               {'ex': traceback.format_exc()})
        except:
            self.logger.error("Couldn't initialize kafka consumer for topic",
                               {'ex': traceback.format_exc(),
                                'topic': self.settings['KAFKA_INCOMING_TOPIC']})
            raise 
开发者ID:istresearch,项目名称:scrapy-cluster,代码行数:27,代码来源:kafka_monitor.py

示例4: check_kafka_msg

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def check_kafka_msg(topic='events', nbr_msg=100):

    ## Collect Messages from Bus
    consumer = KafkaConsumer(
        bootstrap_servers=get_external_ip()+':'+str(KAFKA_BROKER_PORT),
        auto_offset_reset='earliest')

    consumer.subscribe([topic])

    counter = 0
    for message in consumer:
        counter = counter + 1
        if counter == nbr_msg:
            break

    return counter 
开发者ID:Juniper,项目名称:open-nti,代码行数:18,代码来源:open_nti_input_syslog_lib.py

示例5: pull_datapoints_from_kafka

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def pull_datapoints_from_kafka(self, kafka_config, stop_threads):
        log.debug('Kafka datapoints puller thread starting..')

        consumer = KafkaConsumer(
            kafka_config['topic'],
            group_id=kafka_config['group_id'],
            bootstrap_servers=kafka_config['bootstrap_servers'])

        while True and not stop_threads.isSet():
            consumer.poll()
            for message in consumer:
                try:
                    json_message = json.loads(message.value.decode())
                    log.debug('Datapoint from kafka: %s', json_message)
                    if type(json_message) == list:
                        for datapoint in json_message:
                            self.register_datapoint(datapoint)
                    else:
                        self.register_datapoint(json_message)
                except json.JSONDecodeError:
                    log.exception("Failed to decode message from Kafka, skipping..")
                except Exception as e:
                    log.exception("Generic exception while pulling datapoints from Kafka")

        log.debug('Kafka datapoints puller thread shutting down..') 
开发者ID:wikimedia,项目名称:operations-software-druid_exporter,代码行数:27,代码来源:collector.py

示例6: run

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def run(self):
        self.logger.info("start bot:{}".format(self))

        funcs = set(dir(self)) & self.func_map_topic.keys()

        consumer = KafkaConsumer(bootstrap_servers=[KAFKA_HOST])
        current_topics = consumer.topics()

        for func in funcs:
            topic = self.func_map_topic.get(func)
            if topic not in current_topics:
                self.logger.exception("you implement func:{},but the topic:{} for it not exist".format(func, topic))
                continue

            self.threads.append(
                threading.Thread(target=self.consume_topic_with_func, args=(self.func_map_topic.get(func), func)))

        for the_thread in self.threads:
            the_thread.start()

        self.consume_topic_with_func(self.quote_topic, 'on_event')

        self.logger.info("finish bot:{}".format(self)) 
开发者ID:foolcage,项目名称:fooltrader,代码行数:25,代码来源:base_bot.py

示例7: run

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def run(self):
        self.logger.info("start bot:{}".format(self))

        funcs = set(dir(self)) & self.func_map_topic.keys()

        consumer = KafkaConsumer(bootstrap_servers=[KAFKA_HOST])
        current_topics = consumer.topics()

        for func in funcs:
            topic = self.func_map_topic.get(func)
            if topic not in current_topics:
                self.logger.exception("you implement func:{},but the topic:{} for it not exist".format(func, topic))
                continue

            self._threads.append(
                threading.Thread(target=self.consume_topic_with_func, args=(self.func_map_topic.get(func), func)))

        for the_thread in self._threads:
            the_thread.start()

        self.consume_topic_with_func(self.quote_topic, 'on_event')

        self.logger.info("finish bot:{}".format(self)) 
开发者ID:foolcage,项目名称:fooltrader,代码行数:25,代码来源:bot.py

示例8: run

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def run(self):
        from kafka import KafkaConsumer
        super().run()

        self.consumer = KafkaConsumer(self.topic, bootstrap_servers=self.server)
        self.logger.info('Initialized kafka backend - server: {}, topic: {}'
                     .format(self.server, self.topic))

        try:
            for msg in self.consumer:
                self._on_record(msg)
                if self.should_stop(): break
        except Exception as e:
            self.logger.warning('Kafka connection error, reconnecting in {} seconds'.
                            format(self._conn_retry_secs))
            self.logger.exception(e)
            time.sleep(self._conn_retry_secs)

# vim:sw=4:ts=4:et: 
开发者ID:BlackLight,项目名称:platypush,代码行数:21,代码来源:__init__.py

示例9: setup_class

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def setup_class(cls):
        cls.broker = os.getenv('KAFKA_BROKER')
        if not cls.topic:
            topic = "%s-%s" % ('topic_test_', random_string(10))
            cls.topic = topic

        create_topic(cls.topic)
        cls._deserializer = ScrapyJSONDecoder()
        cls.consumer = KafkaConsumer(
            bootstrap_servers=[cls.broker],
            auto_offset_reset='earliest',
            group_id=None,
            value_deserializer=lambda x:
            cls._deserializer.decode(x.decode('utf8'))
        )
        cls.consumer.subscribe([cls.topic]) 
开发者ID:TeamHG-Memex,项目名称:scrapy-kafka-export,代码行数:18,代码来源:test_extension.py

示例10: getOffsets

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def getOffsets(self, topic, partitions, group):
        """ 指定topic、partition和group, 返回offsets数据 """

        try:
            # 尝试使用zookeeper-storage api获取offsets数据
            # 未获得指定group的offsets数据将抛出UnknownTopicOrPartitionError异常
            tp = self.client.send_offset_fetch_request(group, [OffsetRequestPayload(topic, p, -1, 1) for p in partitions])
            offsets = {p.partition: p.offset for p in tp}

        except UnknownTopicOrPartitionError:
            # 收到异常后使用kafka-storage api获取offsets数据
            consumer = KafkaConsumer(group_id=group, bootstrap_servers=self.broker, enable_auto_commit=False)
            tp = [TopicPartition(topic, p) for p in partitions]
            consumer.assign(tp)
            offsets = {p.partition: consumer.position(p) for p in tp}

        return offsets 
开发者ID:tqlihuiqi,项目名称:kzmonitor,代码行数:19,代码来源:client.py

示例11: test_worker_properties

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def test_worker_properties(worker, hosts, topic, group):
    assert hosts in repr(worker)
    assert topic in repr(worker)
    assert group in repr(worker)

    assert worker.consumer.config['bootstrap_servers'] == hosts
    assert worker.consumer.config['group_id'] == group

    assert isinstance(worker.hosts, str) and worker.hosts == hosts
    assert isinstance(worker.topic, str) and worker.topic == topic
    assert isinstance(worker.group, str) and worker.group == group
    assert isinstance(worker.consumer, KafkaConsumer)
    assert callable(worker.deserializer)
    assert callable(worker.callback) or worker.callback is None


# noinspection PyTypeChecker 
开发者ID:joowani,项目名称:kq,代码行数:19,代码来源:test_worker.py

示例12: test_worker_initialization_with_bad_args

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def test_worker_initialization_with_bad_args(hosts, consumer):
    with pytest.raises(AssertionError) as e:
        Worker(topic=True, consumer=consumer)
    assert str(e.value) == 'topic must be a str'

    with pytest.raises(AssertionError) as e:
        Worker(topic='topic', consumer='bar')
    assert str(e.value) == 'bad consumer instance'

    with pytest.raises(AssertionError) as e:
        bad_consumer = KafkaConsumer(bootstrap_servers=hosts)
        Worker(topic='topic', consumer=bad_consumer)
    assert str(e.value) == 'consumer must have group_id'

    with pytest.raises(AssertionError) as e:
        Worker(topic='topic', consumer=consumer, callback=1)
    assert str(e.value) == 'callback must be a callable'

    with pytest.raises(AssertionError) as e:
        Worker(topic='topic', consumer=consumer, deserializer=1)
    assert str(e.value) == 'deserializer must be a callable'

    with pytest.raises(AssertionError) as e:
        Worker(topic='topic', consumer=consumer, logger=1)
    assert str(e.value) == 'bad logger instance' 
开发者ID:joowani,项目名称:kq,代码行数:27,代码来源:test_worker.py

示例13: debug

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def debug(self, topic):
        c=KafkaConsumer(bootstrap_servers=kafka_hosts, client_id=self._client_id , group_id=None, api_version=(0,10))

        # assign/subscribe topic
        partitions=c.partitions_for_topic(topic)
        if not partitions: raise Exception("Topic "+topic+" not exist")
        c.assign([TopicPartition(topic,p) for p in partitions])

        # seek to beginning if needed
        c.seek_to_beginning()

        # fetch messages
        while True:
            partitions=c.poll(100)
            if partitions:
                for p in partitions:
                    for msg in partitions[p]:
                        yield msg.value.decode('utf-8')
            yield ""

        c.close() 
开发者ID:OpenVisualCloud,项目名称:Ad-Insertion-Sample,代码行数:23,代码来源:messaging.py

示例14: create_kafka_consumer

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def create_kafka_consumer(self, fetch_min_bytes, group_name, internal_name, request_data):
        while True:
            try:
                c = KafkaConsumer(
                    bootstrap_servers=self.config["bootstrap_uri"],
                    client_id=internal_name,
                    security_protocol=self.config["security_protocol"],
                    ssl_cafile=self.config["ssl_cafile"],
                    ssl_certfile=self.config["ssl_certfile"],
                    ssl_keyfile=self.config["ssl_keyfile"],
                    group_id=group_name,
                    fetch_min_bytes=fetch_min_bytes,
                    fetch_max_bytes=self.config["consumer_request_max_bytes"],
                    request_timeout_ms=request_data["consumer.request.timeout.ms"],
                    enable_auto_commit=request_data["auto.commit.enable"],
                    auto_offset_reset=request_data["auto.offset.reset"]
                )
                return c
            except:  # pylint: disable=bare-except
                self.log.exception("Unable to create consumer, retrying")
                await asyncio.sleep(1) 
开发者ID:aiven,项目名称:karapace,代码行数:23,代码来源:consumer_manager.py

示例15: start_data_processing

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaConsumer [as 别名]
def start_data_processing(thread_number):
    # open consumer
    consumer = KafkaConsumer(**agent_config_vars['kafka_kwargs'])
    logger.info('Started consumer number ' + str(thread_number))
    # subscribe to given topics
    consumer.subscribe(agent_config_vars['topics'])
    logger.info('Successfully subscribed to topics' + str(agent_config_vars['topics']))
    # start consuming messages
    parse_messages_kafka(consumer)
    consumer.close()
    logger.info('Closed consumer number ' + str(thread_number)) 
开发者ID:insightfinder,项目名称:InsightAgent,代码行数:13,代码来源:getmessages_kafka2.py


注:本文中的kafka.KafkaConsumer方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。