本文整理汇总了Python中kafka.SimpleConsumer方法的典型用法代码示例。如果您正苦于以下问题:Python kafka.SimpleConsumer方法的具体用法?Python kafka.SimpleConsumer怎么用?Python kafka.SimpleConsumer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类kafka
的用法示例。
在下文中一共展示了kafka.SimpleConsumer方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: connect
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import SimpleConsumer [as 别名]
def connect(self):
""" Connect to kafka and create a consumer.
It uses config parameters to create a kafka-python
KafkaClient and SimpleConsumer.
"""
# Instantiate a kafka client connected to kafka.
self.client = KafkaClient(
self.config.broker_list,
client_id=self.config.client_id
)
# Create a kafka SimpleConsumer.
self.kafka_consumer = SimpleConsumer(
client=self.client, topic=self.topic, partitions=self.partitions,
**self.config.get_simple_consumer_args()
)
self.log.debug(
"Connected to kafka. Topic %s, partitions %s, %s",
self.topic,
self.partitions,
','.join(['{0} {1}'.format(k, v) for k, v in
six.iteritems(self.config.get_simple_consumer_args())])
)
self.kafka_consumer.provide_partition_info()
示例2: __init__
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import SimpleConsumer [as 别名]
def __init__(self, *args, **kwargs):
import kafka
super(KafkaRandomReader, self).__init__(*args, **kwargs)
brokers = self.read_option('brokers')
group = self.read_option('group')
topic = self.read_option('topic')
client = kafka.KafkaClient(map(bytes, brokers))
# TODO: Remove this comments when next steps are decided.
# If resume is set to true, then child should not load initial offsets
# child_loads_initial_offsets = False if settings.get('RESUME') else True
# self.consumer = kafka.MultiProcessConsumer(client, group, topic, num_procs=1,
# child_loads_initial_offsets=child_loads_initial_offsets,
# auto_commit=False)
self.consumer = kafka.SimpleConsumer(client, group, topic,
auto_commit=False)
self.decompress_fun = zlib.decompress
self.processor = self.create_processor()
self.partitions = client.get_partition_ids_for_topic(topic)
self.logger.info(
'KafkaRandomReader has been initiated. '
'Topic: {}. Group: {}'.format(self.read_option('topic'), self.read_option('group')))
self.logger.info('Running random sampling')
self._reservoir = self.fill_reservoir()
self.logger.info('Random sampling completed, ready to process batches')
示例3: get_kafka_consumer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import SimpleConsumer [as 别名]
def get_kafka_consumer(group, topic):
client = get_kafka_client()
return kafka.SimpleConsumer(
client, group, topic,
iter_timeout=app.config["CONSUMER_TIMEOUT"]
)
示例4: get_message
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import SimpleConsumer [as 别名]
def get_message(self, block=True, timeout=0.1):
"""Get message from kafka. It supports the same arguments of get_message
in kafka-python SimpleConsumer.
:param block: If True, the API will block till at least a message is fetched.
:type block: boolean
:param timeout: If block is True, the function will block for the specified
time (in seconds).
If None, it will block forever.
:returns: a Kafka message
:rtype: Message namedtuple, which consists of: partition number,
offset, key, and message value
"""
fetched_message = self.kafka_consumer.get_message(block, timeout)
if fetched_message is None:
# get message timed out returns None
return None
else:
partition, kafka_message = fetched_message
return Message(
partition=partition,
offset=kafka_message[0],
key=kafka_message[1].key,
value=kafka_message[1].value,
)
示例5: get_consumer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import SimpleConsumer [as 别名]
def get_consumer(containers, topic):
kafka = containers.get_kafka_connection()
group = str('replication_handler_itest')
return SimpleConsumer(kafka, group, topic)
示例6: setup_capture_new_messages_consumer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import SimpleConsumer [as 别名]
def setup_capture_new_messages_consumer(topic):
"""Seeks to the tail of the topic then returns a function that can
consume messages from that point.
"""
kafka = KafkaClient(get_config().cluster_config.broker_list)
group = str('data_pipeline_clientlib_test')
consumer = SimpleConsumer(kafka, group, topic, max_buffer_size=_ONE_MEGABYTE)
consumer.seek(0, 2) # seek to tail, 0 is the offset, and 2 is the tail
yield consumer
kafka.close()