本文整理汇总了Python中pykafka.KafkaClient方法的典型用法代码示例。如果您正苦于以下问题:Python pykafka.KafkaClient方法的具体用法?Python pykafka.KafkaClient怎么用?Python pykafka.KafkaClient使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pykafka
的用法示例。
在下文中一共展示了pykafka.KafkaClient方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUpClass
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def setUpClass(cls):
cls.kafka = get_cluster()
cls.topic_name = b'test-data'
cls.kafka.create_topic(cls.topic_name, 3, 2)
cls.client = KafkaClient(cls.kafka.brokers)
topic = cls.client.topics[cls.topic_name]
cls.producer = topic.get_producer(min_queued_messages=1)
cls.total_messages = 99
for i in range(cls.total_messages):
cls.producer.produce("message {}".format(i).encode())
示例2: setUpClass
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def setUpClass(cls):
cls.kafka = get_cluster()
cls.topic_name = uuid4().hex.encode()
cls.n_partitions = 3
cls.kafka.create_topic(cls.topic_name, cls.n_partitions, 2)
cls.client = KafkaClient(cls.kafka.brokers, use_greenlets=cls.USE_GEVENT)
cls.prod = cls.client.topics[cls.topic_name].get_producer(
min_queued_messages=1
)
for i in range(1000):
cls.prod.produce('msg {num}'.format(num=i).encode())
示例3: fetch_offsets
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def fetch_offsets(client, topic, offset):
"""Fetch raw offset data from a topic.
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`pykafka.topic.Topic`
:param offset: Offset to reset to. Can be earliest, latest or a datetime.
Using a datetime will reset the offset to the latest message published
*before* the datetime.
:type offset: :class:`pykafka.common.OffsetType` or
:class:`datetime.datetime`
:returns: {partition_id: :class:`pykafka.protocol.OffsetPartitionResponse`}
"""
if offset.lower() == 'earliest':
return topic.earliest_available_offsets()
elif offset.lower() == 'latest':
return topic.latest_available_offsets()
else:
offset = dt.datetime.strptime(offset, "%Y-%m-%dT%H:%M:%S")
offset = int(calendar.timegm(offset.utctimetuple())*1000)
return topic.fetch_offset_limits(offset)
示例4: desc_topic
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def desc_topic(client, args):
"""Print detailed information about a topic.
:param client: KafkaClient connected to the cluster.
:type client: :class:`pykafka.KafkaClient`
:param topic: Name of the topic.
:type topic: :class:`str`
"""
# Don't auto-create topics.
if args.topic not in client.topics:
raise ValueError('Topic {} does not exist.'.format(args.topic))
topic = client.topics[args.topic]
print('Topic: {}'.format(topic.name))
print('Partitions: {}'.format(len(topic.partitions)))
print('Replicas: {}'.format(len(topic.partitions.values()[0].replicas)))
print(tabulate.tabulate(
[(p.id, p.leader.id, [r.id for r in p.replicas], [r.id for r in p.isr])
for p in topic.partitions.values()],
headers=['Partition', 'Leader', 'Replicas', 'ISR'],
numalign='center',
))
示例5: send_to_kafka
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def send_to_kafka(topic_name, msg):
kafka_host = get_config('KAFKA_HOST')
if not kafka_host:
raise Exception('Unable to get Kafka host address')
client = KafkaClient(hosts=kafka_host)
topic = client.topics[topic_name]
with topic.get_producer(delivery_reports=True) as producer:
producer.produce(json.dumps(msg, sort_keys=True))
msg, exc = producer.get_delivery_report(block=True)
if exc is not None:
raise exc
示例6: getKafkaTopic
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def getKafkaTopic(kafkaBrokers=["0.0.0.0:9092"], channel=endorser_util.SYS_CHANNEL_ID):
kafkas = ",".join(kafkaBrokers)
client = KafkaClient(hosts=kafkas)
if client.topics == {} and channel is None:
topic = client.topics[endorser_util.TEST_CHANNEL_ID]
elif client.topics == {} and channel is not None:
topic = client.topics[channel]
elif channel is not None and channel in client.topics:
topic = client.topics[channel]
elif channel is None and client.topics != {}:
topic_list = client.topics.keys()
topic = client.topics[topic_list[0]]
# Print brokers in ISR
print("ISR: {}".format(["kafka{}".format(broker.id) for broker in topic.partitions[0].isr]))
isr_set = ["kafka{}".format(broker.id) for broker in topic.partitions[0].isr]
return topic, isr_set
示例7: loops
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def loops():
logger.info(sub_addr)
logger.info(topic)
logger.info(request_addr)
client_msg = KafkaClient(hosts=sub_addr)
topic_id = client_msg.topics[topic]
consumer = topic_id.get_simple_consumer(consumer_group=group_id, auto_commit_enable=True,
auto_commit_interval_ms=1, consumer_id=consumer_id)
logger.info("==========={topic}_consumer_run=================".format(topic=topic))
while True:
try:
for message in consumer:
if message.value:
logger.info("{topic}_consumer--->{message}".format(topic=topic, message=message.value))
_send(request_addr, message.value)
except SocketDisconnectedError as e:
consumer = topic_id.get_simple_consumer(consumer_group=group_id, auto_commit_enable=True,
auto_commit_interval_ms=1, consumer_id=consumer_id)
logger.error("{topic}_connect_again.....".format(topic=topic))
logger.debug("{topic}_loop_run".format(topic=topic))
示例8: write_partition_to_kafka
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def write_partition_to_kafka(df, zookeeper_hosts, kafka_topic):
"""Write a partition of a dataframe to Kafka.
This runs in the worker proceses."""
# We must start our own logging for this worker process.
# We will also see PyKafka messages in this log.
logging.basicConfig(level=logging.INFO, format=LOGGING_FORMAT)
logger = logging.getLogger('write_partition_to_kafka')
logger.info('BEGIN')
client = KafkaClient(zookeeper_hosts=zookeeper_hosts)
topic = client.topics[kafka_topic]
# Note that if we used the default linger_ms=5000, there would be a minimum delay of 10 seconds to
# complete this function. With linger_ms=1, the delay is 5-10 milliseconds.
with topic.get_producer(delivery_reports=False, linger_ms=10.0) as producer:
logger.info('Kafka client connected')
for row in df:
msg = json.dumps(row.asDict())
producer.produce(msg)
logger.info('Produced all messages')
logger.info('END')
示例9: pykafka_producer_performance
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def pykafka_producer_performance(use_rdkafka=False,topic=topic):
# Setup client
client = KafkaClient(hosts=bootstrap_servers)
topic = client.topics[topic.encode('UTF-8')]
producer = topic.get_producer(use_rdkafka=use_rdkafka)
print("\n>>> Connect Kafka in {} by pykafka as producer".
format(bootstrap_servers))
msgs_produced = 0
produce_start = time.time()
for i in range(msg_count):
# Start producing
producer.produce(msg_payload)
producer.stop() # Will flush background queue
print("produce {} message".format(msg_count))
return time.time() - produce_start
示例10: pykafka_consumer_performance
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def pykafka_consumer_performance(use_rdkafka=False, topic=topic):
# Setup client
client = KafkaClient(hosts=bootstrap_servers)
topic = client.topics[topic.encode('UTF-8')]
print("\n>>> Connect Kafka in {} by pykafka as consumer".
format(bootstrap_servers))
msg_consumed_count = 0
consumer_start = time.time()
# Consumer starts polling messages in background thread, need to start timer here
consumer = topic.get_simple_consumer(use_rdkafka=use_rdkafka)
while True:
msg = consumer.consume()
if msg:
msg_consumed_count += 1
if msg_consumed_count >= msg_count:
break
consumer_timing = time.time() - consumer_start
consumer.stop()
return consumer_timing
示例11: kafka_info
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def kafka_info(Topic = None):
try:
kafka_client = KafkaClient(hosts=KAFKA_HOSTS)
# kafka??
BROKERS = [kafka_client.brokers[id].host for id in kafka_client.brokers]
# kafka??
TOPICS = [t for t in kafka_client.topics]
if Topic:
TOPIC = kafka_client.topics[str(Topic)]
# kafka ISR
ISR = {id:[host.host for host in TOPIC.partitions[id].isr] for id in TOPIC.partitions}
#kafka ??
replicas = {id:[host.host for host in TOPIC.partitions[id].replicas] for id in TOPIC.partitions}
#kafka LRADER
LEADER = {id:TOPIC.partitions[id].leader.host for id in TOPIC.partitions}
# kafka latest_offset
latest_offset = {id:TOPIC.partitions[id].latest_available_offset() for id in TOPIC.partitions}
# kafka earliest_offset
earliest_offset = {id:TOPIC.partitions[id].earliest_available_offset() for id in TOPIC.partitions}
return render_template('kafka_partition_show.html',Main_Infos=g.main_infos,Topic=Topic,ISR = ISR,replicas=replicas,LEADER=LEADER,latest_offset=latest_offset,earliest_offset=earliest_offset)
else:
return render_template('kafka_show.html',Main_Infos=g.main_infos,topics = TOPICS,brokes = BROKERS )
except Exception as e:
logging.error(e)
flash('??????!')
return render_template('Message_static.html', Main_Infos=g.main_infos)
示例12: setUpClass
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def setUpClass(cls):
cls.kafka = get_cluster()
cls.client = KafkaClient(cls.kafka.brokers)
示例13: test_exclude_internal_topics
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def test_exclude_internal_topics(self):
"""Test exclude_internal_topics setting
See also #277 for a related bug.
"""
topic_name = b"__starts_with_underscores"
with self.assertRaises(KeyError):
topic = self.client.topics[topic_name]
client = KafkaClient(self.kafka.brokers, exclude_internal_topics=False)
topic = client.topics[topic_name]
self.assertTrue(isinstance(topic, Topic))
示例14: test_zk_connect
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def test_zk_connect(self):
"""Clusters started with broker lists and zk connect strings should get same brokers"""
zk_client = KafkaClient(zookeeper_hosts=self.kafka.zookeeper)
kafka_client = KafkaClient(hosts=self.kafka.brokers)
zk_brokers = ["{}:{}".format(b.host, b.port)
for b in itervalues(zk_client.brokers)]
kafka_brokers = ["{}:{}".format(b.host, b.port)
for b in itervalues(kafka_client.brokers)]
self.assertEqual(zk_brokers, kafka_brokers)
示例15: test_ca_only
# 需要导入模块: import pykafka [as 别名]
# 或者: from pykafka import KafkaClient [as 别名]
def test_ca_only(self):
"""Connect with CA cert only (ie no client cert)"""
config = SslConfig(cafile=self.kafka.certs.root_cert)
client = KafkaClient(self.kafka.brokers_ssl, ssl_config=config)
self.roundtrip_test(client)