本文整理汇总了Python中kafka.KafkaConsumer.subscribe方法的典型用法代码示例。如果您正苦于以下问题:Python KafkaConsumer.subscribe方法的具体用法?Python KafkaConsumer.subscribe怎么用?Python KafkaConsumer.subscribe使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类kafka.KafkaConsumer
的用法示例。
在下文中一共展示了KafkaConsumer.subscribe方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: KafkaReader
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
class KafkaReader():
def __init__(self, topic):
# To consume latest messages and auto-commit offsets
self.consumer = KafkaConsumer(group_id='my-group',
bootstrap_servers='localhost:9092',
enable_auto_commit=True)
self.consumer.subscribe(topics=[topic])
self.buffer = []
def read_message(self):
# print len(self.buffer)
# if len(self.buffer) == 0:
# self.get_batch()
# if len(self.buffer) > 0:
# return self.buffer.pop(0)
# else:
# print "11"
# return None
return self.consumer.next()
def get_batch(self):
for message in self.consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')`
self.buffer.append(message)
print self.buffer
示例2: consume_messages
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def consume_messages(topics):
# stop iterations after 20 mins
consumer = KafkaConsumer(bootstrap_servers=[port])
consumer.subscribe(topics)
count = 0
print port
for message in consumer:
# check for stopping input
signal.signal(signal.SIGINT , signal_handler)
incoming_message = json.loads(message.value,object_hook=_tostring)
incoming_topic = message.topic
# round trip for consistent values
# trip_dist = round(incoming_message["trip_distance"][str(0)],2)
count = count + 1
print "--------------"
print incoming_message
print "--------------"
new_entry = {"pick_location": {
"lat": float(incoming_message["pickup_latitude"]),
"lon": float(incoming_message["pickup_longitude"])
},
"drop_location": {
"lat": float(incoming_message["dropoff_latitude"]),
"lon": float(incoming_message["dropoff_longitude"])
},
"predicted_timedelta": float(incoming_message["prediction_mins"]),
"@hour": incoming_message["pick_up_hour"]
}
print new_entry
es.index(index=incoming_topic, doc_type=incoming_topic[:-1], id=str(count), body =new_entry)
示例3: run
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def run(self):
consumer = KafkaConsumer(bootstrap_servers='localhost:9092')
# consumer.unsubscribe()
consumer.subscribe(['tfidf'])
for message in consumer:
yield (message)
示例4: run
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def run(self):
consumer = KafkaConsumer(bootstrap_servers='172.16.218.128:10021')
consumer.subscribe(['test'])
for msg in consumer:
msg = next(consumer)
print msg
示例5: run
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def run(self):
consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
auto_offset_reset='earliest')
consumer.subscribe(['my-topic'])
for message in consumer:
print (message)
示例6: KafkaConsumerSpout
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
class KafkaConsumerSpout(Spout):
def __init__(self):
super(KafkaConsumerSpout, self).__init__(script=__file__)
#Consumer for 'badtopic' kafka topic
#Server localhost port 9092 -- Can have multiple clusters for same topic different port
def initialize(self, conf, context):
self.consumer = KafkaConsumer(bootstrap_servers='localhost:9092',auto_offset_reset='earliest')
self.db = pymongo.MongoClient()
self.topic='badtopic'
self.consumer.subscribe([self.topic])
#The return of this spout tuple['sentence'] = tweet
@classmethod
def declareOutputFields(cls):
return ['sentence','user']
#Each tweet added to 'badtopic' will be a Tuple
#For each tuple data is saved at MONGODB DB = BOARD collection = bad
def nextTuple(self):
for message in self.consumer:
algo = message.value
if(len(algo) >4):
user = algo[:1]
if user.isdigit():
aux = 'BOARD'+user
algo = algo[2:len(algo)]
if(algo[0] == ' '):
algo=algo[1:len(algo)]
self.db[aux].bad.insert_one({'tweet':algo})
algo=algo.encode('utf-8','ignore')
storm.emit([algo,user])
示例7: test_end_to_end
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def test_end_to_end(kafka_broker):
connect_str = 'localhost:' + str(kafka_broker.port)
producer = KafkaProducer(bootstrap_servers=connect_str,
max_block_ms=10000,
value_serializer=str.encode)
consumer = KafkaConsumer(bootstrap_servers=connect_str,
group_id=None,
consumer_timeout_ms=10000,
auto_offset_reset='earliest',
value_deserializer=bytes.decode)
topic = random_string(5)
for i in range(1000):
producer.send(topic, 'msg %d' % i)
producer.flush()
producer.close()
consumer.subscribe([topic])
msgs = set()
for i in range(1000):
try:
msgs.add(next(consumer).value)
except StopIteration:
break
assert msgs == set(['msg %d' % i for i in range(1000)])
示例8: main
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def main():
# To consume latest messages and auto-commit offsets
consumer = KafkaConsumer( REPLICA['TOPIC'],
group_id=REPLICA['GROUP_ID'],
metadata_broker_list=REPLICA['BROKER'])
# bootstrap_servers=['localhost:9092'])
for message in consumer:
# message value and key are raw bytes -- decode if necessary!
# e.g., for unicode: `message.value.decode('utf-8')`
print ("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition,
message.offset, message.key,
message.value))
# consume earliest available messages, dont commit offsets
KafkaConsumer(auto_offset_reset='earliest', enable_auto_commit=False)
# consume json messages
KafkaConsumer(value_deserializer=lambda m: json.loads(m.decode('ascii')))
# consume msgpack
# KafkaConsumer(value_deserializer=msgpack.unpackb)
# StopIteration if no message after 1sec
KafkaConsumer(consumer_timeout_ms=1000)
# Subscribe to a regex topic pattern
consumer = KafkaConsumer()
consumer.subscribe(pattern='^awesome.*')
示例9: run
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def run(self):
consumer = KafkaConsumer(bootstrap_servers=IP,
auto_offset_reset='earliest')
consumer.subscribe(['archive_test'])
for message in consumer:
print message.value
示例10: KafkaPythonClient
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
class KafkaPythonClient(PythonClient):
def __init__(self,topic=topic_name, kafkaHost = kafka_host, zookeeperHost=zookeeper_host):
self.config["topic"] = topic
self.config["kafkaHost"] = kafkaHost
self.config["zookeeperHost"] = zookeeperHost
super(KafkaPythonClient, self).__init__()
def createProducer(self, kafkaSync):
self.config["kafkaSync"] = kafkaSync
self.producer = KafkaProducer(bootstrap_servers=self.config["kafkaHost"])
def createConsumer(self):
self.consumer = KafkaConsumer(bootstrap_servers=self.config["kafkaHost"], enable_auto_commit=True, auto_offset_reset='latest',consumer_timeout_ms=1000)
self.consumer.subscribe([self.config["topic"]])
def produce(self, num_msg=20000):
self.msgCount = num_msg
for x in range (self.msgCount):
self.prtProgress(x, 10000)
result = self.producer.send(self.config["topic"], self.msg)
if self.config["kafkaSync"] == True:
# block for "synchronous" mode:
try:
result_metadata = result.get(timeout=10)
except KafkaError:
print "*** KAFKA ERROR ***"
pass
if (x >= 10000):
sys.stdout.write('\n')
def consume(self, num_msg):
count = 0
for message in self.consumer:
count += 1
self.prtProgress(count, 10000)
sys.stdout.write('\n')
if num_msg > 0:
if count != num_msg:
print "ERROR: KafkaPythonClient.consume: # of messages not as expected, read: {}, expected: {}".format(count, num_msg)
return count
def startProducer(self): pass
def stopProducer(self):
self.beforeFlushTimer(self.timeDict['producer'])
if self.config["kafkaSync"] == False:
self.producer.flush()
def stopConsumer(self): pass
def initCount(self):
self.consume(0)
# for p in self.consumer.partitions_for_topic(self.config['topic']):
# tp = TopicPartition(self.config['topic'], p)
# self.consumer.assign([tp])
# committed = self.consumer.committed(tp)
# consumer.seek_to_end(tp)
def finalize(self): pass
示例11: Consumer
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def Consumer():
#consumer = KafkaConsumer(b"test", group_id=b"my_group", metadata_broker_list=["deepc04.acis.ufl.edu:9092"])
consumer = KafkaConsumer(bootstrap_servers='deepc04.acis.ufl.edu:9092', auto_offset_reset='earliest')
consumer.subscribe(['test'])
for message in consumer:
# This will wait and print messages as they become available
print(message)
示例12: run
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def run(self):
consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
auto_offset_reset='latest')
consumer.subscribe(['parking-test-summary'])
while True:
msg = consumer.poll(10)
time.sleep(5)
print msg
示例13: run
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def run(self):
print "consummer"
consumer = KafkaConsumer(bootstrap_servers='kafka:9092',
auto_offset_reset='earliest')
print "consummer ... ok"
consumer.subscribe(['my-topic'])
for message in consumer:
print (message)
示例14: run
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def run(self):
consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
auto_offset_reset='earliest')
consumer.subscribe(['bro'])
while True:
for message in consumer:
print message.value
send_to_es(message.value)
示例15: run
# 需要导入模块: from kafka import KafkaConsumer [as 别名]
# 或者: from kafka.KafkaConsumer import subscribe [as 别名]
def run(self):
consumer = KafkaConsumer(bootstrap_servers='localhost:9092',auto_offset_reset='earliest',consumer_timeout_ms=1000)
consumer.subscribe(['my-test-topic'])
while not self.stop_event.is_set():
for message in consumer:
print(message)
if self.stop_event.is_set():
break
consumer.close()