本文整理汇总了Python中kafka.producer.KeyedProducer.stop方法的典型用法代码示例。如果您正苦于以下问题:Python KeyedProducer.stop方法的具体用法?Python KeyedProducer.stop怎么用?Python KeyedProducer.stop使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类kafka.producer.KeyedProducer
的用法示例。
在下文中一共展示了KeyedProducer.stop方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: KafkaLoggingHandler
# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import stop [as 别名]
class KafkaLoggingHandler(logging.Handler):
def __init__(self, hosts_list, topic, key=None):
logging.Handler.__init__(self)
self.kafka_client = KafkaClient(hosts_list)
self.key = key
self.kafka_topic_name = topic
if not key:
self.producer = SimpleProducer(self.kafka_client)
else:
self.producer = KeyedProducer(self.kafka_client)
def emit(self, record):
# drop kafka logging to avoid infinite recursion
if record.name == 'kafka':
return
try:
# use default formatting
msg = self.format(record)
# produce message
if not self.key:
self.producer.send_messages(self.kafka_topic_name, msg)
else:
self.producer.send(self.kafka_topic_name, self.key, msg)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def close(self):
self.producer.stop()
logging.Handler.close(self)
示例2: KafkaLoggingHandler
# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import stop [as 别名]
class KafkaLoggingHandler(logging.Handler):
def __init__(self, hosts="", topic="", partition=0):
logging.Handler.__init__(self)
self.kafkaClient = KafkaClient(hosts)
self.topic = topic
self.partition = partition
self.producer = KeyedProducer(
self.kafkaClient, async=False, req_acks=KeyedProducer.ACK_AFTER_LOCAL_WRITE, ack_timeout=200
)
def emit(self, record):
# drop kafka logging to avoid infinite recursion
if record.name == "kafka":
return
try:
# use default formatting
msg = self.format(record)
# produce message
self.producer.send_messages(self.topic + record.name, self.partition, msg)
except:
import traceback
ei = sys.exc_info()
traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr)
del ei
def close(self):
self.producer.stop()
logging.Handler.close(self)
示例3: offsetCommit
# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import stop [as 别名]
def offsetCommit():
global users
checkUserPartitionMapping()
kafkaClient = KafkaClient(kafkaHost, timeout=None)
producer = KeyedProducer(kafkaClient, async=False,
req_acks=UserProducer.ACK_AFTER_LOCAL_WRITE,
ack_timeout=200)
for partition in partitions:
encodedMessage = simplejson.dumps({'turtleName':turtleName,
'user':'',
'operation':'offsetCommit'})
print producer.send(kafkaTopic, partition, encodedMessage)
producer.stop(1)
kafkaClient.close()
示例4: Producer
# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import stop [as 别名]
class Producer(object):
def __init__(self, addr):
self.client = KafkaClient(addr)
self.producer = KeyedProducer(self.client)
def open_save(self, fileName):
log_file = open(fileName, "w")
log_file.close()
return log_file
def create_topic(self, topic):
script = "/usr/local/kafka/bin/kafka-topics.sh"
os.system("{} --create --zookeeper localhost:2181 --topic {} --partitions {} --replication-factor 2".format(script, topic, "4"))
return "topic {} created".format(topic)
def produce_msgs(self, source_symbol, topic):
server_topics = self.client.topic_partitions
if topic not in server_topics:
self.create_topic(topic)
price_field = random.randint(800,1400)
cities = ["Barcelona", "Philadelphia", "Honolulu",
"Atlanta", "Miami", "Chicago", "SF", "LA", "NYC",
"Houston", "Paris", "London", "Tokyo"]
msg_cnt = 0
log_file = open("input1/{}.csv".format(topic), "a")
while True:
time_field = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')
location_field = random.choice(cities)
price_field += random.randint(-10, 10)/10.0
str_fmt = "{},{},{},{}"
message_info = str_fmt.format(source_symbol,
time_field,
location_field,
price_field)
print message_info
log_file.write("{}\n".format(message_info))
self.producer.send_messages(topic, source_symbol, message_info)
msg_cnt += 1
if msg_cnt > 200000:
log_file.close()
self.producer.stop()
break
示例5: KafkaLoggingHandler
# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import stop [as 别名]
class KafkaLoggingHandler(logging.Handler):
def __init__(self, hosts_list, topic, **kwargs):
logging.Handler.__init__(self)
self.kafka_client = SimpleClient(hosts_list)
self.key = kwargs.get("key", None)
self.kafka_topic_name = topic
if not self.key:
self.producer = SimpleProducer(self.kafka_client, **kwargs)
else:
self.producer = KeyedProducer(self.kafka_client, **kwargs)
def emit(self, record):
# drop kafka logging to avoid infinite recursion
if record.name == 'kafka':
return
try:
# use default formatting
msg = self.format(record)
if isinstance(msg, unicode):
msg = msg.encode("utf-8")
# produce message
if not self.key:
self.producer.send_messages(self.kafka_topic_name, msg)
else:
self.producer.send_messages(self.kafka_topic_name, self.key,
msg)
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
self.handleError(record)
def close(self):
if self.producer is not None:
self.producer.stop()
logging.Handler.close(self)