本文整理汇总了Python中kafka.producer.SimpleProducer类的典型用法代码示例。如果您正苦于以下问题:Python SimpleProducer类的具体用法?Python SimpleProducer怎么用?Python SimpleProducer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了SimpleProducer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: output_kafka
def output_kafka(graph_db, registry,
kafka_url=None):
ldict = {"step": MODULEFILE + "/" + inspect.stack()[0][3],
"hostname": platform.node().split(".")[0]}
l = logging.LoggerAdapter(common.fetch_lg(), ldict)
kafka_topic = "cs"
if kafka_url is None:
kafka_url = registry.get_config("kafka_url",
"localhost:9092")
else:
l.info("Updating registry with kafka_url: {}".format(kafka_url))
registry.put_config("kafka_url",
kafka_url)
(nodes, rels) = out.output_json(graph_db, None, None, as_list=True)
l.info("Connecting to kafka_url {}".format(kafka_url))
kafka = KafkaClient(kafka_url)
# To send messages asynchronously
producer = SimpleProducer(kafka)
l.info("Sending nodes to kafka {}/{}".format(kafka_url, kafka_topic))
for n in nodes:
producer.send_messages(kafka_topic, n)
l.info("Sending rels to kafka {}/{}".format(kafka_url, kafka_topic))
for n in rels:
producer.send_messages(kafka_topic, n)
kafka.close()
示例2: Producer
class Producer():#threading.Thread):
daemon = True
# Initializes producer with commandline options
def __init__(self,options):
self.host = options.host
self.port = options.port
self.topic = options.topic
self.logfile = options.logfile
self.metadata = options.metadata
self.batch_size = options.batch_size
self.delay = options.delay
self.pending_messages = []
# Formats message to be sent to kafka
def create_message_data(self,data):
if self.metadata is not None:
return "%s::%s" % (self.metadata, data)
elif re.search("GET", data) != None:
data = re.split('[ ,]', data)
csv = data[0] + ' ' + data[1] + ',' + data[7] + ',' + data[9]
return csv
''' batch not currently working
def flush_messages(self):
global pending_messages
print "flushing %d messages " % len(pending_messages)
self.producer.send_messages(self.topic,pending_messages)
pending_messages = []
def send_to_kafka(self,message_text):
global pending_messages
pending_messages.append(message_text)
if len(pending_messages) == self.batch_size:
self.flush_messages(self.producer)
'''
def log_lines_generator(self, logfile, delay_between_iterations=None):
global should_stop
cmd = ['tail', '-n', '0', '-F']
if delay_between_iterations is not None:
cmd.append('-s')
cmd.append(delay_between_iterations)
cmd.append(logfile)
process = subprocess.Popen(cmd,stdout=subprocess.PIPE,stderr=None)
while not should_stop:
line = process.stdout.readline().strip()
yield line
def run(self):
self.client = KafkaClient(self.host + ':' + str(self.port))
self.producer = SimpleProducer(self.client)
try:
for line in self.log_lines_generator(self.logfile):
msg = self.create_message_data(line)
self.producer.send_messages(self.topic, msg)
except KeyboardInterrupt,e:
pass
示例3: main
def main():
happy_log_probs, sad_log_probs = readSentimentList(
'twitter_sentiment_list.csv')
consumer = KafkaConsumer("tweets", bootstrap_servers=["localhost:9092"],
auto_offset_reset='smallest')
kafka = KafkaClient("localhost:9092")
producer = SimpleProducer(kafka)
topic = 'hashtag_sentiment'
positive_tags = Counter()
negative_tags = Counter()
while True:
for message in consumer.fetch_messages():
txt = message.value
txt = re.sub(r'[^\x00-\x7F]', ' ', txt)
hashtags, sentiment = classifySentiment(
txt, happy_log_probs, sad_log_probs)
for hashtag in hashtags:
if sentiment > 0:
positive_tags[hashtag] += 1
else:
negative_tags[hashtag] += 1
results = {}
for key, val in positive_tags.most_common(20):
results[key] = val
producer.send_messages(topic, json.dumps(results))
time.sleep(10)
示例4: seed_kafka
def seed_kafka(kafka_hosts=None, topic_name=None, num_emails=100000):
"""Seed the local Kafka cluster's "dumpmon" topic with sample email data."""
topic_name = topic_name or "dumpmon"
kafka_hosts = kafka_hosts or "127.0.0.1:9092"
kafka = KafkaClient(kafka_hosts)
producer = SimpleProducer(kafka)
# producer = SimpleProducer(kafka, batch_send=True, batch_send_every_n=1000,
# batch_send_every_t=5)
print("Seeding Kafka ({}) topic '{}' with {:,} fake emails."
.format(kafka_hosts, topic_name, num_emails))
emails = random_email_generator()
for i in range(num_emails):
email = json.dumps(next(emails)).encode("utf-8", "ignore")
producer.send_messages(topic_name, email)
print("Done.")
示例5: KafkaConnector
class KafkaConnector(object):
def __init__(self, host_name, host_port):
self.client = KafkaClient(host_name + ":" + host_port)
self.producer = SimpleProducer(self.client)
def create_topic(self, topic_name):
topic_exists = self.client.has_metadata_for_topic(topic_name)
if not topic_exists:
self.client.ensure_topic_exists(topic_name)
def send_message(self, topic_name, message):
self.producer.send_messages(topic_name, message)
def register_consumer(self, callback, parse_json, topic_group, topic_name):
consumer = SimpleConsumer(self.client, topic_group, topic_name)
consumer_thread = ConsumerThread(consumer, callback, parse_json)
consumer_thread.start()
def blocking_consumer(self, message_consume_function, parse_json, topic_group, topic_name):
print "starting blocking consumer with topic group %s and topic name %s" % (topic_group, topic_name)
consumer = SimpleConsumer(self.client, topic_group, topic_name)
consumer.seek(0,2)
for message in consumer:
message = parse_json(message)
print "=============" + str(message) + "============"
message_consume_function(message)
print "called message consume function"
示例6: emit
def emit(self, container, event):
try:
producer = self.producers[event.evt_type]
except KeyError:
producer = SimpleProducer(self.client, event.evt_type)
self.producers[event.evt_type] = producer
producer.send_messages(json.dumps(event.serialize()))
示例7: main
def main():
client = KafkaClient("localhost:9092")
producer = SimpleProducer(client)
for i in range(5):
producer.send_messages('mytopic', "This is my test message, number {0}".format(i))
time.sleep(1)
示例8: run
def run(self):
client = KafkaClient("localhost:9092")
producer = SimpleProducer(client)
ctime_start = int(time.mktime(time.strptime("30-12-2010 14:00", "%d-%m-%Y %H:%M")))
for i in range(1):
ctime_length = 60
ctime_interval = 60*60
print range(ctime_start,
ctime_start+ctime_length,
ctime_interval )
ctime_starts = [ctime_start]
for i,photo_id in enumerate(GetPhotoIDs_batch_iter(ctime_starts,
interval=ctime_interval)):
print i, ctime_start, photo_id
producer.send_messages('flickr-photo_id-dist', photo_id)
#print photo_id
time.sleep(0.3)
ctime_start += ctime_interval
time.sleep(1)
示例9: Producer
class Producer(object):
def __init__(self, addr):
self.client = KafkaClient(addr)
self.producer = SimpleProducer(self.client)
def produce_msgs(self):
msg_cnt = 0
while True:
timestamp=rutil.randomDate("2015,10,01,00,00,00", "2015,10,06,23,59,00", random.random())
phone = fake.phone_number()
latdep,longdep=rutil.randomSingleLatLon(LATDEP, LONGDEP, RADIUS)
latarr,longarr=rutil.randomSingleLatLon(LATARR, LONGARR, RADIUS)
message_info = rutil.create_json_message(phone=phone,
latdep=latdep,
longdep=longdep,
latarr=latarr,
longarr=longarr,
timestamp=timestamp,
dr_flag=random.choice([0, 1]),
user_id=random.choice(range(NUM_USERS)),
message_id=msg_cnt)
print 'message ' +str(msg_cnt) +' ---------- '+ message_info
self.producer.send_messages('RideRequests',message_info)
msg_cnt += 1
示例10: run
def run():
db = getdb()
writer = WriterProcess()
writer.start()
client = KafkaClient(config.KAFKA_HOST)
producer = SimpleProducer(client)
timings = []
for i in range(1000, 2000):
key = random_key()
start = time.clock()
message = IWAMessage(key, i)
# print('Sending: %s' % message.dumps())
producer.send_messages(config.KAFKA_TOPIC, message.dumps())
# print('Queued: %s => %s' % (message.key, message.value))
while True:
with db.begin() as txn:
if txn.get(key):
timings.append(time.clock() - start)
break
writer.terminate()
print('Average write time: %s' % (sum(timings) / len(timings)))
print('Total write time: %s' % (sum(timings)))
示例11: main
def main():
global freq_array
client = KafkaClient('ip-172-31-28-55.ec2.internal:6667')
producer = SimpleProducer(client)
fft_size=1000
fs=92
freq_array=np.array((1*fs/fft_size))
for i in range(2,int(fft_size/2)):
freq_i=np.array((i*fs/fft_size))
freq_array=np.vstack((freq_array,freq_i))
with open('xfourmag.csv', 'rt') as f:
print('opening csv')
reader=csv.reader(f)
row = next(reader)
#global mags
mags = np.array(row)
for row in reader:
#mags += row
mags = np.vstack((mags,row))
#print(mags)
#print(freq_array)
json_data = {'time': int(time.time()), 'fft': np.hstack((freq_array[0:31],mags[0:31])).tolist(), 'sensor_id': '1', 'reading_type': '0'}
print('sending data...')
producer.send_messages('shm', (json.dumps(json_data)).encode('utf-8'))
print('data sent! :)')
示例12: KafkaMetrics
class KafkaMetrics(object):
def __init__(self, broker_list, kafka_topic):
logging.basicConfig(level=logging.INFO)
self.log = logging.getLogger('druid-kafka-metrics')
self.log.info("Kafka (brokers=%s, topic=%s)" %(broker_list, kafka_topic))
client = KafkaClient(broker_list)
self.producer = SimpleProducer(client)
self.msg_count = 0
self.kafka_topic = kafka_topic
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.json_in()
def metrics(self):
messages = cherrypy.request.json
for message in messages:
self.msg_count += 1
self.log.debug("%s - %s" % (self.msg_count, str(message)))
self.producer.send_messages(self.kafka_topic, json.dumps(message))
if self.msg_count % 100 == 0 :
self.log.info("%s messages processed." % (self.msg_count, ))
return "{'code':200}"
示例13: kafkahighlevelproducer
def kafkahighlevelproducer(kafka_conn, schema, bytes):
"""
kafka High level API
"""
print "SimpleProducer start."
writer = cStringIO.StringIO()
encoder = avro.io.BinaryEncoder(writer)
#JSONEncoder is not implemented ..
#encoder = avro.io.JSONEncoder(writer)
datum_writer = avro.io.DatumWriter(schema)
producer = SimpleProducer(kafka_conn)
for topic in ["DUMMY_LOG"]:
writer.truncate(0)
datum_writer.write({"id": 100L, "logTime": 20140401L, "muchoStuff": {"test": "test1value"}}, encoder)
bytes = writer.getvalue()
producer.send_messages(topic, bytes)
writer.truncate(0)
datum_writer.write({"id": 101L, "logTime": 20140402L, "muchoStuff": {"test": "test2value"}}, encoder)
bytes = writer.getvalue()
producer.send_messages(topic, bytes)
writer.close()
print "SimpleProducer done."
示例14: run
def run(self):
#client = KafkaClient("localhost:9092")
client = KafkaClient("kafka_host:9092")
producer = SimpleProducer(client)
while True:
try:
messages = []
for i in xrange(1, 10):
messageStr = SelfGeneratedMessage().asJson()
logger.debug('Generated message: %s', messageStr)
messages.append(messageStr)
producer.send_messages('test', *messages)
# producer.send_messages('test', '{"publisher": "publisher-id", "time": "2015-11-03 15:03:30.352", "readings": [ 1, 1,1,1,1,1,1,1,1,1,1,1,4,3,3,3,32,2,1,1,1,1]}')
time.sleep(1)
except LeaderNotAvailableError as e:
logging.exception('LeaderNotAvailableError')
time.sleep(10)
except KafkaUnavailableError as e:
logging.exception('KafkaUnavailableError')
time.sleep(30)
except ConnectionError as e:
logging.exception('ConnectionError')
time.sleep(60)
except KafkaError as e:
logging.exception('KafkaError')
time.sleep(60)
except Exception as e:
logging.exception('Exception')
time.sleep(60)
示例15: run
def run(self):
client = KafkaClient(settings.KAFKA_SERVER)
producer = SimpleProducer(client)
while True:
producer.send_messages('heartbeats', """{"id":"yes-is-a-fake-uuide", "status": 200, "serviceName":"chewit_cam" }""")
time.sleep(5)