当前位置: 首页>>代码示例>>Python>>正文


Python KeyedProducer.send_messages方法代码示例

本文整理汇总了Python中kafka.producer.KeyedProducer.send_messages方法的典型用法代码示例。如果您正苦于以下问题:Python KeyedProducer.send_messages方法的具体用法?Python KeyedProducer.send_messages怎么用?Python KeyedProducer.send_messages使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在kafka.producer.KeyedProducer的用法示例。


在下文中一共展示了KeyedProducer.send_messages方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: KafkaLoggingHandler

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class KafkaLoggingHandler(logging.Handler):

    def __init__(self, hosts_list, topic, key=None):
        logging.Handler.__init__(self)
        self.kafka_client = KafkaClient(hosts_list)
        self.key = key
        self.kafka_topic_name = topic
        if not key:
            self.producer = SimpleProducer(self.kafka_client)
        else:
            self.producer = KeyedProducer(self.kafka_client)

    def emit(self, record):
        # drop kafka logging to avoid infinite recursion
        if record.name == 'kafka':
            return
        try:
            # use default formatting
            msg = self.format(record)
            # produce message
            if not self.key:
                self.producer.send_messages(self.kafka_topic_name, msg)
            else:
                self.producer.send(self.kafka_topic_name, self.key, msg)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def close(self):
        self.producer.stop()
        logging.Handler.close(self)
开发者ID:nivm,项目名称:python-kafka-logging,代码行数:34,代码来源:KafkaHandler.py

示例2: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):

    def __init__(self, addr):
        self.client = KafkaClient(addr)
        self.producer = KeyedProducer(self.client,async=True,\
                                      batch_send_every_n=500,batch_send=False)
        self.min_steps = 1
        self.max_steps = 3
        self.max_users_each_thread = 12000

    def produce_msgs(self, source_symbol):
        msg_cnt = 0
        while True:
            start_uuid = (int(source_symbol) - 1) * self.max_users_each_thread
            stop_uuid =  (int(source_symbol) * self.max_users_each_thread) - 1
            uuid = random.sample(range(start_uuid,stop_uuid), 9)
            for uid in uuid:
                timestamp = datetime.now(timezone('US/Pacific')).\
                                        strftime('%Y-%m-%d %H:%M:%S')
                steps = random.randint(1,10)
                json_msg= {'source':source_symbol,'uuid':uid, 
                           'timestamp':timestamp, 'steps': steps}
                json_encoded = json.dumps(json_msg)
                self.producer.send_messages('steps_data_part4', source_symbol,\
                                             json_encoded)
                print json_encoded
                msg_cnt += 1
开发者ID:bigdata2,项目名称:rankMySteps,代码行数:29,代码来源:kafka_producer.py

示例3: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):

    def __init__(self, addr):
        self.client = SimpleClient(addr)
        self.producer = KeyedProducer(self.client)
        self.sess = requests.Session()
        adapter = requests.adapters.HTTPAdapter(max_retries=5)
        self.sess.mount('http://', adapter)
        self.sess.mount('https://', adapter)

    def produce_msgs(self, topic, source_symbol, last_record_set):
        self.record_set = set()
        count = 0
	try:
		for item in self.r["data"]:
		    self.record_set.add(item["payment_id"])
		    count += 1
		    if not item["payment_id"] in last_record_set:
			message_info = "{}\n".format(json.dumps(item))
			self.producer.send_messages(topic, source_symbol, message_info)
	#                print message_info
	#                print count
	except:
		k = 1
		    
    
    def get_venmo(self,limit=300,page="https://venmo.com/api/v5/public?"):
	try:
                self.r = self.sess.get(page + "&limit={}".format(limit)).json()
	except:
		self.r = ""
开发者ID:qingpeng,项目名称:VenmoPlus,代码行数:33,代码来源:kafka_producer_venmo_api.py

示例4: KafkaLoggingHandler

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class KafkaLoggingHandler(logging.Handler):
    def __init__(self, hosts="", topic="", partition=0):
        logging.Handler.__init__(self)
        self.kafkaClient = KafkaClient(hosts)
        self.topic = topic
        self.partition = partition
        self.producer = KeyedProducer(
            self.kafkaClient, async=False, req_acks=KeyedProducer.ACK_AFTER_LOCAL_WRITE, ack_timeout=200
        )

    def emit(self, record):
        # drop kafka logging to avoid infinite recursion
        if record.name == "kafka":
            return
        try:
            # use default formatting
            msg = self.format(record)
            # produce message
            self.producer.send_messages(self.topic + record.name, self.partition, msg)
        except:
            import traceback

            ei = sys.exc_info()
            traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr)
            del ei

    def close(self):
        self.producer.stop()
        logging.Handler.close(self)
开发者ID:TravelGene-ios,项目名称:travelgene,代码行数:31,代码来源:kafka_log_handler.py

示例5: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):
	
	def __init__(self, addr):
		self.client = KafkaClient(addr)
		self.producer = KeyedProducer(self.client)

	def produce_msgs(self, source_symbol):
		#price_field = random.randint(800,1400)
		msg_cnt = 0

		datagenerator = DataGenerator()

		function_options = {
			0:datagenerator.click_event,
			1:datagenerator.view_event,
			2:datagenerator.bid_event,
			3:datagenerator.hover_event,
			4:datagenerator.load_event
		}

		while True:
			#time_field = datetime.now().strftime("%Y%m%d %H%M%S")
			#price_field += random.randint(-10, 10)/10.0
			#volume_field = random.randint(1, 1000)
			#str_fmt = "{};{};{};{}"
			#message_info = str_fmt.format(source_symbol, time_field, price_field, volume_field)
			num = random.randint(0, 4)
			message_info = function_options[num]()

			print json.dumps(message_info)

			self.producer.send_messages('test_adability', source_symbol, message_info)
			msg_cnt += 1
开发者ID:DanisHack,项目名称:ADability,代码行数:35,代码来源:kafka_producer.py

示例6: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):

	def __init__(self, addr):
		self.client = KafkaClient(addr)
		self.producer = KeyedProducer(self.client)
		self.artist_id = []
		self.artwork_id = []

	def load_ids(self):
		artwork_path = "/home/ubuntu/Insight/dataset/Artsy/artwork_id.txt"
		artist_path = "/home/ubuntu/Insight/dataset/Artsy/artist_id.txt"
		with open(artwork_path) as f1:
			for line in f1:
				if line != "":
					self.artwork_id.append(line.strip())
			f1.close()
		with open(artist_path) as f2:
			for line in f2:
				if line != "":
					self.artist_id.append(line.strip())
			f2.close()


	def produce_msgs(self, source_symbol):
		msg_cnt = 0
		while True:
			time_field = datetime.now().strftime("%Y%m%d %H%M%S")
			user_field = random.choice(self.artist_id)
			art_field = random.choice(self.artwork_id)
			str_fmt = "{};{};{};{};{}"
			message_info = str_fmt.format(source_symbol,time_field,user_field,"pin",art_field)
			# print message_info
			self.producer.send_messages('pin_activity', source_symbol, message_info)
			msg_cnt += 1
开发者ID:keiraqz,项目名称:artmosphere,代码行数:36,代码来源:my_streaming_producer.py

示例7: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):

  def __init__(self, addr):
    self.client = KafkaClient(addr)
    self.producer = KeyedProducer(self.client)
    self.zipcode = []
    self.complaint = []

  def load_ids(self):
    zipcode_path = "/home/ubuntu/repos/project311/kafka/zipcodes.txt"
    complaint_path = "/home/ubuntu/repos/project311/kafka/complaint_type.txt"
    with open(zipcode_path, 'r') as f1:
      for line in f1:
        if line != "":
            self.zipcode.append(line.strip())
    with open(complaint_path) as f2:
      for line in f2:
        if line != "":
          self.complaint.append(line.strip())

  def produce_msgs(self, source_symbol):
    msg_cnt = 0
    while True:
      time_field = datetime.now().strftime("%Y%m%d%H%M%S")
      zipcode_field = random.choice(self.zipcode)
      complaint_field = random.choice(self.complaint)
      str_fmt = "{};{};{};{}"
      message_info = str_fmt.format(source_symbol, time_field, zipcode_field, complaint_field)
      print message_info
      self.producer.send_messages('complaints', source_symbol, message_info)
      msg_cnt += 1
开发者ID:smehta930,项目名称:project311,代码行数:33,代码来源:kafka_producer.py

示例8: KafkaLfProducer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class KafkaLfProducer(object):
    def __init__(self, addr, conf_file, start_house_id, end_house_id, house_status):
        self.parser = SafeConfigParser()
        self.parser.read(conf_file)
        install_dir = self.parser.get('smw_tool', 'INSTALL_DIR')
        zipdb_file = self.parser.get('smw_tool', 'ZIP_DB_FILE') 

        self.client = KafkaClient(addr)
        self.producer = KeyedProducer(self.client, async=True, batch_send_every_n=500,batch_send=True)
        self.meterReader = MeterLfReader(start_house_id,
                                         end_house_id,
                                         house_status,
                                         install_dir + "/data/low_freq/", 
                                         install_dir + "/" + zipdb_file)

    def produce_msgs(self, source_symbol):
        msg_cnt = 0

        while not self.meterReader.houseSentDone():
            (isLf, msg) = self.meterReader.getRecord()

            if msg_cnt % 500000 == 0:
                print "Sent " + str(msg_cnt) + " messages to Kafka"

            if isLf:
                self.producer.send_messages('smw_batch_lf2', source_symbol, msg)
            else:
                self.producer.send_messages('smw_batch_hf2', source_symbol, msg)

            msg_cnt += 1

        print "Sent Total " + str(msg_cnt) + " messages to Kafka"
        self.meterReader.writeHouseStatus()
开发者ID:andrewcbl,项目名称:SmartMeterWatchdog,代码行数:35,代码来源:KafkaLfProducer.py

示例9: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):
    def __init__(self, addr):
        self.client = KafkaClient(addr)
        self.producer = KeyedProducer(self.client)


    def produce_deal_urls(self, api_url=''):
        ''' Constantly produce deal urls for consumers to crawl '''
        # TODO - Find total deals per category
        
        # TODO - Calculate number of pages to crawl
        
        # TODO - Produce categories and page range for consumers
        # {category_slug; start_page; end_page}
        
        

    def produce_msgs(self, source_symbol):
        price_field = random.randint(800,1400)
        msg_cnt = 0
        while True:
            time_field = datetime.now().strftime("%Y%m%d %H%M%S")
            price_field += random.randint(-10, 10)/10.0
            volume_field = random.randint(1, 1000)
            str_fmt = "{};{};{};{}"
            message_info = str_fmt.format(source_symbol,
                                          time_field,
                                          price_field,
                                          volume_field)
            print message_info
            self.producer.send_messages('price_data_part4', source_symbol, message_info)
            msg_cnt += 1
开发者ID:awaemmanuel,项目名称:exstreamly_cheap,代码行数:34,代码来源:kafka_producer.py

示例10: run

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
    def run(self, delay=0.1):
        client = KafkaClient("localhost:9092")
        producer = KeyedProducer(client)

        import numpy as np

        for photoid in TESTPHOTOIDS:
            producer.send_messages('flickr-photoid','%d'%np.random.randint(0,20) ,photoid)
            print "Sending PhotoID: %s"%photoid

            time.sleep(delay)
开发者ID:patrickzheng,项目名称:InLivingColor,代码行数:13,代码来源:producer_photoid_test.py

示例11: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):

    def __init__(self, addr):
        self.client = KafkaClient(addr)
        self.producer = KeyedProducer(self.client)

    def produce_msgs(self, source_symbol, file_source):
        hd = open(file_source)
        for line in hd:
            print line
            self.producer.send_messages('datatest', source_symbol, line)
开发者ID:HsiangHung,项目名称:TIPMAX,代码行数:13,代码来源:tick_kafkaproducer.py

示例12: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):

	def __init__(self, addr):
		self.client = SimpleClient(addr)
		self.producer = KeyedProducer(self.client)

	def stream_science_posts(self, key):
		r = requests.session()
		header = {"User-Agent": "anisotropix Science"}
		s = r.get('https://www.reddit.com/r/science/new/.json?limit=100', stream = True, headers =header)#tream = True, timeout = 2)
		for post in s.iter_lines():
			if post:
				self.producer.send_messages('Science_posts',key,  post)
				print (post)
开发者ID:goaaron,项目名称:SigSpark,代码行数:16,代码来源:scienceStream.py

示例13: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):

    def __init__(self, addr):
        self.client = SimpleClient(addr)
        self.producer = KeyedProducer(self.client)

    def produce_msgs(self, source_symbol, file_to_use):
        file_obj = open(file_to_use, 'r')
        msg_cnt = 0
        while True:
            message_info = file_obj.next()
            print message_info
            self.producer.send_messages('venmo2', source_symbol, message_info)
            msg_cnt += 1
开发者ID:qingpeng,项目名称:VenmoPlus,代码行数:16,代码来源:kafka_producer.py

示例14: write

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
def write():
    k_client = KafkaClient(KAFKA_URL)
    p = KeyedProducer(k_client,
                      async=False,
                      req_acks=KeyedProducer.ACK_AFTER_LOCAL_WRITE,
                      ack_timeout=2000)
    messages = []
    for i in xrange(NUM_MESSAGES):
        message = json.dumps({'msg': 'X' * SIZE_MSG})
        messages.append(message)
        if len(messages) >= 500:
            key = int(time.time() * 1000)
            p.send_messages(KAFKA_TOPIC, str(key), *messages)
            messages = []
    key = int(time.time() * 1000)
    p.send_messages(KAFKA_TOPIC, str(key), *messages)
开发者ID:hpcloud-mon,项目名称:monasca-perf,代码行数:18,代码来源:kafka_test.py

示例15: Producer

# 需要导入模块: from kafka.producer import KeyedProducer [as 别名]
# 或者: from kafka.producer.KeyedProducer import send_messages [as 别名]
class Producer(object):

    def __init__(self, addr):
        self.client = KafkaClient(addr)
        self.producer = KeyedProducer(self.client)

    def produce_msgs(self, source_symbol):
        msg_cnt = 0
        while True:
            artwork_path = "loc.txt"
            with open(artwork_path) as f1:
                for line in f1:
                    if line.strip():
                        print line.strip()
                        self.producer.send_messages('post_geo_activity', source_symbol,line.strip())
                        msg_cnt += 1
开发者ID:keiraqz,项目名称:artmosphere,代码行数:18,代码来源:hdfs_producer.py


注:本文中的kafka.producer.KeyedProducer.send_messages方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。