当前位置: 首页>>代码示例>>Python>>正文


Python producer.KeyedProducer类代码示例

本文整理汇总了Python中kafka.producer.KeyedProducer的典型用法代码示例。如果您正苦于以下问题:Python KeyedProducer类的具体用法?Python KeyedProducer怎么用?Python KeyedProducer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了KeyedProducer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: Producer

class Producer(object):

    def __init__(self, addr):
        self.client = KafkaClient(addr)
        self.producer = KeyedProducer(self.client,async=True,\
                                      batch_send_every_n=500,batch_send=False)
        self.min_steps = 1
        self.max_steps = 3
        self.max_users_each_thread = 12000

    def produce_msgs(self, source_symbol):
        msg_cnt = 0
        while True:
            start_uuid = (int(source_symbol) - 1) * self.max_users_each_thread
            stop_uuid =  (int(source_symbol) * self.max_users_each_thread) - 1
            uuid = random.sample(range(start_uuid,stop_uuid), 9)
            for uid in uuid:
                timestamp = datetime.now(timezone('US/Pacific')).\
                                        strftime('%Y-%m-%d %H:%M:%S')
                steps = random.randint(1,10)
                json_msg= {'source':source_symbol,'uuid':uid, 
                           'timestamp':timestamp, 'steps': steps}
                json_encoded = json.dumps(json_msg)
                self.producer.send_messages('steps_data_part4', source_symbol,\
                                             json_encoded)
                print json_encoded
                msg_cnt += 1
开发者ID:bigdata2,项目名称:rankMySteps,代码行数:27,代码来源:kafka_producer.py

示例2: KafkaLoggingHandler

class KafkaLoggingHandler(logging.Handler):
    def __init__(self, hosts="", topic="", partition=0):
        logging.Handler.__init__(self)
        self.kafkaClient = KafkaClient(hosts)
        self.topic = topic
        self.partition = partition
        self.producer = KeyedProducer(
            self.kafkaClient, async=False, req_acks=KeyedProducer.ACK_AFTER_LOCAL_WRITE, ack_timeout=200
        )

    def emit(self, record):
        # drop kafka logging to avoid infinite recursion
        if record.name == "kafka":
            return
        try:
            # use default formatting
            msg = self.format(record)
            # produce message
            self.producer.send_messages(self.topic + record.name, self.partition, msg)
        except:
            import traceback

            ei = sys.exc_info()
            traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr)
            del ei

    def close(self):
        self.producer.stop()
        logging.Handler.close(self)
开发者ID:TravelGene-ios,项目名称:travelgene,代码行数:29,代码来源:kafka_log_handler.py

示例3: Producer

class Producer(object):

    def __init__(self, addr):
        self.client = SimpleClient(addr)
        self.producer = KeyedProducer(self.client)
        self.sess = requests.Session()
        adapter = requests.adapters.HTTPAdapter(max_retries=5)
        self.sess.mount('http://', adapter)
        self.sess.mount('https://', adapter)

    def produce_msgs(self, topic, source_symbol, last_record_set):
        self.record_set = set()
        count = 0
	try:
		for item in self.r["data"]:
		    self.record_set.add(item["payment_id"])
		    count += 1
		    if not item["payment_id"] in last_record_set:
			message_info = "{}\n".format(json.dumps(item))
			self.producer.send_messages(topic, source_symbol, message_info)
	#                print message_info
	#                print count
	except:
		k = 1
		    
    
    def get_venmo(self,limit=300,page="https://venmo.com/api/v5/public?"):
	try:
                self.r = self.sess.get(page + "&limit={}".format(limit)).json()
	except:
		self.r = ""
开发者ID:qingpeng,项目名称:VenmoPlus,代码行数:31,代码来源:kafka_producer_venmo_api.py

示例4: Producer

class Producer(object):

	def __init__(self, addr):
		self.client = KafkaClient(addr)
		self.producer = KeyedProducer(self.client)
		self.artist_id = []
		self.artwork_id = []

	def load_ids(self):
		artwork_path = "/home/ubuntu/Insight/dataset/Artsy/artwork_id.txt"
		artist_path = "/home/ubuntu/Insight/dataset/Artsy/artist_id.txt"
		with open(artwork_path) as f1:
			for line in f1:
				if line != "":
					self.artwork_id.append(line.strip())
			f1.close()
		with open(artist_path) as f2:
			for line in f2:
				if line != "":
					self.artist_id.append(line.strip())
			f2.close()


	def produce_msgs(self, source_symbol):
		msg_cnt = 0
		while True:
			time_field = datetime.now().strftime("%Y%m%d %H%M%S")
			user_field = random.choice(self.artist_id)
			art_field = random.choice(self.artwork_id)
			str_fmt = "{};{};{};{};{}"
			message_info = str_fmt.format(source_symbol,time_field,user_field,"pin",art_field)
			# print message_info
			self.producer.send_messages('pin_activity', source_symbol, message_info)
			msg_cnt += 1
开发者ID:keiraqz,项目名称:artmosphere,代码行数:34,代码来源:my_streaming_producer.py

示例5: Producer

class Producer(object):
	
	def __init__(self, addr):
		self.client = KafkaClient(addr)
		self.producer = KeyedProducer(self.client)

	def produce_msgs(self, source_symbol):
		#price_field = random.randint(800,1400)
		msg_cnt = 0

		datagenerator = DataGenerator()

		function_options = {
			0:datagenerator.click_event,
			1:datagenerator.view_event,
			2:datagenerator.bid_event,
			3:datagenerator.hover_event,
			4:datagenerator.load_event
		}

		while True:
			#time_field = datetime.now().strftime("%Y%m%d %H%M%S")
			#price_field += random.randint(-10, 10)/10.0
			#volume_field = random.randint(1, 1000)
			#str_fmt = "{};{};{};{}"
			#message_info = str_fmt.format(source_symbol, time_field, price_field, volume_field)
			num = random.randint(0, 4)
			message_info = function_options[num]()

			print json.dumps(message_info)

			self.producer.send_messages('test_adability', source_symbol, message_info)
			msg_cnt += 1
开发者ID:DanisHack,项目名称:ADability,代码行数:33,代码来源:kafka_producer.py

示例6: Producer

class Producer(object):

  def __init__(self, addr):
    self.client = KafkaClient(addr)
    self.producer = KeyedProducer(self.client)
    self.zipcode = []
    self.complaint = []

  def load_ids(self):
    zipcode_path = "/home/ubuntu/repos/project311/kafka/zipcodes.txt"
    complaint_path = "/home/ubuntu/repos/project311/kafka/complaint_type.txt"
    with open(zipcode_path, 'r') as f1:
      for line in f1:
        if line != "":
            self.zipcode.append(line.strip())
    with open(complaint_path) as f2:
      for line in f2:
        if line != "":
          self.complaint.append(line.strip())

  def produce_msgs(self, source_symbol):
    msg_cnt = 0
    while True:
      time_field = datetime.now().strftime("%Y%m%d%H%M%S")
      zipcode_field = random.choice(self.zipcode)
      complaint_field = random.choice(self.complaint)
      str_fmt = "{};{};{};{}"
      message_info = str_fmt.format(source_symbol, time_field, zipcode_field, complaint_field)
      print message_info
      self.producer.send_messages('complaints', source_symbol, message_info)
      msg_cnt += 1
开发者ID:smehta930,项目名称:project311,代码行数:31,代码来源:kafka_producer.py

示例7: KafkaLfProducer

class KafkaLfProducer(object):
    def __init__(self, addr, conf_file, start_house_id, end_house_id, house_status):
        self.parser = SafeConfigParser()
        self.parser.read(conf_file)
        install_dir = self.parser.get('smw_tool', 'INSTALL_DIR')
        zipdb_file = self.parser.get('smw_tool', 'ZIP_DB_FILE') 

        self.client = KafkaClient(addr)
        self.producer = KeyedProducer(self.client, async=True, batch_send_every_n=500,batch_send=True)
        self.meterReader = MeterLfReader(start_house_id,
                                         end_house_id,
                                         house_status,
                                         install_dir + "/data/low_freq/", 
                                         install_dir + "/" + zipdb_file)

    def produce_msgs(self, source_symbol):
        msg_cnt = 0

        while not self.meterReader.houseSentDone():
            (isLf, msg) = self.meterReader.getRecord()

            if msg_cnt % 500000 == 0:
                print "Sent " + str(msg_cnt) + " messages to Kafka"

            if isLf:
                self.producer.send_messages('smw_batch_lf2', source_symbol, msg)
            else:
                self.producer.send_messages('smw_batch_hf2', source_symbol, msg)

            msg_cnt += 1

        print "Sent Total " + str(msg_cnt) + " messages to Kafka"
        self.meterReader.writeHouseStatus()
开发者ID:andrewcbl,项目名称:SmartMeterWatchdog,代码行数:33,代码来源:KafkaLfProducer.py

示例8: Producer

class Producer(object):
    def __init__(self, addr):
        self.client = KafkaClient(addr)
        self.producer = KeyedProducer(self.client)


    def produce_deal_urls(self, api_url=''):
        ''' Constantly produce deal urls for consumers to crawl '''
        # TODO - Find total deals per category
        
        # TODO - Calculate number of pages to crawl
        
        # TODO - Produce categories and page range for consumers
        # {category_slug; start_page; end_page}
        
        

    def produce_msgs(self, source_symbol):
        price_field = random.randint(800,1400)
        msg_cnt = 0
        while True:
            time_field = datetime.now().strftime("%Y%m%d %H%M%S")
            price_field += random.randint(-10, 10)/10.0
            volume_field = random.randint(1, 1000)
            str_fmt = "{};{};{};{}"
            message_info = str_fmt.format(source_symbol,
                                          time_field,
                                          price_field,
                                          volume_field)
            print message_info
            self.producer.send_messages('price_data_part4', source_symbol, message_info)
            msg_cnt += 1
开发者ID:awaemmanuel,项目名称:exstreamly_cheap,代码行数:32,代码来源:kafka_producer.py

示例9: __init__

 def __init__(self, hosts_list, topic, key=None):
     logging.Handler.__init__(self)
     self.kafka_client = KafkaClient(hosts_list)
     self.key = key
     self.kafka_topic_name = topic
     if not key:
         self.producer = SimpleProducer(self.kafka_client)
     else:
         self.producer = KeyedProducer(self.kafka_client)
开发者ID:nivm,项目名称:python-kafka-logging,代码行数:9,代码来源:KafkaHandler.py

示例10: __init__

    def __init__(self, hosts_list, topic, **kwargs):
        logging.Handler.__init__(self)

        self.kafka_client = SimpleClient(hosts_list)
        self.key = kwargs.get("key", None)
        self.kafka_topic_name = topic

        if not self.key:
            self.producer = SimpleProducer(self.kafka_client, **kwargs)
        else:
            self.producer = KeyedProducer(self.kafka_client, **kwargs)
开发者ID:avihoo,项目名称:python-kafka-logging,代码行数:11,代码来源:KafkaHandler.py

示例11: __init__

    def __init__(self, topic, producer_type=ProducerType.SIMPLE,\
            host_port="127.0.0.1:9092", **producer_opts):

        self.topic = topic
        self.host_port = host_port
        if producer_type == ProducerType.SIMPLE:
            self.producer = SimpleProducer(KafkaClient(host_port),\
                    **producer_opts)
        else:
            self.producer = KeyedProducer(KafkaClient(host_port),\
                    **producer_opts)
开发者ID:hljyunxi,项目名称:logging_wrapper,代码行数:11,代码来源:kafka_handler.py

示例12: Producer

class Producer(object):

    def __init__(self, addr):
        self.client = KafkaClient(addr)
        self.producer = KeyedProducer(self.client)

    def produce_msgs(self, source_symbol, file_source):
        hd = open(file_source)
        for line in hd:
            print line
            self.producer.send_messages('datatest', source_symbol, line)
开发者ID:HsiangHung,项目名称:TIPMAX,代码行数:11,代码来源:tick_kafkaproducer.py

示例13: __init__

    def __init__(self, hosts_list, topic, timeout_secs=DEFAULT_SOCKET_TIMEOUT_SECONDS, **kwargs):
        logging.Handler.__init__(self)

        self.kafka_client = KafkaClient(hosts_list, timeout=timeout_secs)
        self.key = kwargs.get("key", None)
        self.kafka_topic_name = topic

        if not self.key:
            self.producer = SimpleProducer(self.kafka_client, **kwargs)
        else:
            self.producer = KeyedProducer(self.kafka_client, **kwargs)
开发者ID:taykey,项目名称:python-kafka-logging,代码行数:11,代码来源:KafkaHandler.py

示例14: run

    def run(self, delay=0.1):
        client = KafkaClient("localhost:9092")
        producer = KeyedProducer(client)

        import numpy as np

        for photoid in TESTPHOTOIDS:
            producer.send_messages('flickr-photoid','%d'%np.random.randint(0,20) ,photoid)
            print "Sending PhotoID: %s"%photoid

            time.sleep(delay)
开发者ID:patrickzheng,项目名称:InLivingColor,代码行数:11,代码来源:producer_photoid_test.py

示例15: Producer

class Producer(object):

	def __init__(self, addr):
		self.client = SimpleClient(addr)
		self.producer = KeyedProducer(self.client)

	def stream_science_posts(self, key):
		r = requests.session()
		header = {"User-Agent": "anisotropix Science"}
		s = r.get('https://www.reddit.com/r/science/new/.json?limit=100', stream = True, headers =header)#tream = True, timeout = 2)
		for post in s.iter_lines():
			if post:
				self.producer.send_messages('Science_posts',key,  post)
				print (post)
开发者ID:goaaron,项目名称:SigSpark,代码行数:14,代码来源:scienceStream.py


注:本文中的kafka.producer.KeyedProducer类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。