当前位置: 首页>>代码示例>>Python>>正文


Python KeyedProducer.send方法代码示例

本文整理汇总了Python中kafka.KeyedProducer.send方法的典型用法代码示例。如果您正苦于以下问题:Python KeyedProducer.send方法的具体用法?Python KeyedProducer.send怎么用?Python KeyedProducer.send使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在kafka.KeyedProducer的用法示例。


在下文中一共展示了KeyedProducer.send方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_hashed_partitioner

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
    def test_hashed_partitioner(self):
        partitions = self.client.get_partition_ids_for_topic(self.topic)
        start_offsets = [self.current_offset(self.topic, p) for p in partitions]

        producer = KeyedProducer(self.client, partitioner=HashedPartitioner)
        resp1 = producer.send(self.topic, self.key("1"), self.msg("one"))
        resp2 = producer.send(self.topic, self.key("2"), self.msg("two"))
        resp3 = producer.send(self.topic, self.key("3"), self.msg("three"))
        resp4 = producer.send(self.topic, self.key("3"), self.msg("four"))
        resp5 = producer.send(self.topic, self.key("4"), self.msg("five"))

        offsets = {partitions[0]: start_offsets[0], partitions[1]: start_offsets[1]}
        messages = {partitions[0]: [], partitions[1]: []}

        keys = [self.key(k) for k in ["1", "2", "3", "3", "4"]]
        resps = [resp1, resp2, resp3, resp4, resp5]
        msgs = [self.msg(m) for m in ["one", "two", "three", "four", "five"]]

        for key, resp, msg in zip(keys, resps, msgs):
            k = hash(key) % 2
            partition = partitions[k]
            offset = offsets[partition]
            self.assert_produce_response(resp, offset)
            offsets[partition] += 1
            messages[partition].append(msg)

        self.assert_fetch_offset(partitions[0], start_offsets[0], messages[partitions[0]])
        self.assert_fetch_offset(partitions[1], start_offsets[1], messages[partitions[1]])

        producer.stop()
开发者ID:artexnet,项目名称:kafka-python,代码行数:32,代码来源:test_producer_integration.py

示例2: test_hashed_partitioner

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
    def test_hashed_partitioner(self):
        start_offset0 = self.current_offset(self.topic, 0)
        start_offset1 = self.current_offset(self.topic, 1)

        producer = KeyedProducer(self.client, partitioner=HashedPartitioner)
        resp1 = producer.send(self.topic, self.key("1"), self.msg("one"))
        resp2 = producer.send(self.topic, self.key("2"), self.msg("two"))
        resp3 = producer.send(self.topic, self.key("3"), self.msg("three"))
        resp4 = producer.send(self.topic, self.key("3"), self.msg("four"))
        resp5 = producer.send(self.topic, self.key("4"), self.msg("five"))

        offsets = {0: start_offset0, 1: start_offset1}
        messages = {0: [], 1: []}

        keys = [self.key(k) for k in ["1", "2", "3", "3", "4"]]
        resps = [resp1, resp2, resp3, resp4, resp5]
        msgs = [self.msg(m) for m in ["one", "two", "three", "four", "five"]]

        for key, resp, msg in zip(keys, resps, msgs):
            k = hash(key) % 2
            offset = offsets[k]
            self.assert_produce_response(resp, offset)
            offsets[k] += 1
            messages[k].append(msg)

        self.assert_fetch_offset(0, start_offset0, messages[0])
        self.assert_fetch_offset(1, start_offset1, messages[1])

        producer.stop()
开发者ID:thrashaholic,项目名称:kafka-python,代码行数:31,代码来源:test_producer_integration.py

示例3: genData

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
def genData(topic):
    producer = KeyedProducer(kafka)
    while True:
        with open(source_file) as f:
            for line in f:
                key = line.split(" ")[0]
                producer.send(topic, key, line.rstrip()) 
	        time.sleep(0.1)  # Creating some delay to allow proper rendering of the cab locations on the map
        
        source_file.close()
开发者ID:VadymBoikov,项目名称:Insight-MapMyCab,代码行数:12,代码来源:producer.py

示例4: NautilusDive

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
class NautilusDive(object):
    def __init__(self, config):
        self.brokers = config['brokers']
        self.topic = config['topic']
        self.kafka = KafkaClient(self.brokers)
        if config['partitioner'] is None:
            self.producer = KeyedProducer(self.kafka, partitioner=RoundRobinPartitioner)
        else:
            self.producer = KeyedProducer(self.kafka, partitioner=config['partitioner'])

    def send(self, key, message):
        self.producer.send(self.topic, key, message)
开发者ID:manusrivastava,项目名称:nautilus_py,代码行数:14,代码来源:nautilusdive.py

示例5: test_round_robin_partitioner

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
    def test_round_robin_partitioner(self):
        start_offset0 = self.current_offset(self.topic, 0)
        start_offset1 = self.current_offset(self.topic, 1)

        producer = KeyedProducer(self.client, partitioner=RoundRobinPartitioner)
        resp1 = producer.send(self.topic, self.key("key1"), self.msg("one"))
        resp2 = producer.send(self.topic, self.key("key2"), self.msg("two"))
        resp3 = producer.send(self.topic, self.key("key3"), self.msg("three"))
        resp4 = producer.send(self.topic, self.key("key4"), self.msg("four"))

        self.assert_produce_response(resp1, start_offset0+0)
        self.assert_produce_response(resp2, start_offset1+0)
        self.assert_produce_response(resp3, start_offset0+1)
        self.assert_produce_response(resp4, start_offset1+1)

        self.assert_fetch_offset(0, start_offset0, [ self.msg("one"), self.msg("three") ])
        self.assert_fetch_offset(1, start_offset1, [ self.msg("two"), self.msg("four")  ])

        producer.stop()
开发者ID:thrashaholic,项目名称:kafka-python,代码行数:21,代码来源:test_producer_integration.py

示例6: test_round_robin_partitioner

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
    def test_round_robin_partitioner(self):
        partitions = self.client.get_partition_ids_for_topic(self.topic)
        start_offsets = [self.current_offset(self.topic, p) for p in partitions]

        producer = KeyedProducer(self.client, partitioner=RoundRobinPartitioner)
        resp1 = producer.send(self.topic, self.key("key1"), self.msg("one"))
        resp2 = producer.send(self.topic, self.key("key2"), self.msg("two"))
        resp3 = producer.send(self.topic, self.key("key3"), self.msg("three"))
        resp4 = producer.send(self.topic, self.key("key4"), self.msg("four"))

        self.assert_produce_response(resp1, start_offsets[0]+0)
        self.assert_produce_response(resp2, start_offsets[1]+0)
        self.assert_produce_response(resp3, start_offsets[0]+1)
        self.assert_produce_response(resp4, start_offsets[1]+1)

        self.assert_fetch_offset(partitions[0], start_offsets[0], [ self.msg("one"), self.msg("three") ])
        self.assert_fetch_offset(partitions[1], start_offsets[1], [ self.msg("two"), self.msg("four")  ])

        producer.stop()
开发者ID:artexnet,项目名称:kafka-python,代码行数:21,代码来源:test_producer_integration.py

示例7: test_async_keyed_producer

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
    def test_async_keyed_producer(self):
        start_offset0 = self.current_offset(self.topic, 0)

        producer = KeyedProducer(self.client, partitioner = RoundRobinPartitioner, async=True)

        resp = producer.send(self.topic, self.key("key1"), self.msg("one"))
        self.assertEqual(len(resp), 0)

        self.assert_fetch_offset(0, start_offset0, [ self.msg("one") ])

        producer.stop()
开发者ID:thrashaholic,项目名称:kafka-python,代码行数:13,代码来源:test_producer_integration.py

示例8: KafkaBolt

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
class KafkaBolt(Bolt):

    def initialize(self, stormconf, ctx):
        self.kafka_client = KafkaClient(config['kafka']['hosts'])
        self.keyed_producer = KeyedProducer(self.kafka_client)
        self.simple_producer = SimpleProducer(self.kafka_client)

    def process(self, tup):
        report_id, record_type, report_data = tup.values
        self.log('Processing: %s' % report_id)
        json_data = str(report_data)
        report_id = str(report_id)
        topic = str("sanitised")
        if record_type == "entry":
            payload = str("e" + json_data)
        elif record_type == "header":
            payload = str("h" + json_data)
        elif record_type == "footer":
            payload = str("f" + json_data)
        self.keyed_producer.send(topic, report_id, payload)
开发者ID:TylerJFisher,项目名称:ooni-pipeline,代码行数:22,代码来源:reports.py

示例9: test_hashed_partitioner

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
    def test_hashed_partitioner(self):
        start_offset0 = self.current_offset(self.topic, 0)
        start_offset1 = self.current_offset(self.topic, 1)

        producer = KeyedProducer(self.client, partitioner=HashedPartitioner)
        resp1 = producer.send(self.topic, 1, self.msg("one"))
        resp2 = producer.send(self.topic, 2, self.msg("two"))
        resp3 = producer.send(self.topic, 3, self.msg("three"))
        resp4 = producer.send(self.topic, 3, self.msg("four"))
        resp5 = producer.send(self.topic, 4, self.msg("five"))

        self.assert_produce_response(resp1, start_offset1+0)
        self.assert_produce_response(resp2, start_offset0+0)
        self.assert_produce_response(resp3, start_offset1+1)
        self.assert_produce_response(resp4, start_offset1+2)
        self.assert_produce_response(resp5, start_offset0+1)

        self.assert_fetch_offset(0, start_offset0, [ self.msg("two"), self.msg("five") ])
        self.assert_fetch_offset(1, start_offset1, [ self.msg("one"), self.msg("three"), self.msg("four") ])

        producer.stop()
开发者ID:AaronSense,项目名称:kafka-python,代码行数:23,代码来源:test_producer_integration.py

示例10: test_async_keyed_producer

# 需要导入模块: from kafka import KeyedProducer [as 别名]
# 或者: from kafka.KeyedProducer import send [as 别名]
    def test_async_keyed_producer(self):
        partition = self.client.get_partition_ids_for_topic(self.topic)[0]
        start_offset = self.current_offset(self.topic, partition)

        producer = KeyedProducer(self.client, partitioner = RoundRobinPartitioner, async=True)

        resp = producer.send(self.topic, self.key("key1"), self.msg("one"))
        self.assertEqual(len(resp), 0)

        # wait for the server to report a new highwatermark
        while self.current_offset(self.topic, partition) == start_offset:
          time.sleep(0.1)

        self.assert_fetch_offset(partition, start_offset, [ self.msg("one") ])

        producer.stop()
开发者ID:artexnet,项目名称:kafka-python,代码行数:18,代码来源:test_producer_integration.py


注:本文中的kafka.KeyedProducer.send方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。