当前位置: 首页>>代码示例>>Python>>正文


Python kafka.KafkaClient方法代码示例

本文整理汇总了Python中kafka.KafkaClient方法的典型用法代码示例。如果您正苦于以下问题:Python kafka.KafkaClient方法的具体用法?Python kafka.KafkaClient怎么用?Python kafka.KafkaClient使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在kafka的用法示例。


在下文中一共展示了kafka.KafkaClient方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_partition_leaders

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def get_partition_leaders(cluster_config):
    """Return the current leaders of all partitions. Partitions are
    returned as a "topic-partition" string.

    :param cluster_config: the cluster
    :type cluster_config: kafka_utils.utils.config.ClusterConfig
    :returns: leaders for partitions
    :rtype: map of ("topic-partition", broker_id) pairs
    """
    client = KafkaClient(cluster_config.broker_list)
    result = {}
    for topic, topic_data in six.iteritems(client.topic_partitions):
        for partition, p_data in six.iteritems(topic_data):
            topic_partition = topic + "-" + str(partition)
            result[topic_partition] = p_data.leader
    return result 
开发者ID:Yelp,项目名称:kafka-utils,代码行数:18,代码来源:main.py

示例2: test_commit_message_zk

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def test_commit_message_zk(self, config):
        if getattr(KafkaClient, 'send_offset_commit_request_kafka', None) is None:
            return

        with mock_kafka() as (mock_client, mock_consumer):
            config._config['offset_storage'] = 'zookeeper'
            consumer = KafkaSimpleConsumer('test_topic', config)
            consumer.connect()

            actual = consumer.commit_message(
                Message(0, 100, 'mykey', 'myvalue'),
            )

            assert actual is True
            mock_client.return_value.send_offset_commit_request \
                .assert_called_once_with(
                    'test_group'.encode(),
                    [OffsetCommitRequest('test_topic'.encode(), 0, 100, None)],
                ) 
开发者ID:Yelp,项目名称:yelp_kafka,代码行数:21,代码来源:test_consumer.py

示例3: test_commit_message_kafka

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def test_commit_message_kafka(self, config):
        if getattr(KafkaClient, 'send_offset_commit_request_kafka', None) is None:
            return

        with mock_kafka() as (mock_client, mock_consumer):
            config._config['offset_storage'] = 'kafka'
            consumer = KafkaSimpleConsumer('test_topic', config)
            consumer.connect()

            actual = consumer.commit_message(
                Message(0, 100, 'mykey', 'myvalue'),
            )

            assert actual is True
            assert not mock_client.return_value.send_offset_commit_request.called
            mock_client.return_value.send_offset_commit_request_kafka \
                .assert_called_once_with(
                    'test_group'.encode(),
                    [OffsetCommitRequest('test_topic'.encode(), 0, 100, None)],
                ) 
开发者ID:Yelp,项目名称:yelp_kafka,代码行数:22,代码来源:test_consumer.py

示例4: discover_topics

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def discover_topics(cluster):
    """Get all the topics in a cluster

    :param cluster: config of the cluster to get topics from
    :type cluster: ClusterConfig
    :returns: a dict <topic>: <[partitions]>
    :raises DiscoveryError: upon failure to request topics from kafka
    """
    client = KafkaClient(cluster.broker_list)
    try:
        topics = get_kafka_topics(client)
        return dict([(topic.decode(), partitions) for topic, partitions in six.iteritems(topics)])
    except:
        log.exception(
            "Topics discovery failed for %s",
            cluster.broker_list
        )
        raise DiscoveryError("Failed to get topics information from "
                             "{cluster}".format(cluster=cluster)) 
开发者ID:Yelp,项目名称:yelp_kafka,代码行数:21,代码来源:discovery.py

示例5: get_kafka_connection

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def get_kafka_connection(cluster_type, client_id, **kwargs):
    """Get a kafka connection for the local region kafka cluster at Yelp.

    :param cluster_type: kafka cluster type (ex.'scribe' or 'standard').
    :type cluster_type: string
    :param client_id: client_id to be used to connect to kafka.
    :type client_id: string
    :param kwargs: parameters to pass along when creating the KafkaClient instance.
    :returns: KafkaClient
    :raises DiscoveryError: :py:class:`yelp_kafka.error.DiscoveryError` upon failure connecting to a cluster.
    """
    cluster = get_region_cluster(cluster_type, client_id)
    try:
        return KafkaClient(cluster.broker_list, client_id=client_id, **kwargs)
    except:
        log.exception(
            "Connection to kafka cluster %s using broker list %s failed",
            cluster.name,
            cluster.broker_list
        )
        raise DiscoveryError("Failed to connect to cluster {0}".format(
            cluster.name)) 
开发者ID:Yelp,项目名称:yelp_kafka,代码行数:24,代码来源:discovery.py

示例6: connect

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def connect(self):
        """ Connect to kafka and create a consumer.
        It uses config parameters to create a kafka-python
        KafkaClient and SimpleConsumer.
        """
        # Instantiate a kafka client connected to kafka.
        self.client = KafkaClient(
            self.config.broker_list,
            client_id=self.config.client_id
        )

        # Create a kafka SimpleConsumer.
        self.kafka_consumer = SimpleConsumer(
            client=self.client, topic=self.topic, partitions=self.partitions,
            **self.config.get_simple_consumer_args()
        )
        self.log.debug(
            "Connected to kafka. Topic %s, partitions %s, %s",
            self.topic,
            self.partitions,
            ','.join(['{0} {1}'.format(k, v) for k, v in
                      six.iteritems(self.config.get_simple_consumer_args())])
        )
        self.kafka_consumer.provide_partition_info() 
开发者ID:Yelp,项目名称:yelp_kafka,代码行数:26,代码来源:consumer.py

示例7: _get_kafka_client

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def _get_kafka_client(self):
        """
        Create and return a Kafka Client

        Returns:
            KafkaClient: The created Kafka client

        Raises:
            PanoptesContextError: Passes through any exceptions that happen in trying to create the Kafka client
        """
        # The logic of the weird check that follows is this: KafkaClient initialization can fail if there is a problem
        # connecting with even one broker. What we want to do is: succeed if the client was able to connect to even one
        # broker. So, we catch the exception and pass it through - and then check the number of brokers connected to the
        # client in the next statement (if not kafka_client.brokers) and fail if the client is not connected to any
        # broker
        self.__logger.info(u'Attempting to connect Kafka')
        config = self.__config
        kafka_client = None
        try:
            kafka_client = KafkaClient(config.kafka_brokers)
        except ConnectionError:
            pass

        if not kafka_client.brokers:
            raise PanoptesContextError(u'Could not connect to any Kafka broker from this list: %s'
                                       % config.kafka_brokers)
        self.__logger.info(u'Successfully connected to Kafka brokers: %s' % kafka_client.brokers)

        return kafka_client 
开发者ID:yahoo,项目名称:panoptes,代码行数:31,代码来源:context.py

示例8: kafka_client

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def kafka_client(self):
        """
        A Kafka client

        Returns:
            KafkaClient

        """
        return self._kafka_client 
开发者ID:yahoo,项目名称:panoptes,代码行数:11,代码来源:context.py

示例9: __init__

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def __init__(self, APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET, host, port):
        super(KafkaStrawStreamer, self).__init__(APP_KEY, APP_SECRET,OAUTH_TOKEN, OAUTH_TOKEN_SECRET)

        # connect to Kafka
        print("Connecting to Kafka node {0}:{1}".format(host, port))
        kafka = KafkaClient("{0}:{1}".format(host, port))
        self.producer = BufferedSimpleProducer(kafka, 100) 
开发者ID:rwalk,项目名称:straw,代码行数:9,代码来源:tweet_sampler.py

示例10: __init__

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def __init__(self, *args, **kwargs):
        import kafka
        super(KafkaRandomReader, self).__init__(*args, **kwargs)
        brokers = self.read_option('brokers')
        group = self.read_option('group')
        topic = self.read_option('topic')

        client = kafka.KafkaClient(map(bytes, brokers))

        # TODO: Remove this comments when next steps are decided.
        # If resume is set to true, then child should not load initial offsets
        # child_loads_initial_offsets = False if settings.get('RESUME') else True

        # self.consumer = kafka.MultiProcessConsumer(client, group, topic, num_procs=1,
        #                                             child_loads_initial_offsets=child_loads_initial_offsets,
        #                                             auto_commit=False)

        self.consumer = kafka.SimpleConsumer(client, group, topic,
                                             auto_commit=False)

        self.decompress_fun = zlib.decompress
        self.processor = self.create_processor()
        self.partitions = client.get_partition_ids_for_topic(topic)

        self.logger.info(
            'KafkaRandomReader has been initiated. '
            'Topic: {}. Group: {}'.format(self.read_option('topic'), self.read_option('group')))

        self.logger.info('Running random sampling')
        self._reservoir = self.fill_reservoir()
        self.logger.info('Random sampling completed, ready to process batches') 
开发者ID:scrapinghub,项目名称:exporters,代码行数:33,代码来源:kafka_random_reader.py

示例11: get_kafka_client

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def get_kafka_client():
    if not hasattr(flask.g, "kafka_client"):
        flask.g.kafka_client = kafka.KafkaClient(app.config["HOSTS"])
    return flask.g.kafka_client 
开发者ID:travel-intelligence,项目名称:flasfka,代码行数:6,代码来源:api.py

示例12: test_simple_consumer

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def test_simple_consumer():
    topic = create_random_topic(1, 1)

    messages = [str(i).encode("UTF-8") for i in range(100)]

    cluster_config = ClusterConfig(None, None, [KAFKA_URL], ZOOKEEPER_URL)
    producer = YelpKafkaSimpleProducer(
        cluster_config=cluster_config,
        report_metrics=False,
        client=KafkaClient(KAFKA_URL),
    )
    producer.send_messages(topic, *messages)

    config = KafkaConsumerConfig(
        'test',
        cluster_config,
        auto_offset_reset='smallest',
        auto_commit=False,
        consumer_timeout_ms=1000
    )
    consumer = KafkaSimpleConsumer(topic, config)

    with consumer:
        for expected_offset in range(100):
            message = consumer.get_message()
            assert message.offset == expected_offset
            assert message.partition == 0
            assert message.value == str(expected_offset).encode("UTF-8") 
开发者ID:Yelp,项目名称:yelp_kafka,代码行数:30,代码来源:test_consumer.py

示例13: mock_kafka

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def mock_kafka():
    with mock.patch('yelp_kafka.consumer.KafkaClient', autospec=True) as mock_client:
        with mock.patch('yelp_kafka.consumer.SimpleConsumer', autospec=True) as mock_consumer:
            mock_consumer.return_value.auto_commit = True
            yield mock_client, mock_consumer 
开发者ID:Yelp,项目名称:yelp_kafka,代码行数:7,代码来源:test_consumer.py

示例14: get_all_kafka_connections

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def get_all_kafka_connections(cluster_type, client_id, **kwargs):
    """Get a kafka connection for each available kafka cluster at Yelp.

    :param cluster_type: kafka cluster type (ex.'scribe' or 'standard').
    :type cluster_type: string
    :param client_id: client_id to be used to connect to kafka.
    :type client_id: string
    :param kwargs: parameters to pass along when creating the KafkaClient instance.
    :returns: list (cluster_name, KafkaClient)
    :raises DiscoveryError: :py:class:`yelp_kafka.error.DiscoveryError` upon failure connecting to a cluster.

    .. note:: This function creates a KafkaClient for each cluster in a region and tries to connect to it. If a cluster is not available it fails and closes all the previous connections.
    """

    clusters = get_all_clusters(cluster_type, client_id)
    connected_clusters = []
    for cluster in clusters:
        try:
            client = KafkaClient(cluster.broker_list, client_id=client_id, **kwargs)
            connected_clusters.append((cluster.name, client))
        except:
            log.exception(
                "Connection to kafka cluster %s using broker list %s failed",
                cluster.name,
                cluster.broker_list
            )
            for _, client in connected_clusters:
                client.close()
            raise DiscoveryError("Failed to connect to cluster {0}".format(
                cluster.name))
    return connected_clusters 
开发者ID:Yelp,项目名称:yelp_kafka,代码行数:33,代码来源:discovery.py

示例15: close

# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaClient [as 别名]
def close(self):
        """Disconnect from kafka.
        If auto_commit is enabled commit offsets before disconnecting.
        """
        if self.kafka_consumer.auto_commit is True:
            try:
                self.commit()
            except:
                self.log.exception("Commit error. "
                                   "Offsets may not have been committed")
        # Close all the connections to kafka brokers. KafkaClient open
        # connections to all the partition leaders.
        self.client.close() 
开发者ID:Yelp,项目名称:yelp_kafka,代码行数:15,代码来源:consumer.py


注:本文中的kafka.KafkaClient方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。