当前位置: 首页>>代码示例>>Python>>正文


Python confluent_kafka.Consumer方法代码示例

本文整理汇总了Python中confluent_kafka.Consumer方法的典型用法代码示例。如果您正苦于以下问题:Python confluent_kafka.Consumer方法的具体用法?Python confluent_kafka.Consumer怎么用?Python confluent_kafka.Consumer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在confluent_kafka的用法示例。


在下文中一共展示了confluent_kafka.Consumer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: readMsg

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def readMsg():
  logging.debug("readMsg from kafkaTopic: %s", kafkaTopic)
  msg = kConsumer.poll(5.0)

  if msg is None:
    logging.debug('Received message: None')
    return None;
  if msg.error():
    logging.warning("Consumer error: {}".format(msg.error()))
    return None

  logging.debug('Received message: {}'.format(msg.value().decode('utf-8')))
  msgJson= json.loads(msg.value())

  comment = msgJson['payload']['after']['comment_content']
  logging.info("got comment from kafkaTopic: %s", comment)
  return comment 
开发者ID:nutanix,项目名称:xi-iot,代码行数:19,代码来源:wc.py

示例2: prepareConsumer

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def prepareConsumer(self, groupID = "pythonconsumers"):
        options ={
                'bootstrap.servers':  self.kafka_brokers,
                'group.id': groupID,
                 'auto.offset.reset': 'earliest',
                'enable.auto.commit': self.kafka_auto_commit,
        }
        if (self.kafka_env != 'LOCAL'):
            options['security.protocol'] = 'SASL_SSL'
            options['sasl.mechanisms'] = 'PLAIN'
            options['sasl.username'] = 'token'
            options['sasl.password'] = self.kafka_apikey
        if (self.kafka_env == 'OCP'):
            options['ssl.ca.location'] = os.environ['PEM_CERT']
        print("[KafkaConsumer] - This is the configuration for the consumer:")
        print('[KafkaConsumer] - {}'.format(options))
        self.consumer = Consumer(options)
        self.consumer.subscribe([self.topic_name])
    
    # Prints out and returns the decoded events received by the consumer 
开发者ID:ibm-cloud-architecture,项目名称:refarch-kc,代码行数:22,代码来源:KcConsumer.py

示例3: pollNextEvent

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def pollNextEvent(self, keyID, keyname):
        gotIt = False
        anEvent = {}
        while not gotIt:
            msg = self.consumer.poll(timeout=10.0)
            # Continue if we have not received a message yet
            if msg is None:
                continue
            if msg.error():
                print("[KafkaConsumer] - Consumer error: {}".format(msg.error()))
                # Stop reading if we find end of partition in the error message
                if ("PARTITION_EOF" in msg.error()):
                    gotIt= True
                continue
            msgStr = self.traceResponse(msg)
            # Create the json event based on message string formed by traceResponse
            anEvent = json.loads(msgStr)
            # If we've found our event based on keyname and keyID, stop reading messages
            if (anEvent["payload"][keyname] == keyID):
                gotIt = True
        return anEvent

    # Polls for events until it finds an event with same key 
开发者ID:ibm-cloud-architecture,项目名称:refarch-kc,代码行数:25,代码来源:KcConsumer.py

示例4: __createConsumer

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def __createConsumer(self):
        if self.__shouldRun():
            config = {'metadata.broker.list': ','.join(self.brokers),
                        'group.id': self.trigger,
                        'default.topic.config': {'auto.offset.reset': 'latest'},
                        'enable.auto.commit': False,
                        'api.version.request': True,
                        'isolation.level': 'read_uncommitted'
                    }

            if self.isMessageHub:
                # append Message Hub specific config
                config.update({'ssl.ca.location': '/etc/ssl/certs/',
                                'sasl.mechanisms': 'PLAIN',
                                'sasl.username': self.username,
                                'sasl.password': self.password,
                                'security.protocol': 'sasl_ssl'
                             })

            consumer = KafkaConsumer(config)
            consumer.subscribe([self.topic], self.__on_assign, self.__on_revoke)
            logging.info("[{}] Now listening in order to fire trigger".format(self.trigger))
            return consumer 
开发者ID:apache,项目名称:openwhisk-package-kafka,代码行数:25,代码来源:consumer.py

示例5: test_error_cb

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def test_error_cb():
    """ Tests error_cb. """

    def error_cb(error_msg):
        global seen_error_cb
        seen_error_cb = True
        acceptable_error_codes = (confluent_kafka.KafkaError._TRANSPORT, confluent_kafka.KafkaError._ALL_BROKERS_DOWN)
        assert error_msg.code() in acceptable_error_codes

    conf = {'bootstrap.servers': 'localhost:65531',  # Purposely cause connection refused error
            'group.id': 'test',
            'socket.timeout.ms': '100',
            'session.timeout.ms': 1000,  # Avoid close() blocking too long
            'error_cb': error_cb
            }

    kc = confluent_kafka.Consumer(**conf)
    kc.subscribe(["test"])
    while not seen_error_cb:
        kc.poll(timeout=1)

    kc.close()


# global variable for stats_cb call back function 
开发者ID:confluentinc,项目名称:confluent-kafka-python,代码行数:27,代码来源:test_misc.py

示例6: test_stats_cb

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def test_stats_cb():
    """ Tests stats_cb. """

    def stats_cb(stats_json_str):
        global seen_stats_cb
        seen_stats_cb = True
        stats_json = json.loads(stats_json_str)
        assert len(stats_json['name']) > 0

    conf = {'group.id': 'test',
            'socket.timeout.ms': '100',
            'session.timeout.ms': 1000,  # Avoid close() blocking too long
            'statistics.interval.ms': 200,
            'stats_cb': stats_cb
            }

    kc = confluent_kafka.Consumer(**conf)

    kc.subscribe(["test"])
    while not seen_stats_cb:
        kc.poll(timeout=1)
    kc.close() 
开发者ID:confluentinc,项目名称:confluent-kafka-python,代码行数:24,代码来源:test_misc.py

示例7: consume_committed

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def consume_committed(conf, topic):
    print("=== Consuming transactional messages from topic {}. ===".format(topic))

    consumer_conf = {'group.id': str(uuid1()),
                     'auto.offset.reset': 'earliest',
                     'enable.auto.commit': False,
                     'enable.partition.eof': True,
                     'error_cb': prefixed_error_cb(called_by()), }

    consumer_conf.update(conf)
    consumer = Consumer(consumer_conf)
    consumer.subscribe([topic])

    msg_cnt = read_all_msgs(consumer)

    consumer.close()

    return msg_cnt 
开发者ID:confluentinc,项目名称:confluent-kafka-python,代码行数:20,代码来源:test_transactions.py

示例8: test_logging_consumer

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def test_logging_consumer():
    """ Tests that logging works """

    logger = logging.getLogger('consumer')
    logger.setLevel(logging.DEBUG)
    f = CountingFilter('consumer')
    logger.addFilter(f)

    kc = confluent_kafka.Consumer({'group.id': 'test',
                                   'debug': 'all'},
                                  logger=logger)
    while f.cnt == 0:
        kc.poll(timeout=0.5)

    print('%s: %d log messages seen' % (f.name, f.cnt))

    kc.close() 
开发者ID:confluentinc,项目名称:confluent-kafka-python,代码行数:19,代码来源:test_log.py

示例9: start

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def start(self):

        consumer = Consumer(**self._kafka_conf)
        consumer.subscribe([self._topic])
        return consumer 
开发者ID:apache,项目名称:incubator-spot,代码行数:7,代码来源:kafka_client.py

示例10: __init__

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def __init__(self, servers, group, topics, json=False, wrap=False, interval=1):
        c = Consumer({
            'bootstrap.servers': servers,
            'group.id': group,
            'default.topic.config': {
                'auto.offset.reset': 'smallest'
            }
        })

        if not isinstance(topics, list):
            topics = [topics]
        c.subscribe(topics)

        async def _listen(consumer=c, json=json, wrap=wrap, interval=interval):
            while True:
                msg = consumer.poll(interval)

                if msg is None:
                    continue
                if msg.error():
                    if msg.error().code() == KafkaError._PARTITION_EOF:
                        continue
                    else:
                        print(msg.error())
                        break

                msg = msg.value().decode('utf-8')

                if not msg:
                    break
                if json:
                    msg = JSON.loads(msg)
                if wrap:
                    msg = [msg]
                yield msg

        super().__init__(foo=_listen)
        self._name = 'Kafka' 
开发者ID:timkpaine,项目名称:tributary,代码行数:40,代码来源:kafka.py

示例11: __init__

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def __init__(self, config):  # pragma: no cover
        """
        Streaming client implementation based on Kafka.

        Configuration keys:
          KAFKA_ADDRESS
          KAFKA_CONSUMER_GROUP
          KAFKA_TOPIC
          TIMEOUT
          EVENT_HUB_KAFKA_CONNECTION_STRING
        """
        self.logger = Logger()

        self.topic = config.get("KAFKA_TOPIC")
        if not self.topic:
            raise ValueError("KAFKA_TOPIC is not set in the config object.")

        if not config.get("KAFKA_ADDRESS"):
            raise ValueError("KAFKA_ADDRESS is not set in the config object.")

        if config.get("TIMEOUT"):
            try:
                self.timeout = int(config.get("TIMEOUT"))
            except ValueError:
                self.timeout = None
        else:
            self.timeout = None

        kafka_config = self.create_kafka_config(config)
        self.admin = admin.AdminClient(kafka_config)

        if config.get("KAFKA_CONSUMER_GROUP") is None:
            self.logger.info('Creating Producer')
            self.producer = Producer(kafka_config)
            self.run = False
        else:
            self.logger.info('Creating Consumer')
            self.consumer = Consumer(kafka_config)
            self.run = True
            signal.signal(signal.SIGTERM, self.exit_gracefully) 
开发者ID:microsoft,项目名称:agogosml,代码行数:42,代码来源:kafka_streaming_client.py

示例12: consumer

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def consumer(self):
        log.debug('Starting consumer...')
        # TODO: Must set all config values applicable to a consumer
        consumer_args = {'bootstrap.servers': self.config.BOOTSTRAP_SERVERS,
                               'group.id': self.config.APPLICATION_ID,
                               'default.topic.config': {'auto.offset.reset':
                                                        self.config.AUTO_OFFSET_RESET},
                               'enable.auto.commit': self.config.ENABLE_AUTO_COMMIT}

        log.debug('Consumer Arguments: %s', pprint.PrettyPrinter().pformat(consumer_args))

        return kafka.Consumer(consumer_args) 
开发者ID:wintoncode,项目名称:winton-kafka-streams,代码行数:14,代码来源:kafka_client_supplier.py

示例13: get_consumer

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def get_consumer():
    """Create a Kafka consumer."""
    consumer = Consumer(
        {
            "bootstrap.servers": Config.SOURCES_KAFKA_ADDRESS,
            "group.id": "hccm-sources",
            "queued.max.messages.kbytes": 1024,
            "enable.auto.commit": False,
        }
    )
    consumer.subscribe([Config.SOURCES_TOPIC])
    return consumer 
开发者ID:project-koku,项目名称:koku,代码行数:14,代码来源:kafka_listener.py

示例14: listen_for_messages

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def listen_for_messages(msg, consumer, application_source_id):  # noqa: C901
    """
    Listen for Platform-Sources kafka messages.

    Args:
        consumer (Consumer): Kafka consumer object
        application_source_id (Integer): Cost Management's current Application Source ID. Used for
            kafka message filtering.

    Returns:
        None

    """
    try:
        try:
            msg = get_sources_msg_data(msg, application_source_id)
            offset = msg.get("offset")
            partition = msg.get("partition")
        except SourcesMessageError:
            return
        if msg:
            LOG.info(f"Processing message offset: {offset} partition: {partition}")
            topic_partition = TopicPartition(topic=Config.SOURCES_TOPIC, partition=partition, offset=offset)
            LOG.info(f"Cost Management Message to process: {str(msg)}")
            try:
                with transaction.atomic():
                    process_message(application_source_id, msg)
                    consumer.commit()
            except (IntegrityError, InterfaceError, OperationalError) as err:
                connection.close()
                LOG.error(f"{type(err).__name__}: {err}")
                rewind_consumer_to_retry(consumer, topic_partition)
            except SourcesHTTPClientError as err:
                LOG.error(err)
                rewind_consumer_to_retry(consumer, topic_partition)
            except SourceNotFoundError:
                LOG.warning(f"Source not found in platform sources. Skipping msg: {msg}")
                consumer.commit()

    except KafkaError as error:
        LOG.error(f"[listen_for_messages] Kafka error encountered: {type(error).__name__}: {error}", exc_info=True)
    except Exception as error:
        LOG.error(f"[listen_for_messages] UNKNOWN error encountered: {type(error).__name__}: {error}", exc_info=True) 
开发者ID:project-koku,项目名称:koku,代码行数:45,代码来源:kafka_listener.py

示例15: get_consumer

# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import Consumer [as 别名]
def get_consumer():  # pragma: no cover
    """Create a Kafka consumer."""
    consumer = Consumer(
        {
            "bootstrap.servers": Config.INSIGHTS_KAFKA_ADDRESS,
            "group.id": "hccm-group",
            "queued.max.messages.kbytes": 1024,
            "enable.auto.commit": False,
            "enable.auto.offset.store": False,
            "max.poll.interval.ms": 1080000,  # 18 minutes
        }
    )
    consumer.subscribe([HCCM_TOPIC])
    return consumer 
开发者ID:project-koku,项目名称:koku,代码行数:16,代码来源:kafka_msg_handler.py


注:本文中的confluent_kafka.Consumer方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。