當前位置: 首頁>>代碼示例>>Python>>正文


Python kafka.KafkaProducer方法代碼示例

本文整理匯總了Python中kafka.KafkaProducer方法的典型用法代碼示例。如果您正苦於以下問題:Python kafka.KafkaProducer方法的具體用法?Python kafka.KafkaProducer怎麽用?Python kafka.KafkaProducer使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在kafka的用法示例。


在下文中一共展示了kafka.KafkaProducer方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: __init__

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def __init__(self, env, is_external_queue: bool):
        super().__init__(env, is_external_queue, queue_type='kafka', logger=logger)

        eq_host = env.config.get(ConfigKeys.HOST, domain=self.domain_key, default=None)
        eq_queue = env.config.get(ConfigKeys.QUEUE, domain=self.domain_key, default=None)

        if eq_host is None or len(eq_host) == 0 or (type(eq_host) == str and len(eq_host.strip()) == 0):
            logging.warning('blank external host specified, not setting up external publishing')
            return

        if eq_queue is None or len(eq_queue.strip()) == 0:
            logging.warning('blank external queue specified, not setting up external publishing')
            return

        if type(eq_host) == str:
            eq_host = [eq_host]

        from kafka import KafkaProducer
        import json

        self.queue = eq_queue
        self.queue_connection = KafkaProducer(
            bootstrap_servers=eq_host,
            value_serializer=lambda v: json.dumps(v).encode('utf-8'))
        logger.info('setting up pubsub for type "{}: and host(s) "{}"'.format(self.queue_type, ','.join(eq_host))) 
開發者ID:thenetcircle,項目名稱:dino,代碼行數:27,代碼來源:kafka.py

示例2: try_publish

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def try_publish(self, message):
        if self.env.enrichment_manager is not None:
            message = self.env.enrichment_manager.handle(message)

        topic_key = None

        # try to get some consistency
        try:
            target = message.get('target', dict())
            topic_key = target.get('id', None)

            if topic_key is None:
                actor = message.get('actor', dict())
                topic_key = actor.get('id', None)

            # kafka publisher can't handle string keys
            topic_key = bytes(str(topic_key), encoding='utf-8')

        except Exception as partition_e:
            logger.exception(traceback.format_exc())
            environ.env.capture_exception(partition_e)

        # for kafka, the queue_connection is the KafkaProducer and queue is the topic name
        self.queue_connection.send(
            topic=self.queue, value=message, key=topic_key) 
開發者ID:thenetcircle,項目名稱:dino,代碼行數:27,代碼來源:kafka.py

示例3: _create_producer

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def _create_producer(self):
        """Tries to establish a Kafka consumer connection"""
        if not self.closed:
            try:
                self.logger.debug("Creating new kafka producer using brokers: " +
                                   str(self.settings['KAFKA_HOSTS']))

                return KafkaProducer(bootstrap_servers=self.settings['KAFKA_HOSTS'],
                                     value_serializer=lambda v: json.dumps(v).encode('utf-8'),
                                     retries=3,
                                     linger_ms=self.settings['KAFKA_PRODUCER_BATCH_LINGER_MS'],
                                     buffer_memory=self.settings['KAFKA_PRODUCER_BUFFER_BYTES'])
            except KeyError as e:
                self.logger.error('Missing setting named ' + str(e),
                                   {'ex': traceback.format_exc()})
            except:
                self.logger.error("Couldn't initialize kafka producer.",
                                   {'ex': traceback.format_exc()})
                raise 
開發者ID:istresearch,項目名稱:scrapy-cluster,代碼行數:21,代碼來源:rest_service.py

示例4: _create_producer

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def _create_producer(self):
        """Tries to establish a Kafka consumer connection"""
        try:
            brokers = self.settings['KAFKA_HOSTS']
            self.logger.debug("Creating new kafka producer using brokers: " +
                               str(brokers))

            return KafkaProducer(bootstrap_servers=brokers,
                                 value_serializer=lambda m: json.dumps(m),
                                 retries=3,
                                 linger_ms=self.settings['KAFKA_PRODUCER_BATCH_LINGER_MS'],
                                 buffer_memory=self.settings['KAFKA_PRODUCER_BUFFER_BYTES'])
        except KeyError as e:
            self.logger.error('Missing setting named ' + str(e),
                               {'ex': traceback.format_exc()})
        except:
            self.logger.error("Couldn't initialize kafka producer.",
                               {'ex': traceback.format_exc()})
            raise 
開發者ID:istresearch,項目名稱:scrapy-cluster,代碼行數:21,代碼來源:kafka_monitor.py

示例5: _create_producer

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def _create_producer(self, settings):
        """Tries to establish a Kafka consumer connection"""
        try:
            brokers = settings['KAFKA_HOSTS']
            self.logger.debug("Creating new kafka producer using brokers: " +
                               str(brokers))

            return KafkaProducer(bootstrap_servers=brokers,
                                 value_serializer=lambda m: json.dumps(m),
                                 retries=3,
                                 linger_ms=settings['KAFKA_PRODUCER_BATCH_LINGER_MS'],
                                 buffer_memory=settings['KAFKA_PRODUCER_BUFFER_BYTES'])
        except KeyError as e:
            self.logger.error('Missing setting named ' + str(e),
                               {'ex': traceback.format_exc()})
        except:
            self.logger.error("Couldn't initialize kafka producer in plugin.",
                               {'ex': traceback.format_exc()})
            raise 
開發者ID:istresearch,項目名稱:scrapy-cluster,代碼行數:21,代碼來源:kafka_base_monitor.py

示例6: _create_producer

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def _create_producer(self):
        import kafka

        if self.security_protocol == 'SASL_PLAINTEXT':
            self.producer = kafka.KafkaProducer(
                bootstrap_servers=[self.host],
                value_serializer=lambda v: json.dumps(v).encode('utf-8'),
                sasl_plain_username=self.sasl_username,
                sasl_plain_password=self.sasl_password,
                sasl_mechanism='PLAIN',
                security_protocol=self.security_protocol)
        elif self.security_protocol == 'SSL':
            self.producer = kafka.KafkaProducer(
                bootstrap_servers=[self.host],
                value_serializer=lambda v: json.dumps(v).encode('utf-8'),
                ssl_cafile=self.ssl_cafile,
                ssl_certfile=self.ssl_certfile,
                ssl_keyfile=self.ssl_keyfile,
                ssl_check_hostname=False,
                security_protocol=self.security_protocol) 
開發者ID:RasaHQ,項目名稱:rasa_core,代碼行數:22,代碼來源:broker.py

示例7: kafka

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def kafka(TOPIC=None):
    # Lazy init of the Kafka producer
    #
    global PRODUCER
    if PRODUCER is None:
        PRODUCER = KafkaProducer(
            bootstrap_servers=KAFKA_BOOSTRAP_SERVERS,
            sasl_mechanism=KAFKA_SASL_MECHANISM,
            sasl_plain_username=KAFKA_USER,
            sasl_plain_password=KAFKA_PASSWORD)
    try:
        future = PRODUCER.send(TOPIC, request.get_data())
        future.get(timeout=60)
        return "OK", 200, None
    except KafkaTimeoutError:
        return "Internal Server Error", 500, None 
開發者ID:vmw-loginsight,項目名稱:webhook-shims,代碼行數:18,代碼來源:kafkatopic.py

示例8: main

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def main(n):
    """Stream the video into a Kafka producer in an infinite loop"""
    
    topic = choose_channel(n)
    video_reader = imageio.get_reader(DATA + topic + '.mp4', 'ffmpeg')
    metadata = video_reader.get_meta_data()
    fps = metadata['fps']

    producer = KafkaProducer(bootstrap_servers='localhost:9092',
                             batch_size=15728640,
                             linger_ms=1000,
                             max_request_size=15728640,
                             value_serializer=lambda v: json.dumps(v.tolist()))
    
    while True:
        video_loop(video_reader, producer, topic, fps) 
開發者ID:pambot,項目名稱:ozymandias,代碼行數:18,代碼來源:ozy_producer.py

示例9: _create_producer

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def _create_producer(self) -> None:
        import kafka

        if self.security_protocol == "SASL_PLAINTEXT":
            self.producer = kafka.KafkaProducer(
                bootstrap_servers=[self.host],
                value_serializer=lambda v: json.dumps(v).encode(DEFAULT_ENCODING),
                sasl_plain_username=self.sasl_username,
                sasl_plain_password=self.sasl_password,
                sasl_mechanism="PLAIN",
                security_protocol=self.security_protocol,
            )
        elif self.security_protocol == "SSL":
            self.producer = kafka.KafkaProducer(
                bootstrap_servers=[self.host],
                value_serializer=lambda v: json.dumps(v).encode(DEFAULT_ENCODING),
                ssl_cafile=self.ssl_cafile,
                ssl_certfile=self.ssl_certfile,
                ssl_keyfile=self.ssl_keyfile,
                ssl_check_hostname=False,
                security_protocol=self.security_protocol,
            ) 
開發者ID:botfront,項目名稱:rasa-for-botfront,代碼行數:24,代碼來源:kafka.py

示例10: init_producer

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def init_producer(self):
    """Initialize KafkaProducer
    """
    print("[{}: log] Initializing producer...".format(self.pp))
    # Gather optional parameters
    dict_args = dict()
    #dict_args = self.get_servers(dict_args, 'producer_servers')
    #dict_args = self.get_security(dict_args, 'producer_security')
    dict_args = self.get_servers(dict_args, 'servers')
    dict_args = self.get_security(dict_args, 'security')
    # Instantiate producer
    try:
      self.producer = KafkaProducer(**dict_args)
    except Exception as inst:
      msg = "[{}: ERROR] Could not initialize producer with arguments {}. Error was: {}"
      raise RuntimeError(msg.format(self.pp, dict_args, inst))
    self.topic_name = self.get_required_param("topic_name") 
開發者ID:ColumbiaDVMM,項目名稱:ColumbiaImageSearch,代碼行數:19,代碼來源:kafka_pusher.py

示例11: _create_producer

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def _create_producer(self):
        while True:
            try:
                return KafkaProducer(
                    bootstrap_servers=self.config["bootstrap_uri"],
                    security_protocol=self.config["security_protocol"],
                    ssl_cafile=self.config["ssl_cafile"],
                    ssl_certfile=self.config["ssl_certfile"],
                    ssl_keyfile=self.config["ssl_keyfile"],
                    api_version=(1, 0, 0),
                    metadata_max_age_ms=self.config["metadata_max_age_ms"],
                    max_block_ms=2000  # missing topics will block unless we cache cluster metadata and pre-check
                )
            except:  # pylint: disable=bare-except
                self.log.exception("Unable to create producer, retrying")
                time.sleep(1) 
開發者ID:aiven,項目名稱:karapace,代碼行數:18,代碼來源:karapace.py

示例12: __init__

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def __init__(self):
      self.producer = KafkaProducer(bootstrap_servers='docker:9092', value_serializer=lambda v: json.dumps(v))
      self.tweets = [] 
開發者ID:amir-rahnama,項目名稱:pyspark-twitter-stream-mining,代碼行數:5,代碼來源:twitter_stream.py

示例13: from_settings

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def from_settings(cls, settings):
        my_level = settings.get('SC_LOG_LEVEL', 'INFO')
        my_name = settings.get('SC_LOGGER_NAME', 'sc-logger')
        my_output = settings.get('SC_LOG_STDOUT', True)
        my_json = settings.get('SC_LOG_JSON', False)
        my_dir = settings.get('SC_LOG_DIR', 'logs')
        my_bytes = settings.get('SC_LOG_MAX_BYTES', '10MB')
        my_file = settings.get('SC_LOG_FILE', 'main.log')
        my_backups = settings.get('SC_LOG_BACKUPS', 5)
        my_appids = settings.get('KAFKA_APPID_TOPICS', False)

        logger = LogFactory.get_instance(json=my_json,
                                         name=my_name,
                                         stdout=my_output,
                                         level=my_level,
                                         dir=my_dir,
                                         file=my_file,
                                         bytes=my_bytes,
                                         backups=my_backups)

        try:
            producer = KafkaProducer(bootstrap_servers=settings['KAFKA_HOSTS'],
                                 retries=3,
                                 linger_ms=settings['KAFKA_PRODUCER_BATCH_LINGER_MS'],
                                 buffer_memory=settings['KAFKA_PRODUCER_BUFFER_BYTES'])
        except Exception as e:
                logger.error("Unable to connect to Kafka in Pipeline"\
                    ", raising exit flag.")
                # this is critical so we choose to exit.
                # exiting because this is a different thread from the crawlers
                # and we want to ensure we can connect to Kafka when we boot
                sys.exit(1)
        topic_prefix = settings['KAFKA_TOPIC_PREFIX']
        use_base64 = settings['KAFKA_BASE_64_ENCODE']

        return cls(producer, topic_prefix, logger, appids=my_appids,
                   use_base64=use_base64) 
開發者ID:istresearch,項目名稱:scrapy-cluster,代碼行數:39,代碼來源:pipelines.py

示例14: __init__

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def __init__(self, kafka_addr, kafka_topic):
        """
        Client for producing location messages to a Kafka broker.

        :param kafka_addr: Address to the Kafka broker.
        :param kafka_topic: Name of the Kafka topic to which messages should be published.
        """
        # Bypass event publishing entirely when no broker address is specified.
        producer_factory = (kafka_addr and kafka.KafkaProducer) or NoopProducer

        self.topic = kafka_topic
        self.producer = producer_factory(
            bootstrap_servers=kafka_addr,
            value_serializer=json.dumps,
        ) 
開發者ID:LINKIWI,項目名稱:orion-server,代碼行數:17,代碼來源:stream.py

示例15: kafka_connect

# 需要導入模塊: import kafka [as 別名]
# 或者: from kafka import KafkaProducer [as 別名]
def kafka_connect(self):
        self.producer = KafkaProducer(
            bootstrap_servers=KAFKA_URL,
            value_serializer=lambda v: json.dumps(v).encode('utf-8')) 
開發者ID:dmfigol,項目名稱:network-programmability-stream,代碼行數:6,代碼來源:nc_dial_in_subscribe.py


注:本文中的kafka.KafkaProducer方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。