本文整理汇总了Python中kafka.KafkaProducer方法的典型用法代码示例。如果您正苦于以下问题:Python kafka.KafkaProducer方法的具体用法?Python kafka.KafkaProducer怎么用?Python kafka.KafkaProducer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类kafka
的用法示例。
在下文中一共展示了kafka.KafkaProducer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def __init__(self, env, is_external_queue: bool):
super().__init__(env, is_external_queue, queue_type='kafka', logger=logger)
eq_host = env.config.get(ConfigKeys.HOST, domain=self.domain_key, default=None)
eq_queue = env.config.get(ConfigKeys.QUEUE, domain=self.domain_key, default=None)
if eq_host is None or len(eq_host) == 0 or (type(eq_host) == str and len(eq_host.strip()) == 0):
logging.warning('blank external host specified, not setting up external publishing')
return
if eq_queue is None or len(eq_queue.strip()) == 0:
logging.warning('blank external queue specified, not setting up external publishing')
return
if type(eq_host) == str:
eq_host = [eq_host]
from kafka import KafkaProducer
import json
self.queue = eq_queue
self.queue_connection = KafkaProducer(
bootstrap_servers=eq_host,
value_serializer=lambda v: json.dumps(v).encode('utf-8'))
logger.info('setting up pubsub for type "{}: and host(s) "{}"'.format(self.queue_type, ','.join(eq_host)))
示例2: try_publish
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def try_publish(self, message):
if self.env.enrichment_manager is not None:
message = self.env.enrichment_manager.handle(message)
topic_key = None
# try to get some consistency
try:
target = message.get('target', dict())
topic_key = target.get('id', None)
if topic_key is None:
actor = message.get('actor', dict())
topic_key = actor.get('id', None)
# kafka publisher can't handle string keys
topic_key = bytes(str(topic_key), encoding='utf-8')
except Exception as partition_e:
logger.exception(traceback.format_exc())
environ.env.capture_exception(partition_e)
# for kafka, the queue_connection is the KafkaProducer and queue is the topic name
self.queue_connection.send(
topic=self.queue, value=message, key=topic_key)
示例3: _create_producer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def _create_producer(self):
"""Tries to establish a Kafka consumer connection"""
if not self.closed:
try:
self.logger.debug("Creating new kafka producer using brokers: " +
str(self.settings['KAFKA_HOSTS']))
return KafkaProducer(bootstrap_servers=self.settings['KAFKA_HOSTS'],
value_serializer=lambda v: json.dumps(v).encode('utf-8'),
retries=3,
linger_ms=self.settings['KAFKA_PRODUCER_BATCH_LINGER_MS'],
buffer_memory=self.settings['KAFKA_PRODUCER_BUFFER_BYTES'])
except KeyError as e:
self.logger.error('Missing setting named ' + str(e),
{'ex': traceback.format_exc()})
except:
self.logger.error("Couldn't initialize kafka producer.",
{'ex': traceback.format_exc()})
raise
示例4: _create_producer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def _create_producer(self):
"""Tries to establish a Kafka consumer connection"""
try:
brokers = self.settings['KAFKA_HOSTS']
self.logger.debug("Creating new kafka producer using brokers: " +
str(brokers))
return KafkaProducer(bootstrap_servers=brokers,
value_serializer=lambda m: json.dumps(m),
retries=3,
linger_ms=self.settings['KAFKA_PRODUCER_BATCH_LINGER_MS'],
buffer_memory=self.settings['KAFKA_PRODUCER_BUFFER_BYTES'])
except KeyError as e:
self.logger.error('Missing setting named ' + str(e),
{'ex': traceback.format_exc()})
except:
self.logger.error("Couldn't initialize kafka producer.",
{'ex': traceback.format_exc()})
raise
示例5: _create_producer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def _create_producer(self, settings):
"""Tries to establish a Kafka consumer connection"""
try:
brokers = settings['KAFKA_HOSTS']
self.logger.debug("Creating new kafka producer using brokers: " +
str(brokers))
return KafkaProducer(bootstrap_servers=brokers,
value_serializer=lambda m: json.dumps(m),
retries=3,
linger_ms=settings['KAFKA_PRODUCER_BATCH_LINGER_MS'],
buffer_memory=settings['KAFKA_PRODUCER_BUFFER_BYTES'])
except KeyError as e:
self.logger.error('Missing setting named ' + str(e),
{'ex': traceback.format_exc()})
except:
self.logger.error("Couldn't initialize kafka producer in plugin.",
{'ex': traceback.format_exc()})
raise
示例6: _create_producer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def _create_producer(self):
import kafka
if self.security_protocol == 'SASL_PLAINTEXT':
self.producer = kafka.KafkaProducer(
bootstrap_servers=[self.host],
value_serializer=lambda v: json.dumps(v).encode('utf-8'),
sasl_plain_username=self.sasl_username,
sasl_plain_password=self.sasl_password,
sasl_mechanism='PLAIN',
security_protocol=self.security_protocol)
elif self.security_protocol == 'SSL':
self.producer = kafka.KafkaProducer(
bootstrap_servers=[self.host],
value_serializer=lambda v: json.dumps(v).encode('utf-8'),
ssl_cafile=self.ssl_cafile,
ssl_certfile=self.ssl_certfile,
ssl_keyfile=self.ssl_keyfile,
ssl_check_hostname=False,
security_protocol=self.security_protocol)
示例7: kafka
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def kafka(TOPIC=None):
# Lazy init of the Kafka producer
#
global PRODUCER
if PRODUCER is None:
PRODUCER = KafkaProducer(
bootstrap_servers=KAFKA_BOOSTRAP_SERVERS,
sasl_mechanism=KAFKA_SASL_MECHANISM,
sasl_plain_username=KAFKA_USER,
sasl_plain_password=KAFKA_PASSWORD)
try:
future = PRODUCER.send(TOPIC, request.get_data())
future.get(timeout=60)
return "OK", 200, None
except KafkaTimeoutError:
return "Internal Server Error", 500, None
示例8: main
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def main(n):
"""Stream the video into a Kafka producer in an infinite loop"""
topic = choose_channel(n)
video_reader = imageio.get_reader(DATA + topic + '.mp4', 'ffmpeg')
metadata = video_reader.get_meta_data()
fps = metadata['fps']
producer = KafkaProducer(bootstrap_servers='localhost:9092',
batch_size=15728640,
linger_ms=1000,
max_request_size=15728640,
value_serializer=lambda v: json.dumps(v.tolist()))
while True:
video_loop(video_reader, producer, topic, fps)
示例9: _create_producer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def _create_producer(self) -> None:
import kafka
if self.security_protocol == "SASL_PLAINTEXT":
self.producer = kafka.KafkaProducer(
bootstrap_servers=[self.host],
value_serializer=lambda v: json.dumps(v).encode(DEFAULT_ENCODING),
sasl_plain_username=self.sasl_username,
sasl_plain_password=self.sasl_password,
sasl_mechanism="PLAIN",
security_protocol=self.security_protocol,
)
elif self.security_protocol == "SSL":
self.producer = kafka.KafkaProducer(
bootstrap_servers=[self.host],
value_serializer=lambda v: json.dumps(v).encode(DEFAULT_ENCODING),
ssl_cafile=self.ssl_cafile,
ssl_certfile=self.ssl_certfile,
ssl_keyfile=self.ssl_keyfile,
ssl_check_hostname=False,
security_protocol=self.security_protocol,
)
示例10: init_producer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def init_producer(self):
"""Initialize KafkaProducer
"""
print("[{}: log] Initializing producer...".format(self.pp))
# Gather optional parameters
dict_args = dict()
#dict_args = self.get_servers(dict_args, 'producer_servers')
#dict_args = self.get_security(dict_args, 'producer_security')
dict_args = self.get_servers(dict_args, 'servers')
dict_args = self.get_security(dict_args, 'security')
# Instantiate producer
try:
self.producer = KafkaProducer(**dict_args)
except Exception as inst:
msg = "[{}: ERROR] Could not initialize producer with arguments {}. Error was: {}"
raise RuntimeError(msg.format(self.pp, dict_args, inst))
self.topic_name = self.get_required_param("topic_name")
示例11: _create_producer
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def _create_producer(self):
while True:
try:
return KafkaProducer(
bootstrap_servers=self.config["bootstrap_uri"],
security_protocol=self.config["security_protocol"],
ssl_cafile=self.config["ssl_cafile"],
ssl_certfile=self.config["ssl_certfile"],
ssl_keyfile=self.config["ssl_keyfile"],
api_version=(1, 0, 0),
metadata_max_age_ms=self.config["metadata_max_age_ms"],
max_block_ms=2000 # missing topics will block unless we cache cluster metadata and pre-check
)
except: # pylint: disable=bare-except
self.log.exception("Unable to create producer, retrying")
time.sleep(1)
示例12: __init__
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def __init__(self):
self.producer = KafkaProducer(bootstrap_servers='docker:9092', value_serializer=lambda v: json.dumps(v))
self.tweets = []
示例13: from_settings
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def from_settings(cls, settings):
my_level = settings.get('SC_LOG_LEVEL', 'INFO')
my_name = settings.get('SC_LOGGER_NAME', 'sc-logger')
my_output = settings.get('SC_LOG_STDOUT', True)
my_json = settings.get('SC_LOG_JSON', False)
my_dir = settings.get('SC_LOG_DIR', 'logs')
my_bytes = settings.get('SC_LOG_MAX_BYTES', '10MB')
my_file = settings.get('SC_LOG_FILE', 'main.log')
my_backups = settings.get('SC_LOG_BACKUPS', 5)
my_appids = settings.get('KAFKA_APPID_TOPICS', False)
logger = LogFactory.get_instance(json=my_json,
name=my_name,
stdout=my_output,
level=my_level,
dir=my_dir,
file=my_file,
bytes=my_bytes,
backups=my_backups)
try:
producer = KafkaProducer(bootstrap_servers=settings['KAFKA_HOSTS'],
retries=3,
linger_ms=settings['KAFKA_PRODUCER_BATCH_LINGER_MS'],
buffer_memory=settings['KAFKA_PRODUCER_BUFFER_BYTES'])
except Exception as e:
logger.error("Unable to connect to Kafka in Pipeline"\
", raising exit flag.")
# this is critical so we choose to exit.
# exiting because this is a different thread from the crawlers
# and we want to ensure we can connect to Kafka when we boot
sys.exit(1)
topic_prefix = settings['KAFKA_TOPIC_PREFIX']
use_base64 = settings['KAFKA_BASE_64_ENCODE']
return cls(producer, topic_prefix, logger, appids=my_appids,
use_base64=use_base64)
示例14: __init__
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def __init__(self, kafka_addr, kafka_topic):
"""
Client for producing location messages to a Kafka broker.
:param kafka_addr: Address to the Kafka broker.
:param kafka_topic: Name of the Kafka topic to which messages should be published.
"""
# Bypass event publishing entirely when no broker address is specified.
producer_factory = (kafka_addr and kafka.KafkaProducer) or NoopProducer
self.topic = kafka_topic
self.producer = producer_factory(
bootstrap_servers=kafka_addr,
value_serializer=json.dumps,
)
示例15: kafka_connect
# 需要导入模块: import kafka [as 别名]
# 或者: from kafka import KafkaProducer [as 别名]
def kafka_connect(self):
self.producer = KafkaProducer(
bootstrap_servers=KAFKA_URL,
value_serializer=lambda v: json.dumps(v).encode('utf-8'))