本文整理汇总了Python中confluent_kafka.KafkaException方法的典型用法代码示例。如果您正苦于以下问题:Python confluent_kafka.KafkaException方法的具体用法?Python confluent_kafka.KafkaException怎么用?Python confluent_kafka.KafkaException使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类confluent_kafka
的用法示例。
在下文中一共展示了confluent_kafka.KafkaException方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: start
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def start(self):
self._logger.info("Listening topic:{0}".format(self.kafka_consumer.Topic))
consumer = self.kafka_consumer.start()
try:
while True:
message = consumer.poll(timeout=1.0)
if message is None:
continue
if not message.error():
self._new_file(message.value().decode('utf-8'))
elif message.error():
if message.error().code() == KafkaError._PARTITION_EOF:
continue
elif message.error:
raise KafkaException(message.error())
except KeyboardInterrupt:
sys.stderr.write('%% Aborted by user\n')
consumer.close()
示例2: start
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def start(self):
self._logger.info("Listening topic:{0}".format(self.kafka_consumer.Topic))
consumer = self.kafka_consumer.start()
try:
while True:
message = consumer.poll(timeout=1.0)
if message is None:
continue
if not message.error():
self._new_file(message.value().decode('utf-8'))
elif message.error():
if message.error().code() == KafkaError._PARTITION_EOF:
continue
elif message.error:
raise KafkaException(message.error())
except KeyboardInterrupt:
sys.stderr.write('%% Aborted by user\n')
consumer.close()
示例3: produce2
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def produce2(self, topic, value, on_delivery):
"""
A produce method in which delivery notifications are made available
via both the returned future and on_delivery callback (if specified).
"""
result = self._loop.create_future()
def ack(err, msg):
if err:
self._loop.call_soon_threadsafe(
result.set_exception, KafkaException(err))
else:
self._loop.call_soon_threadsafe(
result.set_result, msg)
if on_delivery:
self._loop.call_soon_threadsafe(
on_delivery, err, msg)
self._producer.produce(topic, value, on_delivery=ack)
return result
示例4: example_describe_configs
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def example_describe_configs(a, args):
""" describe configs """
resources = [ConfigResource(restype, resname) for
restype, resname in zip(args[0::2], args[1::2])]
fs = a.describe_configs(resources)
# Wait for operation to finish.
for res, f in fs.items():
try:
configs = f.result()
for config in iter(configs.values()):
print_config(config, 1)
except KafkaException as e:
print("Failed to describe {}: {}".format(res, e))
except Exception:
raise
示例5: test_protobuf_serializer_type_mismatch
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def test_protobuf_serializer_type_mismatch(kafka_cluster):
"""
Ensures an Exception is raised when deserializing an unexpected type.
"""
pb2_1 = TestProto_pb2.TestMessage
pb2_2 = NestedTestProto_pb2.NestedMessage
sr = kafka_cluster.schema_registry({'url': 'http://localhost:8081'})
topic = kafka_cluster.create_topic("serialization-proto-refs")
serializer = ProtobufSerializer(pb2_1, sr)
producer = kafka_cluster.producer(key_serializer=serializer)
with pytest.raises(KafkaException,
match=r"message must be of type <class"
r" 'TestProto_pb2.TestMessage'\> not \<class"
r" 'NestedTestProto_pb2.NestedMessage'\>"):
producer.produce(topic, key=pb2_2())
示例6: test_fatal
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def test_fatal():
""" Test fatal exceptions """
# Configure an invalid broker and make sure the ALL_BROKERS_DOWN
# error is seen in the error callback.
p = Producer({'error_cb': error_cb})
with pytest.raises(KafkaException) as exc:
raise KafkaException(KafkaError(KafkaError.MEMBER_ID_REQUIRED,
fatal=True))
err = exc.value.args[0]
assert isinstance(err, KafkaError)
assert err.fatal()
assert not err.retriable()
assert not err.txn_requires_abort()
p.poll(0) # Need some p use to avoid flake8 unused warning
示例7: handle_kafka_error
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def handle_kafka_error(self, msg): # pragma: no cover
"""Handle an error in kafka."""
if msg.error().code() == KafkaError._PARTITION_EOF:
# End of partition event
self.logger.info('%% %s [%d] reached end at offset %d\n',
msg.topic(), msg.partition(), msg.offset())
else:
# Error
raise KafkaException(msg.error())
示例8: consume
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def consume(self):
# Type: () -> Any
"""
Consume messages from a give list of topic
:return:
"""
records = []
start = datetime.now()
try:
while True:
msg = self.consumer.poll(timeout=self.consumer_poll_timeout)
end = datetime.now()
# The consumer exceeds consume timeout
if (end - start) > timedelta(seconds=self.consumer_total_timeout):
# Exceed the consume timeout
break
if msg is None:
continue
if msg.error():
# Hit the EOF of partition
if msg.error().code() == KafkaError._PARTITION_EOF:
continue
else:
raise KafkaException(msg.error())
else:
records.append(msg.value())
except Exception as e:
LOGGER.exception(e)
finally:
return records
示例9: produce
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def produce(self, topic, value):
"""
An awaitable produce method.
"""
result = self._loop.create_future()
def ack(err, msg):
if err:
self._loop.call_soon_threadsafe(result.set_exception, KafkaException(err))
else:
self._loop.call_soon_threadsafe(result.set_result, msg)
self._producer.produce(topic, value, on_delivery=ack)
return result
示例10: create_item1
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def create_item1(item: Item):
try:
result = await aio_producer.produce("items", item.name)
return {"timestamp": result.timestamp()}
except KafkaException as ex:
raise HTTPException(status_code=500, detail=ex.args[0].str())
示例11: create_item2
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def create_item2(item: Item):
try:
aio_producer.produce2("items", item.name, on_delivery=ack)
return {"timestamp": time()}
except KafkaException as ex:
raise HTTPException(status_code=500, detail=ex.args[0].str())
示例12: create_item3
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def create_item3(item: Item):
try:
producer.produce("items", item.name, on_delivery=ack)
return {"timestamp": time()}
except KafkaException as ex:
raise HTTPException(status_code=500, detail=ex.args[0].str())
示例13: create_topic
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def create_topic(self, topic, conf):
""" Create the topic if it doesn't already exist """
admin = AdminClient(conf)
fs = admin.create_topics([NewTopic(topic, num_partitions=2, replication_factor=3)])
f = fs[topic]
try:
res = f.result() # noqa unused variable
except KafkaException as ex:
if ex.args[0].code() == KafkaError.TOPIC_ALREADY_EXISTS:
self.logger.info("Topic {} already exists: good".format(topic))
else:
raise
示例14: test_basic_api
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def test_basic_api():
""" Basic API tests, these wont really do anything since there is no
broker configured. """
with pytest.raises(TypeError):
a = AdminClient()
a = AdminClient({"socket.timeout.ms": 10})
a.poll(0.001)
try:
a.list_topics(timeout=0.2)
except KafkaException as e:
assert e.args[0].code() in (KafkaError._TIMED_OUT, KafkaError._TRANSPORT)
示例15: test_delete_topics_api
# 需要导入模块: import confluent_kafka [as 别名]
# 或者: from confluent_kafka import KafkaException [as 别名]
def test_delete_topics_api():
""" delete_topics() tests, these wont really do anything since there is no
broker configured. """
a = AdminClient({"socket.timeout.ms": 10})
fs = a.delete_topics(["mytopic"])
# ignore the result
with pytest.raises(Exception):
a.delete_topics(None)
with pytest.raises(Exception):
a.delete_topics("mytopic")
with pytest.raises(Exception):
a.delete_topics([])
with pytest.raises(ValueError):
a.delete_topics([None, "mytopic"])
fs = a.delete_topics(["mytopic", "other"])
with pytest.raises(KafkaException):
for f in concurrent.futures.as_completed(iter(fs.values())):
f.result(timeout=1)
fs = a.delete_topics(["mytopic", "othertopic", "third"],
request_timeout=0.5,
operation_timeout=300.1)
for f in concurrent.futures.as_completed(iter(fs.values())):
e = f.exception(timeout=1)
assert isinstance(e, KafkaException)
assert e.args[0].code() == KafkaError._TIMED_OUT
with pytest.raises(TypeError):
a.delete_topics(["mytopic"],
validate_only="maybe")