本文整理汇总了Python中aiokafka.client.AIOKafkaClient.set_topics方法的典型用法代码示例。如果您正苦于以下问题:Python AIOKafkaClient.set_topics方法的具体用法?Python AIOKafkaClient.set_topics怎么用?Python AIOKafkaClient.set_topics使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类aiokafka.client.AIOKafkaClient
的用法示例。
在下文中一共展示了AIOKafkaClient.set_topics方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_bootstrap
# 需要导入模块: from aiokafka.client import AIOKafkaClient [as 别名]
# 或者: from aiokafka.client.AIOKafkaClient import set_topics [as 别名]
def test_bootstrap(self):
client = AIOKafkaClient(loop=self.loop,
bootstrap_servers='0.42.42.42:444')
with self.assertRaises(ConnectionError):
yield from client.bootstrap()
client = AIOKafkaClient(loop=self.loop, bootstrap_servers=self.hosts)
yield from client.bootstrap()
yield from self.wait_topic(client, 'test_topic')
metadata = yield from client.fetch_all_metadata()
self.assertTrue('test_topic' in metadata.topics())
client.set_topics(['t2', 't3'])
client.set_topics(['t2', 't3']) # should be ignored
client.add_topic('t2') # shold be ignored
# bootstrap again -- no error expected
yield from client.bootstrap()
yield from client.close()
示例2: test_set_topics_trigger_metadata_update
# 需要导入模块: from aiokafka.client import AIOKafkaClient [as 别名]
# 或者: from aiokafka.client.AIOKafkaClient import set_topics [as 别名]
def test_set_topics_trigger_metadata_update(self):
client = AIOKafkaClient(
loop=self.loop,
bootstrap_servers=self.hosts,
metadata_max_age_ms=10000)
yield from client.bootstrap()
self.add_cleanup(client.close)
orig = client._metadata_update
with mock.patch.object(client, '_metadata_update') as mocked:
@asyncio.coroutine
def new(*args, **kw):
yield from asyncio.sleep(0.01, loop=self.loop)
return (yield from orig(*args, **kw))
mocked.side_effect = new
yield from client.set_topics(["topic1"])
self.assertEqual(
len(client._metadata_update.mock_calls), 1)
# Same topics list should not trigger update
yield from client.set_topics(["topic1"])
self.assertEqual(
len(client._metadata_update.mock_calls), 1)
yield from client.set_topics(["topic1", "topic2"])
self.assertEqual(
len(client._metadata_update.mock_calls), 2)
# Less topics should not update too
yield from client.set_topics(["topic2"])
self.assertEqual(
len(client._metadata_update.mock_calls), 2)
# Setting [] should force update as it meens all topics
yield from client.set_topics([])
self.assertEqual(
len(client._metadata_update.mock_calls), 3)
# Changing topics during refresh should trigger 2 refreshes
client.set_topics(["topic3"])
yield from asyncio.sleep(0.001, loop=self.loop)
self.assertEqual(
len(client._metadata_update.mock_calls), 4)
yield from client.set_topics(["topic3", "topics4"])
self.assertEqual(
len(client._metadata_update.mock_calls), 5)
示例3: AIOKafkaConsumer
# 需要导入模块: from aiokafka.client import AIOKafkaClient [as 别名]
# 或者: from aiokafka.client.AIOKafkaClient import set_topics [as 别名]
#.........这里部分代码省略.........
heartbeat_interval_ms=3000,
session_timeout_ms=30000,
consumer_timeout_ms=100,
api_version='auto'):
if api_version not in ('auto', '0.9'):
raise ValueError("Unsupported Kafka API version")
self._client = AIOKafkaClient(
loop=loop, bootstrap_servers=bootstrap_servers,
client_id=client_id, metadata_max_age_ms=metadata_max_age_ms,
request_timeout_ms=request_timeout_ms)
self._api_version = api_version
self._group_id = group_id
self._heartbeat_interval_ms = heartbeat_interval_ms
self._retry_backoff_ms = retry_backoff_ms
self._enable_auto_commit = enable_auto_commit
self._auto_commit_interval_ms = auto_commit_interval_ms
self._partition_assignment_strategy = partition_assignment_strategy
self._key_deserializer = key_deserializer
self._value_deserializer = value_deserializer
self._fetch_min_bytes = fetch_min_bytes
self._fetch_max_wait_ms = fetch_max_wait_ms
self._max_partition_fetch_bytes = max_partition_fetch_bytes
self._consumer_timeout = consumer_timeout_ms / 1000
self._check_crcs = check_crcs
self._subscription = SubscriptionState(auto_offset_reset)
self._fetcher = None
self._coordinator = None
self._closed = False
self._loop = loop
self._topics = topics
if topics:
self._client.set_topics(topics)
self._subscription.subscribe(topics=topics)
@asyncio.coroutine
def start(self):
yield from self._client.bootstrap()
# Check Broker Version if not set explicitly
if self._api_version == 'auto':
self._api_version = yield from self._client.check_version()
# Convert api_version config to tuple for easy comparisons
self._api_version = tuple(
map(int, self._api_version.split('.')))
if self._api_version < (0, 9):
raise ValueError(
"Unsupported Kafka version: {}".format(self._api_version))
self._fetcher = Fetcher(
self._client, self._subscription, loop=self._loop,
key_deserializer=self._key_deserializer,
value_deserializer=self._value_deserializer,
fetch_min_bytes=self._fetch_min_bytes,
fetch_max_wait_ms=self._fetch_max_wait_ms,
max_partition_fetch_bytes=self._max_partition_fetch_bytes,
check_crcs=self._check_crcs,
fetcher_timeout=self._consumer_timeout)
if self._group_id is not None:
# using group coordinator for automatic partitions assignment
self._coordinator = GroupCoordinator(
self._client, self._subscription, loop=self._loop,
group_id=self._group_id,