本文整理汇总了Python中awscli.customizations.s3.utils.StablePriorityQueue类的典型用法代码示例。如果您正苦于以下问题:Python StablePriorityQueue类的具体用法?Python StablePriorityQueue怎么用?Python StablePriorityQueue使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了StablePriorityQueue类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_insert_max_priority_capped
def test_insert_max_priority_capped(self):
q = StablePriorityQueue(maxsize=10, max_priority=20)
a = mock.Mock()
a.PRIORITY = 100
q.put(a)
self.assertIs(q.get(), a)
示例2: test_priority_attr_is_missing
def test_priority_attr_is_missing(self):
# If priority attr is missing, we should add it
# to the lowest priority.
q = StablePriorityQueue(maxsize=10, max_priority=20)
a = object()
b = mock.Mock()
b.PRIORITY = 5
q.put(a)
q.put(b)
self.assertIs(q.get(), b)
self.assertIs(q.get(), a)
示例3: _put
def _put(self, item):
if isinstance(item, CompleteMultipartUploadTask):
# Raising this exception will trigger the
# "error" case shutdown in the executor.
raise RuntimeError(
"Forced error on enqueue of complete task.")
return StablePriorityQueue._put(self, item)
示例4: test_queue_length
def test_queue_length(self):
a = mock.Mock()
a.PRIORITY = 5
q = StablePriorityQueue(maxsize=10, max_priority=20)
self.assertEqual(q.qsize(), 0)
q.put(a)
self.assertEqual(q.qsize(), 1)
q.get()
self.assertEqual(q.qsize(), 0)
示例5: __init__
def __init__(self, num_threads, result_queue,
quiet, max_queue_size, write_queue):
self._max_queue_size = max_queue_size
self.queue = StablePriorityQueue(maxsize=self._max_queue_size,
max_priority=20)
self.num_threads = num_threads
self.result_queue = result_queue
self.quiet = quiet
self.threads_list = []
self.write_queue = write_queue
self.print_thread = PrintThread(self.result_queue,
self.quiet)
self.print_thread.daemon = True
self.io_thread = IOWriterThread(self.write_queue)
示例6: __init__
def __init__(self, num_threads, result_queue, quiet,
only_show_errors, max_queue_size, write_queue):
self._max_queue_size = max_queue_size
LOGGER.debug("Using max queue size for s3 tasks of: %s",
self._max_queue_size)
self.queue = StablePriorityQueue(maxsize=self._max_queue_size,
max_priority=20)
self.num_threads = num_threads
self.result_queue = result_queue
self.quiet = quiet
self.only_show_errors = only_show_errors
self.threads_list = []
self.write_queue = write_queue
self.print_thread = PrintThread(self.result_queue, self.quiet,
self.only_show_errors)
self.print_thread.daemon = True
self.io_thread = IOWriterThread(self.write_queue)
示例7: test_fifo_order_of_same_priorities
def test_fifo_order_of_same_priorities(self):
a = mock.Mock()
a.PRIORITY = 5
b = mock.Mock()
b.PRIORITY = 5
c = mock.Mock()
c.PRIORITY = 1
q = StablePriorityQueue(maxsize=10, max_priority=20)
q.put(a)
q.put(b)
q.put(c)
# First we should get c because it's the lowest priority.
# We're using assertIs because we want the *exact* object.
self.assertIs(q.get(), c)
# Then a and b are the same priority, but we should get
# a first because it was inserted first.
self.assertIs(q.get(), a)
self.assertIs(q.get(), b)
示例8: setUp
def setUp(self):
self.q = StablePriorityQueue(maxsize=10, max_priority=20)
示例9: TestTaskOrdering
class TestTaskOrdering(unittest.TestCase):
def setUp(self):
self.q = StablePriorityQueue(maxsize=10, max_priority=20)
def create_task(self):
# We don't actually care about the arguments, we just want to test
# the ordering of the tasks.
return CreateLocalFileTask(None, None)
def complete_task(self):
return CompleteDownloadTask(None, None, None, None, None)
def download_task(self):
return DownloadPartTask(None, None, None, None, mock.Mock(), None, None)
def shutdown_task(self, priority=None):
return ShutdownThreadRequest(priority)
def test_order_unchanged_in_same_priority(self):
create = self.create_task()
download = self.download_task()
complete = self.complete_task()
self.q.put(create)
self.q.put(download)
self.q.put(complete)
self.assertIs(self.q.get(), create)
self.assertIs(self.q.get(), download)
self.assertIs(self.q.get(), complete)
def test_multiple_tasks(self):
create = self.create_task()
download = self.download_task()
complete = self.complete_task()
create2 = self.create_task()
download2 = self.download_task()
complete2 = self.complete_task()
self.q.put(create)
self.q.put(download)
self.q.put(complete)
self.q.put(create2)
self.q.put(download2)
self.q.put(complete2)
self.assertIs(self.q.get(), create)
self.assertIs(self.q.get(), download)
self.assertIs(self.q.get(), complete)
self.assertIs(self.q.get(), create2)
self.assertIs(self.q.get(), download2)
self.assertIs(self.q.get(), complete2)
def test_shutdown_tasks_are_last(self):
create = self.create_task()
download = self.download_task()
complete = self.complete_task()
shutdown = self.shutdown_task(priority=11)
self.q.put(create)
self.q.put(download)
self.q.put(complete)
self.q.put(shutdown)
self.assertIs(self.q.get(), create)
self.assertIs(self.q.get(), download)
self.assertIs(self.q.get(), complete)
self.assertIs(self.q.get(), shutdown)
示例10: Executor
class Executor(object):
"""
This class is in charge of all of the threads. It starts up the threads
and cleans up the threads when finished. The two type of threads the
``Executor``runs is a worker and a print thread.
"""
STANDARD_PRIORITY = 11
IMMEDIATE_PRIORITY= 1
def __init__(self, num_threads, result_queue, quiet,
only_show_errors, max_queue_size, write_queue):
self._max_queue_size = max_queue_size
LOGGER.debug("Using max queue size for s3 tasks of: %s",
self._max_queue_size)
self.queue = StablePriorityQueue(maxsize=self._max_queue_size,
max_priority=20)
self.num_threads = num_threads
self.result_queue = result_queue
self.quiet = quiet
self.only_show_errors = only_show_errors
self.threads_list = []
self.write_queue = write_queue
self.print_thread = PrintThread(self.result_queue, self.quiet,
self.only_show_errors)
self.print_thread.daemon = True
self.io_thread = IOWriterThread(self.write_queue)
@property
def num_tasks_failed(self):
tasks_failed = 0
if self.print_thread is not None:
tasks_failed = self.print_thread.num_errors_seen
return tasks_failed
@property
def num_tasks_warned(self):
tasks_warned = 0
if self.print_thread is not None:
tasks_warned = self.print_thread.num_warnings_seen
return tasks_warned
def start(self):
self.io_thread.start()
# Note that we're *not* adding the IO thread to the threads_list.
# There's a specific shutdown order we need and we're going to be
# explicit about it rather than relying on the threads_list order.
# See .join() for more info.
self.print_thread.start()
LOGGER.debug("Using a threadpool size of: %s", self.num_threads)
for i in range(self.num_threads):
worker = Worker(queue=self.queue)
worker.setDaemon(True)
self.threads_list.append(worker)
worker.start()
def submit(self, task):
"""
This is the function used to submit a task to the ``Executor``.
"""
LOGGER.debug("Submitting task: %s", task)
self.queue.put(task)
def initiate_shutdown(self, priority=STANDARD_PRIORITY):
"""Instruct all threads to shutdown.
This is a graceful shutdown. It will wait until all
currently queued tasks have been completed before the threads
shutdown. If the task queue is completely full, it may
take a while for the threads to shutdown.
This method does not block. Once ``initiate_shutdown`` has
been called, you can all ``wait_until_shutdown`` to block
until the Executor has been shutdown.
"""
# Implementation detail: we only queue the worker threads
# to shutdown. The print/io threads are shutdown in the
# ``wait_until_shutdown`` method.
for i in range(self.num_threads):
LOGGER.debug(
"Queueing end sentinel for worker thread (priority: %s)",
priority)
self.queue.put(ShutdownThreadRequest(priority))
def wait_until_shutdown(self):
"""Block until the Executor is fully shutdown.
This will wait until all worker threads are shutdown, along
with any additional helper threads used by the executor.
"""
for thread in self.threads_list:
LOGGER.debug("Waiting for thread to shutdown: %s", thread)
while True:
thread.join(timeout=1)
if not thread.is_alive():
break
LOGGER.debug("Thread has been shutdown: %s", thread)
LOGGER.debug("Queueing end sentinel for result thread.")
#.........这里部分代码省略.........