本文整理匯總了Python中w3af.core.controllers.threads.threadpool.Pool.close方法的典型用法代碼示例。如果您正苦於以下問題:Python Pool.close方法的具體用法?Python Pool.close怎麽用?Python Pool.close使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類w3af.core.controllers.threads.threadpool.Pool
的用法示例。
在下文中一共展示了Pool.close方法的4個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: test_max_queued_tasks
# 需要導入模塊: from w3af.core.controllers.threads.threadpool import Pool [as 別名]
# 或者: from w3af.core.controllers.threads.threadpool.Pool import close [as 別名]
def test_max_queued_tasks(self):
worker_pool = Pool(processes=1, max_queued_tasks=2)
# These tasks should be queued very fast
worker_pool.apply_async(func=time.sleep, args=(2,))
worker_pool.apply_async(func=time.sleep, args=(2,))
worker_pool.apply_async(func=time.sleep, args=(2,))
worker_pool.apply_async(func=time.sleep, args=(2,))
# Now the pool is full and we need to wait in the main
# thread to get the task queued
start = time.time()
worker_pool.apply_async(func=time.sleep, args=(2,))
spent = time.time() - start
worker_pool.close()
worker_pool.join()
self.assertLess(spent, 2.1)
self.assertGreater(spent, 1.9)
示例2: BaseConsumer
# 需要導入模塊: from w3af.core.controllers.threads.threadpool import Pool [as 別名]
# 或者: from w3af.core.controllers.threads.threadpool.Pool import close [as 別名]
class BaseConsumer(Process):
"""
Consumer thread that takes fuzzable requests from a Queue that's populated
by the crawl plugins and identified vulnerabilities by performing various
requests.
"""
def __init__(self, consumer_plugins, w3af_core, thread_name,
create_pool=True):
"""
:param base_consumer_plugins: Instances of base_consumer plugins in a list
:param w3af_core: The w3af core that we'll use for status reporting
:param thread_name: How to name the current thread
:param create_pool: True to create a worker pool for this consumer
"""
super(BaseConsumer, self).__init__(name='%sController' % thread_name)
self.in_queue = QueueSpeed()
self._out_queue = Queue.Queue()
self._consumer_plugins = consumer_plugins
self._w3af_core = w3af_core
self._tasks_in_progress = {}
self._threadpool = None
if create_pool:
self._threadpool = Pool(10, worker_names='%sWorker' % thread_name)
def run(self):
"""
Consume the queue items, sending them to the plugins which are then
going to find vulnerabilities, new URLs, etc.
"""
while True:
work_unit = self.in_queue.get()
if work_unit == POISON_PILL:
# Close the pool and wait for everyone to finish
self._threadpool.close()
self._threadpool.join()
del self._threadpool
self._teardown()
# Finish this consumer and everyone consuming the output
self._out_queue.put(POISON_PILL)
self.in_queue.task_done()
break
else:
# pylint: disable=E1120
self._consume_wrapper(work_unit)
self.in_queue.task_done()
def _teardown(self):
raise NotImplementedError
def _consume(self, work_unit):
raise NotImplementedError
@task_decorator
def _consume_wrapper(self, function_id, work_unit):
"""
Just makes sure that all _consume methods are decorated as tasks.
"""
return self._consume(work_unit)
def _task_done(self, function_id):
"""
The task_in_progress_counter is needed because we want to know if the
consumer is processing something and let it finish. It is mainly used
in the has_pending_work().
For example:
* You can have pending work if there are items in the input_queue
* You can have pending work if there are still items to be read from
the output_queue by one of the consumers that reads our output.
* You can have pending work when there are no items in input_queue
and no items in output_queue but the threadpool inside the consumer
is processing something. This situation is handled by the
self._tasks_in_progress attribute and the _add_task and
_task_done methods.
So, for each _add_task() there has to be a _task_done() even if the
task ends in an error or exception.
Recommendation: Do NOT set the callback for apply_async to call
_task_done, the Python2.7 pool implementation won't call it if the
function raised an exception and you'll end up with tasks in progress
that finished with an exception.
"""
try:
#.........這裏部分代碼省略.........
示例3: test_close_terminate
# 需要導入模塊: from w3af.core.controllers.threads.threadpool import Pool [as 別名]
# 或者: from w3af.core.controllers.threads.threadpool.Pool import close [as 別名]
def test_close_terminate(self):
worker_pool = Pool(1, worker_names='WorkerThread')
worker_pool.close()
worker_pool.terminate()
示例4: BaseConsumer
# 需要導入模塊: from w3af.core.controllers.threads.threadpool import Pool [as 別名]
# 或者: from w3af.core.controllers.threads.threadpool.Pool import close [as 別名]
#.........這裏部分代碼省略.........
self._threadpool = None
if create_pool:
self._threadpool = Pool(thread_pool_size or self.THREAD_POOL_SIZE,
worker_names='%sWorker' % thread_name,
max_queued_tasks=max_pool_queued_tasks)
def get_pool(self):
return self._threadpool
def run(self):
"""
Consume the queue items, sending them to the plugins which are then
going to find vulnerabilities, new URLs, etc.
"""
while True:
try:
work_unit = self.in_queue.get()
except KeyboardInterrupt:
# https://github.com/andresriancho/w3af/issues/9587
#
# If we don't do this, the thread will die and will never
# process the POISON_PILL, which will end up in an endless
# wait for .join()
continue
if work_unit == POISON_PILL:
try:
# Close the pool and wait for everyone to finish
if self._threadpool is not None:
self._threadpool.close()
self._threadpool.join()
self._threadpool = None
self._teardown()
finally:
# Finish this consumer and everyone consuming the output
self._out_queue.put(POISON_PILL)
self.in_queue.task_done()
break
else:
# pylint: disable=E1120
try:
self._consume_wrapper(work_unit)
finally:
self.in_queue.task_done()
def _teardown(self):
raise NotImplementedError
def _consume(self, work_unit):
raise NotImplementedError
@task_decorator
def _consume_wrapper(self, function_id, work_unit):
"""
Just makes sure that all _consume methods are decorated as tasks.
"""
return self._consume(work_unit)
def _task_done(self, function_id):
"""