本文整理汇总了Python中multiprocessing.JoinableQueue.cancel_join_thread方法的典型用法代码示例。如果您正苦于以下问题:Python JoinableQueue.cancel_join_thread方法的具体用法?Python JoinableQueue.cancel_join_thread怎么用?Python JoinableQueue.cancel_join_thread使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类multiprocessing.JoinableQueue
的用法示例。
在下文中一共展示了JoinableQueue.cancel_join_thread方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ParallelAnalyzer
# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import cancel_join_thread [as 别名]
class ParallelAnalyzer(BaseAnalyzer):
''' Parallel analyzer which uses the `multiprocessing` module. '''
def __init__(self,
storage, script_list, script_hashes, min_script_needs, apks_or_paths,
concurrency = None):
'''
See :py:method`.BaseAnalyzer.__init__` for details on the first attributes.
Other Parameters
----------------
concurrency : int, optional (default is number of cpu cores)
Number of workers to spawn.
'''
super(ParallelAnalyzer, self).__init__(storage, script_list, script_hashes, min_script_needs, apks_or_paths)
# parallelization parameters
if concurrency is None:
concurrency = cpu_count()
self.__concurrency = concurrency
log.info("concurrency: %s", self.concurrency)
log.info("Using processes")
# parallel stuff, concerning processes
self.__work_queue = Queue()
self.__work_queue.cancel_join_thread()
self.__workers = []
self.__analyzed_apks = Queue()
def get_analyzed_apks(self):
return self.__analyzed_apks
def set_analyzed_apks(self, value):
self.__analyzed_apks = value
def del_analyzed_apks(self):
del self.__analyzed_apks
def get_work_queue(self):
return self.__work_queue
def get_concurrency(self):
return self.__concurrency
def get_workers(self):
return self.__workers
def set_workers(self, value):
self.__workers = value
def del_workers(self):
del self.__workers
analyzed_apks = property(get_analyzed_apks, set_analyzed_apks, del_analyzed_apks, "Queue<FastAPK> : Yet analyzed APKs")
concurrency = property(get_concurrency, None, None, "int : Number of workers to spawn.")
workers = property(get_workers, set_workers, del_workers, "list<Worker> : List of workers.")
work_queue = property(get_work_queue, None, None, "Queue<str> : Queue with paths to apks which shall be analyzed.")
def _analyze(self):
''' See doc of :py:method:BaseAnalyzer.analyze`. '''
try:
work_queue = self.work_queue
# create worker pool
log.debug("starting %s workers ...", self.concurrency)
for _ in range(self.concurrency):
p = Worker(self.script_list, self.script_hashes, self.min_script_needs,
work_queue, self.storage,
self.cnt_analyzed_apks, self.analyzed_apks, self.storage_results)
self.workers.append(p)
p.daemon = True
# start workers
for p in self.workers:
p.start()
# queue has size limit -> start workers first then enqueue items
log.info("Loading apk paths into work queue ...")
for apk_stuff in AnalyzeUtil.apk_gen(self.apks_or_paths):
# task is apk with all scripts
work_queue.put(apk_stuff)
for _ in range(self.concurrency):
# signal end-of-work
work_queue.put(STOP_SENTINEL)
# progress view for cli
av = AnalysisStatsView(self.cnt_analyzed_apks, self._cnt_apks, self.analyzed_apks)
av.daemon = True
av.start()
# block until workers finished
work_queue.join()
av.terminate()
log.debug("joined on work queue ...")
return self.cnt_analyzed_apks.value
#.........这里部分代码省略.........