本文整理汇总了Python中multiprocessing.JoinableQueue.put_nowait方法的典型用法代码示例。如果您正苦于以下问题:Python JoinableQueue.put_nowait方法的具体用法?Python JoinableQueue.put_nowait怎么用?Python JoinableQueue.put_nowait使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类multiprocessing.JoinableQueue
的用法示例。
在下文中一共展示了JoinableQueue.put_nowait方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: CapturePlugin
# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import put_nowait [as 别名]
class CapturePlugin(BaseDronePlugin):
def __init__(self, interfaces, channel, drone):
BaseDronePlugin.__init__(self, interfaces, channel, drone, "CapturePlugin.{0}".format(channel))
self.logutil.log("Initializing")
# Select interface
try:
self.kb = self.interfaces[0]
self.kb.set_channel(self.channel)
self.kb.active = True
except Exception as e:
print("failed to use interface")
self.status = False
# Pipe from the tasker to the filter module, used to send pickled tasking dictionaries (simple DictManager)
recv_pconn, recv_cconn = Pipe()
task_pconn, self.task_cconn = Pipe()
self.task_queue = JoinableQueue()
# Start the filter up
self.p_filt = FilterProcess(
recv_pconn, self.task_queue, self.done_event, self.task_update_event, self.drone, self.name
)
self.p_filt.start()
self.logutil.log("Launched FilterProcess ({0})".format(self.p_filt.pid))
self.childprocesses.append(self.p_filt)
# Start the receiver up
self.p_recv = SnifferProcess(recv_cconn, self.kb, self.done_event, self.drone, self.name)
self.p_recv.start()
self.logutil.log("Launched SnifferProcess: ({0})".format(self.p_recv.pid))
self.childprocesses.append(self.p_recv)
def task(self, uuid, data):
self.logutil.log("Adding Task: {0}".format(uuid))
if uuid in self.tasks:
return False
self.tasks[uuid] = data
self.__update_filter_tasking()
return True
def detask(self, uuid):
res = None
if uuid in self.tasks:
res = self.tasks.get(uuid)
del self.tasks[uuid]
else:
return False
if len(self.tasks) == 0:
# Time to shut the whole party down, as we don't have any more tasks
self.logutil.log("No remaining tasks, shutting down plugin")
self.shutdown()
# TODO return something to indicate a total shutdown also
else:
# We made a change to tasking, let's implement it
self.__update_filter_tasking()
# return res
return True
def __update_filter_tasking(self):
self.logutil.log("Sending Task Updates to FilterProcess")
self.task_queue.put_nowait(cPickle.dumps(self.tasks))
示例2: main
# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import put_nowait [as 别名]
def main(factor = 2):
#E.G: if total cores is 2 , no of processes to be spawned is 2 * factor
files_to_download = JoinableQueue()
result_queue = JoinableQueue()
time_taken = JoinableQueue()
time_taken_to_read_from_queue = JoinableQueue()
with open('downloads.txt', 'r') as f:
for to_download in f:
files_to_download.put_nowait(to_download.split('\n')[0])
files_to_download_size = files_to_download.qsize()
cores = cpu_count()
no_of_processes = cores * factor
for i in xrange(no_of_processes):
files_to_download.put_nowait(None)
jobs = []
start = datetime.datetime.now()
for name in xrange(no_of_processes):
p = Process(target = download, args = (files_to_download, result_queue,\
time_taken, time_taken_to_read_from_queue,name))
p.start()
jobs.append(p)
for job in jobs:
job.join()
print result_queue.qsize()
total_downloaded_urls = 0
try:
while 1:
r = result_queue.get_nowait()
total_downloaded_urls += r
except Empty:
pass
try:
while 1:
"""
locals() keeps track of all variable, functions, class etc.
datetime object is different from int, one cannot perform
0 + datetime.datetime.now(), if when we access the queue which
contains time objects first time, total_time will be set to
first time
"""
if 'total_time' in locals():
total_time += time_taken.get_nowait()
else:
total_time = time_taken.get_nowait()
except Empty:
print("{0} processes on {1} core machine took {2} time to download {3}\
urls".format(no_of_processes, cores, total_time, \
total_downloaded_urls))
try:
while 1:
if 'queue_reading_time' in locals():
queue_reading_time += time_taken_to_read_from_queue.get_nowait()
else:
queue_reading_time = time_taken_to_read_from_queue.get_nowait()
except Empty:
print("{0} processes on {1} core machine took {2} time to read {3}\
urls from queue".format(no_of_processes, cores,queue_reading_time\
,files_to_download_size))
示例3: JoinableQueue
# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import put_nowait [as 别名]
parser.add_argument('-n', '--num-processes', help="Number of subprocesses to start", default=4, type=int)
parser.add_argument('-b', '--batch-size', help="Number of entries to batch prior to transmission", default=100, type=int)
parser.add_argument('-a', '--after', help="Only include nodes published on or after this year")
parser.add_argument('-bf', '--benchmark_freq', help="How often to emit benchmark info", type=int, default=1000000)
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
file_queue = JoinableQueue()
result_queue = JoinableQueue()
date_after = None
if arguments.after:
date_after = datetime.datetime.strptime(arguments.after, "%Y")
for file in arguments.infile:
file_queue.put_nowait(file)
for i in range(arguments.num_processes):
file_queue.put_nowait('STOP')
for i in range(arguments.num_processes):
Process(target=wos_parser, args=(file_queue,
result_queue,
arguments.wos_only,
arguments.sample_rate,
arguments.must_cite,
arguments.batch_size,
date_after)).start()
Process(target=pjk_writer, args=(result_queue, arguments.outfile, arguments.benchmark_freq)).start()