本文整理匯總了Python中multiprocessing.Lock方法的典型用法代碼示例。如果您正苦於以下問題:Python multiprocessing.Lock方法的具體用法?Python multiprocessing.Lock怎麽用?Python multiprocessing.Lock使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類multiprocessing
的用法示例。
在下文中一共展示了multiprocessing.Lock方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: add_cmd_tasks
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def add_cmd_tasks(cmd_task_list, identifier=None, stdin_error_lock=mp.Manager().Lock()):
"""
Run several command line commands in parallel.
@attention: use the Manager to get the lock as in this function definition !!!
@type cmd_task_list: list of TaskCmd
@param stdin_error_lock: acquiring the lock enables writing to the stdout and stderr
@return: list of failed commands, dictionary (cmd, task process)
"""
assert isinstance(cmd_task_list, list)
thread_task_list = []
for cmdTask in cmd_task_list:
assert isinstance(cmdTask, TaskCmd)
thread_task_list.append(TaskThread(_runCmd, (cmdTask, stdin_error_lock)))
return AsyncParallel.add_tasks(thread_task_list, identifier)
示例2: _testCmd
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def _testCmd(parallel=True):
print('Start: Test: runCmdParallel')
inDir = '/Users/ivan/Documents/nobackup/hsim01/562/a'
outDir = '/Users/ivan/Documents/nobackup/hsim01/562/b'
MUSCLE_BINARY = '/Users/ivan/Documents/work/tools/muscle/muscle3.8.31_i86darwin64'
assert os.path.isfile(MUSCLE_BINARY), 'Binnary file does not exist: %s' % MUSCLE_BINARY
cmdListA = []
for fileName in os.listdir(inDir):
cmd = '%s -in %s -out %s' % (MUSCLE_BINARY, os.path.join(inDir, fileName), os.path.join(outDir, fileName))
# print cmd
cmdListA.append(TaskCmd(cmd, outDir))
# break
if parallel:
failList = runCmdParallel(cmdListA)
else:
lock = mp.Lock()
failList = runCmdSerial(cmdListA, stdInErrLock=lock)
reportFailedCmd(failList)
print('Stop: Test: runCmdParallel')
示例3: pytest_configure
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def pytest_configure(config):
config.addinivalue_line('markers',
"mp_group('GroupName', strategy): test (suite) is in named "
"grouped w/ desired strategy: 'free' (default), 'serial', "
"'isolated_free', or 'isolated_serial'.")
standard_reporter = config.pluginmanager.get_plugin('terminalreporter')
if standard_reporter:
from pytest_mp.terminal import MPTerminalReporter
mp_reporter = MPTerminalReporter(standard_reporter, manager)
config.pluginmanager.unregister(standard_reporter)
config.pluginmanager.register(mp_reporter, 'terminalreporter')
if config.option.use_mp is None:
if not config.getini('mp'):
return
if config.option.xmlpath is not None:
from pytest_mp.junitxml import MPLogXML
synchronization['node_reporters'] = manager.list()
synchronization['node_reporters_lock'] = manager.Lock()
xmlpath = config.option.xmlpath
config.pluginmanager.unregister(config._xml)
config._xml = MPLogXML(xmlpath, config.option.junitprefix, config.getini("junit_suite_name"), manager)
config.pluginmanager.register(config._xml, 'mpjunitxml')
示例4: test
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def test():
print("Test mode!")
import logSetup
import WebMirror.rules
import WebMirror.Engine
import multiprocessing
logSetup.initLogging()
c_lok = cookie_lock = multiprocessing.Lock()
engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)
job = testJobFromUrl(r'https://www.wattpad.com/api/v3/stories?fields%3Dstories%28id%2Ctitle%2Curl%2Cdescription%29%2Ctotal%2CnextUrl&limit=50&offset=0')
engine.dispatchRequest(job)
job = testJobFromUrl(r'https://www.wattpad.com/api/v3/stories?fields%3Dstories%28id%2Ctitle%2Curl%2Cdescription%29%2Ctotal%2CnextUrl&limit=50&offset=1490')
engine.dispatchRequest(job)
job = testJobFromUrl(r'https://www.wattpad.com/api/v3/stories?fields%3Dstories%28id%2Ctitle%2Curl%2Cdescription%29%2Ctotal%2CnextUrl&limit=50&offset=1500')
engine.dispatchRequest(job)
job = testJobFromUrl(r'https://www.wattpad.com/api/v3/stories?fields%3Dstories%28id%2Ctitle%2Curl%2Cdescription%29%2Ctotal%2CnextUrl&limit=50&offset=1550')
engine.dispatchRequest(job)
示例5: exposed_load_urls_from_file
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def exposed_load_urls_from_file(file_path):
'''
Load a file of URLs, and feed them through the URL filtering system.
'''
with open(file_path, "r") as fp:
content = fp.readlines()
content = [tmp.strip() for tmp in content]
print(content)
with common.database.session_context() as sess:
c_lok = cookie_lock = multiprocessing.Lock()
engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok, new_job_queue=None, db_interface=sess)
job = testJobFromUrl("https://www.webnovel.com/feed/")
engine.upsertResponseLinks(job, plain=content, debug=True)
print(engine)
示例6: test
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def test():
print("Test mode!")
import logSetup
import WebMirror.rules
import WebMirror.Engine
import multiprocessing
logSetup.initLogging()
c_lok = cookie_lock = multiprocessing.Lock()
engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)
engine.dispatchRequest(testJobFromUrl('http://japtem.com/fanfic.php'))
# import WebRequest as webfunc
# wg = webfunc.WebGetRobust()
# proc = JapTemSeriesPageFilter(pageUrl="urlllllll", pgContent="watttt", type='lolertype', dosuper=False)
# urls = [
# 'http://japtem.com/fanfic.php',
# ]
# for url in urls:
# ctnt = wg.getpage(url)
# proc.content = ctnt
# proc.processPage(ctnt)
示例7: test
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def test():
print("Test mode!")
import logSetup
import WebMirror.rules
import WebMirror.Engine
import multiprocessing
logSetup.initLogging()
c_lok = cookie_lock = multiprocessing.Lock()
engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)
engine.dispatchRequest(testJobFromUrl('https://www.foxteller.com/releases'))
# import WebRequest as webfunc
# wg = webfunc.WebGetRobust()
# proc = FoxTellerSeriesPageFilter(pageUrl="urlllllll", pgContent="watttt", type='lolertype', dosuper=False)
# urls = [
# 'https://www.foxteller.com/releases',
# ]
# for url in urls:
# ctnt = wg.getpage(url)
# proc.content = ctnt
# proc.processPage(ctnt)
示例8: test
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def test():
print("Test mode!")
import logSetup
import WebMirror.rules
import WebMirror.Engine
import multiprocessing
logSetup.initLogging()
c_lok = cookie_lock = multiprocessing.Lock()
engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)
# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fiction/3021'))
# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/latest-updates/'))
# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/best-rated/'))
engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/latest-updates/'))
engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/active-top-50/'))
engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/weekly-views-top-50/'))
engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/newest/'))
示例9: __init__
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def __init__(self):
self.led_queue = mp.Queue()
self.demo_mode = mp.Lock()
self.led_process = mp.Process(target=led_control, args=(self.led_queue, self.demo_mode,))
self.shutting_down = False
self.last_button_release = 0
self.show_end_of_lines = False
# The button has multiple functions:
# Turn the device on when off, single press to show the end of long lines on the display,
# double press to start demo mode, single press to stay at one animation in demo mode,
# long press to shut down
self.button = Button(3, hold_time=2, bounce_time=0.05)
self.button.when_held = self.shutdown
self.button.when_released = self.button_pressed
self.tft = SattrackerTFT()
self.tle_updated_time = None
self.tracker = None # load in start because it takes quite a long time
self.led_array = led_array_from_constants()
示例10: removeProducer
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def removeProducer(self, noLock = False):
if self.num_producers > 0:
# Lock internal
if not noLock: self.__internalLock__.acquire()
# Remove last worker from worker pool
(worker_num, producer, extra_arg_list) = self.producer_pool.pop()
logger.debug("Removing Producer-%d" % worker_num)
# Remove last worker's exitFlag
producer_exitEvent = self.producer_pool_exitEvent.pop()
# Set the worker's exit event
if not producer_exitEvent.is_set():
logger.debug("Producer-%d exitEvent SET" % worker_num)
producer_exitEvent.set()
# Update producer count
self.num_producers -= 1
# Release internal
if not noLock: self.__internalLock__.release()
else:
logger.error("Attempted to remove producer from empty pool.")
示例11: createLock
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def createLock(self):
from threading import Lock
class TLock(LockBase):
def __init__(self, lock):
self.lock = lock
def __enter__(self):
self.lock.acquire()
def __exit__(self, t, v, tb):
self.lock.release()
def acquire(self):
self.lock.acquire()
def release(self):
self.lock.release()
return TLock(Lock())
示例12: synchronized
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def synchronized(fn):
'''
A decorator which acquires a lock before attempting to execute its wrapped
function. Releases the lock in a finally clause.
:param fn: The function to wrap.
'''
lock = threading.Lock()
@functools.wraps(fn)
def decorated(*args, **kwargs):
lock.acquire()
try:
return fn(*args, **kwargs)
finally:
lock.release()
return decorated
示例13: runCmdParallel
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def runCmdParallel(cmdTaskList, maxProc=mp.cpu_count(), stdInErrLock=mp.Manager().Lock()):
"""
Run several command line commands in parallel.
@attention: use the Manager to get the lock as in this function definition !!!
@param cmdTaskList: list of command line tasks
@type cmdTaskList: list of TaskCmd
@param maxProc: maximum number of tasks that will be run in parallel at the same time
@param stdInErrLock: acquiring the lock enables writing to the stdout and stderr
@return: list of failed commands, dictionary (cmd, task process)
"""
assert isinstance(cmdTaskList, list)
assert isinstance(maxProc, int)
threadTaskList = []
for cmdTask in cmdTaskList:
assert isinstance(cmdTask, TaskCmd)
threadTaskList.append(TaskThread(_runCmd, (cmdTask, stdInErrLock)))
returnValueList = runThreadParallel(threadTaskList, maxProc)
failList = []
for process, task in returnValueList:
if process.returncode != 0:
failList.append(dict(process=process, task=task))
if len(failList) > 0:
return failList
else:
return None
示例14: runCmdSerial
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def runCmdSerial(cmdTaskList, verbose=False, stopWhenError=True, stdInErrLock=None):
"""
Run several command line commands one by one.
@attention: Use the Manager to get the lock (mp.Manager().Lock()) if the lock shared among multiple processes!
@param cmdTaskList: list of command line tasks
@type cmdTaskList: list of TaskCmd
@param stdInErrLock: acquiring the lock enables writing to the stdout and stderr
@type stdInErrLock: multiprocessing.Lock()
"""
assert isinstance(cmdTaskList, list)
counter = 0
failList = []
for task in cmdTaskList:
counter += 1
if verbose:
msg = 'Starting "#%s" cmd: %s\n' % (counter, task.cmd)
if stdInErrLock is not None:
stdInErrLock.acquire()
sys.stdout.write(msg)
sys.stdout.flush()
if stdInErrLock is not None:
stdInErrLock.release()
# run command
process, taskCmd = _runCmd(task, stdInErrLock)
if process.returncode != 0:
failList.append(dict(process=process, task=task))
if stopWhenError:
break
if len(failList) > 0:
return failList
else:
return None
示例15: refactor
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Lock [as 別名]
def refactor(self, items, write=False, doctests_only=False,
num_processes=1):
if num_processes == 1:
return super(MultiprocessRefactoringTool, self).refactor(
items, write, doctests_only)
try:
import multiprocessing
except ImportError:
raise MultiprocessingUnsupported
if self.queue is not None:
raise RuntimeError("already doing multiple processes")
self.queue = multiprocessing.JoinableQueue()
self.output_lock = multiprocessing.Lock()
processes = [multiprocessing.Process(target=self._child)
for i in range(num_processes)]
try:
for p in processes:
p.start()
super(MultiprocessRefactoringTool, self).refactor(items, write,
doctests_only)
finally:
self.queue.join()
for i in range(num_processes):
self.queue.put(None)
for p in processes:
if p.is_alive():
p.join()
self.queue = None