本文整理汇总了Python中multiprocessing.Value.value方法的典型用法代码示例。如果您正苦于以下问题:Python Value.value方法的具体用法?Python Value.value怎么用?Python Value.value使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类multiprocessing.Value
的用法示例。
在下文中一共展示了Value.value方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: execute_task
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def execute_task(self, website: Website, busy: Value, post_id: str, comment_id: str):
busy.value = 1
if os.path.exists("data.json"):
os.remove("data.json")
print("Started crawling task")
process = CrawlerProcess(get_project_settings())
process.crawl("od_links", base_url=website.url)
process.start()
print("Done crawling")
self.db.import_json("data.json", website)
os.remove("data.json")
print("Imported in SQLite3")
if post_id:
# Reply to post
stats = self.db.get_website_stats(website.id)
comment = self.reddit_bot.get_comment({"": stats}, website.id)
print(comment)
if "total_size" in stats and stats["total_size"] > 10000000:
post = self.reddit_bot.reddit.submission(post_id)
self.reddit_bot.reply(post, comment)
pass
else:
self.reddit_bot.log_crawl(post_id)
elif comment_id:
# Reply to comment
stats = self.db.get_website_stats(website.id)
comment = self.reddit_bot.get_comment({"There you go!": stats}, website.id)
print(comment)
reddit_comment = self.reddit_bot.reddit.comment(comment_id)
self.reddit_bot.reply(reddit_comment, comment)
busy.value = 0
print("Done crawling task")
示例2: call
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def call(args, stdout=None, stderr=None, stdin=None, daemonize=False,
preexec_fn=None, shell=False, cwd=None, env=None):
"""
Run an external command in a separate process and detach it from the current process. Excepting
`stdout`, `stderr`, and `stdin` all file descriptors are closed after forking. If `daemonize`
is True then the parent process exits. All stdio is redirected to `os.devnull` unless
specified. The `preexec_fn`, `shell`, `cwd`, and `env` parameters are the same as their `Popen`
counterparts. Return the PID of the child process if not daemonized.
"""
stream = lambda s, m: s is None and os.open(os.devnull, m) or s
stdout = stream(stdout, os.O_WRONLY)
stderr = stream(stderr, os.O_WRONLY)
stdin = stream(stdin, os.O_RDONLY)
shared_pid = Value('i', 0)
pid = os.fork()
if pid > 0:
os.waitpid(pid, 0)
child_pid = shared_pid.value
del shared_pid
if daemonize:
sys.exit(0)
return child_pid
else:
os.setsid()
proc = subprocess.Popen(args, stdout=stdout, stderr=stderr, stdin=stdin, close_fds=True,
preexec_fn=preexec_fn, shell=shell, cwd=cwd, env=env)
shared_pid.value = proc.pid
os._exit(0)
示例3: main
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def main():
#Begin Init
settings.setup()
from common.safeprint import safeprint
try:
import miniupnpc
except:
safeprint("Dependency miniupnpc is not installed. Running in outbound only mode")
settings.config['outbound'] = True
safeprint("settings are:")
safeprint(settings.config)
queue = Queue()
live = Value('b',True)
ear = listener(settings.config['port'],settings.config['outbound'],queue,live,settings.config['server'])
ear.daemon = True
ear.items = sync()
ear.start()
mouth = propagator(settings.config['port'] + 1, live)
mouth.daemon = True
mouth.items = ear.items
mouth.start()
feedback = []
stamp = time()
while queue.empty():
if time() - 5 > stamp:
break #pragma: no cover
try:
feedback = queue.get(False)
except: #pragma: no cover
safeprint("No feedback received from listener")
ext_ip = "" #Does this affect peers?
ext_port = -1 #Does this affect peers?
if feedback != []:
settings.outbound = feedback[0]
if settings.outbound is not True:
ext_ip = feedback[1]
ext_port = feedback[2]
initializePeerConnections(settings.config['port'], ext_ip, ext_port)
#End Init
#Begin main loop
if settings.config.get('seed'):
safeprint("Seed mode activated")
try:
while True and not settings.config.get('test'):
sleep(0.1)
except KeyboardInterrupt:
safeprint("Keyboard Interrupt")
elif settings.config.get('server'):
safeprint("Server mode activated")
else:
safeprint("Client mode activated")
#End main loop
#Begin shutdown
safeprint("Beginning exit process")
live.value = False
settings.saveSettings()
saveToFile()
bounty.saveToFile()
示例4: setup_heartbeat
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def setup_heartbeat(self, client_controller):
cond = multiprocessing.Condition()
s_init_finish = Value('i', 0)
do_sample = Value('i', 0)
do_sample_lock = Lock()
server_process = multiprocessing.Process(
target=self.server_heart_beat,
args=(cond, s_init_finish, do_sample, do_sample_lock))
server_process.daemon = False
server_process.start()
logger.info("Waiting for server init ...")
cond.acquire()
while (s_init_finish.value == 0):
cond.wait()
if s_init_finish.value == 5:
logger.error("Waiting for server init ... FAIL")
raise RuntimeError("server init failed.")
cond.release()
logger.info("Waiting for server init ... Done")
# let all clients start running the benchmark
client_controller.client_run(do_sample, do_sample_lock)
cond.acquire()
s_init_finish.value = 0
cond.release()
return server_process
示例5: main
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def main():
settings.setup()
try:
import miniupnpc
except:
safeprint("Dependency miniupnpc is not installed. Running in outbound only mode")
settings.config['outbound'] = True
safeprint("settings are:")
safeprint(settings.config)
queue = Queue()
live = Value('b',True)
ear = listener(settings.config['port'],settings.config['outbound'],queue,live,settings.config['server'])
ear.daemon = True
ear.start()
feedback = []
stamp = time()
while queue.empty():
if time() - 5 > stamp:
break #pragma: no cover
try:
feedback = queue.get(False)
except: #pragma: no cover
safeprint("No feedback received from listener")
ext_ip = "" #Does this affect peers?
ext_port = -1 #Does this affect peers?
if feedback != []:
settings.outbound = feedback[0]
if settings.outbound is not True:
ext_ip = feedback[1]
ext_port = feedback[2]
initializePeerConnections(settings.config['port'], ext_ip, ext_port)
live.value = False
示例6: hanging_get
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def hanging_get(my_id, messages, initial_peer_id):
remote_sender = None
remote_peer_id = Value("i", initial_peer_id)
remote_sender = Process(target=to_remote_server, args=(5550,my_id,remote_peer_id)).start()
while True:
r = requests.get('http://localhost:8888/wait?peer_id=' + str(my_id))
if r.status_code == 200:
if int(r.headers['pragma']) == my_id:
connected = r.text.split("\n")
for l in connected:
info = l.strip().split(",")
if len(info) == 3 and info[0] == "receiver" and info[2] == '1':
remote_peer_id.value = int(info[1])
if len(info) == 3 and info[0] == "receiver" and info[2] == '0':
remote_peer_id.value = -1
else:
messages.put(Message(int(r.headers['pragma']), r.text))
示例7: main
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def main():
running = Value(c_int, 1)
readQueue = Queue()
reader = Process(target = Reader("/dev/ttyUSB0", 9600), args = (running, readQueue))
worker = Process(target = Worker(), args = (running, readQueue))
reader.start()
worker.start()
time.sleep(5)
running.value = 0
reader.join()
worker.join()
示例8: camstream
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def camstream():
print "CAMera STREAMer (OpenCV " + cv2.__version__ + ")"
print "main(): OS: {}".format(os.name)
# * Start CameraStreamer process
print "main(): Starting CameraStreamer process..."
if os.name == 'nt': # [Windows]
# ** Create shared objects (NOTE only necessary on Windows since it uses a different multiprocessing implementation)
print "main(): [Windows] Creating shared objects..."
# *** Stay alive flag
stayAliveObj = Value(c_bool, True)
# *** Frame counter
frameCountObj = Value('i', 0)
# *** Image array
image = np.zeros((camera_frame_height, camera_frame_width, camera_frame_depth), dtype=np.uint8)
imageShape = image.shape
imageSize = image.size
image.shape = imageSize # flatten numpy array
imageObj = Array(c_ubyte, image) # create a synchronized shared array
# *** Image shape
imageShapeObj = Array('i', imageShape)
cameraStreamerProcess = CameraStreamer(stayAliveObj, frameCountObj, imageObj, imageShapeObj)
else: # [POSIX]
cameraStreamerProcess = CameraStreamer()
# ** Grab generated shared objects to share with other child processes
print "main(): [POSIX] Getting shared objects from CameraStreamer..."
stayAliveObj = cameraStreamerProcess.stayAliveObj
frameCountObj = cameraStreamerProcess.frameCountObj
imageObj = cameraStreamerProcess.imageObj
imageShapeObj = cameraStreamerProcess.imageShapeObj
cameraStreamerProcess.start()
# * Start StreamViewer process
print "main(): Starting StreamViewer process..."
streamViewerProcess = StreamViewer(stayAliveObj, frameCountObj, imageObj, imageShapeObj)
streamViewerProcess.start()
# * Wait for child processes to finish
print "main(): Waiting for child processes to finish..."
try:
streamViewerProcess.join()
cameraStreamerProcess.join()
except KeyboardInterrupt:
stayAliveObj.value = False
streamViewerProcess.join()
cameraStreamerProcess.join()
print "main(): Done."
示例9: start
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def start(self, reload_from=None):
"""Start this server process.
:param int reload_from: Optional, the PID of a running game server
process that this process should reload from
:returns None:
"""
assert not self._process, "server instance already started"
pid = Value("i")
self._process = Process(target=self._start,
args=(pid, socket_queue),
kwargs={"reload_from": reload_from})
self._process.start()
pid.value = self._process.pid
示例10: send_mldquery_regularly
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def send_mldquery_regularly(self):
self.logger.debug("")
requraly_query_type = self.config[const.REGURALY_QUERY_TYPE]
reguraly_query_interval = self.config[const.REGURALY_QUERY_INTERVAL]
mc_query_interval = self.config[const.MC_QUERY_INTERVAL]
# 初回送信前に定期送信クエリの送信間隔の1/4秒待つ
time.sleep(reguraly_query_interval / 4)
# General Query
if requraly_query_type == self.GENERAL_QUERY:
self.logger.debug("create general query")
mc_info = {const.MC_TAG_MC_ADDR: const.DELIMIT_DOUBLE_COLON,
const.MC_TAG_SERV_IP: None}
while self.SEND_LOOP:
self.send_mldquery([mc_info])
# タイムアウトチェック
self.check_user_timeout()
time.sleep(reguraly_query_interval - self.QUERY_QRV)
# Specific Query
elif requraly_query_type == self.SPECIFIC_QUERY:
self.logger.debug("create specific query")
next_interval = Value(ctypes.c_bool, False)
while self.SEND_LOOP:
query_proc = Process(
target=self.wait_query_interval,
args=(next_interval, reguraly_query_interval))
query_proc.daemon = True
query_proc.start()
self.logger.debug(
"next_interval : %s", str(next_interval.value))
self.send_mldquery(
self.mc_info_list, mc_query_interval, next_interval)
# タイムアウトチェック
self.check_user_timeout()
# 定期送信クエリの送信間隔が過ぎていない場合は待ち
if not next_interval.value:
self.logger.debug(
"waiting query interval(%d sec)...",
reguraly_query_interval)
query_proc.join()
next_interval.value = False
query_proc.terminate()
示例11: main
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def main():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-f', dest='logfile', required=True)
arg_parser.add_argument('-t', dest='sleep', type=int, default=1)
args = arg_parser.parse_args()
count = Value('i', 0)
p = Process(target=log_counter, args=(args.logfile, count,))
p.start()
while True:
time.sleep(args.sleep)
print('{0} events/{1}s'.format(count.value, args.sleep))
count.value = 0
p.join()
示例12: main
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def main():
guardian = Heimdall()
# zoeh = Zoehmacarena()
# guardian.add_bot(zoeh)
# guardian.main_loop()
cont = True
ispaused = False
# bobby = Bob()
state = Value("i", 0)
l = Lock()
# proc = Process(target=bobby.main, args=(l, state,))
print("starting")
# proc.start()
while cont:
if ispaused:
command = raw_input("Heimdall[paused]: ")
else:
command = raw_input("Heimdall[running]: ")
if command == "quit":
cont = False
state.value = -1
elif command == "pause":
l.acquire()
print("paused")
ispaused = True
elif command == "start":
print("unpausing")
l.release()
ispaused = False
else:
print("unknown command")
# proc.join()
print("finished")
return
示例13: run_test
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def run_test(*args):
# logins = get_bm_config('ADMINS')
# logins.extend([get_bm_config('API_USER_PREFIX') % i
# for i in xrange(args['proc'])])
# msg_count = get_bm_config('MSG_COUNT')
# host = args['host']
# port = args['port']
# visit_args = [(login, host, port, msg_count) for login in logins]
visit_args = [[i] for i in [1, 2]]
term = Value(c_int, 0)
queue = Queue()
logging.debug('users pool')
pool = Pool(initializer=init,
initargs=(term, queue),
processes=10,
maxtasksperchild=1)
try:
swarm = pool.map(visit, [i for i in range(10)])
logging.debug('finishing...')
except KeyboardInterrupt:
logging.info('terminating...')
term.value = 1
pool.terminate()
示例14: server
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def server(backlog=5):
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(('', PORT))
server_socket.listen(backlog)
nclients = Value('i', 0)
processes = []
schedule.every(1).seconds.do(update_active_clients, nclients)
try:
while 1:
conn, addr = server_socket.accept()
print "new client " + str(addr)
proc = Process(target=client_process, args=(conn, server_socket, nclients))
processes.append(proc)
proc.start()
conn.close()
nclients.value = len(multiprocessing.active_children())
finally:
# Ctrl-C
for proc in processes:
proc.terminate()
server_socket.close()
示例15: main
# 需要导入模块: from multiprocessing import Value [as 别名]
# 或者: from multiprocessing.Value import value [as 别名]
def main():
with open("/media/extradikke/UbuntuData/wikipedia_data/data_dump/enwiki-20150112-pages-articles.xml",
mode='r') as big_file:
fsize = os.stat(
"/media/extradikke/UbuntuData/wikipedia_data/data_dump/enwiki-20150112-pages-articles.xml").st_size
stream = pulldom.parse(big_file)
# sys.setrecursionlimit(10000)
redirect_queue = Queue(100)
pages_queue = Queue(100)
rawnodes_queue = Queue(1000)
counter = 1
redirect_counter = 0
running = Value('b', True)
link_extractor1 = Process(target=link_extractor, args=(rawnodes_queue, redirect_queue, pages_queue, running, ))
link_extractor2 = Process(target=link_extractor, args=(rawnodes_queue, redirect_queue, pages_queue, running, ))
# link_extractor3 = Process(target=link_extractor, args=(rawnodes_queue, redirect_queue, pages_queue, running, ))
# link_extractor4 = Process(target=link_extractor, args=(rawnodes_queue, redirect_queue, pages_queue, running, ))
xml_openeger = Process(target=opener, args=(rawnodes_queue, pages_queue, redirect_queue))
redirects = Process(target=redirect_saver, args=(redirect_queue, running,))
page_saver = Process(target=save_pages, args=(pages_queue, running,))
#
xml_openeger.start()
processes = [link_extractor1, link_extractor2, redirects, page_saver]
for process in processes:
process.start()
xml_openeger.join()
while True:
print("here???")
if redirect_queue.empty() and pages_queue.empty() and rawnodes_queue.empty():
running.value = False
for process in processes:
process.join()
break