当前位置: 首页>>代码示例>>Python>>正文


Python Array.raw方法代码示例

本文整理汇总了Python中multiprocessing.Array.raw方法的典型用法代码示例。如果您正苦于以下问题:Python Array.raw方法的具体用法?Python Array.raw怎么用?Python Array.raw使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.Array的用法示例。


在下文中一共展示了Array.raw方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: run

# 需要导入模块: from multiprocessing import Array [as 别名]
# 或者: from multiprocessing.Array import raw [as 别名]
def run(args):
    # create dummy environment to be able to create model
    env = gym.make(args.environment)
    assert isinstance(env.observation_space, Box)
    assert isinstance(env.action_space, Discrete)
    print("Observation space:", env.observation_space)
    print("Action space:", env.action_space)

    # create main model
    model = create_model(env, args)
    model.summary()
    env.close()

    # for better compatibility with Theano and Tensorflow
    multiprocessing.set_start_method('spawn')

    # create shared buffer for sharing weights
    blob = pickle.dumps(model.get_weights(), pickle.HIGHEST_PROTOCOL)
    shared_buffer = Array('c', len(blob))
    shared_buffer.raw = blob

    # force runner processes to use cpu
    os.environ["CUDA_VISIBLE_DEVICES"] = ""

    # create fifos and threads for all runners
    fifos = []
    for i in range(args.num_runners):
        fifo = Queue(args.queue_length)
        fifos.append(fifo)
        process = Process(target=runner, args=(shared_buffer, fifo, args))
        process.start()

    # start trainer in main thread
    trainer(model, fifos, shared_buffer, args)
开发者ID:tambetm,项目名称:gymexperiments,代码行数:36,代码来源:a2c.py

示例2: main

# 需要导入模块: from multiprocessing import Array [as 别名]
# 或者: from multiprocessing.Array import raw [as 别名]
def main():
	global WORDLIST, HASHFILE, words, result, curr, total, num_words, curr_words

	#
	# process files
	#

	print("[*] reading hashes...")
	hashes = open(HASHFILE, 'r')
	hashlist = []
	for line in hashes:
		data = line.split(":")
		if len(data) > 1:
			hashv = data[0].strip()
			salt = data[1].strip()
			hashlist.append((hashv, salt))
	hashes.close() 


	print("[*] parsing wordlist...")
	words = Array('c', SHARED_MEM_SIZE, lock=False)		# allocate shared memory segment
	# get line count
	wordlist_file = open(WORDLIST, 'r')
	lines = 0
	for line in wordlist_file:
		lines += 1
	
	total = lines * len(hashlist)
	curr = Value('i', 0)
	curr_words = Value('i', 0)
	wordlist_file.seek(0)	# get back to beginning



	#
	# crack
	#
	print("[*] beginning cracking")
	pool = Pool(processes=NUM_PROCESSES)
	results = []

	current_char_count = 0
	words_raw = ""
	for line in wordlist_file:
		length = len(line)
		if length + current_char_count < SHARED_MEM_SIZE:
			words_raw += line
			current_char_count += length
		else:
			print("[*] next round")
			curr_words.value = len(words_raw.split("\n"))
			words.raw = words_raw + (SHARED_MEM_SIZE - len(words_raw)) * '0'	# clear space
			words_raw = line
			current_char_count = length

			# let workers do work!
			results.extend(pool.map(entry, hashlist))

			# remove cracked hashes
			# TODO

	print("[*] final round")
	curr_words.value = len(words_raw.split("\n"))
	words.raw = words_raw + (SHARED_MEM_SIZE - len(words_raw)) * '0'
	results.extend(pool.map(entry, hashlist))

	print("[*] done")

	for result in results:
		if result is not None:
			print("%s:%s" % (result))
开发者ID:gdemo1,项目名称:hash-tools,代码行数:73,代码来源:cracker.py


注:本文中的multiprocessing.Array.raw方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。