当前位置: 首页>>代码示例>>Python>>正文


Python Pool.append方法代码示例

本文整理汇总了Python中multiprocessing.Pool.append方法的典型用法代码示例。如果您正苦于以下问题:Python Pool.append方法的具体用法?Python Pool.append怎么用?Python Pool.append使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.Pool的用法示例。


在下文中一共展示了Pool.append方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: Pool

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import append [as 别名]
    # Reindex the date index to a datetime object. This is slow.
    #f = Pool().map(time_index,f)

    # Resample the raw data to mean data.
    f = Pool().map(resamp,f)

    # Cut speeds only above 1.0 m/s
    f1 = f
    f = Pool().map(above,f)

    # Find the differences between velocities at each sigma layer in the
    # datasets.
    t = []
    for i in xrange(len(f)-1):
        #t.append(np.abs(f[0].sub(f[i+1])))
        t.append(f1[0].sub(f1[i+1]))

    # Plot the mean profile differences. Here we should be careful, no where
    # has the difference between ebb and flood tide been taken into account.
    # That should be added to the code.
    plt.figure()
    plt.rc('font',size='22')
    for i in xrange(len(t)):
        plt.plot(t[i].mean(),siglay,label='BF = '+str(bfd[i]))
    plt.ylabel('Sigma Layer')
    plt.xlabel('Velocity Difference (m/s)')
    plt.grid()
    plt.legend(fontsize='18')
    plt.show()

开发者ID:Aidan-Bharath,项目名称:code_and_stuffs,代码行数:31,代码来源:fric_vel.py

示例2: len

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import append [as 别名]
        targz.close()

if __name__ == "__main__":
    if len(sys.argv) != 3:
        print("Usage: " + sys.argv[0] + " input_path output_path")
        exit(1)

    path = sys.argv[1]
    output_path = sys.argv[2]

    if not os.path.exists(output_path):
        os.makedirs(output_path)

    pool = Pool(processes=15)
    files_path = [item for sublist in files_path for item in sublist]

    file_q = Queue.Queue()

    for filepath in files_path:
        file_q.put(path + '/' + filepath)

    pool = []
    for i in xrange(0, THREADS):
        pool.append(PackingThread(name=i, file_q=file_q, output_path=output_path))
    for thread in pool:
        thread.start()
    for thread in pool:
        thread.join()
    

开发者ID:anukat2015,项目名称:CERMINE,代码行数:30,代码来源:pack.py

示例3: build_project

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import append [as 别名]
def build_project(files, output_file, compile_args = ['-O2', '-g', '-mtune=native', '-fopenmp'], link_args = None, build_dir = 'build', force_rebuild = False, compiler = 'g++', linker = None, include_paths = [], library_paths = [], concurrency = cpu_count(), execute = False, libraries = []):
	""" Build a buncha files at once with concurrency, linking at the end. Uses build_file in parallel. """
	build_start = time()

	# Make lists of source and header files. They are treated differently. Headers are optional!
	header_files = []
	src_files = []
	for f in files:
		if f.endswith('.h') or f.endswith('.hpp'):
			header_files.append(f)
		elif f.endswith('.c') or f.endswith('.cpp'):
			src_files.append(f)
		else:
			print 'Unknown file type:', f

	# At present we don't build headers-only.
	if not src_files:
		print 'No source files found. Nothing to do.'
		return False

	needs_linking = False

	# Compile headers first, if any
	if header_files:
		return_vals = []
		build_args = [(f, compile_args, build_dir, force_rebuild, compiler, include_paths, library_paths, libraries) for f in header_files]
		if concurrency == 1:
			for args in build_args:
				return_vals.append(_build_file_tuple(args))
		else:
			return_vals = Pool(concurrency).map(_build_file_tuple, build_args)

		for r in return_vals:
			if not r:
				print 'Project build failed at headers :('
				return False
			if r[1]:
				# If any files actually were built, we need to link again
				needs_linking = True

	# Compile source files. Uses Pool.map for concurrency
	return_vals = []
	build_args = [(f, compile_args + ['-H',], build_dir, force_rebuild, compiler, include_paths, library_paths, libraries) for f in src_files]
	if concurrency == 1:
		for args in build_args:
			return_vals.append(_build_file_tuple(args))
	else:
		return_vals = Pool(concurrency).map(_build_file_tuple, build_args)

	for r in return_vals:
		if not r:
			print 'Project build failed :('
			return False
		if r[1]:
			# If any files actually were built, we need to link again
			needs_linking = True

	if not needs_linking:
		print 'Nothing modified. No build required.'
		if not execute:
			return True
	else:
		if not linker:
			linker = compiler
		if not link_args:
			link_args = compile_args

		# Filenames that need linking. These were returned by the compiler. Need to link all, not just those that were recompiled!
		link_files = [a[0] for a in return_vals]

		# Execute the linker
		link_result = run_cmd(linker, link_args + ['-L' + p for p in library_paths] + ['-l' + l for l in libraries] + ['-o', output_file] + link_files)

		if link_result[1] != 0:
			print '\033[1;31mLinking Failed\033[0m (exit code:', str(link_result[1]) + '):'
			print link_result[0]
			return False
		print '\033[1;32mLinking Succeeded\033[0m, built in', round(time() - build_start, 1), 'seconds'

	if execute:
		# If binary doesn't have a path, prefix with ./ so it runs
		if '/' not in output_file:
			output_file = './' + output_file

		# Execute the app, printing output as it comes
		run_result = run_cmd(output_file, print_output = True)
		if run_result[1] != 0:
			return False
	return True
开发者ID:decultured,项目名称:builder,代码行数:91,代码来源:api.py


注:本文中的multiprocessing.Pool.append方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。