本文整理汇总了Python中frogsBiom.BiomIO.write_count_table方法的典型用法代码示例。如果您正苦于以下问题:Python BiomIO.write_count_table方法的具体用法?Python BiomIO.write_count_table怎么用?Python BiomIO.write_count_table使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类frogsBiom.BiomIO
的用法示例。
在下文中一共展示了BiomIO.write_count_table方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main_process
# 需要导入模块: from frogsBiom import BiomIO [as 别名]
# 或者: from frogsBiom.BiomIO import write_count_table [as 别名]
def main_process(args):
tmp_files = TmpFiles(os.path.split(args.non_chimera)[0])
try:
if args.out_abundance is None:
args.out_abundance = "count.tsv"
if args.biom is not None:
args.out_abundance = "abundance.biom"
count_table = args.count
if args.biom is not None:
count_table = tmp_files.add("tmp_count.tsv")
biom = BiomIO.from_json( args.biom )
BiomIO.write_count_table( count_table, biom )
del biom
# Get samples
samples = dict()
in_count_fh = open( count_table )
header_line = in_count_fh.readline().strip()
for sample_name in header_line.split()[1:]:
samples[sample_name] = { 'fasta_path': tmp_files.add(sample_name + ".fasta"),
'chimera_path': tmp_files.add(sample_name + ".chimera")
}
in_count_fh.close()
# Find chimera
log_detection = Queue()
nb_processses_used = min( len(samples.keys()), args.nb_cpus )
processes = [{'process':None, 'in_file':[], 'out_file':[], 'sample_name':[]} for idx in range(nb_processses_used)]
# Set processes
for idx, sample_name in enumerate(samples.keys()):
process_idx = idx % nb_processses_used
processes[process_idx]['sample_name'].append( sample_name )
processes[process_idx]['in_file'].append( samples[sample_name]['fasta_path'] )
processes[process_idx]['out_file'].append( samples[sample_name]['chimera_path'] )
# Launch processes
for current_process in processes:
if idx == 0: # First process is threaded with parent job
current_process['process'] = threading.Thread( target=chimera,
args=(current_process['sample_name'], args.sequences, count_table, current_process['in_file'], current_process['out_file'], log_detection, args.size_separator) )
else: # Others processes are processed on different CPU
current_process['process'] = multiprocessing.Process( target=chimera,
args=(current_process['sample_name'], args.sequences, count_table, current_process['in_file'], current_process['out_file'], log_detection, args.size_separator) )
current_process['process'].start()
# Wait processes end
for current_process in processes:
current_process['process'].join()
# Check processes status
for current_process in processes:
if issubclass(current_process['process'].__class__, multiprocessing.Process) and current_process['process'].exitcode != 0:
sys.exit(1)
# Remove chimera
log_remove_global = { 'nb_kept': 0,
'abundance_kept': 0,
'nb_removed': 0,
'abundance_removed': 0,
'nb_ambiguous': 0,
'abundance_ambiguous': 0}
log_remove_spl = {}
if args.biom is not None:
remove_chimera_biom( samples, args.biom, args.out_abundance, args.lenient_filter, log_remove_global, log_remove_spl )
remove_chimera_fasta( args.sequences, args.non_chimera, get_obs_from_biom(args.out_abundance), args.size_separator )
else:
remove_chimera_count( samples, args.count, args.out_abundance, args.lenient_filter, log_remove_global, log_remove_spl )
remove_chimera_fasta( args.sequences, args.non_chimera, get_obs_from_count(args.out_abundance), args.size_separator )
# Summary
write_summary( samples.keys(), log_detection, log_remove_global, log_remove_spl, args.summary )
finally:
if not args.debug:
tmp_files.deleteAll()