本文整理汇总了Python中qiime.util.load_qiime_config函数的典型用法代码示例。如果您正苦于以下问题:Python load_qiime_config函数的具体用法?Python load_qiime_config怎么用?Python load_qiime_config使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了load_qiime_config函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUp
def setUp(self):
"""setup the test values"""
self.qiime_config = load_qiime_config()
self.dirs_to_remove = []
self.files_to_remove = []
#this is specific to the web-apps only
test_dir = abspath(dirname(__file__))
self.fna_original_fp = os.path.join(test_dir, 'support_files', \
'test.fna')
tmp_dir = self.qiime_config['temp_dir'] or '/tmp/'
if not exists(tmp_dir):
makedirs(tmp_dir)
# if test creates the temp dir, also remove it
self.dirs_to_remove.append(tmp_dir)
self.wf_out = get_tmp_filename(tmp_dir=tmp_dir,
prefix='qiime_wf_out',suffix='',result_constructor=str)
if not exists(self.wf_out):
makedirs(self.wf_out)
self.dirs_to_remove.append(self.wf_out)
#print self.wf_out
working_dir = self.qiime_config['working_dir'] or './'
jobs_dir = join(working_dir,'jobs')
if not exists(jobs_dir):
# only clean up the jobs dir if it doesn't already exist
self.dirs_to_remove.append(jobs_dir)
self.params = parse_qiime_parameters(qiime_parameters_f.split('\n'))
signal.signal(signal.SIGALRM, timeout)
# set the 'alarm' to go off in allowed_seconds seconds
signal.alarm(allowed_seconds_per_test)
示例2: setUp
def setUp(self):
"""Set up some test variables"""
self.qiime_config = load_qiime_config()
self.tmp_dir = self.qiime_config['temp_dir'] or '/tmp/'
self.plot_name = "example_name"
self.headers = {}
self.headers[HEADERS_HOR] = ["Sample1", "Sample2", "Sample3", "Sample4"]
self.headers[HEADERS_VER] = ["Sample1", "Sample2", "Sample3", "Sample4"]
self.matrix = matrix
self.trans_values = {}
self.trans_values[(None, None)] = (0, "")
self.trans_values[(0.0, 0.25)] = (1, "(0-25%)")
self.trans_values[(0.25, 0.5)] = (2, "(25-50%)")
self.trans_values[(0.5, 0.75)] = (3, "(50-75%)")
self.trans_values[(0.75, 1.0)] = (4, "(75-100%)")
self.output_dir = path.join(self.tmp_dir, self.plot_name)
self.plot_name_ns = "not_a_square_matrix"
self.headers_ns = {}
self.headers_ns[HEADERS_HOR] = ["Sample1", "Sample2",
"Sample3", "Sample4"]
self.headers_ns[HEADERS_VER] = ["Sample1", "Sample2", "Sample3"]
self.matrix_ns = not_a_square_matrix
self.output_dir_ns = path.join(self.tmp_dir, self.plot_name_ns)
self._paths_to_clean_up = []
self._dirs_to_clean_up = []
示例3: setUp
def setUp(self):
"""Set up some test variables"""
self.qiime_config = load_qiime_config()
self.tmp_dir = self.qiime_config["temp_dir"] or "/tmp/"
self.input_file = get_tmp_filename(tmp_dir=self.tmp_dir)
self.support_lines = support_lines.splitlines()
self._paths_to_clean_up = []
示例4: format_jnlp_file_lines
def format_jnlp_file_lines(web_flag, url, tep_fp):
""" Format the jnlp file for TopiaryExplorer """
# write the jnlp header
lines = [jnlp_top_block]
# write the location of TopiaryExplorer location
if(web_flag):
lines += ['http://topiaryexplorer.sourceforge.net/app/']
else:
topiaryexplorer_project_dir =\
load_qiime_config()['topiaryexplorer_project_dir']
if topiaryexplorer_project_dir:
lines += ['file:' + topiaryexplorer_project_dir]
else:
print "WARNING: Couldn't create jnlp file - topiaryexplorer_project_dir is not defined in your qiime_config. tep file was created sucessfully."
# write the jnlp body text
lines += [jnlp_middle_block]
if(url):
lines += [url]
else:
lines += [abspath(tep_fp)]
# write the jnlp footer
lines += [jnlp_bottom_block]
return lines
示例5: summarize_otus
def summarize_otus(processed_dir):
"""
"""
per_library_stats_file = join(processed_dir, 'gg_97_otus/per_library_stats.txt')
# Generate the per_library_stats_file if it doesn't already exist
if not exists (per_library_stats_file):
qiime_config = load_qiime_config()
biom_file = join(processed_dir, 'gg_97_otus/exact_uclust_ref_otu_table.biom')
python_exe_fp = qiime_config['python_exe_fp']
script_dir = get_qiime_scripts_dir()
per_library_stats_script = join(script_dir, 'per_library_stats.py')
command = '{0} {1} -i {2}'.format(python_exe_fp, per_library_stats_script, biom_file)
# Run the script and produce the per_library_stats.txt
proc = Popen(command, shell = True, universal_newlines = True, stdout = PIPE, stderr = STDOUT)
return_value = proc.wait()
f = open(per_library_stats_file, 'w')
f.write(proc.stdout.read())
f.close()
# File exists, parse out details
start_lines = ['Seqs/sample detail:']
header_lines, otu_summary_dict = parse_log_file(per_library_stats_file, start_lines)
return header_lines, otu_summary_dict
示例6: setUp
def setUp(self):
self.files_to_remove = []
self.dirs_to_remove = []
# Create example output directory
tmp_dir = get_qiime_temp_dir()
self.test_out = mkdtemp(dir=tmp_dir,
prefix='core_qiime_analyses_test_',
suffix='')
self.dirs_to_remove.append(self.test_out)
# Get input data
self.test_data = get_test_data_fps()
self.qiime_config = load_qiime_config()
self.qiime_config['jobs_to_start'] = 2
self.qiime_config['seconds_to_sleep'] = 1
# suppress stderr during tests (one of the systems calls in the
# workflow prints a warning, and we can't suppress that warning with
# warnings.filterwarnings) here because it comes from within the code
# executed through the system call. Found this trick here:
# http://stackoverflow.com/questions/9949633/suppressing-print-as-stdout-python
self.saved_stderr = sys.stderr
sys.stderr = StringIO()
# Define number of seconds a test can run for before timing out
# and failing
initiate_timeout(600)
示例7: adjust_workers
def adjust_workers(num_flows, num_cpus, worker_sockets, log_fh=None):
"""Stop workers no longer needed.
num_flows: number of flowgrams
num_cpus: number of CPUs currently used
worker_sockets: list of connected sockets
log_fh: open fh to log file
Returns new number of CPUs
"""
qiime_config = load_qiime_config()
min_per_core = int(qiime_config['denoiser_min_per_core'])
if(num_flows < (num_cpus-1)*min_per_core):
if log_fh:
log_fh.write("Adjusting number of workers:\n")
log_fh.write("flows: %d cpus:%d\n" % (num_flows, num_cpus))
# TODO: make sure this works with __future__ division
per_core = max(min_per_core, (num_flows/num_cpus)+1)
for i in range (num_cpus):
if(i*per_core > num_flows):
worker_sock = worker_sockets.pop()
worker_sock.close()
num_cpus = num_cpus-1
if log_fh:
log_fh.write("released worker %d\n"% i)
if log_fh:
log_fh.write("New number of cpus:%d\n"% num_cpus)
if (num_cpus == 0 or num_cpus!=len(worker_sockets)):
raise ValueError, "Adjust_workers screwed up!"
return num_cpus
示例8: setUp
def setUp(self):
""" """
self.test_data = get_test_data_fps()
self.files_to_remove = []
self.dirs_to_remove = []
# Create example output directory
tmp_dir = get_qiime_temp_dir()
self.test_out = mkdtemp(dir=tmp_dir,
prefix='core_qiime_analyses_test_',
suffix='')
self.dirs_to_remove.append(self.test_out)
self.qiime_config = load_qiime_config()
self.params = parse_qiime_parameters(params_f1)
# suppress stderr during tests (one of the systems calls in the
# workflow prints a warning, and we can't suppress that warning with
# warnings.filterwarnings) here because it comes from within the code
# executed through the system call. Found this trick here:
# http://stackoverflow.com/questions/9949633/suppressing-print-as-stdout-python
self.saved_stderr = sys.stderr
sys.stderr = StringIO()
initiate_timeout(180)
示例9: setUp
def setUp(self):
"""Set up some test variables"""
self.newick = "((s1:0.2,s2:0.2):0.6,s3:0.8);"
self.tree = parse_newick(self.newick, PhyloNode)
self.newick_scaled = "((s1:25,s2:25):75,s3:100);"
self.tree_scaled = parse_newick(self.newick_scaled, PhyloNode)
self.tree_scaled.scaleBranchLengths(max_length=100, ultrametric=True)
self.num_trees_considered = 10
self.trans_values = {(None, None) : ("#FFFFFF", ""),
(None, 0.5): ("#dddddd", "< 50%"),
(0.5, 0.7): ("#99CCFF", "50-70%"),
(0.7, 0.9): ("#82FF8B", "70-90%"),
(0.9, 0.999): ("#F8FE83", "90-99.9%"),
(0.999, None): ("#FF8582", "> 99.9%")}
self.jack_newick = "((s1:0.2,s2:0.2)0.8:0.6,s3:0.8)1.0;"
self.jack_tree = parse_newick(self.jack_newick, PhyloNode)
self.jack_newick_scaled = "((s1:25,s2:25)0.8:75,s3:100)1.0;"
self.jack_tree_scaled = parse_newick(self.jack_newick_scaled, PhyloNode)
self.jack_tree_scaled.scaleBranchLengths(max_length=100,
ultrametric=True)
self.support = { 'trees_considered': 10,
'support_dict': {"node0":1.0,
"node1":0.8}}
self.qiime_config = load_qiime_config()
self.tmp_dir = self.qiime_config['temp_dir'] or '/tmp/'
self.output_file = get_tmp_filename(tmp_dir = self.tmp_dir)
dict_mapping_data = {}
dict_mapping_data["s1"] = {
'Description':'s1 test description',
'NumIndividuals':'100',
'BarcodeSequence':'AAAAAAAAAACT',
'LinkerPrimerSequence':'AAAAAAAAAAAAAAAAAAAAA',
'ExampleHeader1':'Value1',
'ExampleHeader2':'Val2'}
dict_mapping_data["s2"] = {
'Description':'s2 test description',
'NumIndividuals':'200',
'BarcodeSequence':'CAAAAAAAAACT',
'LinkerPrimerSequence':'AAAAAAAAAAAAAAAAAAAAA',
'ExampleHeader1':'Value2',
'ExampleHeader2':'Val1'}
dict_mapping_data["s3"] = {
'Description':'s3 test description',
'NumIndividuals':'300',
'BarcodeSequence':'GAAAAAAAAACT',
'LinkerPrimerSequence':'AAAAAAAAAAAAAAAAAAAAA',
'ExampleHeader1':'Value2',
'ExampleHeader2':'Val3'}
self.mapping_data = [dict_mapping_data,
"Example comment string for test"]
self._paths_to_clean_up = []
示例10: test_adjust_workers
def test_adjust_workers(self):
"""adjust_workers stops clients"""
workers, client_sockets = self._setup_server_and_clients()
last_sock = client_sockets[-1]
qiime_config = load_qiime_config()
min_per_core = int(qiime_config['denoiser_min_per_core'])
# no sockets get stopped
self.assertEqual(
adjust_workers(
4 *
min_per_core -
1,
4,
client_sockets),
4)
# if we can send something the socket is still alive
self.assertEqual(last_sock.send("Hello"), 5)
# now, kill one client
self.assertEqual(
adjust_workers(
3 *
min_per_core -
1,
4,
client_sockets),
3)
# socket should be closed
self.assertRaises(error, last_sock.send, "Hello")
示例11: setUp
def setUp(self):
# Get QIIME's temp dir
self.qiime_config = load_qiime_config()
self.tmp_dir = self.qiime_config['temp_dir'] or '/tmp/'
self._paths_to_clean_up = []
self._dirs_to_clean_up = []
示例12: main
def main():
option_parser, opts, args = \
parse_command_line_parameters(suppress_verbose=True, **script_info)
input_dir = opts.input_dir
parameter_fp = opts.parameter_fp
read1_indicator = opts.read1_indicator
read2_indicator = opts.read2_indicator
match_barcodes = opts.match_barcodes
barcode_indicator = opts.barcode_indicator
leading_text = opts.leading_text
trailing_text = opts.trailing_text
include_input_dir_path = opts.include_input_dir_path
output_dir = abspath(opts.output_dir)
remove_filepath_in_name = opts.remove_filepath_in_name
print_only = opts.print_only
if remove_filepath_in_name and not include_input_dir_path:
option_parser.error("If --remove_filepath_in_name is enabled, "
"--include_input_dir_path must also be enabled.")
if opts.parameter_fp:
with open(opts.parameter_fp, 'U') as parameter_f:
params_dict = parse_qiime_parameters(parameter_f)
params_str = get_params_str(params_dict['join_paired_ends'])
else:
params_dict = {}
params_str = ""
create_dir(output_dir)
all_files = []
extensions = ['.fastq.gz', '.fastq', '.fq.gz', '.fq']
for root, dir, fps in walk(input_dir):
for fp in fps:
for extension in extensions:
if fp.endswith(extension):
all_files += [abspath(join(root, fp))]
pairs, bc_pairs = get_pairs(all_files, read1_indicator, read2_indicator,
match_barcodes, barcode_indicator)
commands = create_commands_jpe(pairs, output_dir,
params_str, leading_text, trailing_text, include_input_dir_path,
remove_filepath_in_name, match_barcodes, bc_pairs)
qiime_config = load_qiime_config()
if print_only:
command_handler = print_commands
else:
command_handler = call_commands_serially
logger = WorkflowLogger(generate_log_fp(output_dir),
params=params_dict,
qiime_config=qiime_config)
# Call the command handler on the list of commands
command_handler(commands,
status_update_callback=no_status_updates,
logger=logger,
close_logger_on_success=True)
示例13: setUp
def setUp(self):
# Get QIIME's temp dir
self.qiime_config = load_qiime_config()
self.tmp_dir = self.qiime_config['temp_dir'] or '/tmp/'
self._paths_to_clean_up = []
self._dirs_to_clean_up = []
raise ValueError, "Test not implemented!!!"
示例14: setUp
def setUp(self):
# Get QIIME's temp dir
self.qiime_config = load_qiime_config()
self.tmp_dir = self.qiime_config['temp_dir'] or '/tmp/'
self.data = [0.1336206897, 0.2740524781, 0.5923268322]
self.labels = ['o__Bacteroidales', 'o__Clostridiales', 'not_shared']
self._paths_to_clean_up = []
示例15: setUp
def setUp(self):
""" """
self.qiime_config = load_qiime_config()
self.dirs_to_remove = []
self.files_to_remove = []
#this is specific to the web-apps only
test_dir = abspath(dirname(__file__))
sff_original_fp = os.path.join(test_dir, 'support_files', \
'Fasting_subset.sff')
self.sff_fp = os.path.join('/%s/' % environ['HOME'],
'Fasting_subset.sff')
self.files_to_remove.append(self.sff_fp)
copy(sff_original_fp, self.sff_fp)
self.illumina_fps = [os.path.join(test_dir, 'support_files', \
's_8_1_sequence_100_records.txt'),
os.path.join(test_dir, 'support_files', \
's_8_2_sequence_100_records.txt')]
self.illumina_map_fp = os.path.join(test_dir, 'support_files', \
's8_map_incomplete.txt')
self.fasta_fps=[os.path.join(test_dir,'support_files',
'test_split_lib_seqs.fasta')]
self.fasta_map_fp = os.path.join(test_dir, 'support_files', \
'fasta_mapping_file.txt')
tmp_dir = "/%s/test_wf" % environ['HOME']
self.dirs_to_remove.append(tmp_dir)
#self.qiime_config['temp_dir'] or '/tmp/'
if not exists(tmp_dir):
makedirs(tmp_dir)
# if test creates the temp dir, also remove it
#self.dirs_to_remove.append(tmp_dir)
self.wf_out="/%s/test_processed_data" % environ['HOME']
#print self.wf_out
self.dirs_to_remove.append(self.wf_out)
self.gg_out=os.path.join(self.wf_out,'gg_97_otus')
if not exists(self.gg_out):
makedirs(self.gg_out)
#self.dirs_to_remove.append(self.gg_out)
self.fasting_mapping_fp = get_tmp_filename(tmp_dir=tmp_dir,
prefix='qiime_wf_mapping',suffix='.txt')
fasting_mapping_f = open(self.fasting_mapping_fp,'w')
fasting_mapping_f.write(fasting_map)
fasting_mapping_f.close()
self.files_to_remove.append(self.fasting_mapping_fp)
self.params = parse_qiime_parameters(qiime_parameters_f)
signal.signal(signal.SIGALRM, timeout)
# set the 'alarm' to go off in allowed_seconds seconds
signal.alarm(allowed_seconds_per_test)