本文整理汇总了Python中support.error函数的典型用法代码示例。如果您正苦于以下问题:Python error函数的具体用法?Python error怎么用?Python error使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了error函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_options_from_params
def get_options_from_params(params_filename, spades_py_name=None):
if not os.path.isfile(params_filename):
return None, None
params = open(params_filename, 'r')
cmd_line = params.readline().strip()
spades_prev_version = None
for line in params:
if line.find('rnaSPAdes version:') != -1:
spades_prev_version = line.split('rnaSPAdes version:')[1]
break
params.close()
if spades_prev_version is None:
support.error("failed to parse rnaSPAdes version of the previous run! "
"Please restart from the beginning or specify another output directory.")
if spades_prev_version.strip() != spades_version.strip():
support.error("rnaSPAdes version of the previous run (%s) is not equal to the current version of rnaSPAdes (%s)! "
"Please restart from the beginning or specify another output directory."
% (spades_prev_version.strip(), spades_version.strip()))
if spades_py_name is None or cmd_line.find(os.path.basename(spades_py_name)) == -1:
spades_py_name = 'spades.py' # try default name
else:
spades_py_name = os.path.basename(spades_py_name)
spades_py_pos = cmd_line.find(spades_py_name)
if spades_py_pos == -1:
return None, None
return cmd_line, cmd_line[spades_py_pos + len(spades_py_name):].split()
示例2: parse_arguments
def parse_arguments(argv, log):
try:
options, not_options = getopt.gnu_getopt(argv, DS_Args_List.short_options, DS_Args_List.long_options)
except getopt.GetoptError:
_, exc, _ = sys.exc_info()
sys.stderr.write(str(exc) + "\n")
sys.stderr.flush()
options_storage.usage("", dipspades=True)
sys.exit(1)
ds_args = DS_Args()
for opt, arg in options:
if opt == '-o':
ds_args.output_dir = os.path.abspath(arg)
elif opt == '--expect-gaps':
ds_args.allow_gaps = True
elif opt == '--expect-rearrangements':
ds_args.weak_align = True
elif opt == '--hap':
ds_args.haplocontigs_fnames.append(support.check_file_existence(arg, 'haplocontigs', log, dipspades=True))
elif opt == '-t' or opt == "--threads":
ds_args.max_threads = int(arg)
elif opt == '-m' or opt == "--memory":
ds_args.max_memory = int(arg)
elif opt == '--tmp-dir':
ds_args.tmp_dir = os.path.abspath(arg)
ds_args.haplocontigs = os.path.join(ds_args.output_dir, "haplocontigs")
if not ds_args.output_dir:
support.error("the output_dir is not set! It is a mandatory parameter (-o output_dir).", log, dipspades=True)
if not ds_args.haplocontigs_fnames:
support.error("cannot start dipSPAdes without at least one haplocontigs file!", log, dipspades=True)
if not ds_args.tmp_dir:
ds_args.tmp_dir = os.path.join(ds_args.output_dir, options_storage.TMP_DIR)
return ds_args
示例3: run_scaffold_correction
def run_scaffold_correction(configs_dir, execution_home, cfg, log, K):
data_dir = os.path.join(cfg.output_dir, "SCC")
saves_dir = os.path.join(data_dir, 'saves')
dst_configs = os.path.join(data_dir, "configs")
cfg_file_name = os.path.join(dst_configs, "config.info")
if os.path.exists(data_dir):
shutil.rmtree(data_dir)
os.makedirs(data_dir)
dir_util.copy_tree(os.path.join(configs_dir, "debruijn"), dst_configs, preserve_times=False)
# removing template configs
for root, dirs, files in os.walk(dst_configs):
for cfg_file in files:
cfg_file = os.path.join(root, cfg_file)
if cfg_file.endswith('.info.template'):
if os.path.isfile(cfg_file.split('.template')[0]):
os.remove(cfg_file)
else:
os.rename(cfg_file, cfg_file.split('.template')[0])
log.info("\n== Running scaffold correction \n")
latest = os.path.join(cfg.output_dir, "K%d" % K)
scaffolds_file = os.path.join(latest, "scaffolds.fasta")
if not os.path.isfile(scaffolds_file):
support.error("Scaffodls were not found in " + scaffolds_file, log)
if "read_buffer_size" in cfg.__dict__:
construction_cfg_file_name = os.path.join(dst_configs, "construction.info")
process_cfg.substitute_params(construction_cfg_file_name, {"read_buffer_size": cfg.read_buffer_size}, log)
prepare_config_scaffold_correction(cfg_file_name, cfg, log, saves_dir, scaffolds_file)
command = [os.path.join(execution_home, "scaffold_correction"), cfg_file_name]
log.info(str(command))
support.sys_call(command, log)
示例4: run_scaffold_correction
def run_scaffold_correction(configs_dir, execution_home, cfg, log, latest, K):
data_dir = os.path.join(cfg.output_dir, "SCC", "K%d" % K)
saves_dir = os.path.join(data_dir, 'saves')
dst_configs = os.path.join(data_dir, "configs")
cfg_file_name = os.path.join(dst_configs, "config.info")
if os.path.exists(data_dir):
shutil.rmtree(data_dir)
os.makedirs(data_dir)
dir_util.copy_tree(os.path.join(configs_dir, "debruijn"), dst_configs, preserve_times=False)
log.info("\n== Running scaffold correction \n")
scaffolds_file = os.path.join(latest, "scaffolds.fasta")
if not os.path.isfile(scaffolds_file):
support.error("Scaffodls were not found in " + scaffolds_file, log)
if "read_buffer_size" in cfg.__dict__:
construction_cfg_file_name = os.path.join(dst_configs, "construction.info")
process_cfg.substitute_params(construction_cfg_file_name, {"read_buffer_size": cfg.read_buffer_size}, log)
process_cfg.substitute_params(os.path.join(dst_configs, "moleculo_mode.info"), {"scaffolds_file": scaffolds_file}, log)
prepare_config_scaffold_correction(cfg_file_name, cfg, log, saves_dir, K)
command = [os.path.join(execution_home, "scaffold_correction"), cfg_file_name]
add_configs(command, dst_configs)
log.info(str(command))
support.sys_call(command, log)
示例5: compress_dataset_files
def compress_dataset_files(dataset_data, ext_python_modules_home, max_threads, log):
log.info("\n== Compressing corrected reads (with gzip)")
to_compress = []
for reads_library in dataset_data:
for key, value in reads_library.items():
if key.endswith('reads'):
compressed_reads_filenames = []
for reads_file in value:
if not os.path.isfile(reads_file):
support.error('something went wrong and file with corrected reads (' + reads_file + ') is missing!', log)
to_compress.append(reads_file)
compressed_reads_filenames.append(reads_file + ".gz")
reads_library[key] = compressed_reads_filenames
if len(to_compress):
pigz_path = support.which('pigz')
if pigz_path:
for reads_file in to_compress:
support.sys_call([pigz_path, '-f', '-7', '-p', str(max_threads), reads_file], log)
else:
addsitedir(ext_python_modules_home)
if sys.version.startswith('2.'):
from joblib2 import Parallel, delayed
elif sys.version.startswith('3.'):
from joblib3 import Parallel, delayed
n_jobs = min(len(to_compress), max_threads)
outputs = Parallel(n_jobs=n_jobs)(delayed(support.sys_call)(['gzip', '-f', '-7', reads_file]) for reads_file in to_compress)
for output in outputs:
if output:
log.info(output)
示例6: run_corrector
def run_corrector(configs_dir, execution_home, cfg,
ext_python_modules_home, log, to_correct, result):
addsitedir(ext_python_modules_home)
if sys.version.startswith('2.'):
import pyyaml2 as pyyaml
elif sys.version.startswith('3.'):
import pyyaml3 as pyyaml
dst_configs = os.path.join(cfg.output_dir, "configs")
if os.path.exists(dst_configs):
shutil.rmtree(dst_configs)
dir_util.copy_tree(os.path.join(configs_dir, "corrector"), dst_configs, preserve_times=False)
cfg_file_name = os.path.join(dst_configs, "corrector.info")
cfg.tmp_dir = support.get_tmp_dir(prefix="corrector_")
prepare_config_corr(cfg_file_name, cfg, ext_python_modules_home)
binary_name = "corrector"
command = [os.path.join(execution_home, binary_name),
os.path.abspath(cfg_file_name), os.path.abspath(to_correct)]
log.info("\n== Running contig polishing tool: " + ' '.join(command) + "\n")
log.info("\n== Dataset description file was created: " + cfg_file_name + "\n")
support.sys_call(command, log)
if not os.path.isfile(result):
support.error("Mismatch correction finished abnormally: " + result + " not found!")
if os.path.isdir(cfg.tmp_dir):
shutil.rmtree(cfg.tmp_dir)
示例7: run_iteration
def run_iteration(configs_dir, execution_home, cfg, log, K, prev_K, last_one):
data_dir = os.path.join(cfg.output_dir, "K%d" % K)
stage = BASE_STAGE
saves_dir = os.path.join(data_dir, 'saves')
dst_configs = os.path.join(data_dir, "configs")
cfg_file_name = os.path.join(dst_configs, "config.info")
if options_storage.continue_mode:
if os.path.isfile(os.path.join(data_dir, "final_contigs.fasta")) and not (options_storage.restart_from and
(options_storage.restart_from == ("k%d" % K) or options_storage.restart_from.startswith("k%d:" % K))):
log.info("\n== Skipping assembler: " + ("K%d" % K) + " (already processed)")
return
if options_storage.restart_from and options_storage.restart_from.find(":") != -1:
stage = options_storage.restart_from[options_storage.restart_from.find(":") + 1:]
support.continue_from_here(log)
if stage != BASE_STAGE:
if not os.path.isdir(saves_dir):
support.error("Cannot restart from stage %s: saves were not found (%s)!" % (stage, saves_dir))
else:
if os.path.exists(data_dir):
shutil.rmtree(data_dir)
os.makedirs(data_dir)
shutil.copytree(os.path.join(configs_dir, "debruijn"), dst_configs)
# removing template configs
for root, dirs, files in os.walk(dst_configs):
for cfg_file in files:
cfg_file = os.path.join(root, cfg_file)
if cfg_file.endswith('.info.template'):
if os.path.isfile(cfg_file.split('.template')[0]):
os.remove(cfg_file)
else:
os.rename(cfg_file, cfg_file.split('.template')[0])
log.info("\n== Running assembler: " + ("K%d" % K) + "\n")
if prev_K:
additional_contigs_fname = os.path.join(cfg.output_dir, "K%d" % prev_K, "simplified_contigs.fasta")
if not os.path.isfile(additional_contigs_fname):
support.warning("additional contigs for K=%d were not found (%s)!" % (K, additional_contigs_fname), log)
additional_contigs_fname = None
else:
additional_contigs_fname = None
if "read_buffer_size" in cfg.__dict__:
construction_cfg_file_name = os.path.join(dst_configs, "construction.info")
process_cfg.substitute_params(construction_cfg_file_name, {"read_buffer_size": cfg.read_buffer_size}, log)
prepare_config_spades(cfg_file_name, cfg, log, additional_contigs_fname, K, stage, saves_dir, last_one)
command = [os.path.join(execution_home, "spades"), cfg_file_name]
## this code makes sense for src/debruijn/simplification.cpp: corrected_and_save_reads() function which is not used now
# bin_reads_dir = os.path.join(cfg.output_dir, ".bin_reads")
# if os.path.isdir(bin_reads_dir):
# if glob.glob(os.path.join(bin_reads_dir, "*_cor*")):
# for cor_filename in glob.glob(os.path.join(bin_reads_dir, "*_cor*")):
# cor_index = cor_filename.rfind("_cor")
# new_bin_filename = cor_filename[:cor_index] + cor_filename[cor_index + 4:]
# shutil.move(cor_filename, new_bin_filename)
support.sys_call(command, log)
示例8: check_binaries
def check_binaries(binary_dir, log):
for binary in ["hammer", "spades", "bwa-spades"]:
binary_path = os.path.join(binary_dir, binary)
if not os.path.isfile(binary_path):
support.error("SPAdes binaries not found: " + binary_path +
"\nYou can obtain SPAdes binaries in one of two ways:" +
"\n1. Download them from http://spades.bioinf.spbau.ru/release" +
str(spades_version).strip() + "/SPAdes-" + str(spades_version).strip() + "-Linux.tar.gz" +
"\n2. Build source code with ./spades_compile.sh script", log)
示例9: run_hammer
def run_hammer(corrected_dataset_yaml_filename, configs_dir, execution_home, cfg,
not_used_dataset_data, ext_python_modules_home, log):
addsitedir(ext_python_modules_home)
if sys.version.startswith('2.'):
import pyyaml2 as pyyaml
elif sys.version.startswith('3.'):
import pyyaml3 as pyyaml
dst_configs = os.path.join(cfg.output_dir, "configs")
if os.path.exists(dst_configs):
shutil.rmtree(dst_configs)
if cfg.iontorrent:
dir_util.copy_tree(os.path.join(configs_dir, "ionhammer"), dst_configs, preserve_times=False)
cfg_file_name = os.path.join(dst_configs, "ionhammer.cfg")
else:
dir_util.copy_tree(os.path.join(configs_dir, "hammer"), dst_configs, preserve_times=False)
cfg_file_name = os.path.join(dst_configs, "config.info")
# removing template configs
for root, dirs, files in os.walk(dst_configs):
for cfg_file in files:
cfg_file = os.path.join(root, cfg_file)
if cfg_file.endswith('.template'):
if os.path.isfile(cfg_file.split('.template')[0]):
os.remove(cfg_file)
else:
os.rename(cfg_file, cfg_file.split('.template')[0])
cfg.tmp_dir = support.get_tmp_dir(prefix="hammer_")
if cfg.iontorrent:
prepare_config_ih(cfg_file_name, cfg, ext_python_modules_home)
binary_name = "ionhammer"
else:
prepare_config_bh(cfg_file_name, cfg, log)
binary_name = "hammer"
command = [os.path.join(execution_home, binary_name),
os.path.abspath(cfg_file_name)]
log.info("\n== Running read error correction tool: " + ' '.join(command) + "\n")
support.sys_call(command, log)
if not os.path.isfile(corrected_dataset_yaml_filename):
support.error("read error correction finished abnormally: " + corrected_dataset_yaml_filename + " not found!")
corrected_dataset_data = pyyaml.load(open(corrected_dataset_yaml_filename, 'r'))
remove_not_corrected_reads(cfg.output_dir)
is_changed = False
if cfg.gzip_output:
is_changed = True
compress_dataset_files(corrected_dataset_data, ext_python_modules_home, cfg.max_threads, log)
if not_used_dataset_data:
is_changed = True
corrected_dataset_data += not_used_dataset_data
if is_changed:
pyyaml.dump(corrected_dataset_data, open(corrected_dataset_yaml_filename, 'w'))
log.info("\n== Dataset description file was created: " + corrected_dataset_yaml_filename + "\n")
if os.path.isdir(cfg.tmp_dir):
shutil.rmtree(cfg.tmp_dir)
示例10: run_iteration
def run_iteration(configs_dir, execution_home, cfg, log, K, prev_K, last_one):
data_dir = os.path.join(cfg.output_dir, "K%d" % K)
stage = BASE_STAGE
saves_dir = os.path.join(data_dir, 'saves')
dst_configs = os.path.join(data_dir, "configs")
if options_storage.continue_mode:
if os.path.isfile(os.path.join(data_dir, "final_contigs.fasta")) and not (options_storage.restart_from and
(options_storage.restart_from == ("k%d" % K) or options_storage.restart_from.startswith("k%d:" % K))):
log.info("\n== Skipping assembler: " + ("K%d" % K) + " (already processed)")
return
if options_storage.restart_from and options_storage.restart_from.find(":") != -1 \
and options_storage.restart_from.startswith("k%d:" % K):
stage = options_storage.restart_from[options_storage.restart_from.find(":") + 1:]
support.continue_from_here(log)
if stage != BASE_STAGE:
if not os.path.isdir(saves_dir):
support.error("Cannot restart from stage %s: saves were not found (%s)!" % (stage, saves_dir))
else:
if os.path.exists(data_dir):
shutil.rmtree(data_dir)
os.makedirs(data_dir)
dir_util._path_created = {} # see http://stackoverflow.com/questions/9160227/dir-util-copy-tree-fails-after-shutil-rmtree
dir_util.copy_tree(os.path.join(configs_dir, "debruijn"), dst_configs, preserve_times=False)
log.info("\n== Running assembler: " + ("K%d" % K) + "\n")
if prev_K:
additional_contigs_fname = os.path.join(cfg.output_dir, "K%d" % prev_K, "simplified_contigs.fasta")
if not os.path.isfile(additional_contigs_fname):
support.warning("additional contigs for K=%d were not found (%s)!" % (K, additional_contigs_fname), log)
additional_contigs_fname = None
else:
additional_contigs_fname = None
if "read_buffer_size" in cfg.__dict__:
#FIXME why here???
process_cfg.substitute_params(os.path.join(dst_configs, "construction.info"), {"read_buffer_size": cfg.read_buffer_size}, log)
if "scaffolding_mode" in cfg.__dict__:
#FIXME why here???
process_cfg.substitute_params(os.path.join(dst_configs, "pe_params.info"), {"scaffolding_mode": cfg.scaffolding_mode}, log)
prepare_config_rnaspades(os.path.join(dst_configs, "rna_mode.info"), log)
prepare_config_construction(os.path.join(dst_configs, "construction.info"), log)
cfg_fn = os.path.join(dst_configs, "config.info")
prepare_config_spades(cfg_fn, cfg, log, additional_contigs_fname, K, stage, saves_dir, last_one, execution_home)
command = [os.path.join(execution_home, "spades-core"), cfg_fn]
add_configs(command, dst_configs)
#print("Calling: " + " ".join(command))
support.sys_call(command, log)
示例11: substitute_params
def substitute_params(filename, var_dict, log):
lines = file_lines(filename)
vars_in_file = vars_from_lines(lines)
for var, value in var_dict.items():
if var not in vars_in_file:
support.error("Couldn't find " + var + " in " + filename, log)
meta = vars_in_file[var]
lines[meta.line_num] = meta.indent + str(var) + " " + str(value) + "\n"
file = open(filename, "w")
file.writelines(lines)
示例12: check_cfg_for_restart_from
def check_cfg_for_restart_from(cfg):
if options_storage.restart_from == 'ec' and ("error_correction" not in cfg):
support.error("failed to restart from read error correction because this stage was not specified!")
if options_storage.restart_from == 'mc' and ("mismatch_corrector" not in cfg):
support.error("failed to restart from mismatch correction because this stage was not specified!")
if options_storage.restart_from == 'as' or options_storage.restart_from.startswith('k'):
if "assembly" not in cfg:
support.error("failed to restart from assembling because this stage was not specified!")
if options_storage.restart_from.startswith('k'):
correct_k = False
k_to_check = options_storage.k_mers
if not k_to_check:
if options_storage.auto_K_allowed():
k_to_check = list(set(options_storage.K_MERS_SHORT + options_storage.K_MERS_150 + options_storage.K_MERS_250))
else:
k_to_check = options_storage.K_MERS_SHORT
for k in k_to_check:
if options_storage.restart_from == ("k%d" % k) or options_storage.restart_from.startswith("k%d:" % k):
correct_k = True
break
if not correct_k:
k_str = options_storage.restart_from[1:]
if k_str.find(":") != -1:
k_str = k_str[:k_str.find(":")]
support.error("failed to restart from K=%s because this K was not specified!" % k_str)
示例13: get_read_length
def get_read_length(output_dir, K, ext_python_modules_home, log):
est_params_filename = os.path.join(output_dir, "K%d" % K, "final.lib_data")
max_read_length = 0
if os.path.isfile(est_params_filename):
addsitedir(ext_python_modules_home)
if sys.version.startswith('2.'):
import pyyaml2 as pyyaml
elif sys.version.startswith('3.'):
import pyyaml3 as pyyaml
est_params_data = pyyaml.load(open(est_params_filename, 'r'))
max_read_length = int(est_params_data['nomerge max read length'])
log.info("Max read length detected as %d" % max_read_length)
if max_read_length == 0:
support.error("Failed to estimate maximum read length! File with estimated params: " + est_params_filename, log)
return max_read_length
示例14: get_read_length
def get_read_length(output_dir, K, ext_python_modules_home, log):
est_params_filename = os.path.join(output_dir, "K%d" % K, "final.lib_data")
max_read_length = 0
if os.path.isfile(est_params_filename):
addsitedir(ext_python_modules_home)
if sys.version.startswith('2.'):
import pyyaml2 as pyyaml
elif sys.version.startswith('3.'):
import pyyaml3 as pyyaml
est_params_data = pyyaml.load(open(est_params_filename, 'r'))
for reads_library in est_params_data:
if reads_library['type'] in READS_TYPES_USED_IN_CONSTRUCTION:
if int(reads_library["read length"]) > max_read_length:
max_read_length = int(reads_library["read length"])
if max_read_length == 0:
support.error("Failed to estimate maximum read length! File with estimated params: " + est_params_filename, log)
return max_read_length
示例15: run_bh
def run_bh(result_filename, configs_dir, execution_home, cfg, ext_python_modules_home, log):
addsitedir(ext_python_modules_home)
if sys.version.startswith('2.'):
import pyyaml2 as pyyaml
elif sys.version.startswith('3.'):
import pyyaml3 as pyyaml
dst_configs = os.path.join(cfg.output_dir, "configs")
if os.path.exists(dst_configs):
shutil.rmtree(dst_configs)
shutil.copytree(os.path.join(configs_dir, "hammer"), dst_configs)
cfg_file_name = os.path.join(dst_configs, "config.info")
# removing template configs
for root, dirs, files in os.walk(dst_configs):
for cfg_file in files:
cfg_file = os.path.join(root, cfg_file)
if cfg_file.endswith('.info.template'):
if os.path.isfile(cfg_file.split('.template')[0]):
os.remove(cfg_file)
else:
os.rename(cfg_file, cfg_file.split('.template')[0])
prepare_config_bh(cfg_file_name, cfg, log)
command = [os.path.join(execution_home, "hammer"),
os.path.abspath(cfg_file_name)]
log.info("\n== Running read error correction tool: " + ' '.join(command) + "\n")
support.sys_call(command, log)
corrected_dataset_yaml_filename = os.path.join(cfg.tmp_dir, "corrected.yaml")
if not os.path.isfile(corrected_dataset_yaml_filename):
support.error("read error correction finished abnormally: " + corrected_dataset_yaml_filename + " not found!")
corrected_dataset_data = pyyaml.load(open(corrected_dataset_yaml_filename, 'r'))
if cfg.gzip_output:
log.info("\n== Compressing corrected reads (with gzip)")
move_dataset_files(corrected_dataset_data, cfg.output_dir, ext_python_modules_home, cfg.max_threads, log, cfg.gzip_output)
corrected_dataset_yaml_filename = result_filename
pyyaml.dump(corrected_dataset_data, open(corrected_dataset_yaml_filename, 'w'))
log.info("\n== Dataset description file created: " + corrected_dataset_yaml_filename + "\n")
shutil.rmtree(cfg.tmp_dir)