本文整理汇总了Python中pypeline.common.fileutils.reroot_path函数的典型用法代码示例。如果您正苦于以下问题:Python reroot_path函数的具体用法?Python reroot_path怎么用?Python reroot_path使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了reroot_path函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _run
def _run(self, _config, temp):
table = {}
for filename in self.input_files:
coverage.read_table(table, filename)
coverage.write_table(table, reroot_path(temp, self._output_file))
move_file(reroot_path(temp, self._output_file), self._output_file)
示例2: _run
def _run(self, config, temp):
try:
CommandNode._run(self, config, temp)
except NodeError, error:
# Allow failures due to low coverage
with open(fileutils.reroot_path(temp, "template.stdout")) as handle:
codeml = handle.read()
if "sequences do not have any resolved nucleotides. Giving up." not in codeml:
raise error
with open(fileutils.reroot_path(temp, self._output_prefix + ".codeml"), "a") as handle:
handle.write("\nWARNING: No resolved nucleotides found, could not process gene.\n")
import sys
sys.stderr.write("WARNING: No resolved nucleotides in " + self._output_prefix + "\n")
示例3: _teardown
def _teardown(self, config, temp):
for postfix in ("ALIGNMENT", "PARTITION"):
filenames = [self._kwargs["TEMP_IN_" + postfix],
self._kwargs["TEMP_IN_" + postfix] + ".reduced",
self._kwargs["OUT_" + postfix]]
for (source, destination) in zip(filenames, filenames[1:]):
source = fileutils.reroot_path(temp, source)
destination = fileutils.reroot_path(temp, destination)
if not os.path.exists(destination):
fileutils.copy_file(source, destination)
os.remove(source)
CommandNode._teardown(self, config, temp)
示例4: _check_output_files
def _check_output_files(cls, output_files):
"""Checks dict of output files to nodes for cases where
multiple nodes create the same output file.
The directory component of paths are realized in order to
detect cases where nodes create the same file, but via
different paths (e.g. due to relative/absolute paths, or
due to use of symbolic links). Since output files are
replaced, not modified in place, it is not nessesary to
compare files themselves."""
dirpath_cache, real_output_files = {}, {}
for (filename, nodes) in output_files.iteritems():
dirpath = os.path.dirname(filename)
if dirpath not in dirpath_cache:
dirpath_cache[dirpath] = os.path.realpath(dirpath)
real_output_file = reroot_path(dirpath_cache[dirpath], filename)
real_output_files.setdefault(real_output_file, []).extend(nodes)
for (filename, nodes) in real_output_files.iteritems():
if (len(nodes) > 1):
nodes = _summarize_nodes(nodes)
yield "Multiple nodes create the same (clobber) output-file:" \
"\n\tFilename: %s\n\tNodes: %s" \
% (filename, "\n\t ".join(nodes))
示例5: _teardown
def _teardown(self, config, temp):
# Validate output from MAFFT
output_file = reroot_path(temp, self._output_file)
try:
MSA.from_file(output_file)
except MSAError, error:
raise NodeError("Invalid MSA produced by MAFFT:\n%s" % (error,))
示例6: _setup
def _setup(self, config, temp):
"""See CommandNode._setup."""
infile = os.path.abspath(self._infile)
outfile = reroot_path(temp, self._infile)
os.symlink(infile, outfile)
CommandNode._setup(self, config, temp)
示例7: _teardown
def _teardown(self, config, temp):
os.remove(os.path.join(temp, "RAxML_info.output"))
source = os.path.join(temp, "RAxML_parsimonyTree.output.0")
destination = fileutils.reroot_path(temp, self._output_tree)
fileutils.move_file(source, destination)
CommandNode._teardown(self, config, temp)
示例8: _run
def _run(self, _config, temp):
alignment = MSA.from_file(self._input_file)
for (to_filter, groups) in self._filter_by.iteritems():
alignment = alignment.filter_singletons(to_filter, groups)
temp_filename = fileutils.reroot_path(temp, self._output_file)
with open(temp_filename, "w") as handle:
alignment.to_file(handle)
fileutils.move_file(temp_filename, self._output_file)
示例9: _run
def _run(self, _config, temp):
msas = []
for filename in sorted(self._infiles):
split_by = self._infiles[filename].get("partition_by", self._part_by)
for (key, msa) in sorted(split_msa(read_msa(filename), split_by).items()):
for excluded_group in self._excluded:
msa.pop(excluded_group)
msas.append(("%s_%s" % (self._infiles[filename]["name"], key), msa))
msa = join_msa(*(msa for (_, msa) in msas))
with open(reroot_path(temp, self._out_prefix + ".phy"), "w") as output:
output.write(interleaved_phy(msa, add_flag = self._add_flag))
with open(reroot_path(temp, self._out_prefix + ".partitions"), "w") as output:
end = 0
for (name, msa) in msas:
length = len(msa.itervalues().next())
output.write("DNA, %s = %i-%i\n" % (name, end + 1, end + length))
end += length
示例10: _run
def _run(self, config, temp):
try:
CommandNode._run(self, config, temp)
except NodeError, error:
if self._command.join() == [1, None]:
with open(fileutils.reroot_path(temp, "template.stdout")) as handle:
lines = handle.readlines()
if lines and ("Giving up." in lines[-1]):
error = NodeError("%s\n\n%s" % (error, lines[-1]))
raise error
示例11: _run
def _run(self, config, temp):
region_names = self._create_tables(config, temp)
table = {}
for (key, (filename, handle)) in self._tables.iteritems():
handle.close()
self._read_table(key, table, filename)
temp_filename = reroot_path(temp, self._output_file)
self._write_table(table, temp_filename, region_names)
示例12: _run
def _run(self, _config, temp):
# Read and check that MSAs share groups
msas = [MSA.from_file(filename) for filename in sorted(self.input_files)]
MSA.validate(*msas)
blocks = []
for msa in msas:
blocks.append(sequential_phy(msa, add_flag = self._add_flag))
with open(reroot_path(temp, self._out_phy), "w") as output:
output.write("\n\n".join(blocks))
示例13: __init__
def __init__(self, description, destination, source_nodes):
source_nodes = safe_coerce_to_tuple(source_nodes)
input_files = []
for source_node in source_nodes:
input_files.extend(source_node.output_files)
output_files = [reroot_path(destination, fpath) for fpath in input_files]
self._files = zip(input_files, output_files)
Node.__init__(self,
description = "<Copy %s output to %r>" % (description, destination),
input_files = input_files,
output_files = output_files,
dependencies = source_nodes)
示例14: __init__
def __init__(self, input_files, destination, filter_by, dependencies=()):
subnodes = []
filter_by = dict(filter_by)
for (filename, node) in input_files.iteritems():
output_filename = fileutils.reroot_path(destination, filename)
subnodes.append(FilterSingletonsNode(input_file=filename,
output_file=output_filename,
filter_by=filter_by,
dependencies=node))
MetaNode.__init__(self,
description="<FilterSingleton: %i files -> '%s'>"
% (len(subnodes), destination),
subnodes=subnodes,
dependencies=dependencies)
示例15: _run
def _run(self, _config, temp):
alignment = msa.read_msa(self._input_file)
for (to_filter, groups) in self._filter_by.iteritems():
sequences = [alignment[group] for group in groups]
sequence = list(alignment[to_filter])
for (index, nts) in enumerate(zip(*sequences)):
nt = sequence[index]
if (nt not in "Nn-") and (nts.count(nt) == 1):
sequence[index] = 'n'
alignment[to_filter] = "".join(sequence)
temp_filename = fileutils.reroot_path(temp, self._output_file)
msa.write_msa(alignment, temp_filename)
fileutils.move_file(temp_filename, self._output_file)