本文整理汇总了Python中pipeline.Pipeline.start方法的典型用法代码示例。如果您正苦于以下问题:Python Pipeline.start方法的具体用法?Python Pipeline.start怎么用?Python Pipeline.start使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pipeline.Pipeline
的用法示例。
在下文中一共展示了Pipeline.start方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: add_pics
# 需要导入模块: from pipeline import Pipeline [as 别名]
# 或者: from pipeline.Pipeline import start [as 别名]
def add_pics(rep, paths, process, recipe=None):
"""
Add pictures to repository.
Arguments:
rep -- Add pictures to this repository.
paths -- Paths of the pictures to be added (check if path exists).
process -- Boolean flag if added pictures should be processed.
recipe -- Recipe to use for picture processing.
"""
for path in paths:
if not os.path.exists(path):
log.warning("File not found: '%s'. Skipping it." % path)
pics = [Picture(path) for path in paths if os.path.exists(path)]
rep.index.add(pics)
if process:
log.info("Processing pictures.")
if not recipe: # set up pipeline
process_recipe = \
Recipe.fromString(rep.config['recipes.default'])
pl = Pipeline('Pipeline1', process_recipe,
path=rep.connector.url.path)
for pic in pics:
pl.put(pic)
pl.start() # start processing threads
pl.join() # wait until threads exit
log.info("Saving index to file.")
with rep.connector.connected():
rep.save_index_to_disk()
return rep
示例2: bas
# 需要导入模块: from pipeline import Pipeline [as 别名]
# 或者: from pipeline.Pipeline import start [as 别名]
def bas(target, fluxcal, phasecal, fringefinder, bpcal, catlist, FLAG_INT,
FLAG_BATCH, FLAG_DEBUG, FLAG_SILENT, FLAG_LOG, converts,
StageSelection, confpath, conffile, inpath, inext, outpath, outprefix,
logpath, logfile, comment, timeformat, prompt):
# Get all those parameters from above ^
settings = locals()
# Inform the CASA logger of what's happening.
casalog.origin("bas")
casalog.post("Output from this task is not appended to the CASA log.",
priority="INFO")
# Make sure we can see all the files we need.
obit_path = "/home/rowell/src/obit/python"
parseltongue_path = "/usr/local/share/parseltongue/python"
sys.path.append(obit_path)
sys.path.append(parseltongue_path)
# Do what we came here to do.
p = Pipeline()
p.set(settings)
p.start()
# Return the settings.
s = p.settings
return s
示例3: App
# 需要导入模块: from pipeline import Pipeline [as 别名]
# 或者: from pipeline.Pipeline import start [as 别名]
class App(object):
exit_cond = threading.Event()
def __init__(self, configfile):
logger.info('App: starting ...')
self.configfile = configfile
self.config = get_env(self.configfile)
self.pipe = Pipeline(config=self.config)
# let's do init according to our purpose: IN next OUT
self.finput = self.pipe.create_module('file_input')
self.pipe.append(self.finput)
self.output = self.pipe.create_module('file_output')
self.pipe.append(self.output)
self.pipe.start()
self.core_processor = ThreadLoop(loop=self.processing_queue_loop)
self.core_processor.start()
def main(self):
rc = 0
try:
while not self.exit_cond.wait(.1):
time.sleep(.1)
except KeyboardInterrupt as e:
logger.warn('App: KeyboardInterrupt')
rc = -1
self.on_exit()
logger.info('App: exited with {}'.format(rc))
return rc
def processing_queue_loop(self):
try:
self.pipe.processing_loop()
except EOS as e:
logger.info('App: EOS')
self.core_processor.running = False
self.quit()
except Exception as e:
exc_type, exc_value, exc_tb = sys.exc_info()
logger.error('App: Fatal exception in processing_queue_loop')
logger.error('App: ' + ''.join(traceback.format_exception(exc_type, exc_value, exc_tb)))
self.core_processor.running = False
self.quit()
def quit(self):
self.exit_cond.set()
def on_exit(self):
logger.info('App: exit')
self.pipe.stop()
self.core_processor.stop()
示例4: prog_pipeline
# 需要导入模块: from pipeline import Pipeline [as 别名]
# 或者: from pipeline.Pipeline import start [as 别名]
#.........这里部分代码省略.........
if not path.isfile(cmd_align):
raise ValueError(\
"Executable to align sequences (\"%s\") does not exist.\n\
Please use \"rtcr Config\" to see if the Aligner is properly configured "%
cmd_align)
init_logging()
if args.debug:
logging.root.setLevel(logging.DEBUG)
logging.root.debug("log level set to DEBUG")
if args.verbose:
logging.root.addHandler(logging.StreamHandler(stdout,))
logging.root.info("Writing log statements to stdout")
# Note, delaying import of modules that have a logger until after logging
# has been initialised.
from fileio import read_reference, zopen
from pipeline import Pipeline
ref = read_reference(ref_fn).get_slice(species = species, gene = gene)
for s in species:
if not s in ref.species:
logger.error("species \"%s\" does not exist in reference"%s)
sys.exit(1)
for g in gene:
if not g in ref.genes:
logger.error("gene \"%s\" does not exist in reference"%s)
sys.exit(1)
version = __version__
preamble = '\nRTCR version: %(version)s\n'%locals()
preamble += '\n[Command line arguments]\n' + \
'\n'.join(['%s : %s'%(i,v) for i,v in enumerate(sys.argv)])
preamble += '\n\
[Files]\n\
Reference: %(ref_fn)s\n\
Reads: %(reads_fn)s\n\
Output: %(output_fn)s\n\
\n\
[Settings]\n\
PHRED encoding: %(phred_encoding)s\n\
Species: %(species)s\n\
Gene: %(gene)s\n\
confidence: %(confidence)s\n\
\n\
[Immune receptor reference]\n'%locals()
for species in sorted(ref.species):
for gene in sorted(ref.genes):
for region in sorted(ref.regions):
alleles = ref.get_alleles(species = species, gene = gene,
region = region)
n = len(alleles)
if n > 0:
preamble += "%s,%s,%s: %s alleles\n"%(
species, gene, region, n)
s = ""
for allele in alleles:
s += "%s, %s\n"%(allele.species, allele.name)
logger.debug("species, allele\n" + s)
preamble += "\n[Pipeline run]"
logger.info(preamble)
# Make sure exceptions are logged, even when not caught
sys.excepthook = handle_uncaught_exception
pipeline = Pipeline(
ref = ref,
reads = zopen(reads_fn, 'r'),
phred_encoding = phred_encoding,
cmd_build_index = cmd_build_index,
args_build_index = args_build_index,
cmd_align = cmd_align,
args_align_v = args_align_v,
args_align_j = args_align_j,
alignments_fn = alignments_fn,
alignment_stats_fn = alignment_stats_fn,
Q_mm_stats_fn = Q_mm_stats_fn,
Q_mm_stats_plot_fn = Q_mm_stats_plot_fn,
output_fn = output_fn,
output_hdr = output_hdr,
output_fmt = output_fmt,
clone_classname = clone_classname,
confidence = confidence,
min_seqlen = min_seqlen,
include_cysphe = include_cysphe,
min_phred_threshold = min_phred_threshold,
n_threads = n_threads,
update_interval = update_interval,
listener = Listener())
pipeline.daemon = True
pipeline.name = 'Pipeline'
try:
pipeline.start()
while pipeline.is_alive():
pipeline.join(1)
except KeyboardInterrupt:
logger.error('Caught keyboard interrupt. Shutting down.')
pipeline.stop()
pipeline.join(1)