本文整理汇总了Python中Pipeline.varsPipeline方法的典型用法代码示例。如果您正苦于以下问题:Python Pipeline.varsPipeline方法的具体用法?Python Pipeline.varsPipeline怎么用?Python Pipeline.varsPipeline使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Pipeline
的用法示例。
在下文中一共展示了Pipeline.varsPipeline方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: runCharacterize
# 需要导入模块: import Pipeline [as 别名]
# 或者: from Pipeline import varsPipeline [as 别名]
def runCharacterize(cwd, rabin, refcmap, contigdir, contigbase, runaligns, xmappath, optargs, nthreads):
'''Load Pipeline files from first arg; configure CharacterizeModule; run alignments if runaligns;
report on those alignments or the xmap provided as xmappath.
'''
printargs = True
if not os.path.isfile(os.path.join(cwd,"utilities.py")):
print "utilities.py missing in dir", cwd, "check -p argument, or run this script in Pipeline dir"
sys.exit(1)
import utilities as util
if not util.checkFile(os.path.join(cwd,"Pipeline.py")):
print "Pipeline.py missing in dir", cwd, "check -p argument, or run this script in Pipeline dir"
sys.exit(1)
import Pipeline
if not util.checkFile(os.path.join(cwd,"CharacterizeModule.py")):
print "CharacterizeModule.py missing in dir", cwd, "check -p argument, or run this script in Pipeline dir"
sys.exit(1)
import CharacterizeModule as cm
#if not util.checkFile(os.path.join(cwd,"MapClassesRev.py")):
# print "MapClassesRev.py missing in dir", cwd, "check -p argument, or run this script in Pipeline dir"
# sys.exit(1)
#import MapClassesRev
#use Pipeline objects
varsP = Pipeline.varsPipeline()
varsP.optArgumentsFileIn = optargs
varsP.RefAlignerBin = rabin
varsP.latestMergedCmap = os.path.join(contigdir, contigbase+".cmap") #file suffix required to be .cmap
varsP.contigFolder = contigdir
varsP.nThreads = nthreads #necessary otherwise job won't start
varsP.ref = refcmap
varsP.stdoutlog = True #enable -stdout -stderr args to RefAligner
varsP.curCharacterizeCmaps = [varsP.latestMergedCmap]
if runaligns :
varsP.contigAlignTarget = contigdir+"/alignref" #this is output dir
varsP.runSV = False
varsP.groupContigs = False
varsP.stageComplete = contigbase
varsP.outputContigFolder = contigdir
varsP.memoryLogpath = os.path.join(contigdir, "memory_log.txt")
varsP.pipeReportFile = os.path.join(contigdir, "pipeReport.txt")
varsP.parseArguments() #parses optArgumentsFile
if printargs :
print "\nRunning Characterization with arguments:\n" + " ".join(varsP.argsListed('characterizeDefault')) + '\n'
if hasattr(util, "InitStatus") : #if old version, skip
util.InitStatus(os.path.join(contigdir, "status.xml")) #needed otherwise call to status_log fails
charmod = cm.Characterize(varsP) #create Characterize object from CharacterizeModule -- this also calls generateJobList
xmappath = charmod.xmapTarget #set in Characterize.generateJobList
charmod.runJobs()
else :
#varsP.contigAlignTarget = contigdir #this is dir in which _q and _r cmaps must be located -- contigdir is from cmap; this should be from xmap
varsP.contigAlignTarget = os.path.split(xmappath)[0]
print "Loading alignments from\n" + xmappath + "\n"
#no longer using this in Pipeline
#print MapClassesRev.TopLevelCharacterization(varsP, [os.path.join(varsP.contigAlignTarget, contigbase)])
print cm.characterizeContigs(varsP, xmappath)
示例2: on_exit
# 需要导入模块: import Pipeline [as 别名]
# 或者: from Pipeline import varsPipeline [as 别名]
Usage: python pipelineCL.py -h
"""
#this is moved here from utilities/Pipeline.py because those files are used outside of the pipeline
@atexit.register
def on_exit():
try:
util.LogStatus("pipeline", "exit", "%d" % os.getpid())
except:
pass
if __name__ == "__main__":
import Pipeline
varsP = Pipeline.varsPipeline()
varsP.prerunChecks()
print(' Prerun Tests:\n\t%d ERRORS\n\t%d WARNINGS\n' % (varsP.error, varsP.warning))
if varsP.error or varsP.warning:
#print(varsP.message)
varsP.printMessage()
if varsP.error:
print(' EXITING: See errors')
sys.exit()
dnpipeline = Pipeline.DNPipeline()
dnpipeline.run(varsP)
示例3: runAlignMol
# 需要导入模块: import Pipeline [as 别名]
# 或者: from Pipeline import varsPipeline [as 别名]
#.........这里部分代码省略.........
sys.exit(1)
#.err file
errfile = result.errFile
if errfile and errbinfile :
print "Warning: .err and .errbin arguments supplied; ignoring .err file"
errfile = ""
elif errfile :
errfile = os.path.realpath(result.errFile)
if not util.checkFile(errfile, ".err") :
print "err file supplied but not found or incorrect suffix:", errfile
sys.exit(1)
if errfile and not util.checkFile(os.path.join(cwd,"SampleCharModule.py")):
print "SampleCharModule.py missing in dir", cwd, "check -p argument, or run this script in Pipeline dir"
sys.exit(1)
elif errfile :
import SampleCharModule as scm
doref = result.ref
#DONE checking arguments
print "Using output dir", outdir
if runaligns :
print "Aligning", bnxfile, "\nTo", qrypath, "\n"
else :
print "Merging", qrypath, "\n"
startTime = time.time() #time since Epoch
memory_log = os.path.join(outdir, "memory_log.txt")
util.initMemoryLog(memory_log)
varsP = Pipeline.varsPipeline()
varsP.RefAlignerBin = rabin
varsP.contigFolder = "" #not used but needs to be an attr
varsP.outputContigFolder = "" #not used but needs to be a string attr
varsP.pipeReportFile = os.path.join(outdir, "alignmol_jobs_log.txt")
varsP.infoReportFile = os.path.join(outdir, "alignmol_log.txt")
util.InitStatus( os.path.join(outdir, "status.xml") )
if runaligns :
varsP.optArgumentsFileIn = optargs
varsP.latestMergedCmap = qrypath #if !doref, need this one
varsP.ref = qrypath #and if doref, need this one
varsP.nThreads = nthreads #necessary otherwise job won't start -- max threads per node
varsP.maxthreads = maxthreads #threads per job
p = os.path.split(qrypath)[1]
varsP.outputContigPrefix = p[:p.rfind(".")] #filename prefix
varsP.stdoutlog = True #use -stdout -stderr
varsP.memoryLogpath = os.path.join(outdir, "memory_log.txt")
varsP.parseArguments() #parses optArgumentsFile
varsP.replaceParam("alignmol", "-T", pvalue)
varsP.checkDependencies()
varsP.RefAlignerBinOrig = rabin
varsP.prerunLog() #general information in log -- needed for refaligner_version
noisep = {}
if errbinfile :
noisep = {"readparameters": errbinfile}
#print "Using noise parameters from "+errbinfile+"\n" #move below
elif errfile :
noisep = scm.readNoiseParameters(errfile.replace(".err",""))
if noisep.has_key('readparameters') : #remove this because it's redundant, and it can cause problems with RefAligner compatibility
del noisep['readparameters']
示例4: runSV
# 需要导入模块: import Pipeline [as 别名]
# 或者: from Pipeline import varsPipeline [as 别名]
def runSV(cwd, rabin, refcmap, contigdir, contigbase, runaligns, xmappath, optargs, nthreads, maxthreads, bedfile, errfile, outdir, errbinfile, clustargs, groupsv):
'''Load Pipeline files from first arg; configure CharacterizeModule; run alignments if runaligns;
report on those alignments or the xmap provided as xmappath.
'''
printargs = True
if not os.path.isfile(os.path.join(cwd,"utilities.py")):
print "utilities.py missing in dir", cwd, "check -p argument, or run this script in Pipeline dir"
sys.exit(1)
import utilities as util
if not util.checkFile(os.path.join(cwd,"Pipeline.py")):
print "Pipeline.py missing in dir", cwd, "check -p argument, or run this script in Pipeline dir"
sys.exit(1)
import Pipeline
if not util.checkFile(os.path.join(cwd,"SVModule.py")):
print "SVModule.py missing in dir", cwd, "check -p argument, or run this script in Pipeline dir"
sys.exit(1)
import SVModule as svm
if errfile and not util.checkFile(os.path.join(cwd,"SampleCharModule.py")):
print "SampleCharModule.py missing in dir", cwd, "check -p argument, or run this script in Pipeline dir"
sys.exit(1)
elif errfile :
import SampleCharModule as scm
#use Pipeline objects
varsP = Pipeline.varsPipeline()
varsP.optArgumentsFileIn = optargs
varsP.RefAlignerBin = rabin
varsP.latestMergedCmap = os.path.join(contigdir, contigbase+".cmap") #file suffix required to be .cmap
varsP.contigFolder = os.path.split(contigdir)[0]
varsP.nThreads = nthreads #necessary otherwise job won't start -- max threads per node
varsP.maxthreads = maxthreads #threads per job
varsP.ref = refcmap
varsP.stdoutlog = True #enable -stdout -stderr args to RefAligner
varsP.curCharacterizeCmaps = [varsP.latestMergedCmap]
varsP.contigSubDirectories = True #needed for prepareContigIO
varsP.doAlignMolvRef = False #do not look for copy number
varsP.groupSV = groupsv #mimic Pipeline behavior: group or not
if runaligns :
#varsP.contigAlignTarget = outdir
varsP.runSV = False
varsP.groupContigs = False
varsP.stdoutlog = True #use -stdout -stderr
varsP.stageComplete = contigbase
varsP.outputContigPrefix = getContigPrefix(util, contigdir) #if outdir is not supplied, this is used as dir prefix; also used as file pref for -o arg
varsP.outputContigFolder = contigdir #cmaps are copied from here
if not outdir :
outdir = contigdir+"_sv" #this will be outdir of sv jobs
if os.path.isdir(outdir) :
if not util.checkDir(outdir) : #check writeable
print "\nERROR: Output dir is not writeable:\n", outdir, "\n"
sys.exit(1)
elif outdir == contigdir :
print "\nERROR: Output dir cannot be same as input dir:\n", outdir, "\n"
sys.exit(1)
print "\nWARNING: Output dir already exists, results will be overwritten:\n", outdir, "\n"
elif not util.checkDir(outdir) : #does not exist, make, if False, can't make or not writeable
print "\nERROR: Output dir cannot be created or is not writeable:\n", outdir, "\n"
sys.exit(1)
if clustargs :
os.putenv('SGE_ROOT', '/var/lib/gridengine') #do I want this???
varsP.onCluster = True
varsP.clusterLogDir = os.path.join(outdir, 'ClusterLogs')
util.checkDir(varsP.clusterLogDir) #make it
varsP.checkCluster()
varsP.clusterArgumentsFileIn = clustargs #required for parseArguments
varsP.parseArguments(readingClusterFile=True)
if varsP.error :
print varsP.message
sys.exit(1)
varsP.RefAlignerBin += "${BINARY_SUFFIX:=}" #copy from varsPipeline, handled by external script on phi host
varsP.pipeReportFile = os.path.join(outdir, "sv_jobs_log.txt")
varsP.infoReportFile = os.path.join(outdir, "sv_log.txt")
varsP.memoryLogpath = os.path.join(outdir, "memory_log.txt")
if bedfile :
varsP.bedFile = bedfile
util.InitStatus( os.path.join(outdir, "status.xml") )
varsP.parseArguments() #parses optArgumentsFile
varsP.checkDependencies()
varsP.RefAlignerBinOrig = rabin
varsP.prerunLog() #general information in log -- needed for refaligner_version
if printargs :
print "\nRunning SV detection with arguments ("+os.path.split(optargs)[1]+"):\n" + " ".join(varsP.argsListed('svdetect')) + '\n'
noisep = {}
if errbinfile :
noisep = {"readparameters": errbinfile}
print "Using noise parameters from "+errbinfile+"\n"
elif errfile :
noisep = scm.readNoiseParameters(errfile.replace(".err",""))
#.........这里部分代码省略.........