当前位置: 首页>>代码示例>>Python>>正文


Python Stack.addToJobFile方法代码示例

本文整理汇总了Python中jobTree.scriptTree.stack.Stack.addToJobFile方法的典型用法代码示例。如果您正苦于以下问题:Python Stack.addToJobFile方法的具体用法?Python Stack.addToJobFile怎么用?Python Stack.addToJobFile使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在jobTree.scriptTree.stack.Stack的用法示例。


在下文中一共展示了Stack.addToJobFile方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: main

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]
def main():
    ## Make sure we're in the right type of directory
    assert os.path.exists("jobs.list")
    assert os.path.exists("jobsEM.list")
    assert os.path.exists("config.txt")
    assert os.path.exists("configEM.txt")
    assert os.path.exists("params0.txt")

    assert commandAvailable(collectParamsExec)
    assert commandAvailable(mergeSwarm)
    assert commandAvailable(mergeMerge)

    ##
    ## Parse options
    ##
    parser = OptionParser()
    Stack.addJobTreeOptions(parser) # so that the stack will work
    parser.add_option("--jobFile", help="Add as a child of jobFile rather " +
                      "than making a new jobTree")
    options, args = parser.parse_args()
    print "Using Batch System '" + options.batchSystem + "'"
    assert len(args) == 0 or len(args) == 1

    tolerance = 0.001
    if len(args) == 1:
        tolerance = float(args[0])

    logger.info("options: " + str(options))

    ##
    ## Run
    ##
    logger.info("starting first EM iteration")
    s = Stack(ExpectationIteration(0, tolerance, os.getcwd()))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
    else:
        if options.jobTree == None:
            options.jobTree = "./.jobTree"
        
        failed = s.startJobTree(options)
        if failed:
            print ("%d jobs failed" % failed)
        else:
            logger.info("Run complete!")
开发者ID:sng87,项目名称:paradigm-scripts,代码行数:47,代码来源:jtParadigm.py

示例2: wrapParadigm

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]
def wrapParadigm():
    ## parse arguments
    parser = OptionParser(usage = "%prog [options] attachment file:path [attachment file:path ...]")
    Stack.addJobTreeOptions(parser)
    parser.add_option("--jobFile", help = "Add as a child of jobFile rather " +
                      "than making a new jobTree")
    parser.add_option("-w", "--workdir", dest="workdir", help="Common Work directory", default="./")
    parser.add_option("-n", "--nulls", dest="nulls", help="Number of Null Samples", default="5")
    parser.add_option("-d", "--dogma", dest="dogmazip", help="Path to PARADIGM Dogma Specification", default=basedogma)
    parser.add_option("-p", "--pathway", dest="pathwayzip", help="Path to PARADIGM Pathway Specification", default=basepathway)
    parser.add_option("-b", "--boundaries", dest="disc", help="Data Discretization Bounds", default="0.33;0.67")
    parser.add_option("-t", "--storedparam", dest="param", help="Initial Parameter Starting Point", default=None)
    parser.add_option("-s", "--skipem", action="store_false", dest="em", help="Skip Running EM", default=True)
    
    parser.add_option("--fr", "--filter-real", dest="filtered_real", help="Filtered Output", default=None)
    parser.add_option("--fa", "--filter-all", dest="filtered_all", help="Filtered Output", default=None)
    parser.add_option("--ur", "--unfilter-real", dest="unfiltered_real", help="Filtered Output", default=None)
    parser.add_option("--ua", "--unfilter-all", dest="unfiltered_all", help="Filtered Output", default=None)
    
    options, args = parser.parse_args()
    logger.info("options: " + str(options))
    print "Using Batch System '" + options.batchSystem + "'"
    
    evidList = []
    for i, element in enumerate(args):
        if i % 2 == 1:
            (fileType, filePath) = args[i].split(":")
            evidList.append("%s:%s" % (fileType, os.path.abspath(filePath)))
        else:
            evidList.append(args[i])
    
    if (len(evidList) % 2 == 1) | (len(evidList) == 0):
        sys.stderr.write("ERROR: incorrect number of arguments\n")
        sys.exit(1)
    
 
    workdir = os.path.abspath(options.workdir)
    nullBatches = int(options.nulls)
    dogmaZip=os.path.abspath(options.dogmazip)
    pathwayZip=os.path.abspath(options.pathwayzip)
    disc=options.disc
    paramFile=os.path.abspath(options.param) if options.param is not None else None
    runEM = options.em
    
    dogmaLib = os.path.join(workdir, "dogma")
    pathwayLib = os.path.join(workdir, "pathway")
    system("unzip %s -d %s" % (dogmaZip, dogmaLib))
    system("unzip %s -d %s" % (pathwayZip, pathwayLib))

    ## run
    logger.info("starting prepare")
    s = Stack(prepareParadigm(" ".join(evidList), disc, paramFile, nullBatches, paradigmExec, inferSpec, dogmaLib, pathwayLib, runEM, workdir))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
    else:
        if options.jobTree == None:
            options.jobTree = "./.jobTree"
        
        failed = s.startJobTree(options)
        if failed:
            print ("%d jobs failed" % failed)
        else:
            logger.info("Run complete!")
            system("rm -rf .lastjobTree")
            system("mv .jobTree .lastjobTree")
开发者ID:sng87,项目名称:paradigm-scripts,代码行数:67,代码来源:jtgalaxyParadigm.py

示例3: main

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]

#.........这里部分代码省略.........
            for m in range(mFolds):
                foldMap[r+1][m+1] = []
            for col in mapSamples:
                foldMap[r+1][int(mapData[col][row])].append(col)
            if (r+1) == rRepeats:
                break
    else:
        for r in range(1, rRepeats+1):
            foldMap[r] = {}
            for m in range(1, mFolds+1):
                foldMap[r][m] = None
    
    ## check files
    pathwayFile = None
    cnvFile = None
    expFile = None
    dataMap = {}
    assert os.path.exists("%s/clusterFiles" % (paradigmDir))
    for file in os.listdir("%s/clusterFiles" % (paradigmDir)):
        if file.endswith("pathway.tab"):
            pathwayFile = "%s/clusterFiles/%s" % (paradigmDir, file)
        elif file.endswith("CNV.tab"):
            cnvFile = "%s/clusterFiles/%s" % (paradigmDir, file)
            dataMap["cnv"] = cnvFile
        elif (file.endswith("Expression.tab") | file.endswith("Expression.vCohort.tab") | 
              file.endswith("Expression.vNormal.tab")):
            expFile = "%s/clusterFiles/%s" % (paradigmDir, file)
            dataMap["exp"] = expFile
    assert (pathwayFile != None)
    assert (cnvFile != None)
    assert (expFile != None)
    paradigmFile = None
    if os.path.exists("%s/merge_merged_unfiltered.tab" % (paradigmDir)):
        paradigmFile = "%s/merge_merged_unfiltered.tab" % (paradigmDir)
    elif os.path.exists("%s/merge_merged.tab" % (paradigmDir)):
        paradigmFile = "%s/merge_merged.tab" % (paradigmDir)
	
    ## store feature, sample and pathway information
    dataFeatures = list(set(retColumns(cnvFile)) & set(retColumns(expFile)))
    includeFeatures = None
    if len(featureFile) != 0:
        includeFeatures = rList(featureFile)
    
    dataSamples = list(set(retRows(cnvFile)) & set(retRows(expFile)))
    if len(sampleFile) != 0:
        dataSamples = list(set(dataSamples) & set(rList(sampleFile)))

    (gNodes, gInteractions) = rPathway(pathwayFile)
    gfPathway = flattenPathway(Pathway(gNodes, gInteractions))
    if not useFlattened:
        gPathway = Pathway(gNodes, gInteractions)
    else:
        gPathway = gfPathway
	
    mutationOrder = []
    mutationMap = {}
    f = open(mutFile, "r")
    for line in f:
        if line.isspace():
            continue
        pline = re.split("\t", line.rstrip("\r\n"))
        mutatedGene = pline[0]
        mutatedSamples = list(set(re.split(",", pline[2])) & set(dataSamples))
        if mutatedGene in gPathway.nodes:
            if len(mutatedSamples) >= mutationThreshold:
                mutationMap[mutatedGene] = deepcopy(mutatedSamples)
                if includeFeatures is None:
                    mutationOrder.append(mutatedGene)
    f.close()
    if includeFeatures is not None:
        for mutatedGene in includeFeatures:
            if mutatedGene in mutationMap:
                mutationOrder.append(mutatedGene)
     
    submitMap = {}
    for mutatedGene in mutationOrder:
        submitMap[mutatedGene] = deepcopy(mutationMap[mutatedGene])
        if len(submitMap.keys()) >= maxFeatures:
            break
    
    ## run
    logger.info("options: " + str(options))
    logger.info("starting make")
    writeScripts()
    
    s = Stack(branchGenes(dataSamples, dataFeatures, dataMap, submitMap, gPathway, paradigmDir, 
              paramMap, foldMap, os.getcwd()))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
    else:
        if options.jobTree == None:
            options.jobTree = "./.jobTree"
        
        failed = s.startJobTree(options)
        if failed:
            print ("%d jobs failed" % failed)
        else:
            logger.info("Run complete!")
            system("rm -rf .lastjobTree")
            system("mv .jobTree .lastjobTree")
开发者ID:BioinformaticsArchive,项目名称:paradigmshift,代码行数:104,代码来源:paradigmSHIFT.py

示例4: gp_main

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]
def gp_main():
    ## check for fresh run
    if os.path.exists(".jobTree"):
        logging.warning("WARNING: '.jobTree' directory found, remove it first to start a fresh run\n")
    
    ## parse arguments
    parser = OptionParser(usage = "%prog [options] attachment file:path [attachment file:path ...]")
    Stack.addJobTreeOptions(parser)
    parser.add_option("--jobFile",
                      help = "Add as a child of jobFile rather than making a new jobTree")
    parser.add_option("-w", "--workdir", dest = "work_dir", default = "./",
                      help = "Directory to perform work in")
    parser.add_option("-n", "--nulls", dest = "null_size", default="5",
                      help = "Number of null samples to be generated per real sample")
    parser.add_option("-d", "--dogma", dest = "dogma_lib", default = standard_dogma,
                      help = "Directory of PARADIGM Dogma specification")
    parser.add_option("-t", "--param", dest = "param_file", default = None,
                      help = "Path to initial PARADIGM Parameters")
    parser.add_option("-p", "--pathway", dest = "pathway_lib", default=standard_pathway,
                      help = "Path to PARADIGM Pathway (directory/file/zip)")
    parser.add_option("-b", "--boundaries", dest = "disc", default="0.33;0.67",
                      help = "Data discretization boundaries")
    parser.add_option("-s", "--skipem", action = "store_false", dest="run_em", default=True,
                      help = "Skip EM steps")
    parser.add_option("-y", dest = "paradigm_public", action = "store_true", default = False,
                      help = "This flag must be enabled when using the publically available version of paradigm")
    
    parser.add_option("-o", "--output-ipls", dest = "output_ipls", default = "unfiltered.all.tab",
                      help = "Unfiltered Output")
    parser.add_option("--op", "--output-params", dest = "output_params", default = None,
                      help = "Parameter Output")
    parser.add_option("--oc", "--output-config", dest = "output_config", default = None,
                      help = "Config Output")
    parser.add_option("--of", "--output-files", dest = "output_files", default = None,
                      help = "Output Files")
    
    options, args = parser.parse_args()
    logging.info("options: %s" % (str(options)))
    print "Using Batch System '%s'" % (options.batchSystem)
    
    evidence_list = []
    for i, element in enumerate(args):
        if i % 2 == 1:
            (file_type, file_path) = args[i].split(":")
            evidence_list.append("%s:%s" % (file_type, os.path.abspath(file_path)))
        else:
            evidence_list.append(args[i])
    
    if (len(evidence_list) % 2 == 1) | (len(evidence_list) == 0):
        logging.error("ERROR: incorrect number of arguments\n")
        sys.exit(1)
    
    work_dir = os.path.abspath(options.work_dir)
    if not os.path.exists(work_dir):
        os.makedirs(work_dir)
    null_size = int(options.null_size)
    disc = options.disc
    if options.param_file is not None:
        param_file = os.path.abspath(options.param_file)
    else:
        param_file  = None
    run_em = options.run_em
    
    ## import dogma and pathway libraries
    if zipfile.is_zipfile(options.dogma_lib):
        zf = zipfile.ZipFile(options.dogma_lib, "r")
        dogma_lib = os.path.join(work_dir, "dogma")
        zf.extractall(dogma_lib)
        zf.close()
    elif os.path.isdir(options.dogma_lib):
        dogma_lib = os.path.abspath(options.dogma_lib)
    else:
        logging.error("ERROR: dogma cannot be a regular file\n")
        sys.exit(1)
    if zipfile.is_zipfile(options.pathway_lib):
        zf = zipfile.ZipFile(options.pathway_lib, "r")
        pathway_lib = os.path.join(work_dir, "pathway")
        zf.extractall(pathway_lib)
        zf.close()
    elif os.path.isdir(options.pathway_lib):
        pathway_lib = os.path.abspath(options.pathway_lib)
    else:
        logging.error("ERROR: pathway cannot be a regular file\n")
        sys.exit(1)
    
    ## initialize the stack and run
    logging.info("starting prepare")
    s = Stack(PrepareParadigm(evidence_spec=" ".join(evidence_list),
                              disc = disc,
                              param_file = param_file,
                              null_size = null_size,
                              paradigm_exec = paradigm_exec,
                              inference_spec = standard_inference,
                              dogma_lib = dogma_lib,
                              pathway_lib = pathway_lib,
                              run_em = run_em,
                              directory = work_dir,
                              paradigm_public = options.paradigm_public))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
#.........这里部分代码省略.........
开发者ID:QuanXue,项目名称:paradigm-scripts,代码行数:103,代码来源:galaxyParadigm.py

示例5: main

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]
def main():
    ## parse arguments
    parser = OptionParser(usage = "%prog [options] network IPL-matrix features")
    Stack.addJobTreeOptions(parser)
    parser.add_option("--jobFile", help="Add as a child of jobFile rather " +
                      "than making a new jobTree")
    parser.add_option("-w", "--workdir", dest="workdir", help="Common Work directory", default="./")
    parser.add_option("-i", "--ipl", dest="iplFile", default = None)
    parser.add_option("-p", "--pathway", dest="pathwayZip", default=None)
    parser.add_option("-c", "--phenotype", dest="phenotypeFile", default=None)
    parser.add_option("-o", "--oz", dest="outputZip", default=None)
    parser.add_option("-s", "--score", dest="scoreFile", default=None)
    parser.add_option("-f", "--filter", dest="filterParams", default="0.0;0.0")
    parser.add_option("-b", "--background", dest="nBackground", default="0")
    options, args = parser.parse_args()
    print "Using Batch System '" + options.batchSystem + "'"
    
    ## clean
    if len(args) == 1:
        if args[0] == "clean":
            print "rm -rf real* null* OCCAM__* LAYOUT background.R .jobTree"
            system("rm -rf real* null* OCCAM__* LAYOUT background.R .jobTree")
            sys.exit(0)
    
    ## parse arguments
    assert ((len(args) == 0) or (len(args) == 2) or (len(args) == 3))
    if len(args) == 0:
        pathwayZip = options.pathwayZip if options.pathwayZip is not None else basepathway
        pathwayLib = os.path.join(options.workdir, "pathway")
        system("unzip %s -d %s" % (pathwayZip, pathwayLib))
        paradigmPathway = None
        for file in os.listdir(pathwayLib):
            if file.endswith("_pathway.tab"):
                paradigmPathway = "%s/%s" % (pathwayLib, file)
                break
        scoreFile = None
        phenotypeFile = options.phenotypeFile
        dataFile = options.iplFile
        sampleList = []
        for sample in retColumns(dataFile):
            if not sample.startswith("na_iter"):
                sampleList.append(sample)
        filterParams = options.filterParams
        nNulls = int(options.nBackground)
        outputZip = options.outputZip
        assert(os.path.exists(paradigmPathway))
        assert(os.path.exists(phenotypeFile))
        assert(os.path.exists(dataFile))
    elif len(args) == 2:
        paradigmPathway = args[0] 
        scoreFile = args[1]
        phenotypeFile = None
        dataFile = None
        sampleList = None
        filterParams = options.filterParams
        nNulls = 0
        outputZip = options.outputZip
        assert(os.path.exists(paradigmPathway))
        assert(os.path.exists(scoreFile))
    elif len(args) == 3:
        paradigmPathway = args[0]
        scoreFile = None
        phenotypeFile = args[2]
        dataFile = args[1]
        sampleList = []
        for sample in retColumns(dataFile):
            if not sample.startswith("na_iter"):
                sampleList.append(sample)
        filterParams = options.filterParams
        nNulls = int(options.nBackground)
        outputZip = options.outputZip
        assert(os.path.exists(paradigmPathway))
        assert(os.path.exists(phenotypeFile))
        assert(os.path.exists(dataFile))
    
    ## run
    logger.info("options: " + str(options))
    logger.info("starting make")
    writeScripts()
    s = Stack(prepareOCCAM(paradigmPathway, scoreFile, phenotypeFile, None, dataFile, sampleList, filterParams, nNulls, outputZip, os.getcwd()))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
    else:
        if options.jobTree == None:
            options.jobTree = "./.jobTree"
        
        failed = s.startJobTree(options)
        if failed:
            print ("%d jobs failed" % failed)
        else:
            logger.info("Run complete!")
            system("rm -rf .lastjobTree")
            system("mv .jobTree .lastjobTree")
开发者ID:sng87,项目名称:pathmark-scripts,代码行数:95,代码来源:jtPATHMARK.py

示例6: wrapParadigm

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]
def wrapParadigm():
    ## parse arguments
    parser = OptionParser(usage = "%prog [options] attachment file:path [attachment file:path ...]")
    Stack.addJobTreeOptions(parser)
    parser.add_option("--jobFile", help = "Add as a child of jobFile rather " +
                      "than making a new jobTree")
    parser.add_option("-w", "--workdir", dest="workdir", help="Common Work directory", default="./")
    parser.add_option("-n", "--nulls", dest="nulls", help="Number of Null Samples", default="5")
    parser.add_option("-d", "--dogma", dest="dogma", help="Path to PARADIGM Dogma Specification", default=basedogma)
    parser.add_option("-i", "--imap", dest="imap", help="Path to PARADIGM Interaction Map Specification", default=baseimap)
    parser.add_option("-t", "--storedparam", dest="param", help="Initial Parameter Starting Point", default=baseparams)
    
    parser.add_option("--xgmml", dest="xgmml", help="Path to PARADIGM Pathway XGMML file", default=None)
    parser.add_option("-p", "--pathway", dest="pathway", help="Path to PARADIGM Pathway Specification", default=basepathway)
    parser.add_option("-b", "--boundaries", dest="disc", help="Data Discretization Bounds", default="0.33;0.67")
    parser.add_option("-s", "--skipem", action="store_false", dest="em", help="Skip Running EM", default=True)
    parser.add_option("--lb-max", dest="lb_max", help="Loopy Belief Max iterations", default=10000)
    
    parser.add_option("-o", "--output", dest="output_paradigm", help="Unfiltered Output", default="paradigm.output")
    parser.add_option("--op", "--output-params", dest="output_params", help="Parameter Output", default=None)
    parser.add_option("--oc", "--output-config", dest="output_config", help="Config Output", default=None)
    parser.add_option("--of", "--output-files", dest="output_files", help="Output Files", default=None)

    parser.add_option("-z", dest="private_paradigm", help="This is such bullshit", action="store_true", default=False)

    
    options, args = parser.parse_args()
    logging.info("options: " + str(options))
    print "Using Batch System '" + options.batchSystem + "'"
    
    evidList = []
    for i, element in enumerate(args):
        if i % 2 == 1:
            (fileType, filePath) = args[i].split(":")
            evidList.append("%s:%s" % (fileType, os.path.abspath(filePath)))
        else:
            evidList.append(args[i])
    
    if (len(evidList) % 2 == 1) | (len(evidList) == 0):
        sys.stderr.write("ERROR: incorrect number of arguments\n")
        sys.exit(1)
    
 
    workdir = os.path.abspath(options.workdir)
    if not os.path.exists(workdir):
        os.makedirs(workdir)
    nullBatches = int(options.nulls)
    dogma = os.path.abspath(options.dogma)
    imap = os.path.abspath(options.imap)
    params = os.path.abspath(options.param)
    disc=options.disc
    paramFile=os.path.abspath(options.param) 
    runEM = options.em
    
    if not os.path.exists(workdir):
        os.makedirs(workdir)
    dogmaLib = os.path.join(workdir, "dogma")
    pathwayLib = os.path.join(workdir, "pathway")
    os.makedirs(dogmaLib)
    os.makedirs(pathwayLib)
    shutil.copy(dogma, dogmaLib)
    shutil.copy(imap, dogmaLib)

    if options.xgmml:
        pathway = os.path.join(pathwayLib, "pid_tmp_pathway.tab")
        system("%s %s %s %s" % (sys.executable, xgmmlExec, options.xgmml, pathway))
    else:
        pathway = os.path.abspath(options.pathway)
        shutil.copy(pathway, os.path.join(pathwayLib, "pid_tmp_pathway.tab"))


    ## run
    logging.info("starting prepare")
    argSpec = inferSpec % (options.lb_max)
    s = Stack(prepareParadigm(evidSpec=" ".join(evidList), disc=disc, 
        paramFile=paramFile, nullBatches=nullBatches, 
        paradigmExec=paradigmExec, inferSpec=argSpec, 
        dogmaLib=dogmaLib, pathwayLib=pathwayLib, em=runEM, directory=workdir,
        private_paradigm=options.private_paradigm
        ))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
    else:
        if options.jobTree == None:
            options.jobTree = "./.jobTree"
        
        failed = s.startJobTree(options)
        if failed:
            print ("%d jobs failed" % failed)
        else:
            shutil.copy( os.path.join(options.workdir, "merge_merged_unfiltered.all.tab"), options.output_paradigm)
            if options.output_params is not None:
                shutil.copy( os.path.join(options.workdir, "params.txt"), options.output_params)
            if options.output_config is not None:
                shutil.copy( os.path.join(options.workdir, "config.txt"), options.output_config)
            if options.output_files is not None:
                system("zip -r outputFiles.zip outputFiles")
                shutil.copy( os.path.join(options.workdir, "outputFiles.zip"), options.output_files)
                
            logging.info("Run complete!")
#.........这里部分代码省略.........
开发者ID:Yeung678,项目名称:paradigm-scripts,代码行数:103,代码来源:jtgalaxyParadigm.py

示例7: wrapParadigm

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]
def wrapParadigm():
    ## parse arguments
    parser = OptionParser(usage = "%prog [options] attachment file:path [attachment file:path ...]")
    Stack.addJobTreeOptions(parser)
    parser.add_option("--jobFile", help = "Add as a child of jobFile rather " +
                      "than making a new jobTree")
    parser.add_option("-d", "--dogma", dest="dogmaPath", help="Path to PARADIGM Dogma Specification", default="")
    parser.add_option("-p", "--pathway", dest="pathwayPath", help="Path to PARADIGM Pathway Specification", default="")
    parser.add_option("-b", "--boundaries", dest="discBound", help="Data Discretization Bounds", default="")
    parser.add_option("-n", "--nulls", dest="nullBatches", help="Number of Null Samples", default="5")
    parser.add_option("-t", "--storedparam", dest="paramFile", help="Initial Parameter Starting Point", default="")
    parser.add_option("-s", "--skipem", action="store_false", dest="runEM", help="Skip Running EM", default=True)
    options, args = parser.parse_args()
    print "Using Batch System '" + options.batchSystem + "'"
   
    evidList = []
    for element in args:
        if element.startswith("rankAllFile"):
             evidList.append(re.sub("rankAllFile", "file", element))
        else:
             evidList.append(element)
    
    if (len(evidList) % 2 == 1) | (len(evidList) == 0):
        sys.stderr.write("ERROR: incorrect number of arguments\n")
        sys.exit(1)
     
    if len(options.discBound) == 0:
        disc = "0.3333;0.6667"
    else:
        disc = options.discBound
    if len(options.dogmaPath) == 0:
        dogma = "%s/%s" % (dogmaDir, dogmaDefault)
    else:
        dogma = options.dogmaPath
    if len(options.pathwayPath) == 0:
        pathway = "%s/%s" % (pathwayDir, pathwayDefault)
    else:
        pathway = options.pathwayPath
    nullBatches = int(options.nullBatches)
    if len(options.paramFile) == 0:
        paramFile = None
    else:
        paramFile = options.paramFile
    runEM = options.runEM
    logger.info("options: " + str(options))
    
    ## run
    logger.info("starting prepare")
    s = Stack(prepareParadigm(" ".join(evidList), disc, paramFile, nullBatches, paradigmExec, inferSpec, dogma, pathway, runEM, os.getcwd()))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
    else:
        if options.jobTree == None:
            options.jobTree = "./.jobTree"
        
        failed = s.startJobTree(options)
        if failed:
            print ("%d jobs failed" % failed)
        else:
            logger.info("Run complete!")
            system("rm -rf .lastjobTree")
            system("mv .jobTree .lastjobTree")
开发者ID:sng87,项目名称:paradigm-scripts,代码行数:64,代码来源:wrapParadigm.py

示例8: main

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]
def main():
    ## configure logger
    formatter = logging.Formatter('%(levelname)s %(asctime)s %(message)s')
    file_handler = logging.FileHandler('signature.log')
    file_handler.setFormatter(formatter)
    file_handler.setLevel(logging.INFO)
    stream_handler = logging.StreamHandler()
    stream_handler.setFormatter(formatter)
    stream_handler.setLevel(logging.ERROR)
    logger.addHandler(file_handler)
    logger.addHandler(stream_handler)
    
    
    ## check for fresh run
    if os.path.exists(".jobTree"):
        logger.warning("WARNING: '.jobTree' directory found, remove it first to start a fresh run\n")
    
    ## parse arguments
    parser = OptionParser(usage = "%prog [options] data_matrix phenotype_matrix")
    Stack.addJobTreeOptions(parser)
    parser.add_option("--jobFile",
                      help = "Add as child of jobFile rather than new jobTree")
    parser.add_option("-b", "--bootstrap", dest = "bootstrap_size", default = 0,
                      help = "Number of bootstrap samples to estimate subnetwork robustness")
    parser.add_option("-n", "--null", dest = "null_size", default = 0,
                      help = "Number of null samples to estimate subnetwork significance")
    parser.add_option("-p", "--permute", dest = "null_permute", default = "paradigm",
                      help = "Permutation method for generation of null samples")
    parser.add_option("-m", "--method", dest = "signature_method", default = "sam",
                      help = "Differential method for computing signatures")
    parser.add_option("-z", "--seed", dest = "seed", default = None,
                      help = "Random seed used for bootstrap and null generation")
    options, args = parser.parse_args()
    logger.info("options: %s" % (str(options)))
    print "Using Batch System '%s'" % (options.batchSystem)
    
    if len(args) != 2:
        logger.error("ERROR: incorrect number of arguments\n")
        sys.exit(1)
    data_file = os.path.abspath(args[0])
    phenotype_file = os.path.abspath(args[1])
    
    ## set parameters
    parameters = Parameters(signature_method = options.signature_method,
                            bootstrap_size = int(options.bootstrap_size),
                            null_size = int(options.null_size),
                            null_method = options.null_permute,
                            random_seed = options.seed)
    
    ## run
    s = Stack(branchSignatures(data_file,
                               phenotype_file,
                               parameters,
                               os.getcwd().rstrip("/")))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
    else:
        if options.jobTree == None:
            options.jobTree = "./.jobTree"
        
        jobtree_dir = options.jobTree.rstrip("/")
        lasttree_dir = jobtree_dir + "_previous"
        
        failed = s.startJobTree(options)
        if failed:
            logger.warning("WARNING: %d jobs failed" % (failed))
        else:
            logger.info("Run complete!")
            if os.path.exists(lasttree_dir):
                shutil.rmtree(lasttree_dir)
            if os.path.exists(jobtree_dir):
                shutil.move(jobtree_dir, lasttree_dir)
开发者ID:ucscCancer,项目名称:pathmark-scripts,代码行数:74,代码来源:signature.py

示例9: as_main

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]
def as_main():
    ## check for fresh run
    if os.path.exists('.jobTree'):
        logger('WARNING: .jobTree directory found, remove it first to start a fresh run\n')
    
    ## parse arguments
    parser = OptionParser(usage = '%prog [options] paradigm_directory model_directory')
    Stack.addJobTreeOptions(parser)
    parser.add_option('--jobFile', help='Add as child of jobFile rather than new jobTree')
    parser.add_option('-n', '--nulls', dest='nulls', default=0)
    parser.add_option('-b', '--batchsize', dest='batch_size', default=50)
    parser.add_option('-y', '--public', action='store_true', dest='paradigm_public', default=False)
    parser.add_option('-g', '--galaxy', dest='galaxy_run', action='store_true', default=False)
    options, args = parser.parse_args()
    logger('Using Batch System : %s\n' % (options.batchSystem))
    
    if len(args) == 1:
        if args[0] == 'clean':
            command = 'rm -rf .jobTree analysis'
            logger(command)
            os.system(command)
            sys.exit(0)
    
    assert(len(args) == 2)
    paradigm_directory = os.path.abspath(args[0])
    model_directory = os.path.abspath(args[1])
    
    ## set Galaxy Executables
    global paradigm_executable, circleplot_executable
    if options.galaxy_run:
        paradigm_executable = os.path.join(base_directory, 'paradigm')
    
    ## set Paradigm files
    paradigm_setup = ParadigmSetup(paradigm_directory,
                                   nulls = int(options.nulls),
                                   batch_size = int(options.batch_size),
                                   public = options.paradigm_public)
    
    ## gather models
    model_list = []
    for file in os.listdir(model_directory):
        if os.path.exists('%s/%s/upstream_pathway.tab' % (model_directory, file)) and os.path.exists('%s/%s/downstream_pathway.tab' % (model_directory, file)):
            model = Model('%s/%s' % (model_directory, file))
            model.setSeed()
            model_list.append(model)
    
    ## run
    s = Stack(queueModels(model_list,
                          paradigm_setup,
                          os.getcwd().rstrip('/')))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
    else:
        if options.jobTree == None:
            options.jobTree = './.jobTree'
        
        failed = s.startJobTree(options)
        #failed = ''
        #queueModels(model_list, paradigm_setup, os.getcwd().rstrip('/')).run()
        if failed:
            logger('%d jobs failed\n' % failed)
        else:
            os.system('rm -rf .lastjobTree')
            os.system('mv .jobTree .lastjobTree')
开发者ID:jeizenga,项目名称:paradigmshift,代码行数:66,代码来源:applySHIFT.py

示例10: jtFitness

# 需要导入模块: from jobTree.scriptTree.stack import Stack [as 别名]
# 或者: from jobTree.scriptTree.stack.Stack import addToJobFile [as 别名]
def jtFitness():
    ## parse arguments
    parser = OptionParser()
    Stack.addJobTreeOptions(parser)
    parser.add_option("--jobFile", help = "Add as a child of jobFile rather " +
                      "than making a new jobTree")
    parser.add_option("-d", "--dogma", dest="dogmaPath", default="")
    parser.add_option("-p", "--pathway", dest="pathwayPath", default="")
    parser.add_option("-b", "--boundaries", dest="discBound", default="")
    parser.add_option("-s", "--shuffle", dest="shuffleNode", default="")
    parser.add_option("-n", "--nulls", dest="nNulls", default="10")
    parser.add_option("-t", "--storedparam", dest="paramFile", default="")
    
    options, args = parser.parse_args()
    print "Using Batch System '" + options.batchSystem + "'"
   
    evidList = args 
    if (len(evidList) % 2 == 1) | (len(evidList) == 0):
        sys.stderr.write("ERROR: incorrect number of arguments\n")
        sys.exit(1)
    
    if len(options.discBound) == 0:
        disc = "0.3333;0.6667"
    else:
        disc = options.discBound
    if len(options.dogmaPath) == 0:
        dogma = "%s/%s" % (dogmaDir, dogmaDefault)
    else:
        dogma = options.dogmaPath
        if not dogma.startswith("/"):
            dogma = "%s/%s" % (os.getcwd(), dogma)        
    if len(options.pathwayPath) == 0:
        pathway = "%s/%s" % (pathwayDir, pathwayDefault)
    else:
        pathway = options.pathwayPath
        if not pathway.startswith("/"):
            pathway = "%s/%s" % (os.getcwd(), pathway)
    if len(options.shuffleNode) == 0:
        shuffleNode = "NULL"
    else:
        shuffleNode = options.shuffleNode
    nShuffle = int(options.nNulls)
    if len(options.paramFile) == 0:
        paramFile = None
    else:
        paramFile = options.paramFile

    ## clean
    if len(args) == 1:
        if args[0] == "clean":
            print "rm -rf .jobTree fold*"
            os.system("rm -rf .jobTree fold*")
            sys.exit(0)
    
    ## run
    logger.info("options: " + str(options))
    s = Stack(branchFolds(" ".join(evidList), disc, paramFile, paradigmExec, inferSpec, dogma, pathway, shuffleNode, nShuffle, mFolds, os.getcwd()))
    if options.jobFile:
        s.addToJobFile(options.jobFile)
    else:
        if options.jobTree == None:
            options.jobTree = "./.jobTree"
        
        failed = s.startJobTree(options)
        if failed:
            print ("%d jobs failed" % failed)
        else:
            logger.info("Run complete!")
            system("rm -rf .lastjobTree")
            system("mv .jobTree .lastjobTree")
开发者ID:Yeung678,项目名称:paradigm-scripts,代码行数:72,代码来源:jtFitness.py


注:本文中的jobTree.scriptTree.stack.Stack.addToJobFile方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。