当前位置: 首页>>代码示例>>Java>>正文


Java Ontology类代码示例

本文整理汇总了Java中ontologizer.go.Ontology的典型用法代码示例。如果您正苦于以下问题:Java Ontology类的具体用法?Java Ontology怎么用?Java Ontology使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


Ontology类属于ontologizer.go包,在下文中一共展示了Ontology类的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: mostSpecificTerms

import ontologizer.go.Ontology; //导入依赖的package包/类
/**
 * Returns a minimal length array of terms of which the induced graph is the same as of the given terms. These are
 * the leaf terms.
 *
 * @param terms
 * @return
 */
public int[] mostSpecificTerms(int[] terms)
{
    ArrayList<TermID> termList = new ArrayList<TermID>(terms.length);
    for (int term : terms) {
        termList.add(this.slimGraph.getVertex(term).getID());
    }

    Ontology termGraph = this.graph.getInducedGraph(termList);

    ArrayList<Term> leafTermList = termGraph.getLeafTerms();

    int[] specifcTerms = new int[leafTermList.size()];
    int i = 0;

    for (Term t : termGraph.getLeafTerms()) {
        specifcTerms[i++] = this.slimGraph.getVertexIndex(t);
    }

    return specifcTerms;
}
 
开发者ID:johntiger1,项目名称:boqa,代码行数:28,代码来源:BOQA.java

示例2: testWordnetParser

import ontologizer.go.Ontology; //导入依赖的package包/类
@Test
public void testWordnetParser() throws IOException
{
    TermContainer tc = WordNetParser.parserWordnet("WordNet-3.0/dict/data.noun");
    Ontology ontology = new Ontology(tc);

    Set<TermID> ts = new HashSet<TermID>();
    // ts.addAll(ontology.getTermsOfInducedGraph(null, ontology.getTerm("WNO:09571693").getID())); /* Orion */
    // ts.addAll(ontology.getTermsOfInducedGraph(null, ontology.getTerm("WNO:09380117").getID())); /* Orion */
    ts.addAll(ontology.getTermsOfInducedGraph(null, ontology.getTerm("WNO:09917593").getID())); /* Child */
    ts.addAll(ontology.getTermsOfInducedGraph(null, ontology.getTerm("WNO:05560787").getID())); /* Leg */

    ontology.getGraph().writeDOT(new FileOutputStream(new File("test.dot")),
        ontology.getSetOfTermsFromSetOfTermIds(ts), new DotAttributesProvider<Term>()
        {
            @Override
            public String getDotNodeAttributes(Term vt)
            {
                return "label=\"" + vt.getName() + "\"";
            }
        });
}
 
开发者ID:johntiger1,项目名称:boqa,代码行数:23,代码来源:WordNetParserTest.java

示例3: testLargeNumberOfItems

import ontologizer.go.Ontology; //导入依赖的package包/类
@Test
@Ignore("Getting [The ontology graph contains cycles] error...")
public void testLargeNumberOfItems() throws IOException
{
    Random rnd = new Random(2);

    TermContainer tc = WordNetParser.parserWordnet("WordNet-3.0/dict/data.noun");
    Ontology ontology = new Ontology(tc);
    SlimDirectedGraphView<Term> slim = ontology.getSlimGraphView();

    AssociationContainer assocs = new AssociationContainer();

    for (int i = 0; i < 100000; i++) {
        ByteString item = new ByteString("item" + i);

        for (int j = 0; j < rnd.nextInt(16) + 2; j++) {
            Term t;
            do {
                t = slim.getVertex(rnd.nextInt(slim.getNumberOfVertices()));
            } while (t.isObsolete());

            Association a = new Association(item, t.getIDAsString());
            assocs.addAssociation(a);
        }
    }

    this.logger.info("Constructed data set");
    final BOQA boqa = new BOQA();
    boqa.setup(ontology, assocs);
    this.logger.info("Setted up ontology and associations");

}
 
开发者ID:johntiger1,项目名称:boqa,代码行数:33,代码来源:WordNetParserTest.java

示例4: getOntology

import ontologizer.go.Ontology; //导入依赖的package包/类
public Ontology getOntology() {
	return ontology;
}
 
开发者ID:Phenomics,项目名称:annotation-simulation,代码行数:4,代码来源:AnnotationSimulator.java

示例5: getOntology

import ontologizer.go.Ontology; //导入依赖的package包/类
public Ontology getOntology()
{
    return this.graph;
}
 
开发者ID:johntiger1,项目名称:boqa,代码行数:5,代码来源:ReducedBoqa.java

示例6: testLargeNumberOfItems

import ontologizer.go.Ontology; //导入依赖的package包/类
@Test
    public void testLargeNumberOfItems() throws IOException, OBOParserException, URISyntaxException
    {
        //Editable/working as intended
//        for (int i = 0; i < 1110; i++)
//        {
//
//            System.out.println("AAAADff");
//        }

        Random rnd = new Random(2); //this hint is for long l

        final BOQA boqa = new BOQA();

        OBOParser hpoParser = new OBOParser(
            new File(ClassLoader.getSystemResource("human-phenotype-ontology.obo.gz").toURI()).getCanonicalPath());
        hpoParser.doParse();

        //blackbox: it gets all the terms (in the HPO)
        TermContainer tc = new TermContainer(hpoParser.getTermMap(), hpoParser.getFormatVersion(), hpoParser.getDate());
        Ontology ontology = new Ontology(tc);
        SlimDirectedGraphView<Term> slim = ontology.getSlimGraphView();

        AssociationContainer assocs = new AssociationContainer();

        for (int i = 0; i < 10000; i++) {
            ByteString item = new ByteString("item" + i);

            // Association a = new Association(item,slim.getVertex(10).getIDAsString());
            // assocs.addAssociation(a);

            for (int j = 0; j < rnd.nextInt(16) + 2; j++) {
                Term t;
                do {
                    t = slim.getVertex(rnd.nextInt(slim.getNumberOfVertices())); //randomly select a vertex
                //keeps doing this til it gets a non-obsolete vertex
                } while (t.isObsolete());
                Association a = new Association(item, t.getIDAsString());
                assocs.addAssociation(a); //this seems to not hve any effect on BOQA... (nvm, it is used inb boqa.setup)
            }
        }

        this.logger.info("Constructed data set");
        //the next few are just setting some parameters of BOQA
        boqa.setConsiderFrequenciesOnly(false);
        boqa.setPrecalculateScoreDistribution(false);
        boqa.setCacheScoreDistribution(false);
        boqa.setPrecalculateItemMaxs(false);
        boqa.setup(ontology, assocs);
        this.logger.info("Setted up ontology and associations");

        Observations o = new Observations(); //initalized to all 0 to start off with
        o.observations = new boolean[boqa.getOntology().getNumberOfTerms()]; //this is actually likely where
        //a lot of FP etc. happens?
        System.out.println("This many terms" + boqa.getOntology().getNumberOfTerms());
        System.out.println("This many items" + boqa.getNumberOfItems());
        System.out.println("This many nodes" + boqa.slimGraph.getNumberOfVertices()); // this is lower than the number
        //of terms, possibly because some were taken out
        System.out.println("This many allitems" + boqa.allItemList.size());
        //we can also check how many terms using one of the arrays, and we just assert that they are all the same sisze

        long start = System.nanoTime();
        this.logger.info("Calculating");
        Result res = boqa.assignMarginals(o, false, 1);
        long end = System.nanoTime();

        this.logger.info(((end - start) / 1000 / 1000) + "ms");
    }
 
开发者ID:johntiger1,项目名称:boqa,代码行数:69,代码来源:BOQATest.java

示例7: vanillaTestLargeNumberOfItems

import ontologizer.go.Ontology; //导入依赖的package包/类
public void vanillaTestLargeNumberOfItems() throws IOException, OBOParserException, URISyntaxException {


        final BOQA boqa = new BOQA();
        //get the file, then get its canonical path
        AssociationContainer assocs;


        URL resource = ClassLoader.getSystemResource("hp.obo.gz");
        if (resource == null) {

            throw new NullPointerException("Couldn't find it!");
        }
        URI resourceURI = resource.toURI();
        File hpo_file = new File(resourceURI);
        String final_path = hpo_file.getCanonicalPath();
        //.toURI();

        OBOParser hpoParser = new OBOParser(
                final_path);
        hpoParser.doParse();

        //blackbox: it gets all the terms (in the HPO)
        TermContainer tc = new TermContainer(hpoParser.getTermMap(), hpoParser.getFormatVersion(), hpoParser.getDate());
        Ontology ontology = new Ontology(tc);
        SlimDirectedGraphView<Term> slim = ontology.getSlimGraphView();
        assocs = generateAnnotations(25, slim);

        //pseudo:
        //boqa.setup
        //boqa.assignMarginals (get best score)
        //inference step: do some sampling to see which might be best (like in Monte Carlo tree search)

        //Run BOQA once to get the initial guesses.
        ArrayList<String> initial_guesses = null;
        boqa.setConsiderFrequenciesOnly(false);
        boqa.setup(ontology, assocs);

        Observations o = new Observations();
        o.observations = new boolean[boqa.getOntology().getNumberOfTerms()];

        long start = System.nanoTime();
        this.logger.info("Calculating");
        BOQA.Result res = boqa.assignMarginals(o, false, 1);
        System.out.println(getTopDiseases(res));
        //for (double t: res.)
        //write a method that keeps track of the top 10 scores
        //use concept of lower and upper bound
        //instead: use selection algorithm
        //however, it should be online (based on the api exported)
        //insert the first 10 unconditionally
        //then, for each element, check if it should be put in or not, --this is a linear time algorithm
        //but we must keep track of the max and min (i.e. go through the array and update hte max, min indices each time...)

        //find the 10th largest number, using quickselect
        //then, we shall have the 10 larger numbers on one side and we can just return that
        //easier way is to just sort the array and then take the top n elements, except we need a reference
        //to previous. This issue is also a problem in using quickselect too.

        //we can just use parallel arrays though. for example, lookup[i] = pos_in__sorted_array


        long end = System.nanoTime();

        this.logger.info(((end - start) / 1000 / 1000) + "ms");
    }
 
开发者ID:johntiger1,项目名称:boqa,代码行数:67,代码来源:ReducedBOQATest.java

示例8: getOntology

import ontologizer.go.Ontology; //导入依赖的package包/类
/**
 * Returns the ontology.
 *
 * @return
 */
public Ontology getOntology()
{
    return this.graph;
}
 
开发者ID:johntiger1,项目名称:boqa,代码行数:10,代码来源:BOQA.java


注:本文中的ontologizer.go.Ontology类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。