本文整理汇总了Java中org.openrdf.model.vocabulary.RDFS.NAMESPACE属性的典型用法代码示例。如果您正苦于以下问题:Java RDFS.NAMESPACE属性的具体用法?Java RDFS.NAMESPACE怎么用?Java RDFS.NAMESPACE使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类org.openrdf.model.vocabulary.RDFS
的用法示例。
在下文中一共展示了RDFS.NAMESPACE属性的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: removeManagedURI
/**
* Tests remove() method with managed URI.
*
* @throws Exception never otherwise the test fails.
*/
@Test
public void removeManagedURI() throws Exception {
final String[] managedNamespaces = {
FOAF.NAMESPACE,
RDFS.NAMESPACE,
OWL.NAMESPACE };
for (final String managedNamespace : managedNamespaces) {
assertTrue(_cut.contains(managedNamespace));
final Value uri = buildResource(managedNamespace + randomString());
final String n3 = NTriplesUtil.toNTriplesString(uri);
_cut.removeValue(uri, _isPredicate);
verify(_dummyIndex).remove(n3);
reset(_dummyIndex);
}
}
示例2: testEquivPropOf
public void testEquivPropOf() throws Exception {
if(internalInferenceEngine == null)
{
return; //infer not supported;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), OWL.EQUIVALENTPROPERTY, vf.createURI(litdupsNS, "ugradDegreeFrom")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "ugradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradC"), vf.createURI(litdupsNS, "ugraduateDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:ugradDegreeFrom lit:Harvard.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(2, tupleHandler.getCount());
conn.close();
}
示例3: testInverseOf
public void testInverseOf() throws Exception {
if(internalInferenceEngine == null)
{
return; //infer not supported;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), OWL.INVERSEOF, vf.createURI(litdupsNS, "hasAlumnus")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "Harvard"), vf.createURI(litdupsNS, "hasAlumnus"), vf.createURI(litdupsNS, "AlumC")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {lit:Harvard lit:hasAlumnus ?s.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(3, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:degreeFrom lit:Harvard.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(3, tupleHandler.getCount());
conn.close();
}
示例4: getIdWithManagedURI
/**
* A request is received for a URI with a namespace that belongs to managed domains.
*
* @throws Exception never otherwise the test fails.
*/
@Test
public void getIdWithManagedURI() throws Exception {
final String[] managedNamespaces = {
FOAF.NAMESPACE,
RDFS.NAMESPACE,
OWL.NAMESPACE
};
for (final String managedNamespace : managedNamespaces) {
assertTrue(_cut.contains(managedNamespace));
final Value uri = buildResource(managedNamespace + randomString());
final String n3 = NTriplesUtil.toNTriplesString(uri);
// Make sure the mock index returns "Sorry, we don't have such value".
when(_dummyIndex.get(n3)).thenReturn(ValueDictionaryBase.NOT_SET);
// 1. ask for uri.
byte[] id = _cut.getID(uri, _isPredicate);
// 2. make sure the identifier is well-formed.
assertEquals(KnownURIsDictionary.ID_LENGTH, id.length);
assertEquals(KnownURIsDictionary.KNOWN_URI_MARKER, id[0]);
assertEquals(ValueDictionaryBase.RESOURCE_BYTE_FLAG, id[1]);
// 3. make sure the decoratee wasn't involved in identifier creation.
verify(_decoratee, times(0)).getID(uri, _isPredicate);
verify(_dummyIndex).putQuick(n3, id);
reset(_decoratee, _dummyIndex);
}
}
示例5: testSubPropertyOf
public void testSubPropertyOf() throws Exception {
if(internalInferenceEngine == null)
{
return; //infer not supported;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "gradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "memberOf")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "memberOf"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "associatedWith")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "gradDegreeFrom"), vf.createURI(litdupsNS, "Yale")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "ProfessorC"), vf.createURI(litdupsNS, "memberOf"), vf.createURI(litdupsNS, "Harvard")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:degreeFrom lit:Harvard.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:memberOf lit:Harvard.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(2, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:associatedWith ?o.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(3, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:gradDegreeFrom lit:Yale.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount());
conn.close();
}
示例6: testSymmPropOf
public void testSymmPropOf() throws Exception {
if(internalInferenceEngine == null)
{
return; //infer not supported;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "friendOf"), RDF.TYPE, OWL.SYMMETRICPROPERTY));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "Bob"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "James"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:friendOf lit:Bob.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:friendOf lit:James.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:friendOf lit:Jeff.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(2, tupleHandler.getCount());
conn.close();
}
示例7: testTransitiveProp
public void testTransitiveProp() throws Exception {
if(internalInferenceEngine == null)
{
return; //infer not supported;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "subRegionOf"), RDF.TYPE, OWL.TRANSITIVEPROPERTY));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "Queens"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NYC")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "NYC"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NY")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "NY"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "US")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "US"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NorthAmerica")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "NorthAmerica"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "World")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:subRegionOf lit:NorthAmerica.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(4, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s lit:subRegionOf lit:NY.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(2, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {lit:Queens lit:subRegionOf ?s.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(5, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {lit:NY lit:subRegionOf ?s.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(3, tupleHandler.getCount());
conn.close();
}
示例8: testSubClassOf
public void testSubClassOf() throws Exception {
if(internalInferenceEngine == null)
{
return; //infer not supported;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "UndergraduateStudent"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Student")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "Student"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Person")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), RDF.TYPE, vf.createURI(litdupsNS, "UndergraduateStudent")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB"), RDF.TYPE, vf.createURI(litdupsNS, "Student")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "PersonC"), RDF.TYPE, vf.createURI(litdupsNS, "Person")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
//simple api first
RepositoryResult<Statement> person = conn.getStatements(null, RDF.TYPE, vf.createURI(litdupsNS, "Person"), true);
int count = 0;
while (person.hasNext()) {
count++;
person.next();
}
person.close();
assertEquals(3, count);
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s rdf:type lit:Person.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(3, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s rdf:type lit:Student.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(2, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select * where {?s rdf:type lit:UndergraduateStudent.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount());
conn.close();
}
示例9: testSameAs
public void testSameAs() throws Exception {
if(internalInferenceEngine == null)
{
return; //infer not supported;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA2")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA3")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB2")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB3")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), vf.createURI(litdupsNS, "pred1"), vf.createURI(litdupsNS, "StudentB3")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), vf.createURI(litdupsNS, "pred2"), vf.createURI(litdupsNS, "StudentA3")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
// query where finds sameAs for obj, pred specified
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select ?s where {?s lit:pred1 lit:StudentB2.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount());
// query where finds sameAs for obj only specified
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select ?s where {?s ?p lit:StudentB2.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(3, tupleHandler.getCount()); // including sameAs assertions
// query where finds sameAs for subj, pred specified
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select ?s where {lit:StudentB2 lit:pred2 ?s.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount()); // including sameAs assertions
// query where finds sameAs for subj only specified
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select ?s where {lit:StudentB2 ?p ?s.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(3, tupleHandler.getCount()); // including sameAs assertions
// query where finds sameAs for subj, obj specified
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
"PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
"PREFIX lit: <" + litdupsNS + ">\n" +
"select ?s where {lit:StudentB2 ?s lit:StudentA2.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount());
conn.close();
}
示例10: testSomeValuesFromInference
public static void testSomeValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
final String lubm = "http://swat.cse.lehigh.edu/onto/univ-bench.owl#";
log.info("Adding Data");
String insert = "PREFIX lubm: <" + lubm + ">\n"
+ "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:Department0> a lubm:Department; lubm:subOrganizationOf <urn:University0> .\n"
+ " <urn:ResearchGroup0> a lubm:ResearchGroup; lubm:subOrganizationOf <urn:Department0> .\n"
+ " <urn:Alice> lubm:headOf <urn:Department0> .\n"
+ " <urn:Bob> lubm:headOf <urn:ResearchGroup0> .\n"
+ " <urn:Carol> lubm:worksFor <urn:Department0> .\n"
+ "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
final String inferQuery = "select distinct ?x { GRAPH <http://updated/test> { ?x a <" + lubm + "Chair> }}";
final String explicitQuery = "prefix lubm: <" + lubm + ">\n"
+ "select distinct ?x { GRAPH <http://updated/test> {\n"
+ " { ?x a lubm:Chair }\n"
+ " UNION\n"
+ " { ?x lubm:headOf [ a lubm:Department ] }\n"
+ "}}";
log.info("Running Explicit Query");
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
log.info("Running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
log.info("Adding owl:someValuesFrom Schema");
insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n"
+ "PREFIX owl: <" + OWL.NAMESPACE + ">\n"
+ "PREFIX lubm: <" + lubm + ">\n"
+ "INSERT DATA\n"
+ "{ GRAPH <http://updated/test> {\n"
+ " lubm:Chair owl:equivalentClass [ owl:onProperty lubm:headOf ; owl:someValuesFrom lubm:Department ] ."
+ "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
}
示例11: testAllValuesFromInference
public static void testAllValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
log.info("Adding Data");
String insert = "INSERT DATA\n"
+ "{ GRAPH <http://updated/test> {\n"
+ " <urn:Alice> a <urn:Person> .\n"
+ " <urn:Alice> <urn:hasParent> <urn:Bob> .\n"
+ " <urn:Carol> <urn:hasParent> <urn:Dan> .\n"
+ "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
final String inferQuery = "select distinct ?x { GRAPH <http://updated/test> { ?x a <urn:Person> }}";
final String explicitQuery = "select distinct ?x { GRAPH <http://updated/test> {\n"
+ " { ?x a <urn:Person> }\n"
+ " UNION {\n"
+ " ?y a <urn:Person> .\n"
+ " ?y <urn:hasParent> ?x .\n"
+ " }\n"
+ "}}";
log.info("Running Explicit Query");
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
log.info("Running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
log.info("Adding owl:allValuesFrom Schema");
insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n"
+ "PREFIX owl: <" + OWL.NAMESPACE + ">\n"
+ "INSERT DATA\n"
+ "{ GRAPH <http://updated/test> {\n"
+ " <urn:Person> rdfs:subClassOf [ owl:onProperty <urn:hasParent> ; owl:allValuesFrom <urn:Person> ] ."
+ "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
}