本文整理汇总了Java中org.openrdf.query.parser.sparql.SPARQLParser类的典型用法代码示例。如果您正苦于以下问题:Java SPARQLParser类的具体用法?Java SPARQLParser怎么用?Java SPARQLParser使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
SPARQLParser类属于org.openrdf.query.parser.sparql包,在下文中一共展示了SPARQLParser类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: run
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void run() throws MalformedQueryException, QueryEvaluationException, NotEnoughResultsException, SailException {
CloseableIteration<? extends BindingSet, QueryEvaluationException> it = null;
try {
// Execute the query.
final SPARQLParser sparqlParser = new SPARQLParser();
final ParsedQuery parsedQuery = sparqlParser.parseQuery(sparql, null);
it = sailConn.evaluate(parsedQuery.getTupleExpr(), null, null, false);
// Perform the reads.
if(numReads.isPresent()) {
read(it, numReads.get() );
} else {
readAll(it);
}
} finally {
if(it != null) {
it.close();
}
}
}
示例2: testStatementPatternContext
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void testStatementPatternContext() throws Exception {
String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
" PREFIX ub: <urn:lubm:rdfts#>\n" +
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
" SELECT * WHERE\n" +
" {\n" +
" GRAPH ub:g1 {\n" +
"\t?x rdf:type ub:UndergraduateStudent\n" +
" }\n" +
" }\n" +
"";
QueryParser parser = new SPARQLParser();
ParsedQuery parsedQuery = parser.parseQuery(query, null);
// System.out.println(parsedQuery);
SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
visitor.setTablePrefix(tablePrefix);
visitor.setInstance(instance);
visitor.setZk(zk);
visitor.setUser(user);
visitor.setPassword(password);
visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
// System.out.println(visitor.getPigScript());
}
示例3: testJoin
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void testJoin() throws Exception {
String query = "select * where {\n" +
"?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
"?subj <urn:lubm:rdfts#subOrganizationOf> <http://www.University0.edu>.\n" +
"}";
// System.out.println(query);
QueryParser parser = new SPARQLParser();
ParsedQuery parsedQuery = parser.parseQuery(query, null);
// System.out.println(parsedQuery);
SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
visitor.setTablePrefix(tablePrefix);
visitor.setInstance(instance);
visitor.setZk(zk);
visitor.setUser(user);
visitor.setPassword(password);
visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
// System.out.println(visitor.getPigScript());
}
示例4: testMutliReturnJoin
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void testMutliReturnJoin() throws Exception {
String query = "select * where {\n" +
"?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
"?subj <urn:lubm:rdfts#subOrganizationOf> ?suborg.\n" +
"}";
QueryParser parser = new SPARQLParser();
ParsedQuery parsedQuery = parser.parseQuery(query, null);
System.out.println(query);
SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
visitor.setTablePrefix(tablePrefix);
visitor.setInstance(instance);
visitor.setZk(zk);
visitor.setUser(user);
visitor.setPassword(password);
visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
System.out.println(visitor.getPigScript());
}
示例5: testMutlipleJoins
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void testMutlipleJoins() throws Exception {
String query = "select * where {\n" +
"?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
"?subj <urn:lubm:rdfts#subOrganizationOf> <http://www.University0.edu>.\n" +
"?subj <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:lubm:rdfts#Department>.\n" +
"}";
// System.out.println(query);
QueryParser parser = new SPARQLParser();
ParsedQuery parsedQuery = parser.parseQuery(query, null);
// System.out.println(parsedQuery);
SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
visitor.setTablePrefix(tablePrefix);
visitor.setInstance(instance);
visitor.setZk(zk);
visitor.setUser(user);
visitor.setPassword(password);
visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
// System.out.println(visitor.getPigScript());
}
示例6: createSingleMongoMetadataNode
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void createSingleMongoMetadataNode() throws MalformedQueryException {
MongoDBRdfConfiguration conf = (MongoDBRdfConfiguration) getConf(true);
Set<RyaURI> propertySet = new HashSet<>();
propertySet.add(new RyaURI("http://createdBy"));
conf.setStatementMetadataProperties(propertySet);
StatementMetadataExternalSetProvider metaProvider = new StatementMetadataExternalSetProvider(conf);
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq = parser.parseQuery(query, null);
List<QueryModelNode> patterns = new ArrayList<>();
List<StatementMetadataNode<?>> expected = new ArrayList<>();
Set<StatementPattern> sp = StatementMetadataTestUtils.getMetadataStatementPatterns(pq.getTupleExpr(), propertySet);
patterns.addAll(StatementPatternCollector.process(pq.getTupleExpr()));
JoinSegment<StatementMetadataNode<?>> segment = new JoinSegment<>(
new HashSet<>(patterns), patterns, new HashMap<ValueExpr, Filter>());
List<StatementMetadataNode<?>> extSets = metaProvider.getExternalSets(segment);
expected.add(new StatementMetadataNode<>(sp,conf));
Assert.assertEquals(expected, extSets);
}
示例7: testReOrderedBasicOptional
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testReOrderedBasicOptional() throws MalformedQueryException {
String query = ""//
+ "SELECT ?e ?c ?l" //
+ "{" //
+ " ?e a ?c . "//
+ " OPTIONAL{?e <uri:talksTo> ?l } . "//
+ " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "//
+ "}";//
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq = parser.parseQuery(query, null);
System.out.println(pq.getTupleExpr());
List<TupleExpr> joinArgs = getJoinArgs(pq.getTupleExpr(),
new ArrayList<TupleExpr>());
// System.out.println(joinArgs);
FlattenedOptional optional = (FlattenedOptional) joinArgs.get(0);
TupleExpr sp1 = joinArgs.get(1);
TupleExpr sp2 = joinArgs.get(2);
Assert.assertEquals(false, optional.canRemoveTuple(sp1));
Assert.assertEquals(false, optional.canAddTuple(sp2));
}
示例8: testPeriodicNodePlacement
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testPeriodicNodePlacement() throws MalformedQueryException {
String query = "prefix function: <http://org.apache.rya/function#> " //n
+ "prefix time: <http://www.w3.org/2006/time#> " //n
+ "prefix fn: <http://www.w3.org/2006/fn#> " //n
+ "select ?obs ?time ?lat where {" //n
+ "Filter(function:periodic(?time, 12.0, 6.0,time:hours)) " //n
+ "Filter(fn:test(?lat, 25)) " //n
+ "?obs <uri:hasTime> ?time. " //n
+ "?obs <uri:hasLattitude> ?lat }"; //n
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq = parser.parseQuery(query, null);
TupleExpr te = pq.getTupleExpr();
te.visit(new PeriodicQueryNodeVisitor());
PeriodicNodeCollector collector = new PeriodicNodeCollector();
te.visit(collector);
PeriodicQueryNode node2 = new PeriodicQueryNode(12*60*60*1000L, 6*3600*1000L, TimeUnit.MILLISECONDS, "time", new Join());
Assert.assertEquals(true, periodicNodesEqualIgnoreArg(node2, collector.getPeriodicQueryNode()));
}
示例9: testConstructGraph
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testConstructGraph() throws MalformedQueryException, UnsupportedEncodingException {
String query = "select ?x where { ?x <uri:talksTo> <uri:Bob>. ?y <uri:worksAt> ?z }";
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq = parser.parseQuery(query, null);
List<StatementPattern> patterns = StatementPatternCollector.process(pq.getTupleExpr());
ConstructGraph graph = new ConstructGraph(patterns);
QueryBindingSet bs = new QueryBindingSet();
bs.addBinding("x", vf.createURI("uri:Joe"));
bs.addBinding("y", vf.createURI("uri:Bob"));
bs.addBinding("z", vf.createURI("uri:BurgerShack"));
VisibilityBindingSet vBs = new VisibilityBindingSet(bs,"FOUO");
Set<RyaStatement> statements = graph.createGraphFromBindingSet(vBs);
RyaStatement statement1 = new RyaStatement(new RyaURI("uri:Joe"), new RyaURI("uri:talksTo"), new RyaURI("uri:Bob"));
RyaStatement statement2 = new RyaStatement(new RyaURI("uri:Bob"), new RyaURI("uri:worksAt"), new RyaURI("uri:BurgerShack"));
Set<RyaStatement> expected = Sets.newHashSet(Arrays.asList(statement1, statement2));
expected.forEach(x-> x.setColumnVisibility("FOUO".getBytes()));
ConstructGraphTestUtils.ryaStatementSetsEqualIgnoresTimestamp(expected, statements);
}
示例10: makeUnchainedQuery
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
private static TupleExpr makeUnchainedQuery(final BenchmarkParams params) throws MalformedQueryException {
final Queue<String> varQueue= Lists.newLinkedList(variables);
final SPARQLParser parser = new SPARQLParser();
final List<String> queryVars = new ArrayList<>();
// The first statement pattern has two variables.
queryVars.add( varQueue.remove() );
queryVars.add( varQueue.remove() );
// The each extra statement pattern joins with the previous one, so only need one more variable each.
for(int i = 1; i < params.getQuerySPCount(); i++) {
queryVars.add( varQueue.remove() );
queryVars.add( varQueue.remove() );
}
final String sparql = buildUnchainedSPARQL(queryVars);
return parser.parseQuery(sparql, null).getTupleExpr();
}
示例11: AccumuloIndexSet
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
/**
*
* @param accCon
* - connection to a valid Accumulo instance
* @param tablename
* - name of an existing PCJ table
* @throws MalformedQueryException
* @throws SailException
* @throws QueryEvaluationException
* @throws TableNotFoundException
* @throws AccumuloSecurityException
* @throws AccumuloException
* @throws PCJStorageException
*/
public AccumuloIndexSet(final Configuration conf, final String tablename)
throws MalformedQueryException, SailException,
QueryEvaluationException, TableNotFoundException, AccumuloException, AccumuloSecurityException, PCJStorageException {
this.tablename = tablename;
this.accCon = ConfigUtils.getConnector(conf);
this.auths = getAuthorizations(conf);
PcjMetadata meta = pcj.getPcjMetadata(accCon, tablename);
final SPARQLParser sp = new SPARQLParser();
final ParsedTupleQuery pq = (ParsedTupleQuery) sp.parseQuery(meta.getSparql(), null);
setProjectionExpr((Projection) pq.getTupleExpr());
final Set<VariableOrder> orders = meta.getVarOrders();
varOrder = Lists.newArrayList();
for (final VariableOrder var : orders) {
varOrder.add(var.toString());
}
setLocalityGroups(tablename, accCon, varOrder);
this.setSupportedVariableOrderMap(varOrder);
}
示例12: MongoPcjQueryNode
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
/**
* Creates a new {@link MongoPcjQueryNode}.
*
* @param sparql - sparql query whose results will be stored in PCJ document. (not empty of null)
* @param pcjId - name of an existing PCJ. (not empty or null)
* @param pcjDocs - {@link MongoPcjDocuments} used to maintain PCJs in mongo. (not null)
*
* @throws MalformedQueryException - The SPARQL query needs to contain a projection.
*/
public MongoPcjQueryNode(final String sparql, final String pcjId, final MongoPcjDocuments pcjDocs) throws MalformedQueryException {
checkArgument(!Strings.isNullOrEmpty(sparql));
checkArgument(!Strings.isNullOrEmpty(pcjId));
this.pcjDocs = checkNotNull(pcjDocs);
this.pcjId = pcjId;
final SPARQLParser sp = new SPARQLParser();
final ParsedTupleQuery pq = (ParsedTupleQuery) sp.parseQuery(sparql, null);
final TupleExpr te = pq.getTupleExpr();
Preconditions.checkArgument(PCJOptimizerUtilities.isPCJValid(te), "TupleExpr is an invalid PCJ.");
final Optional<Projection> projection = new ParsedQueryUtil().findProjection(pq);
if (!projection.isPresent()) {
throw new MalformedQueryException("SPARQL query '" + sparql + "' does not contain a Projection.");
}
setProjectionExpr(projection.get());
}
示例13: tupleReArrangeTest3
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void tupleReArrangeTest3() throws MalformedQueryException {
String queryString = ""//
+ "SELECT ?a ?b ?c ?d ?e ?x ?y" //
+ "{" //
+ " Filter(?c = <uri:label2>)" //
+ " Filter(?x = <uri:somethingFunny>) "//
+ " ?e <uri:laughsAt> ?x ." //
+ " ?e <uri:livesIn> ?y . "//
+ "{ ?a a ?b . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?c }"//
+ " UNION { ?a <uri:talksTo> ?d . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?e }"//
+ "}";//
SPARQLParser sp = new SPARQLParser();
ParsedQuery pq = sp.parseQuery(queryString, null);
List<TupleExpr> tuples = TupleReArranger.getTupleReOrderings(pq.getTupleExpr());
Assert.assertEquals(24, tuples.size());
}
示例14: tupleReArrangeTest4
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void tupleReArrangeTest4() throws MalformedQueryException {
String queryString = ""//
+ "SELECT ?a ?b ?c ?d ?e ?x ?y" //
+ "{" //
+ " Filter(?c = <uri:label2>)" //
+ " Filter(?x = <uri:somethingFunny>) "//
+ " Filter(?d = <uri:Fred> ) " //
+ " ?e <uri:laughsAt> ?x ." //
+ " ?e <uri:livesIn> ?y . "//
+ "{ ?a a ?b . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?c }"//
+ " UNION { ?a <uri:talksTo> ?d . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?e }"//
+ "}";//
SPARQLParser sp = new SPARQLParser();
ParsedQuery pq = sp.parseQuery(queryString, null);
TupleExpr te = pq.getTupleExpr();
new FilterOptimizer().optimize(te, null, null);
System.out.println(te);
List<TupleExpr> tuples = TupleReArranger.getTupleReOrderings(te);
System.out.println(tuples);
Assert.assertEquals(24, tuples.size());
}
示例15: testCost1
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testCost1() throws Exception {
String q1 = ""//
+ "SELECT ?f ?m ?d ?h ?i " //
+ "{" //
+ " ?f a ?m ."//
+ " ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+ " ?d <uri:talksTo> ?f . "//
+ " ?d <uri:hangOutWith> ?f ." //
+ " ?f <uri:hangOutWith> ?h ." //
+ " ?f <uri:associatesWith> ?i ." //
+ " ?i <uri:associatesWith> ?h ." //
+ "}";//
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq1 = parser.parseQuery(q1, null);
ThreshholdPlanSelector tps = new ThreshholdPlanSelector(
pq1.getTupleExpr());
double cost = tps.getCost(pq1.getTupleExpr(), .6, .4, 0);
Assert.assertEquals(.7, cost, .01);
}