本文整理汇总了Java中com.hp.hpl.jena.sparql.core.BasicPattern.add方法的典型用法代码示例。如果您正苦于以下问题:Java BasicPattern.add方法的具体用法?Java BasicPattern.add怎么用?Java BasicPattern.add使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.hp.hpl.jena.sparql.core.BasicPattern
的用法示例。
在下文中一共展示了BasicPattern.add方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: prepareBindings
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
public BindingQueryPlan prepareBindings(GraphQuery q, Node[] variables) {
this.variables = variables;
this.indexes = new HashMap<Node,Integer>();
for (int i = 0; i < variables.length; i++) {
indexes.put(variables[i], new Integer(i));
}
BasicPattern pattern = new BasicPattern();
for (Triple t: q.getPattern()) {
pattern.add(t);
}
Plan plan = QueryEngineD2RQ.getFactory().create(new OpBGP(pattern), dataset, null, null);
final ExtendedIterator<Domain> queryIterator = new Map1Iterator<Binding,Domain>(new BindingToDomain(), plan.iterator());
return new BindingQueryPlan() {
public ExtendedIterator<Domain> executeBindings() {
return queryIterator;
}
};
}
示例2: prepareBindings
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
public BindingQueryPlan prepareBindings(Query q, Node[] variables) {
this.variables = variables;
this.indexes = new HashMap<Node,Integer>();
for (int i = 0; i < variables.length; i++) {
indexes.put(variables[i], new Integer(i));
}
BasicPattern pattern = new BasicPattern();
for (Triple t: q.getPattern()) {
pattern.add(t);
}
Plan plan = QueryEngineD2RQ.getFactory().create(new OpBGP(pattern), dataset, null, null);
final ExtendedIterator<Domain> queryIterator = new Map1Iterator<Binding,Domain>(new BindingToDomain(), plan.iterator());
return new BindingQueryPlan() {
public ExtendedIterator<Domain> executeBindings() {
return queryIterator;
}
};
}
示例3: prepareBindings
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
/**
* <p>Method that prepares the bindings for a query plan</p>
* @param q - the input query
* @param variables - the variables in the given query
* @return a binding query plan
*
* @see de.fuberlin.wiwiss.d2rq.D2RQQueryHandler#prepareBindings(Query, Node[])
*/
@SuppressWarnings("unchecked")
public BindingQueryPlan prepareBindings(Query q, Node[] variables)
{
this.variables = variables;
this.indexes = new HashMap();
for (int i = 0; i < variables.length; i++) { indexes.put(variables[i], new Integer(i)); }
BasicPattern pattern = new BasicPattern();
Iterator it = q.getPattern().iterator();
while (it.hasNext())
{
Triple t = (Triple) it.next();
pattern.add(t);
}
Plan plan = QueryEngineD2RQ.getFactory().create(new OpBGP(pattern), dataset, null, null);
final ExtendedIterator queryIterator = new Map1Iterator(new BindingToDomain(), plan.iterator());
return new BindingQueryPlan()
{
public ExtendedIterator executeBindings() { return queryIterator; }
};
}
示例4: transformPathToBasicPattern
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
public static BasicPattern transformPathToBasicPattern(Node subject, String sPath, Node object){
BasicPattern res= new BasicPattern();
boolean inverse = checkInverse(sPath);
if (inverse)
{
sPath = sPath.substring(1);
Node temp = subject;
subject = object;
object = temp;
}
sPath = TransformerHelper.removeScopes(sPath);
int operatorPos = TransformerHelper.getPosOfNextOperator(sPath);
if (operatorPos < 0)
{
Triple triple = new Triple(subject, NodeFactory.createURI(sPath.substring(1, sPath.length()-1)), object);
res.add(triple);
return res;
}
String leftStringPath = sPath.substring(0, operatorPos);
String rightStringPath = sPath.substring(operatorPos+1);
Node newConection = NodeFactory.createVariable(getNextVaribleName());
BasicPattern leftPattern = transformPathToBasicPattern(subject, leftStringPath, newConection);
BasicPattern rightPattern = transformPathToBasicPattern(newConection, rightStringPath, object);
res.addAll(leftPattern);
res.addAll(rightPattern);
return res;
}
示例5: read
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
@Override
public Template read(Kryo kryo, Input input, Class<Template> type) {
final BasicPattern bgp = new BasicPattern();
final int count = input.readInt();
for (int i = 0; i < count; i++) {
bgp.add((Triple) kryo.readClassAndObject(input));
}
return new Template(bgp);
}
示例6: getCacheQueryPattern
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
protected BasicPattern getCacheQueryPattern() {
Var facetVar = Var.alloc("facet");
Var clusterVar = Var.alloc("cluster");
Var reasonVar = Var.alloc("reason");
Var sourceVar = Var.alloc("source");
Var targetVar = Var.alloc("target");
// local pattern
BasicPattern bp = new BasicPattern();
// ?facet rdf:type reneviz:Facet.
bp.add(Triple.create(
facetVar,
RDF.type.asNode(),
Reneviz.Facet.asNode()));
// ?facet reneviz:facetType <this.getClass().getName()>.
bp.add(Triple.create(
facetVar,
Reneviz.facetType.asNode(),
NodeFactory.createLiteral(this.getClass().getName())));
// ?facet reneviz:hasTopic <this.getTopic()>.
bp.add(Triple.create(
facetVar,
Reneviz.hasTopic.asNode(),
this.getTopic().asNode()));
// ?facet reneviz:propertySequence <this.getPropertyPath()>.
Node propertySequence = NodeFactory.createLiteral(StringUtils.join(this.getPropertyPath(), ";"));
bp.add(Triple.create(
facetVar,
Reneviz.propertySequence.asNode(),
propertySequence));
// ?facet reneviz:hasCluster ?cluster.
bp.add(Triple.create(
facetVar,
Reneviz.hasCluster.asNode(),
clusterVar));
// ?cluster reneviz:hasReason ?reason.
bp.add(Triple.create(
clusterVar,
Reneviz.hasReason.asNode(),
reasonVar));
// ?cluster reneviz:hasMember ?source.
bp.add(Triple.create(
clusterVar,
Reneviz.hasMember.asNode(),
sourceVar));
// ?cluster reneviz:hasMember ?target.
bp.add(Triple.create(
clusterVar,
Reneviz.hasMember.asNode(),
targetVar));
return bp;
}
示例7: getInnerQueryOp
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
protected Op getInnerQueryOp() {
int rank = this.getPropertyPath().size() - 1;
Node rdfType = RDF.type.asNode();
Var memberVar = Var.alloc("member");
List<Var> intermediateVarsSource = new ArrayList<Var>(rank);
List<Expr> intermediateExprsSource = new ArrayList<Expr>(rank);
for (int i = 0; i < rank; i++) {
intermediateVarsSource.add(Var.alloc("intermediatevarsource" + i));
intermediateExprsSource.add(new ExprVar(intermediateVarsSource
.get(i)));
}
Var reasonVar = Var.alloc("reason");
// compile basic patterns
BasicPattern bp = new BasicPattern();
// ?p1 rdf:type core:Project.
bp.add(Triple.create(memberVar, rdfType, this.getTopic().asNode()));
if (rank == 0) {
// ?p1 core:domesticGeographicFocus ?o.
bp.add(Triple.create(reasonVar, this.getPropertyPath().get(0)
.asNode(), memberVar));
} else {
// ?source <prop0> ?ivs0
bp.add(Triple.create(reasonVar, this.getPropertyPath().get(0)
.asNode(), intermediateVarsSource.get(0)));
for (int i = 0; i < (rank - 1); i++) {
// ?ivs(i) <prop(i+1)> ?ivs(i+1)
bp.add(Triple.create(intermediateVarsSource.get(i), this
.getPropertyPath().get(i + 1).asNode(),
intermediateVarsSource.get(i + 1)));
}
// ?ivs(rank) <prop(rank+1)> ?reason.
bp.add(Triple.create(intermediateVarsSource.get(rank - 1), this
.getPropertyPath().get(rank).asNode(), memberVar));
}
Op remoteOp = new OpBGP(bp);
return remoteOp;
}
示例8: getInnerQueryOp
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
protected Op getInnerQueryOp() {
int rank = this.getPropertyPath().size() - 1;
Node rdfType = RDF.type.asNode();
Var memberVar = Var.alloc("member");
List<Var> intermediateVarsSource = new ArrayList<Var>(rank);
List<Expr> intermediateExprsSource = new ArrayList<Expr>(rank);
for (int i = 0; i < rank; i++) {
intermediateVarsSource.add(Var.alloc("intermediatevarsource" + i));
intermediateExprsSource.add(new ExprVar(intermediateVarsSource
.get(i)));
}
Var reasonVar = Var.alloc("reason");
// compile basic patterns
BasicPattern bp = new BasicPattern();
// ?p1 rdf:type core:Project.
bp.add(Triple.create(memberVar, rdfType, this.getTopic().asNode()));
if (rank == 0) {
// ?p1 core:domesticGeographicFocus ?o.
bp.add(Triple.create(memberVar, this.getPropertyPath().get(0)
.asNode(), reasonVar));
} else {
// ?source <prop0> ?ivs0
bp.add(Triple.create(memberVar, this.getPropertyPath().get(0)
.asNode(), intermediateVarsSource.get(0)));
for (int i = 0; i < (rank - 1); i++) {
// ?ivs(i) <prop(i+1)> ?ivs(i+1)
bp.add(Triple.create(intermediateVarsSource.get(i), this
.getPropertyPath().get(i + 1).asNode(),
intermediateVarsSource.get(i + 1)));
}
// ?ivs(rank) <prop(rank+1)> ?reason.
bp.add(Triple.create(intermediateVarsSource.get(rank - 1), this
.getPropertyPath().get(rank).asNode(), reasonVar));
}
Op remoteOp = new OpBGP(bp);
return remoteOp;
}
示例9: getInnerQueryOp
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
protected Op getInnerQueryOp() {
int rank = this.getPropertyPath().size() - 1;
Node rdfType = RDF.type.asNode();
Var memberVar = Var.alloc("member");
Expr memberExpr = new ExprVar(memberVar);
List<Var> intermediateVars = new ArrayList<Var>(rank);
List<Expr> intermediateExprs = new ArrayList<Expr>(rank);
for (int i = 0; i < rank; i++) {
intermediateVars.add(Var.alloc("intermediatevar" + i));
intermediateExprs.add(new ExprVar(intermediateVars.get(i)));
}
Var targetVar = Var.alloc("target");
// compile basic patterns
BasicPattern bp = new BasicPattern();
// ?p1 rdf:type core:Project.
bp.add(Triple.create(memberVar, rdfType, this.getTopic().asNode()));
// ?p2 rdf:type core:Project.
bp.add(Triple.create(targetVar, rdfType, this.getTopic().asNode()));
// FILTER (iv(i) != source)
List<Expr> intermediateFilters = new ArrayList<Expr>(rank);
for (int i = 0; i < rank; i++) {
intermediateFilters.add(new E_NotEquals(intermediateExprs.get(i),
memberExpr));
}
if (rank == 0) {
// ?source <prop(0)> ?target.
bp.add(Triple.create(memberVar, this.getPropertyPath().get(0)
.asNode(), targetVar));
} else {
// ?source <prop0> ?iv0
bp.add(Triple.create(memberVar, this.getPropertyPath().get(0)
.asNode(), intermediateVars.get(0)));
for (int i = 0; i < (rank - 1); i++) {
// ?ivs(i) <prop(i+1)> ?ivs(i+1)
bp.add(Triple.create(intermediateVars.get(i), this
.getPropertyPath().get(i + 1).asNode(),
intermediateVars.get(i + 1)));
}
// ?iv(rank) <prop(rank+1)> ?target.
bp.add(Triple.create(intermediateVars.get(rank - 1), this
.getPropertyPath().get(rank).asNode(), targetVar));
}
Op op = new OpBGP(bp);
for (Expr f : intermediateFilters) {
op = OpFilter.filter(f, op);
}
return op;
}
示例10: graphBaseFind
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
@Override
protected ExtendedIterator<Triple> graphBaseFind(TripleMatch m) {
Node s = m.getMatchSubject() ;
Var sVar = null ;
if ( s == null )
{
sVar = Var.alloc("s") ;
s = sVar ;
}
Node p = m.getMatchPredicate() ;
Var pVar = null ;
if ( p == null )
{
pVar = Var.alloc("p") ;
p = pVar ;
}
Node o = m.getMatchObject() ;
Var oVar = null ;
if ( o == null )
{
oVar = Var.alloc("o") ;
o = oVar ;
}
Triple triple = new Triple(s, p ,o) ;
BasicPattern pattern = new BasicPattern() ;
pattern.add(triple) ;
ElementTriplesBlock element = new ElementTriplesBlock(pattern);
Query query = new Query();
query.setQuerySelectType();
query.setQueryResultStar(true);
query.setQueryPattern(element);
// query.setDistinct(true);
boolean resNotEmpty = true;
query.setLimit(tripleSliceSize);
long offsetCounter = 0;
Set<Triple> triples = new HashSet<Triple>() ;
while (resNotEmpty) {
long offset = tripleSliceSize * offsetCounter++;
query.setOffset(offset);
QueryExecution qe = QueryExecutionFactory.sparqlService(serviceURI, query);
logger.debug(query.serialize());
ResultSet res = qe.execSelect();
if (!res.hasNext()) resNotEmpty = false;
while(res.hasNext()) {
QuerySolution sol = res.nextSolution();
Node subj;
if (s.isVariable()) {
subj = sol.get("s").asNode();
} else {
subj= s;
}
Node pred;
if (p.isVariable()) {
pred = sol.get("p").asNode();
} else {
pred = p;
}
Node obj;
if (o.isVariable()) {
obj = sol.get("o").asNode();
} else {
obj = o;
}
Triple resTriple = new Triple(subj, pred, obj);
triples.add(resTriple);
}
qe.close();
}
return WrappedIterator.createNoRemove(triples.iterator()) ;
}
示例11: createLinksetQueryClauses
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
public DatasetQueryVarLinksets createLinksetQueryClauses(QueryVars queryVars) {
DatasetQueryVarLinksets linksetOpServices = new DatasetQueryVarLinksets ();
for (Linkset linkset : this) {
if (linkset.supportsInstanceMapping()) {
QueryVars variablesOfSubjectClass = queryVars.locateVariablesOfClass(linkset.getSubjectsClass());
for (QueryVar linkQueryVariable : variablesOfSubjectClass) {
// TODO need to remove the need to pass through
// queryVariables.
QueryClauses subjectQueryClauses = linkset.getSubjectsDataset().getClauseVariables(queryVars,
linkQueryVariable);
// These dataset queryClause variables could be linked to
// another dataset queryClause variable
// So now for each we need to find the 'other' end of the
// linkset and find any clauses in the objectsDataset that
// share the same variable.
// (Does it have to conform to objectsClass?)
QueryClauses objectQueryClauses = linkset.getObjectsDataset().getClauseVariables(queryVars,
linkQueryVariable);
// Now we have both ends of the link that we can create
// between clause variables, as long as not the same
// dataset
// subjectQueryClauses/subjectQueryVariable sameAs
// objectQueryClauses/objectQueryVariable
// Generate the SPARQL algebra
for (QueryClause subjectQueryClause : subjectQueryClauses) {
for (QueryClause objectQueryClause : objectQueryClauses) {
linkQueryVariable.setLinked();
Node linksetNode = NodeFactory.createURI(linkset.getSparqlEndPoint().toString());
BasicPattern pattern = new BasicPattern();
Node pred = NodeFactory.createURI(linkset.getLinkPredicate().toString());
pattern.add(Triple.create(linkQueryVariable.getLinkedVar(linkset.getSubjectsDataset()), pred,linkQueryVariable.getLinkedVar(linkset.getObjectsDataset())));
DatasetQueryVarLinkset subjectDatasetQueryVarLinkset = linkset.getSubjectsDataset().getDatasetQueryVarLinkset(
linkQueryVariable,linkset);
DatasetQueryVarLinkset objectDatasetQueryVarLinkset = linkset.getObjectsDataset().getDatasetQueryVarLinkset(
linkQueryVariable,linkset);
LinksetOpService linksetOpService = new LinksetOpService(linkset, new OpService(
linksetNode, new OpBGP(pattern), false));
linkQueryVariable.addDatasetQueryVar(subjectDatasetQueryVarLinkset, linksetOpService,
objectDatasetQueryVarLinkset);
linkQueryVariable.addDatasetQueryVar(objectDatasetQueryVarLinkset, linksetOpService,
subjectDatasetQueryVarLinkset);
linksetOpServices.add(subjectDatasetQueryVarLinkset);
linksetOpServices.add(objectDatasetQueryVarLinkset);
}
}
}
}
}
return linksetOpServices;
}
示例12: applyConstraint
import com.hp.hpl.jena.sparql.core.BasicPattern; //导入方法依赖的package包/类
@Override
public Op applyConstraint(Op op, Var reason) {
logger.debug("Applying constraint with value='" + this.getValue() + "' and expression=" + this.getExpression());
if (value == null || value.isEmpty()) {
return op;
}
Node rdfsLabel = RDFS.label.asNode();
Var reasonLabel = Var.alloc("reasonlabel");
Expr reasonLabelExpr = new ExprVar(reasonLabel);
Expr reasonExpr = new ExprVar(reason);
// create Triple
BasicPattern optionalPattern = new BasicPattern();
// ?source rdfs:label ?sourcelabel.
optionalPattern
.add(Triple.create(reason, rdfsLabel, reasonLabel));
op = OpLeftJoin.create(op, new OpBGP(optionalPattern),new ExprList());
Expr filter = new E_Conditional(
new E_IsIRI(reasonExpr),
reasonLabelExpr,
reasonExpr);
filter = this.getExpression().getFilter(filter, value);
if (filter == null) {
return op;
}
return OpFilter.filter(filter, op);
}