本文整理汇总了Java中org.openrdf.query.impl.MapBindingSet.addBinding方法的典型用法代码示例。如果您正苦于以下问题:Java MapBindingSet.addBinding方法的具体用法?Java MapBindingSet.addBinding怎么用?Java MapBindingSet.addBinding使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.openrdf.query.impl.MapBindingSet
的用法示例。
在下文中一共展示了MapBindingSet.addBinding方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: sum
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void sum() throws Exception {
// A query that sums the counts of all of the items that are in the inventory.
final String sparql =
"SELECT (sum(?count) as ?itemSum) { " +
"?item <urn:count> ?count . " +
"}";
// Create the Statements that will be loaded into Rya.
final ValueFactory vf = new ValueFactoryImpl();
final Collection<Statement> statements = Sets.newHashSet(
vf.createStatement(vf.createURI("urn:apple"), vf.createURI("urn:count"), vf.createLiteral(5)),
vf.createStatement(vf.createURI("urn:gum"), vf.createURI("urn:count"), vf.createLiteral(7)),
vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:count"), vf.createLiteral(2)));
// Create the PCJ in Fluo and load the statements into Rya.
final String pcjId = loadDataAndCreateQuery(sparql, statements);
// Create the expected results of the SPARQL query once the PCJ has been computed.
final MapBindingSet expectedResult = new MapBindingSet();
expectedResult.addBinding("itemSum", vf.createLiteral("14", XMLSchema.INTEGER));
// Ensure the last result matches the expected result.
final VisibilityBindingSet result = readLastResult(pcjId);
assertEquals(expectedResult, result);
}
示例2: toString_URIs
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void toString_URIs() throws BindingSetConversionException {
// Setup the binding set that will be converted.
final MapBindingSet originalBindingSet = new MapBindingSet();
originalBindingSet.addBinding("x", new URIImpl("http://a"));
originalBindingSet.addBinding("y", new URIImpl("http://b"));
originalBindingSet.addBinding("z", new URIImpl("http://c"));
final VisibilityBindingSet visiSet = new VisibilityBindingSet(originalBindingSet, "A&B&C");
// Convert it to a String.
final VariableOrder varOrder = new VariableOrder("y", "z", "x");
final VisibilityBindingSetStringConverter converter = new VisibilityBindingSetStringConverter();
final String bindingSetString = converter.convert(visiSet, varOrder);
// Ensure it converted to the expected result.l
final String expected =
"http://b<<~>>http://www.w3.org/2001/XMLSchema#anyURI:::" +
"http://c<<~>>http://www.w3.org/2001/XMLSchema#anyURI:::" +
"http://a<<~>>http://www.w3.org/2001/XMLSchema#anyURI" +
VISIBILITY_DELIM + "A&B&C";
assertEquals(expected, bindingSetString);
}
示例3: doEvaluate
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Override
void doEvaluate(final Session session, final BindingSet input,
final MapBindingSet output) throws Throwable {
final URI id = (URI) Statements.parseValue(this.id.instantiate(input),
Namespaces.DEFAULT);
long numTriples = 0L;
try {
final Stream<Record> stream = session.retrieve(this.layer).ids(id)
.properties(this.properties).exec();
numTriples = Record.encode(stream, ImmutableList.of(this.layer)).count();
if (numTriples == 0) {
LOGGER.warn("No results for LOOKUP request, layer "
+ TestUtil.format(this.layer) + ", id " + id);
}
} catch (final Throwable ex) {
throw new RuntimeException("Failed LOOKUP " + TestUtil.format(this.layer)
+ ", id " + TestUtil.format(id) + ", properties " + this.properties,
ex);
} finally {
output.addBinding("size", FACTORY.createLiteral(numTriples));
}
}
示例4: missingObject
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void missingObject() {
// Create the input binding set.
final ValueFactory vf = new ValueFactoryImpl();
final MapBindingSet bindingSet = new MapBindingSet();
bindingSet.addBinding("subject", vf.createURI("urn:Alice"));
bindingSet.addBinding("predicate", vf.createURI("urn:age"));
final VisibilityBindingSet visBs = new VisibilityBindingSet(bindingSet, "a");
// Mock the processor context that will be invoked.
final ProcessorContext context = mock(ProcessorContext.class);
// Run the test.
final StatementOutputFormatter formatter = new StatementOutputFormatter();
formatter.init(context);
formatter.process("key", ProcessorResult.make(new UnaryResult(visBs)));
// Verify the mock was never invoked.
verify(context, times(0)).forward(any(), any());
}
示例5: binaryResult
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void binaryResult() {
// Create the input binding set.
final ValueFactory vf = new ValueFactoryImpl();
final MapBindingSet bindingSet = new MapBindingSet();
bindingSet.addBinding("subject", vf.createURI("urn:Alice"));
bindingSet.addBinding("predicate", vf.createURI("urn:age"));
bindingSet.addBinding("object", vf.createLiteral(34));
final VisibilityBindingSet visBs = new VisibilityBindingSet(bindingSet, "a");
// Mock the processor context that will be invoked.
final ProcessorContext context = mock(ProcessorContext.class);
// Run the test.
final StatementOutputFormatter formatter = new StatementOutputFormatter();
formatter.init(context);
formatter.process("key", ProcessorResult.make(new BinaryResult(Side.LEFT, visBs)));
// Verify the mock was invoked with the expected output.
final VisibilityStatement expectedStmt = new VisibilityStatement(vf.createStatement(
vf.createURI("urn:Alice"),
vf.createURI("urn:age"),
vf.createLiteral(34)), "a");
verify(context, times(1)).forward(eq("key"), eq(expectedStmt));
}
示例6: average
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void average() throws Exception {
// A query that finds the average price for an item that is in the inventory.
final String sparql =
"SELECT (avg(?price) as ?averagePrice) { " +
"?item <urn:price> ?price . " +
"}";
// Create the Statements that will be loaded into Rya.
final ValueFactory vf = new ValueFactoryImpl();
final Collection<Statement> statements = Sets.newHashSet(
vf.createStatement(vf.createURI("urn:apple"), vf.createURI("urn:price"), vf.createLiteral(3)),
vf.createStatement(vf.createURI("urn:gum"), vf.createURI("urn:price"), vf.createLiteral(4)),
vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:price"), vf.createLiteral(8)));
// Create the PCJ in Fluo and load the statements into Rya.
final String pcjId = loadDataAndCreateQuery(sparql, statements);
// Create the expected results of the SPARQL query once the PCJ has been computed.
final MapBindingSet expectedResult = new MapBindingSet();
expectedResult.addBinding("averagePrice", vf.createLiteral("5", XMLSchema.DECIMAL));
// Ensure the last result matches the expected result.
final VisibilityBindingSet result = readLastResult(pcjId);
assertEquals(expectedResult, result);
}
示例7: showWithinWorks
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void showWithinWorks() throws Exception {
// Enumerate some topics that will be re-used
final String ryaInstance = UUID.randomUUID().toString();
final UUID queryId = UUID.randomUUID();
final String statementsTopic = KafkaTopics.statementsTopic(ryaInstance);
final String resultsTopic = KafkaTopics.queryResultsTopic(queryId);
// Get the RDF model objects that will be used to build the query.
final String sparql =
"PREFIX time: <http://www.w3.org/2006/time/> \n"
+ "PREFIX tempf: <" + TemporalURIs.NAMESPACE + ">\n"
+ "SELECT * \n"
+ "WHERE { \n"
+ " <urn:time> time:atTime ?date .\n"
+ " FILTER(tempf:within(?date, \"" + TIME.toString() + "/" + TIME_20.toString() + "\")) "
+ "}";
// Setup a topology.
final TopologyBuilder builder = new TopologyFactory().build(sparql, statementsTopic, resultsTopic, new RandomUUIDFactory());
// Create the statements that will be input into the query.
final ValueFactory vf = new ValueFactoryImpl();
final List<VisibilityStatement> statements = getStatements();
// Make the expected results.
final Set<VisibilityBindingSet> expected = new HashSet<>();
final MapBindingSet bs = new MapBindingSet();
bs.addBinding("date", vf.createLiteral(TIME_10.toString()));
expected.add( new VisibilityBindingSet(bs, "a") );
// Run the test.
RyaStreamsTestUtil.runStreamProcessingTest(kafka, statementsTopic, resultsTopic, builder, statements, expected, VisibilityBindingSetDeserializer.class);
}
示例8: project
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
/**
* Applies the projection to a value. If the result has a blank node whose ID is not mapped to a value in
* {@code blankNodes}, then a random UUID will be used.
*
* @param bs - The value the projection will be applied to. (not null)
* @param blankNodes - A map from node source names to the blank nodes that will be used for those names. (not null)
* @return A new value that is the result of the projection.
*/
public VisibilityBindingSet project(final VisibilityBindingSet bs, final Map<String, BNode> blankNodes) {
requireNonNull(bs);
requireNonNull(blankNodes);
// Apply the projection elements against the original binding set.
final MapBindingSet result = new MapBindingSet();
for (final ProjectionElem elem : projectionElems.getElements()) {
final String sourceName = elem.getSourceName();
Value value = null;
// If the binding set already has the source name, then use the target name.
if (bs.hasBinding(sourceName)) {
value = bs.getValue(elem.getSourceName());
}
// If the source name represents a constant value, then use the constant.
else if(constantSources.containsKey(sourceName)) {
value = constantSources.get(sourceName);
}
// If the source name represents an anonymous value, then create a Blank Node.
else if(anonymousSources.contains(sourceName)) {
if(blankNodes.containsKey(sourceName)) {
value = blankNodes.get(sourceName);
} else {
value = vf.createBNode( UUID.randomUUID().toString() );
}
}
// Only add the value if there is one. There may not be one if a binding is optional.
if(value != null) {
result.addBinding(elem.getTargetName(), value);
}
}
return new VisibilityBindingSet(result, bs.getVisibility());
}
示例9: showAfterWorks
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void showAfterWorks() throws Exception {
// Enumerate some topics that will be re-used
final String ryaInstance = UUID.randomUUID().toString();
final UUID queryId = UUID.randomUUID();
final String statementsTopic = KafkaTopics.statementsTopic(ryaInstance);
final String resultsTopic = KafkaTopics.queryResultsTopic(queryId);
// Get the RDF model objects that will be used to build the query.
final String sparql =
"PREFIX time: <http://www.w3.org/2006/time/> \n"
+ "PREFIX tempf: <" + TemporalURIs.NAMESPACE + ">\n"
+ "SELECT * \n"
+ "WHERE { \n"
+ " <urn:time> time:atTime ?date .\n"
+ " FILTER(tempf:after(?date, \"" + TIME_10.toString() + "\")) "
+ "}";
// Setup a topology.
final TopologyBuilder builder = new TopologyFactory().build(sparql, statementsTopic, resultsTopic, new RandomUUIDFactory());
// Create the statements that will be input into the query.
final ValueFactory vf = new ValueFactoryImpl();
final List<VisibilityStatement> statements = getStatements();
// Make the expected results.
final Set<VisibilityBindingSet> expected = new HashSet<>();
final MapBindingSet bs = new MapBindingSet();
bs.addBinding("date", vf.createLiteral(TIME_20.toString()));
expected.add( new VisibilityBindingSet(bs, "a") );
// Run the test.
RyaStreamsTestUtil.runStreamProcessingTest(kafka, statementsTopic, resultsTopic, builder, statements, expected, VisibilityBindingSetDeserializer.class);
}
示例10: showProcessorWorks
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void showProcessorWorks() throws Exception {
// Enumerate some topics that will be re-used
final String ryaInstance = UUID.randomUUID().toString();
final UUID queryId = UUID.randomUUID();
final String statementsTopic = KafkaTopics.statementsTopic(ryaInstance);
final String resultsTopic = KafkaTopics.queryResultsTopic(queryId);
// Get the RDF model objects that will be used to build the query.
final String sparql =
"SELECT * " +
"WHERE { " +
"FILTER(?age < 10)" +
"?person <urn:age> ?age " +
"}";
// Setup a topology.
final TopologyBuilder builder = new TopologyFactory().build(sparql, statementsTopic, resultsTopic, new RandomUUIDFactory());
// Create the statements that will be input into the query.
final ValueFactory vf = new ValueFactoryImpl();
final List<VisibilityStatement> statements = new ArrayList<>();
statements.add(new VisibilityStatement(vf.createStatement(vf.createURI("urn:Bob"), vf.createURI("urn:age"), vf.createLiteral(11)), "a"));
statements.add(new VisibilityStatement(vf.createStatement(vf.createURI("urn:Alice"), vf.createURI("urn:age"), vf.createLiteral(9)), "a"));
// Make the expected results.
final Set<VisibilityBindingSet> expected = new HashSet<>();
final MapBindingSet bs = new MapBindingSet();
bs.addBinding("person", vf.createURI("urn:Alice"));
bs.addBinding("age", vf.createLiteral(9));
expected.add( new VisibilityBindingSet(bs, "a") );
// Run the test.
RyaStreamsTestUtil.runStreamProcessingTest(kafka, statementsTopic, resultsTopic, builder, statements, expected, VisibilityBindingSetDeserializer.class);
}
示例11: constantSubjQuery_Test
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void constantSubjQuery_Test() throws Exception {
final Sail sail = GeoRyaSailFactory.getInstance(conf);
final SailRepositoryConnection conn = new SailRepository(sail).getConnection();
try {
addStatements(conn);
final String query =
"PREFIX time: <http://www.w3.org/2006/time#> \n"
+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"
+ "PREFIX geo: <http://www.opengis.net/ont/geosparql#>"
+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>"
+ "SELECT * "
+ "WHERE { "
+ " <urn:event1> time:atTime ?time . "
+ " <urn:event1> geo:asWKT ?point . "
+ " FILTER(geof:sfWithin(?point, \"POLYGON((-3 -2, -3 2, 1 2, 1 -2, -3 -2))\"^^geo:wktLiteral)) "
+ " FILTER(tempo:equals(?time, \"2015-12-30T12:00:00Z\")) "
+ "}";
final TupleQueryResult rez = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate();
final Set<BindingSet> results = new HashSet<>();
while(rez.hasNext()) {
final BindingSet bs = rez.next();
results.add(bs);
}
final MapBindingSet expected = new MapBindingSet();
expected.addBinding("point", VF.createLiteral("POINT (0 0)"));
expected.addBinding("time", VF.createLiteral("2015-12-30T12:00:00Z"));
assertEquals(1, results.size());
assertEquals(expected, results.iterator().next());
} finally {
conn.close();
sail.shutDown();
}
}
示例12: showBeforeWorks
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void showBeforeWorks() throws Exception {
// Enumerate some topics that will be re-used
final String ryaInstance = UUID.randomUUID().toString();
final UUID queryId = UUID.randomUUID();
final String statementsTopic = KafkaTopics.statementsTopic(ryaInstance);
final String resultsTopic = KafkaTopics.queryResultsTopic(queryId);
// Get the RDF model objects that will be used to build the query.
final String sparql =
"PREFIX time: <http://www.w3.org/2006/time/> \n"
+ "PREFIX tempf: <" + TemporalURIs.NAMESPACE + ">\n"
+ "SELECT * \n"
+ "WHERE { \n"
+ " <urn:time> time:atTime ?date .\n"
+ " FILTER(tempf:before(?date, \"" + TIME_10.toString() + "\")) "
+ "}";
// Setup a topology.
final TopologyBuilder builder = new TopologyFactory().build(sparql, statementsTopic, resultsTopic, new RandomUUIDFactory());
// Create the statements that will be input into the query.
final ValueFactory vf = new ValueFactoryImpl();
final List<VisibilityStatement> statements = getStatements();
// Make the expected results.
final Set<VisibilityBindingSet> expected = new HashSet<>();
final MapBindingSet bs = new MapBindingSet();
bs.addBinding("date", vf.createLiteral(TIME.toString()));
expected.add( new VisibilityBindingSet(bs, "a") );
// Run the test.
RyaStreamsTestUtil.runStreamProcessingTest(kafka, statementsTopic, resultsTopic, builder, statements, expected, VisibilityBindingSetDeserializer.class);
}
示例13: keepBindings
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
/**
* Create a new {@link BindingSet} that only includes the bindings whose names appear within the {@code variableOrder}.
* If no binding is found for a variable, then that binding is just omitted from the resulting object.
*
* @param variableOrder - Defines which bindings will be kept. (not null)
* @param bindingSet - Contains the source {@link Binding}s. (not null)
* @return A new {@link BindingSet} containing only the specified bindings.
*/
public static BindingSet keepBindings(final VariableOrder variableOrder, final BindingSet bindingSet) {
requireNonNull(variableOrder);
requireNonNull(bindingSet);
final MapBindingSet result = new MapBindingSet();
for(final String bindingName : variableOrder) {
if(bindingSet.hasBinding(bindingName)) {
final Binding binding = bindingSet.getBinding(bindingName);
result.addBinding(binding);
}
}
return result;
}
示例14: showProjectionFunctionIsCalled
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@Test
public void showProjectionFunctionIsCalled() throws Exception {
// A query whose projection does not change anything.
final Projection projection = RdfTestUtil.getProjection(
"SELECT (?person AS ?p) (?employee AS ?e) ?business " +
"WHERE { " +
"?person <urn:talksTo> ?employee . " +
"?employee <urn:worksAt> ?business . " +
"}");
// Create a Binding Set that contains the result of the WHERE clause.
final ValueFactory vf = new ValueFactoryImpl();
final MapBindingSet inputBs = new MapBindingSet();
inputBs.addBinding("person", vf.createURI("urn:Alice"));
inputBs.addBinding("employee", vf.createURI("urn:Bob"));
inputBs.addBinding("business", vf.createURI("urn:TacoJoint"));
final VisibilityBindingSet inputVisBs = new VisibilityBindingSet(inputBs, "a");
// The expected binding set changes the "person" binding name to "p" and "employee" to "e".
final MapBindingSet expectedBs = new MapBindingSet();
expectedBs.addBinding("p", vf.createURI("urn:Alice"));
expectedBs.addBinding("e", vf.createURI("urn:Bob"));
expectedBs.addBinding("business", vf.createURI("urn:TacoJoint"));
final VisibilityBindingSet expectedVisBs = new VisibilityBindingSet(expectedBs, "a");
// Show it resulted in the correct output BindingSet.
final ProcessorContext context = mock(ProcessorContext.class);
final ProjectionProcessor processor = new ProjectionProcessor(
ProjectionEvaluator.make(projection),
result -> ProcessorResult.make(new UnaryResult(result)));
processor.init(context);
processor.process("key", ProcessorResult.make(new UnaryResult(inputVisBs)));
// Verify the expected binding set was emitted.
final ProcessorResult expected = ProcessorResult.make(new UnaryResult(expectedVisBs));
verify(context, times(1)).forward(eq("key"), eq(expected));
}
示例15: batchbindingSetsQuery
import org.openrdf.query.impl.MapBindingSet; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Test
public void batchbindingSetsQuery() throws Exception {
final MongoDBRyaDAO dao = new MongoDBRyaDAO();
try(final MongoDBQueryEngine engine =new MongoDBQueryEngine()) {
engine.setConf(conf);
// Add data.
dao.setConf(conf);
dao.init();
dao.add(getStatement("u:a", "u:tt", "u:b"));
dao.add(getStatement("u:a", "u:tt", "u:c"));
// Run the test.
final RyaStatement s1 = getStatement(null, null, "u:b");
final MapBindingSet bs1 = new MapBindingSet();
bs1.addBinding("foo", new URIImpl("u:x"));
final Map.Entry<RyaStatement, BindingSet> e1 = new RdfCloudTripleStoreUtils.CustomEntry<>(s1, bs1);
final Collection<Entry<RyaStatement, BindingSet>> stmts1 = Lists.newArrayList(e1);
assertEquals(1, size(engine.queryWithBindingSet(stmts1, conf)));
final MapBindingSet bs2 = new MapBindingSet();
bs2.addBinding("foo", new URIImpl("u:y"));
final RyaStatement s2 = getStatement(null, null, "u:c");
final Map.Entry<RyaStatement, BindingSet> e2 = new RdfCloudTripleStoreUtils.CustomEntry<>(s2, bs2);
final Collection<Entry<RyaStatement, BindingSet>> stmts2 = Lists.newArrayList(e1, e2);
assertEquals(2, size(engine.queryWithBindingSet(stmts2, conf)));
final Map.Entry<RyaStatement, BindingSet> e3 = new RdfCloudTripleStoreUtils.CustomEntry<>(s2, bs1);
final Map.Entry<RyaStatement, BindingSet> e4 = new RdfCloudTripleStoreUtils.CustomEntry<>(s1, bs2);
final Collection<Entry<RyaStatement, BindingSet>> stmts3 = Lists.newArrayList(e1, e2, e3, e4);
assertEquals(4, size(engine.queryWithBindingSet(stmts3, conf)));
} finally {
dao.destroy();
}
}