本文整理汇总了Java中org.openrdf.query.Update类的典型用法代码示例。如果您正苦于以下问题:Java Update类的具体用法?Java Update怎么用?Java Update使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Update类属于org.openrdf.query包,在下文中一共展示了Update类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: doUpdate
import org.openrdf.query.Update; //导入依赖的package包/类
public static final void doUpdate( UpdateExecutor query,
RepositoryConnection rc, boolean dobindings ) throws RepositoryException,
MalformedQueryException, UpdateExecutionException {
String sparql = processNamespaces( dobindings ? query.getSparql()
: query.bindAndGetSparql(), query.getNamespaces() );
ValueFactory vfac = new ValueFactoryImpl();
Update upd = rc.prepareUpdate( QueryLanguage.SPARQL, sparql );
if ( dobindings ) {
upd.setIncludeInferred( query.usesInferred() );
query.setBindings( upd, vfac );
}
upd.execute();
query.done();
}
示例2: performUpdate
import org.openrdf.query.Update; //导入依赖的package包/类
private void performUpdate(final String query, final SailRepositoryConnection conn, final ServletOutputStream os, final String infer, final String vis) throws RepositoryException, MalformedQueryException, IOException {
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
if (infer != null && infer.length() > 0) {
update.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer)));
}
if (conn.getSailConnection() instanceof RdfCloudTripleStoreConnection && vis != null) {
final RdfCloudTripleStoreConnection<?> sailConnection = (RdfCloudTripleStoreConnection<?>) conn.getSailConnection();
sailConnection.getConf().set(RdfCloudTripleStoreConfiguration.CONF_CV, vis);
}
final long startTime = System.currentTimeMillis();
try {
update.execute();
} catch (final UpdateExecutionException e) {
final String message = "Update could not be successfully completed for query: ";
os.print(String.format(message + "%s\n\n", StringEscapeUtils.escapeHtml4(query)));
log.error(message + LogUtils.clean(query), e);
}
log.info(String.format("Update Time = %.3f\n", (System.currentTimeMillis() - startTime) / 1000.));
}
示例3: clear
import org.openrdf.query.Update; //导入依赖的package包/类
/**
* Clears the Anno4j underlying triplestore.
* This is required in order to prevent a drop in throughput while parsing.
*
* @throws RepositoryException Thrown if no connection to the object repository could be made.
* @throws UpdateExecutionException Thrown if an error occurred while executing the clearing query.
*/
private void clear() throws RepositoryException, UpdateExecutionException {
String deleteUpdate = "DELETE {?s ?p ?o}\n" +
"WHERE {?s ?p ?o}";
ObjectConnection connection = anno4j.getObjectRepository().getConnection();
Update update;
try {
update = connection.prepareUpdate(deleteUpdate);
} catch (MalformedQueryException e) {
e.printStackTrace();
return;
}
update.execute();
}
示例4: removeExistingConcepts
import org.openrdf.query.Update; //导入依赖的package包/类
public void removeExistingConcepts(Vector <String> subVector)
{
for(int remIndex = 0;remIndex < subVector.size();remIndex++)
{
try {
String remQuery = subVector.elementAt(remIndex);
logger.warn("Removing query " + remQuery);
Update update = rc.prepareUpdate(QueryLanguage.SPARQL, remQuery);
update.execute();
this.baseRelEngine.execInsertQuery(remQuery);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
示例5: execInsertQuery
import org.openrdf.query.Update; //导入依赖的package包/类
@Override
public void execInsertQuery(String query) throws SailException, UpdateExecutionException, RepositoryException, MalformedQueryException {
Update up = rc.prepareUpdate(QueryLanguage.SPARQL, query);
//sc.addStatement(vf.createURI("<http://health.mil/ontologies/dbcm/Concept/Service/tom2>"),vf.createURI("<http://health.mil/ontologies/dbcm/Relation/Exposes>"),vf.createURI("<http://health.mil/ontologies/dbcm/Concept/BusinessLogicUnit/tom1>"));
System.out.println("\nSPARQL: " + query);
//tq.setIncludeInferred(true /* includeInferred */);
//tq.evaluate();
rc.setAutoCommit(false);
up.execute();
//rc.commit();
InferenceEngine ie = ((BigdataSail)bdSail).getInferenceEngine();
ie.computeClosure(null);
rc.commit();
}
示例6: copyCatalogAttributes
import org.openrdf.query.Update; //导入依赖的package包/类
private void copyCatalogAttributes(Value graph, Value catalogUri, Value rawGraph, RepositoryConnection connection) throws RepositoryException, MalformedQueryException, UpdateExecutionException {
String query = "" +
" PREFIX dcterms:<http://purl.org/dc/terms/>" +
" insert into ?graph {" +
" ?catalogUri ?p ?t. " +
" }\n" +
" where { " +
" graph ?graph {\n" +
" ?rawCatalogUri a <http://www.w3.org/ns/dcat#Catalog>." +
" ?rawCatalogUri ?p ?t. " +
" VALUES ?p { dcterms:publisher dcterms:description dcterms:title}" +
" }" +
"}";
Update u = connection.prepareUpdate(QueryLanguage.SPARQL, query);
u.setBinding("catalogUri", catalogUri);
u.setBinding("graph", graph);
u.execute();
}
示例7: transform
import org.openrdf.query.Update; //导入依赖的package包/类
@Override
public void transform(Repository repository, URI graph, TransformContext context) throws TransformException {
try {
RepositoryConnection con = repository.getConnection();
try {
con.setAutoCommit(false);
Update update = con.prepareUpdate(QueryLanguage.SPARQL, config.getQuery());
DatasetImpl dataset = new DatasetImpl();
dataset.setDefaultInsertGraph(graph);
update.setDataset(dataset);
update.execute();
con.commit();
} finally {
con.close();
}
} catch (Exception ex) {
throw new TransformException(ex);
}
}
示例8: testInsertDeleteData
import org.openrdf.query.Update; //导入依赖的package包/类
public void testInsertDeleteData() throws Exception {
RepositoryConnection conn = repository.getConnection();
String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
"INSERT DATA\n" +
"{ <http://example/book3> dc:title \"A new book\" ;\n" +
" dc:creator \"A.N.Other\" .\n" +
"}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
"select * where { <http://example/book3> ?p ?o. }";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(2, tupleHandler.getCount());
String delete = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
"\n" +
"DELETE DATA\n" +
"{ <http://example/book3> dc:title \"A new book\" ;\n" +
" dc:creator \"A.N.Other\" .\n" +
"}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, delete);
update.execute();
query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
"select * where { <http://example/book3> ?p ?o. }";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(0, tupleHandler.getCount());
conn.close();
}
示例9: testClearAllGraph
import org.openrdf.query.Update; //导入依赖的package包/类
public void testClearAllGraph() throws Exception {
RepositoryConnection conn = repository.getConnection();
String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
"PREFIX ex: <http://example/addresses#>\n" +
"INSERT DATA\n" +
"{ GRAPH ex:G1 {\n" +
"<http://example/book3> dc:title \"A new book\" ;\n" +
" dc:creator \"A.N.Other\" .\n" +
"}\n" +
"}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
"PREFIX ex: <http://example/addresses#>\n" +
"INSERT DATA\n" +
"{ GRAPH ex:G2 {\n" +
"<http://example/book3> dc:title \"A new book\" ;\n" +
" dc:creator \"A.N.Other\" .\n" +
"}\n" +
"}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
"select * where { <http://example/book3> ?p ?o. }";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(4, tupleHandler.getCount());
tupleHandler = new CountTupleHandler();
conn.clear();
tupleQuery.evaluate(tupleHandler);
assertEquals(0, tupleHandler.getCount());
conn.close();
}
示例10: testGraphConfiguration
import org.openrdf.query.Update; //导入依赖的package包/类
@Test
public void testGraphConfiguration() throws Exception {
// build a connection
RdfCloudTripleStore store = new RdfCloudTripleStore();
store.setConf(conf);
store.setRyaDAO(ryaDAO);
InferenceEngine inferenceEngine = new InferenceEngine();
inferenceEngine.setRyaDAO(ryaDAO);
store.setInferenceEngine(inferenceEngine);
inferenceEngine.refreshGraph();
store.initialize();
SailRepository repository = new SailRepository(store);
SailRepositoryConnection conn = repository.getConnection();
String query = "INSERT DATA\n"//
+ "{ GRAPH <http://updated/test> {\n"//
+ " <urn:greatMother> owl:propertyChainAxiom <urn:12342> . " +
" <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> _:node1atjakcvbx15023 . " +
" _:node1atjakcvbx15023 <http://www.w3.org/2002/07/owl#inverseOf> <urn:isChildOf> . " +
" <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> _:node1atjakcvbx15123 . " +
" _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> <http://www.w3.org/1999/02/22-rdf-syntax-ns#nil> . " +
" _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> <urn:MotherOf> . }}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
inferenceEngine.refreshGraph();
List<URI> chain = inferenceEngine.getPropertyChain(vf.createURI("urn:greatMother"));
Assert.assertEquals(chain.size(), 2);
Assert.assertEquals(chain.get(0), new InverseURI(vf.createURI("urn:isChildOf")));
Assert.assertEquals(chain.get(1), vf.createURI("urn:MotherOf"));
}
示例11: testDeleteTemporalData
import org.openrdf.query.Update; //导入依赖的package包/类
private static void testDeleteTemporalData(
final SailRepositoryConnection conn) throws Exception {
// Delete all stored dates
final String sparqlDelete = "PREFIX time: <http://www.w3.org/2006/time#>\n"
+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ "DELETE {\n" //
+ " ?event time:inXSDDateTime ?time . \n"
+ "}\n"
+ "WHERE { \n" + " ?event time:inXSDDateTime ?time . \n"//
+ "}";//
final Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL,
sparqlDelete);
deleteUpdate.execute();
// Find all stored dates.
final String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ "SELECT ?event ?time \n" //
+ "WHERE { \n"
+ " ?event time:inXSDDateTime ?time . \n"//
+ " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after
// 3
// seconds
+ "}";//
final CountingResultHandler tupleHandler = new CountingResultHandler();
final TupleQuery tupleQuery = conn.prepareTupleQuery(
QueryLanguage.SPARQL, queryString);
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 0);
}
示例12: testInfer
import org.openrdf.query.Update; //导入依赖的package包/类
public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
// Add data
String query = "INSERT DATA\n"//
+ "{ \n"//
+ " <http://acme.com/people/Mike> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:type1>. "
+ " <urn:type1> <http://www.w3.org/2000/01/rdf-schema#subClassOf> <urn:superclass>. }";
log.info("Performing Query");
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
// refresh the graph for inferencing (otherwise there is a five minute wait)
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
query = "select ?s { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:superclass> . }";
final CountingResultHandler resultHandler = new CountingResultHandler();
final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
resultHandler.resetCount();
}
示例13: testDeleteGeoData
import org.openrdf.query.Update; //导入依赖的package包/类
private static void testDeleteGeoData(final SailRepositoryConnection conn)
throws Exception {
// Delete all stored points
final String sparqlDelete = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ "DELETE {\n" //
+ " ?feature a geo:Feature . "//
+ " ?feature geo:hasGeometry ?point . "//
+ " ?point a geo:Point . "//
+ " ?point geo:asWKT ?wkt . "//
+ "}\n" + "WHERE { \n" + " ?feature a geo:Feature . "//
+ " ?feature geo:hasGeometry ?point . "//
+ " ?point a geo:Point . "//
+ " ?point geo:asWKT ?wkt . "//
+ "}";//
final Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL,
sparqlDelete);
deleteUpdate.execute();
String queryString;
TupleQuery tupleQuery;
CountingResultHandler tupleHandler;
// Find all stored points
queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ "SELECT ?feature ?point ?wkt " //
+ "{" //
+ " ?feature a geo:Feature . "//
+ " ?feature geo:hasGeometry ?point . "//
+ " ?point a geo:Point . "//
+ " ?point geo:asWKT ?wkt . "//
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 0);
}
示例14: testUpdateQuery
import org.openrdf.query.Update; //导入依赖的package包/类
@Test
public void testUpdateQuery()
throws Exception {
String defGraphQuery = "INSERT DATA { GRAPH <http://marklogic.com/test/g27> { <http://marklogic.com/test> <pp1> <oo1> } }";
String checkQuery = "ASK WHERE { <http://marklogic.com/test> <pp1> <oo1> }";
Update updateQuery = conn.prepareUpdate(QueryLanguage.SPARQL, defGraphQuery);
updateQuery.execute();
BooleanQuery booleanQuery = conn.prepareBooleanQuery(QueryLanguage.SPARQL, checkQuery);
boolean results = booleanQuery.evaluate();
Assert.assertEquals(true, results);
conn.clear(conn.getValueFactory().createURI("http://marklogic.com/test/g27"));
}
示例15: testUpdateQueryWithBaseURI
import org.openrdf.query.Update; //导入依赖的package包/类
@Test
public void testUpdateQueryWithBaseURI()
throws Exception {
String defGraphQuery = "INSERT DATA { GRAPH <http://marklogic.com/example/context1> { <http://marklogic.com/test/subject> <relative1> <relative2> } }";
String checkQuery = "ASK WHERE { <http://marklogic.com/test/subject> <relative1> <relative2> }";
Update updateQuery = conn.prepareUpdate(QueryLanguage.SPARQL, defGraphQuery,"http://marklogic.com/test/baseuri");
updateQuery.execute();
BooleanQuery booleanQuery = conn.prepareBooleanQuery(QueryLanguage.SPARQL, checkQuery,"http://marklogic.com/test/baseuri");
boolean results = booleanQuery.evaluate();
Assert.assertEquals(true, results);
conn.clear(conn.getValueFactory().createURI("http://marklogic.com/example/context1"));
}