当前位置: 首页>>代码示例>>Java>>正文


Java Update.execute方法代码示例

本文整理汇总了Java中org.openrdf.query.Update.execute方法的典型用法代码示例。如果您正苦于以下问题:Java Update.execute方法的具体用法?Java Update.execute怎么用?Java Update.execute使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.openrdf.query.Update的用法示例。


在下文中一共展示了Update.execute方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: doUpdate

import org.openrdf.query.Update; //导入方法依赖的package包/类
public static final void doUpdate( UpdateExecutor query,
		RepositoryConnection rc, boolean dobindings ) throws RepositoryException,
		MalformedQueryException, UpdateExecutionException {

	String sparql = processNamespaces( dobindings ? query.getSparql()
			: query.bindAndGetSparql(), query.getNamespaces() );

	ValueFactory vfac = new ValueFactoryImpl();
	Update upd = rc.prepareUpdate( QueryLanguage.SPARQL, sparql );

	if ( dobindings ) {
		upd.setIncludeInferred( query.usesInferred() );
		query.setBindings( upd, vfac );
	}

	upd.execute();
	query.done();
}
 
开发者ID:Ostrich-Emulators,项目名称:semtool,代码行数:19,代码来源:AbstractSesameEngine.java

示例2: clear

import org.openrdf.query.Update; //导入方法依赖的package包/类
/**
 * Clears the Anno4j underlying triplestore.
 * This is required in order to prevent a drop in throughput while parsing.
 *
 * @throws RepositoryException      Thrown if no connection to the object repository could be made.
 * @throws UpdateExecutionException Thrown if an error occurred while executing the clearing query.
 */
private void clear() throws RepositoryException, UpdateExecutionException {
    String deleteUpdate = "DELETE {?s ?p ?o}\n" +
            "WHERE {?s ?p ?o}";

    ObjectConnection connection = anno4j.getObjectRepository().getConnection();

    Update update;
    try {
        update = connection.prepareUpdate(deleteUpdate);
    } catch (MalformedQueryException e) {
        e.printStackTrace();
        return;
    }

    update.execute();
}
 
开发者ID:anno4j,项目名称:anno4j,代码行数:24,代码来源:ObjectParser.java

示例3: removeExistingConcepts

import org.openrdf.query.Update; //导入方法依赖的package包/类
public void removeExistingConcepts(Vector <String> subVector)
{

	for(int remIndex = 0;remIndex < subVector.size();remIndex++)
	{
		try {
			String remQuery = subVector.elementAt(remIndex);
			logger.warn("Removing query " + remQuery);
			
			Update update = rc.prepareUpdate(QueryLanguage.SPARQL, remQuery);
			update.execute();
			this.baseRelEngine.execInsertQuery(remQuery);
		
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
}
 
开发者ID:SEMOSS,项目名称:semoss,代码行数:20,代码来源:GraphPlaySheet.java

示例4: execInsertQuery

import org.openrdf.query.Update; //导入方法依赖的package包/类
@Override
public void execInsertQuery(String query) throws SailException, UpdateExecutionException, RepositoryException, MalformedQueryException {

	Update up = rc.prepareUpdate(QueryLanguage.SPARQL, query);
	//sc.addStatement(vf.createURI("<http://health.mil/ontologies/dbcm/Concept/Service/tom2>"),vf.createURI("<http://health.mil/ontologies/dbcm/Relation/Exposes>"),vf.createURI("<http://health.mil/ontologies/dbcm/Concept/BusinessLogicUnit/tom1>"));
	System.out.println("\nSPARQL: " + query);
	//tq.setIncludeInferred(true /* includeInferred */);
	//tq.evaluate();
	rc.setAutoCommit(false);
	up.execute();
	//rc.commit();
       InferenceEngine ie = ((BigdataSail)bdSail).getInferenceEngine();
       ie.computeClosure(null);
	rc.commit();
	

}
 
开发者ID:SEMOSS,项目名称:semoss,代码行数:18,代码来源:RemoteSparqlEngine.java

示例5: copyCatalogAttributes

import org.openrdf.query.Update; //导入方法依赖的package包/类
private void copyCatalogAttributes(Value graph, Value catalogUri, Value rawGraph, RepositoryConnection connection) throws RepositoryException, MalformedQueryException, UpdateExecutionException {
  String query = "" +
          " PREFIX dcterms:<http://purl.org/dc/terms/>" +
          " insert into ?graph {" +
          " ?catalogUri ?p  ?t. " +
          " }\n" +
          " where { " +
          " graph ?graph {\n" +
          "   ?rawCatalogUri a <http://www.w3.org/ns/dcat#Catalog>." +
          "   ?rawCatalogUri ?p ?t. " +
          "   VALUES ?p { dcterms:publisher dcterms:description dcterms:title}" +
          " }" +
          "}";
  Update u = connection.prepareUpdate(QueryLanguage.SPARQL, query);
  u.setBinding("catalogUri", catalogUri);
  u.setBinding("graph", graph);
  u.execute();
}
 
开发者ID:nvdk,项目名称:ods-lodms-plugins,代码行数:19,代码来源:ODSDCatGenerator.java

示例6: transform

import org.openrdf.query.Update; //导入方法依赖的package包/类
@Override
public void transform(Repository repository, URI graph, TransformContext context) throws TransformException {
    try {
        RepositoryConnection con = repository.getConnection();
        try {
            con.setAutoCommit(false);
            Update update = con.prepareUpdate(QueryLanguage.SPARQL, config.getQuery());
            DatasetImpl dataset = new DatasetImpl();
            dataset.setDefaultInsertGraph(graph);
            update.setDataset(dataset);
            update.execute();
            con.commit();
        } finally {
            con.close();
        }
    } catch (Exception ex) {
        throw new TransformException(ex);
    }
}
 
开发者ID:lodms,项目名称:lodms-plugins,代码行数:20,代码来源:SPARQLUpdateTransformer.java

示例7: testDeleteGeoData

import org.openrdf.query.Update; //导入方法依赖的package包/类
private static void testDeleteGeoData(final SailRepositoryConnection conn)
		throws Exception {
	// Delete all stored points
	final String sparqlDelete = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
			+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
			+ "DELETE {\n" //
			+ "  ?feature a geo:Feature . "//
			+ "  ?feature geo:hasGeometry ?point . "//
			+ "  ?point a geo:Point . "//
			+ "  ?point geo:asWKT ?wkt . "//
			+ "}\n" + "WHERE { \n" + "  ?feature a geo:Feature . "//
			+ "  ?feature geo:hasGeometry ?point . "//
			+ "  ?point a geo:Point . "//
			+ "  ?point geo:asWKT ?wkt . "//
			+ "}";//

	final Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL,
			sparqlDelete);
	deleteUpdate.execute();

	String queryString;
	TupleQuery tupleQuery;
	CountingResultHandler tupleHandler;

	// Find all stored points
	queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
			+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
			+ "SELECT ?feature ?point ?wkt " //
			+ "{" //
			+ "  ?feature a geo:Feature . "//
			+ "  ?feature geo:hasGeometry ?point . "//
			+ "  ?point a geo:Point . "//
			+ "  ?point geo:asWKT ?wkt . "//
			+ "}";//
	tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
	tupleHandler = new CountingResultHandler();
	tupleQuery.evaluate(tupleHandler);
	log.info("Result count : " + tupleHandler.getCount());
	Validate.isTrue(tupleHandler.getCount() == 0);
}
 
开发者ID:apache,项目名称:incubator-rya,代码行数:41,代码来源:GeowaveDirectExample.java

示例8: persistInverseFunctional

import org.openrdf.query.Update; //导入方法依赖的package包/类
/**
 * Persists the information that a property is inverse functional to the default graph of the connected triplestore.
 * All properties with {@link InverseFunctional} or {@link Bijective} annotation are considered.
 * @param annotatedObjects The {@link Iri} annotated objects that should be considered.
 * @throws RepositoryException Thrown on error regarding the connected triplestore.
 * @throws UpdateExecutionException Thrown if an error occurred while executing the update.
 */
private void persistInverseFunctional(Collection<AccessibleObject> annotatedObjects) throws RepositoryException, UpdateExecutionException {
    // Get those methods and fields that have the @InverseFunctional annotation:
    Collection<AccessibleObject> inverseFunctionalObjects = filterObjectsWithAnnotation(annotatedObjects, InverseFunctional.class);
    // All those objects that are declared bijective are also inverse functional:
    inverseFunctionalObjects.addAll(filterObjectsWithAnnotation(annotatedObjects, Bijective.class));

    // Prepare the update query and execute it:
    try {
        Update update = buildInstanceUpdate(getIrisFromObjects(inverseFunctionalObjects), OWL.INVERSE_FUNCTIONAL_PROPERTY);
        update.execute();
    } catch (MalformedQueryException e) {
        throw new UpdateExecutionException();
    }
}
 
开发者ID:anno4j,项目名称:anno4j,代码行数:22,代码来源:OWLSchemaPersistingManager.java

示例9: persistSubPropertyOf

import org.openrdf.query.Update; //导入方法依赖的package包/类
/**
 * Persists the information that a property is a subproperty of another to the default graph of the connected triplestore.
 * All properties with {@link SubPropertyOf} annotation are considered.
 * @param annotatedObjects The {@link Iri} annotated objects that should be considered.
 * @throws RepositoryException Thrown on error regarding the connected triplestore.
 * @throws UpdateExecutionException Thrown if an error occurred while executing the update.
 */
private void persistSubPropertyOf(Collection<AccessibleObject> annotatedObjects) throws RepositoryException, UpdateExecutionException {
    // Get those methods and fields that have the @Transitive annotation:
    Collection<AccessibleObject> subPropertyObjects = filterObjectsWithAnnotation(annotatedObjects, SubPropertyOf.class);

    for (AccessibleObject object : subPropertyObjects) {
        String iri = getIriFromObject(object);

        SubPropertyOf subPropertyAnnotation = object.getAnnotation(SubPropertyOf.class);

        StringBuilder query = new StringBuilder(QUERY_PREFIX)
                                .append(" INSERT DATA { ");
        for (String superPropertyIri : subPropertyAnnotation.value()) {
            query.append("<").append(iri).append("> ")
                 .append("<").append(RDFS.SUB_PROPERTY_OF).append("> ")
                 .append("<").append(superPropertyIri).append("> . ");
        }
        query.append("}");

        // Prepare the update query and execute it:
        try {
            Update update = getConnection().prepareUpdate(query.toString());
            update.execute();
        } catch (MalformedQueryException e) {
            throw new UpdateExecutionException();
        }
    }
}
 
开发者ID:anno4j,项目名称:anno4j,代码行数:35,代码来源:OWLSchemaPersistingManager.java

示例10: executeUpdate

import org.openrdf.query.Update; //导入方法依赖的package包/类
/**
 * Executes a given update command both on remote and local model.
 * 
 * @param data the object holding test data (i.e. commands, queries, datafiles).
 * @throws Exception hopefully never otherwise the corresponding test fails.
 */
@SuppressWarnings("deprecation")
void executeUpdate(final MisteryGuest data) throws Exception {
	load(data);
	
	final String updateCommand = readFile(data.query);
	final Update localUpdate = localConnection.prepareUpdate(QueryLanguage.SPARQL, updateCommand);
	final Update cumulusUpdate = cumulusConnection.prepareUpdate(QueryLanguage.SPARQL, updateCommand);
	
	localUpdate.execute();
	cumulusUpdate.execute();
	
	try {
		assertTrue(ModelUtil.equals(
				statements(localConnection.getStatements(null, null, null, false).asList()), 
				statements(cumulusConnection.getStatements(null, null, null, false).asList())));
		
	} catch (final AssertionError exception) {
		final String queryString = "CONSTRUCT {?s ?p ?o} WHERE {?s ?p ?o}";
		final GraphQuery localQuery = localConnection.prepareGraphQuery(QueryLanguage.SPARQL, queryString);
		final GraphQuery cumulusQuery = cumulusConnection.prepareGraphQuery(QueryLanguage.SPARQL, queryString);
		
		final GraphQueryResult debugLocalResult = localQuery.evaluate();
		final GraphQueryResult debugCumulusResult = cumulusQuery.evaluate();
			
		System.err.println("***** LOCAL ******");
		QueryResultIO.write(debugLocalResult, RDFFormat.NTRIPLES, System.err);

		System.err.println("***** CRDF ******");
		QueryResultIO.write(debugCumulusResult, RDFFormat.NTRIPLES, System.err);
		
		debugCumulusResult.close();
		debugLocalResult.close();
		throw exception;
	}
}
 
开发者ID:cumulusrdf,项目名称:cumulusrdf,代码行数:42,代码来源:LearningSparql_UPDATE_ITCase.java

示例11: testGraphConfiguration

import org.openrdf.query.Update; //导入方法依赖的package包/类
@Test
public void testGraphConfiguration() throws Exception {
    // build a connection
    RdfCloudTripleStore store = new RdfCloudTripleStore();
    store.setConf(conf);
    store.setRyaDAO(ryaDAO);
    InferenceEngine inferenceEngine = new InferenceEngine();
    inferenceEngine.setRyaDAO(ryaDAO);
    store.setInferenceEngine(inferenceEngine);
    inferenceEngine.refreshGraph();
    store.initialize();
    SailRepository repository = new SailRepository(store);
    SailRepositoryConnection conn = repository.getConnection();
    

    
	String query = "INSERT DATA\n"//
			+ "{ GRAPH <http://updated/test> {\n"//
			+ "  <urn:greatMother> owl:propertyChainAxiom <urn:12342>  . " + 
			" <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> _:node1atjakcvbx15023 . " + 
			" _:node1atjakcvbx15023 <http://www.w3.org/2002/07/owl#inverseOf> <urn:isChildOf> . " + 
			" <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> _:node1atjakcvbx15123 . " + 
   			" _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> <http://www.w3.org/1999/02/22-rdf-syntax-ns#nil> . " + 
			" _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> <urn:MotherOf> .  }}";
	Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
	update.execute();
    inferenceEngine.refreshGraph();
   List<URI> chain = inferenceEngine.getPropertyChain(vf.createURI("urn:greatMother"));
   Assert.assertEquals(chain.size(), 2);
   Assert.assertEquals(chain.get(0), new InverseURI(vf.createURI("urn:isChildOf")));
   Assert.assertEquals(chain.get(1), vf.createURI("urn:MotherOf"));
 
}
 
开发者ID:apache,项目名称:incubator-rya,代码行数:34,代码来源:PropertyChainTest.java

示例12: testDeleteTemporalData

import org.openrdf.query.Update; //导入方法依赖的package包/类
private static void testDeleteTemporalData(
		final SailRepositoryConnection conn) throws Exception {
	// Delete all stored dates
	final String sparqlDelete = "PREFIX time: <http://www.w3.org/2006/time#>\n"
			+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
			+ "DELETE {\n" //
			+ "  ?event time:inXSDDateTime ?time . \n"
			+ "}\n"
			+ "WHERE { \n" + "  ?event time:inXSDDateTime ?time . \n"//
			+ "}";//

	final Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL,
			sparqlDelete);
	deleteUpdate.execute();

	// Find all stored dates.
	final String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
			+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
			+ "SELECT ?event ?time \n" //
			+ "WHERE { \n"
			+ "  ?event time:inXSDDateTime ?time . \n"//
			+ "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after
																				// 3
																				// seconds
			+ "}";//

	final CountingResultHandler tupleHandler = new CountingResultHandler();
	final TupleQuery tupleQuery = conn.prepareTupleQuery(
			QueryLanguage.SPARQL, queryString);
	tupleQuery.evaluate(tupleHandler);
	log.info("Result count : " + tupleHandler.getCount());
	Validate.isTrue(tupleHandler.getCount() == 0);
}
 
开发者ID:apache,项目名称:incubator-rya,代码行数:34,代码来源:RyaDirectExample.java

示例13: persistTransitive

import org.openrdf.query.Update; //导入方法依赖的package包/类
/**
 * Persists the information that a property is transitive to the default graph of the connected triplestore.
 * All properties with {@link com.github.anno4j.annotations.Transitive} annotation are considered.
 * @param annotatedObjects The {@link Iri} annotated objects that should be considered.
 * @throws RepositoryException Thrown on error regarding the connected triplestore.
 * @throws UpdateExecutionException Thrown if an error occurred while executing the update.
 */
private void persistTransitive(Collection<AccessibleObject> annotatedObjects) throws RepositoryException, UpdateExecutionException {
    // Get those methods and fields that have the @Transitive annotation:
    Collection<AccessibleObject> transitiveObjects = filterObjectsWithAnnotation(annotatedObjects, Transitive.class);

    // Prepare the update query and execute it:
    try {
        Update update = buildInstanceUpdate(getIrisFromObjects(transitiveObjects), OWL.TRANSITIVE_PROPERTY);
        update.execute();
    } catch (MalformedQueryException e) {
        throw new UpdateExecutionException();
    }
}
 
开发者ID:anno4j,项目名称:anno4j,代码行数:20,代码来源:OWLSchemaPersistingManager.java

示例14: testInfer

import org.openrdf.query.Update; //导入方法依赖的package包/类
public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {

    // Add data
    String query = "INSERT DATA\n"//
            + "{ \n"//
            + " <http://acme.com/people/Mike> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:type1>.  "
            + " <urn:type1> <http://www.w3.org/2000/01/rdf-schema#subClassOf> <urn:superclass>.  }";

    log.info("Performing Query");

    final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
    update.execute();

    // refresh the graph for inferencing (otherwise there is a five minute wait)
    ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();

    query = "select ?s { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:superclass> . }";
    final CountingResultHandler resultHandler = new CountingResultHandler();
    final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());

    Validate.isTrue(resultHandler.getCount() == 1);

    resultHandler.resetCount();
}
 
开发者ID:apache,项目名称:incubator-rya,代码行数:28,代码来源:MongoRyaDirectExample.java

示例15: testUpdateQueryUpdateExecutionException

import org.openrdf.query.Update; //导入方法依赖的package包/类
@Test(expected=org.openrdf.query.UpdateExecutionException.class)
public void testUpdateQueryUpdateExecutionException()
        throws Exception {
    String defGraphQuery = "INSERT DATA GRAPH <http://marklogic.com/test/g27> { <http://marklogic.com/test> <pp1> <oo1> } }";
    Update updateQuery = conn.prepareUpdate(QueryLanguage.SPARQL, defGraphQuery);
    updateQuery.execute();
}
 
开发者ID:marklogic,项目名称:marklogic-sesame,代码行数:8,代码来源:MarkLogicUpdateQueryTest.java


注:本文中的org.openrdf.query.Update.execute方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。