本文整理汇总了Java中org.eclipse.rdf4j.query.UpdateExecutionException类的典型用法代码示例。如果您正苦于以下问题:Java UpdateExecutionException类的具体用法?Java UpdateExecutionException怎么用?Java UpdateExecutionException使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
UpdateExecutionException类属于org.eclipse.rdf4j.query包,在下文中一共展示了UpdateExecutionException类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: checkGraph
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
protected void checkGraph(RepositoryConnection connection, final List<RDFDataUnit.Entry> entries) throws DPUException {
SparqlUtils.SparqlAskObject ask = null;
try {
ask = SparqlUtils.createAsk(config.getAskQuery(), entries);
SparqlUtils.execute(connection, ask);
} catch (RepositoryException | MalformedQueryException | UpdateExecutionException | QueryEvaluationException | SparqlProblemException | DataUnitException e) {
throw new DPUException(e.getLocalizedMessage(), e);
}
if (!ask.result) {
reportFailure();
}
else {
//everything OK:
ContextUtils.sendShortInfo(ctx, "rdfvalidation.finished.ok");
}
}
示例2: testMultipleReposWithDifferentUsers
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Test
public void testMultipleReposWithDifferentUsers() throws RepositoryException, MalformedQueryException, UpdateExecutionException {
readerRep.initialize();
MarkLogicRepositoryConnection testReaderCon = readerRep.getConnection();
exception.expect(Exception.class);
testReaderCon.prepareUpdate("CREATE GRAPH <abc>").execute();
writerRep.initialize();
MarkLogicRepositoryConnection testWriterCon = writerRep.getConnection();
testWriterCon.prepareUpdate("CREATE GRAPH <abcdef10>").execute();
writerRep.shutDown();
readerRep.shutDown();
}
示例3: updateWithWrongPerms
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Test
public void updateWithWrongPerms() throws RepositoryException, MalformedQueryException, UpdateExecutionException {
readerRep.initialize();
MarkLogicRepositoryConnection testReaderCon = readerRep.getConnection();
exception.expect(UpdateExecutionException.class);
testReaderCon.prepareUpdate("CREATE GRAPH <abc>").execute();
}
示例4: execUpdate
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
public void execUpdate() throws OData2SparqlException {
try {
super.connection = rdfRoleRepository.getRepository().getConnection();
log.info( super.query);
updateQuery = connection.prepareUpdate(QueryLanguage.SPARQL, super.query);
updateQuery.execute();
} catch (RepositoryException | MalformedQueryException |UpdateExecutionException e) {
log.error( e.getMessage());
throw new OData2SparqlException("RdfUpdate execUpdate failure",e);
}
}
示例5: queryUpdate
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
/**
* Execute SPARQL Update query
*
* @param sparql
* @throws RepositoryException
*/
public void queryUpdate(String sparql) throws RepositoryException {
try {
Update upd = conn.prepareUpdate(QueryLanguage.SPARQL, sparql);
upd.execute();
} catch (MalformedQueryException | UpdateExecutionException ex) {
throw new RepositoryException(ex);
}
}
示例6: executeUpdateQuery
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
/**
* Execute given query.
*
* @param query
* @param sourceEntries
* USING graphs.
* @param targetGraph
* WITH graphs.
* @param connection
* @throws eu.unifiedviews.dpu.DPUException
* @throws eu.unifiedviews.dataunit.DataUnitException
*/
protected void executeUpdateQuery(String query, final List<RDFDataUnit.Entry> sourceEntries,
IRI targetGraph,
RepositoryConnection connection) throws DPUException, DataUnitException {
// Prepare query.
if (!useDataset()) {
if (Pattern.compile(Pattern.quote("DELETE"), Pattern.CASE_INSENSITIVE).matcher(query).find()) {
query = query.replaceFirst("(?i)DELETE", prepareWithClause(targetGraph) + " DELETE");
} else {
query = query.replaceFirst("(?i)INSERT", prepareWithClause(targetGraph) + " INSERT");
}
query = query.replaceFirst("(?i)WHERE", prepareUsingClause(sourceEntries) + "WHERE");
}
LOG.debug("Query to execute: {}", query);
try {
// Execute query.
final Update update = connection.prepareUpdate(QueryLanguage.SPARQL, query);
if (useDataset()) {
final DatasetImpl dataset = new DatasetImpl();
for (RDFDataUnit.Entry entry : sourceEntries) {
dataset.addDefaultGraph(entry.getDataGraphURI());
}
dataset.addDefaultRemoveGraph(targetGraph);
dataset.setDefaultInsertGraph(targetGraph);
update.setDataset(dataset);
}
update.execute();
} catch (RepositoryException | MalformedQueryException | UpdateExecutionException ex) {
throw ContextUtils.dpuException(ctx, ex, "SparqlConstruct.execute.exception.updateExecute");
}
}
示例7: copyMetadata
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Override
public void copyMetadata(String symbolicName) throws DataUnitException {
try {
if (connection == null) {
connection = source.getConnection();
}
// Select all triples <bnode> symbolicName "symbolicName"
// add all of them to destination data unit
// (we use source connection - both run on same storage).
final Update update = connection.prepareUpdate(
QueryLanguage.SPARQL, UPDATE);
update.setBinding(SYMBOLIC_NAME_BINDING,
connection.getValueFactory().createLiteral(symbolicName));
final SimpleDataset dataset = new SimpleDataset();
for (IRI item : source.getMetadataGraphnames()) {
dataset.addDefaultGraph(item);
}
dataset.setDefaultInsertGraph(
destination.getMetadataWriteGraphname());
update.setDataset(dataset);
update.execute();
} catch (RepositoryException | UpdateExecutionException | MalformedQueryException ex) {
throw new DataUnitException("", ex);
}
}
示例8: flush
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
/**
* Flushes the cache to the server, writing triples as graph.
*
* @throws MarkLogicRdf4jException
*/
protected synchronized void flush() throws RepositoryException, MalformedQueryException, UpdateExecutionException, IOException {
if (cache.isEmpty()) { return; }
StringBuffer entireQuery = new StringBuffer();
SPARQLQueryBindingSet bindingSet = new SPARQLQueryBindingSet();
for (Namespace ns :cache.getNamespaces()){
entireQuery.append("PREFIX "+ns.getPrefix()+": <"+ns.getName()+">. ");
}
entireQuery.append("INSERT DATA { ");
Set<Resource> distinctCtx = new HashSet<Resource>();
for (Resource context :cache.contexts()) {
distinctCtx.add(context);
}
for (Resource ctx : distinctCtx) {
if (ctx != null) {
entireQuery.append(" GRAPH <" + ctx + "> { ");
}
for (Statement stmt : cache.filter(null, null, null, ctx)) {
if (stmt.getSubject() instanceof org.eclipse.rdf4j.model.BNode) {
entireQuery.append("<http://marklogic.com/semantics/blank/" + stmt.getSubject().stringValue() + "> ");
}else {
entireQuery.append("<" + stmt.getSubject().stringValue() + "> ");
}
if (stmt.getPredicate() instanceof org.eclipse.rdf4j.model.BNode) {
entireQuery.append("<http://marklogic.com/semantics/blank/" + stmt.getPredicate().stringValue() + "> ");
}else{
entireQuery.append("<" + stmt.getPredicate().stringValue() + "> ");
}
Value object=stmt.getObject();
if (object instanceof Literal) {
Literal lit = (Literal) object;
entireQuery.append("\"");
entireQuery.append(SPARQLUtil.encodeString(lit.getLabel()));
entireQuery.append("\"");
if(null == lit.getLanguage().orElse(null)) {
entireQuery.append("^^<" + lit.getDatatype().stringValue() + ">");
}else{
entireQuery.append("@" + lit.getLanguage().orElse(null));
}
} else {
if (stmt.getObject() instanceof org.eclipse.rdf4j.model.BNode) {
entireQuery.append("<http://marklogic.com/semantics/blank/" + stmt.getObject().stringValue() + "> ");
}else {
entireQuery.append("<" + object.stringValue() + "> ");
}
}
entireQuery.append(".");
}
if (ctx != null) {
entireQuery.append(" }");
}
}
entireQuery.append("} ");
log.debug(entireQuery.toString());
client.sendUpdateQuery(entireQuery.toString(),bindingSet,false,null);
lastCacheAccess = new Date();
log.debug("success writing cache: {}",String.valueOf(cache.size()));
cache.clear();
}
示例9: flush
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
/**
* Flushes the cache to the server, writing triples as graph.
*
* @throws MarkLogicRdf4jException
*/
protected synchronized void flush() throws RepositoryException, MalformedQueryException, UpdateExecutionException, IOException {
if (cache.isEmpty()) { return; }
StringBuffer entireQuery = new StringBuffer();
SPARQLQueryBindingSet bindingSet = new SPARQLQueryBindingSet();
for (Namespace ns :cache.getNamespaces()){
entireQuery.append("PREFIX "+ns.getPrefix()+": <"+ns.getName()+">. ");
}
entireQuery.append("DELETE DATA { ");
Set<Resource> distinctCtx = new HashSet<Resource>();
for (Resource context :cache.contexts()) {
distinctCtx.add(context);
}
for (Resource ctx : distinctCtx) {
if (ctx != null) {
entireQuery.append(" GRAPH <" + ctx + "> { ");
}
for (Statement stmt : cache.filter(null, null, null, ctx)) {
entireQuery.append("<" + stmt.getSubject().stringValue() + "> ");
entireQuery.append("<" + stmt.getPredicate().stringValue() + "> ");
Value object=stmt.getObject();
if (object instanceof Literal) {
Literal lit = (Literal) object;
entireQuery.append("\"");
entireQuery.append(SPARQLUtil.encodeString(lit.getLabel()));
entireQuery.append("\"");
if(null == lit.getLanguage().orElse(null)) {
entireQuery.append("^^<" + lit.getDatatype().stringValue() + ">");
}else{
entireQuery.append("@" + lit.getLanguage().toString());
}
} else {
entireQuery.append("<" + object.stringValue() + "> ");
}
entireQuery.append(".");
}
if (ctx != null) {
entireQuery.append(" }");
}
}
entireQuery.append("} ");
log.info(entireQuery.toString());
client.sendUpdateQuery(entireQuery.toString(),bindingSet,false,null);
lastCacheAccess = new Date();
//log.info("success writing cache: {}",String.valueOf(cache.size()));
cache.clear();
}
示例10: TestMalformedUpdateQUery
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Test
public void TestMalformedUpdateQUery() throws RepositoryException, MalformedQueryException, UpdateExecutionException {
exception.expect(UpdateExecutionException.class);
conn.prepareUpdate("A malformed query").execute();
}
示例11: execute
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Override
protected void execute(
UpdateExpr updateExpr, Dataset dataset, BindingSet bindings, boolean includeInferred, int maxExecutionTime
) throws UpdateExecutionException {
throw new UpdateExecutionException("This repository is read only");
}
示例12: flush
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
protected abstract void flush() throws RepositoryException, MalformedQueryException, UpdateExecutionException, IOException;