本文整理汇总了Java中org.apache.jena.query.Dataset.commit方法的典型用法代码示例。如果您正苦于以下问题:Java Dataset.commit方法的具体用法?Java Dataset.commit怎么用?Java Dataset.commit使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.jena.query.Dataset
的用法示例。
在下文中一共展示了Dataset.commit方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: delete
import org.apache.jena.query.Dataset; //导入方法依赖的package包/类
@Override
public void delete(URI uri, Map<String, String> parameters, InputStream payload) throws RESTException {
Dataset dataset = ThingDirectory.get().dataset;
dataset.begin(ReadWrite.WRITE);
try {
dataset.getDefaultModel().getResource(uri.toString()).removeProperties();
dataset.removeNamedModel(uri.toString());
deleteToAll(uri.getPath());
dataset.commit();
} catch (Exception e) {
// TODO distinguish between client and server errors
throw new RESTException();
} finally {
dataset.end();
}
}
示例2: delete
import org.apache.jena.query.Dataset; //导入方法依赖的package包/类
@Override
public void delete(URI uri, Map<String, String> parameters, InputStream payload) throws RESTException {
Dataset dataset = ThingDirectory.get().dataset;
dataset.begin(ReadWrite.WRITE);
try {
dataset.getDefaultModel().createResource(uri.toString()).removeProperties();
dataset.removeNamedModel(uri.toString());
deleteToAll(uri.getPath());
dataset.commit();
// Remove from priority queue
ThingDescription td = new ThingDescription(uri.toString());
ThingDirectory.get().tdQueue.remove(td);
ThingDirectory.get().setTimer();
} catch (Exception e) {
// TODO distinguish between client and server errors
throw new RESTException();
} finally {
dataset.end();
}
}
示例3: delete
import org.apache.jena.query.Dataset; //导入方法依赖的package包/类
@Override
public void delete(URI uri, Map<String, String> parameters, InputStream payload) throws RESTException {
Dataset dataset = Repository.get().dataset;
dataset.begin(ReadWrite.WRITE);
try {
dataset.getDefaultModel().getResource(uri.toString()).removeProperties();
dataset.removeNamedModel(uri.toString());
deleteToAll(uri.getPath());
dataset.commit();
} catch (Exception e) {
// TODO distinguish between client and server errors
throw new RESTException();
} finally {
dataset.end();
}
}
示例4: delete
import org.apache.jena.query.Dataset; //导入方法依赖的package包/类
@Override
public void delete(URI uri, Map<String, String> parameters, InputStream payload) throws RESTException {
Dataset dataset = Repository.get().dataset;
dataset.begin(ReadWrite.WRITE);
try {
dataset.getDefaultModel().createResource(uri.toString()).removeProperties();
dataset.removeNamedModel(uri.toString());
deleteToAll(uri.getPath());
dataset.commit();
// Remove from priority queue
ThingDescription td = new ThingDescription(uri.toString());
Repository.get().tdQueue.remove(td);
Repository.get().setTimer();
} catch (Exception e) {
// TODO distinguish between client and server errors
throw new RESTException();
} finally {
dataset.end();
}
}
示例5: loadOntology
import org.apache.jena.query.Dataset; //导入方法依赖的package包/类
/**
* Loads an ontology to the triple store, in the
* default graph.
* @param fileName File name with the ontology context.
*/
public static void loadOntology(InputStream fileName) {
List<String> ont = new ArrayList<>();
// Check if the ontology is already there
Dataset dataset = ThingDirectory.get().dataset;
dataset.begin(ReadWrite.READ);
try {
String prefix = StrUtils.strjoinNL
( "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>"
, "PREFIX owl: <http://www.w3.org/2002/07/owl#>");
String query = prefix + "SELECT ?s WHERE {?s rdf:type owl:Ontology}";
try (QueryExecution qexec = QueryExecutionFactory.create(query, dataset)) {
ResultSet result = qexec.execSelect();
while (result.hasNext()) {
ont.add(result.next().get("s").toString());
}
}
} finally {
dataset.end();
}
// Load QUDT ontology
if (ont.isEmpty()) {
dataset = ThingDirectory.get().dataset;
dataset.begin( ReadWrite.WRITE );
try {
Model m = dataset.getDefaultModel();
//RDFDataMgr.read(m, fileName);
RDFDataMgr.read(m, fileName, Lang.TURTLE);
dataset.commit();
} finally {
dataset.end();
}
}
}
示例6: loadOntology
import org.apache.jena.query.Dataset; //导入方法依赖的package包/类
/**
* Loads an ontology to the triple store, in the
* default graph.
* @param fileName File name with the ontology context.
*/
public static void loadOntology(InputStream fileName) {
List<String> ont = new ArrayList<>();
// Check if the ontology is already there
Dataset dataset = Repository.get().dataset;
dataset.begin(ReadWrite.READ);
try {
String prefix = StrUtils.strjoinNL
( "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>"
, "PREFIX owl: <http://www.w3.org/2002/07/owl#>");
String query = prefix + "SELECT ?s WHERE {?s rdf:type owl:Ontology}";
try (QueryExecution qexec = QueryExecutionFactory.create(query, dataset)) {
ResultSet result = qexec.execSelect();
while (result.hasNext()) {
ont.add(result.next().get("s").toString());
}
}
} finally {
dataset.end();
}
// Load QUDT ontology
if (ont.isEmpty()) {
dataset = Repository.get().dataset;
dataset.begin( ReadWrite.WRITE );
try {
Model m = dataset.getDefaultModel();
//RDFDataMgr.read(m, fileName);
RDFDataMgr.read(m, fileName, Lang.TURTLE);
dataset.commit();
} finally {
dataset.end();
}
}
}
示例7: main
import org.apache.jena.query.Dataset; //导入方法依赖的package包/类
public static void main(String... argv)
{
String directory = "MyDatabases/DB1" ;
Dataset dataset = TDBFactory.createDataset(directory) ;
// Start WRITE transaction.
// It's possible to read from the datet inside the write transaction.
// An application can have other Datasets, in the same JVM,
// tied to the same TDB database performing read
// transactions concurrently. If another write transaction
// starts, the call of dataset.begin(WRITE) blocks until
// existing writer finishes.
dataset.begin(ReadWrite.WRITE) ;
try
{
GraphStore graphStore = GraphStoreFactory.create(dataset) ;
// Do a SPARQL Update.
String sparqlUpdateString = StrUtils.strjoinNL(
"PREFIX . <http://example/>",
"INSERT { :s :p ?now } WHERE { BIND(now() AS ?now) }"
) ;
execUpdate(sparqlUpdateString, graphStore) ;
dataset.commit() ;
// Or call .abort()
} finally
{
// Notify the end of the transaction.
// The transaction was finished at the point .commit or .abort was called.
// .end will force an abort() if no previous call to .commit() or .abort()
// has occurred, so .end() help manage track the state of the transaction.
// .end() can be called multiple times for the same .begin(WRITE)
dataset.end() ;
}
}
示例8: post
import org.apache.jena.query.Dataset; //导入方法依赖的package包/类
@Override
public RESTResource post(URI uri, Map<String, String> parameters, InputStream payload) throws RESTException {
String data = "";
String ontologyUri = null;
try {
data = ThingDescriptionUtils.streamToString(payload);
ontologyUri = new URI(data).toString();
data = null;
} catch (IOException e1) {
e1.printStackTrace();
throw new BadRequestException();
} catch (URISyntaxException e2) {
// do nothing
}
Dataset dataset = ThingDirectory.get().dataset;
dataset.begin(ReadWrite.WRITE);
try {
String rootId = null;
OntModel ontology = ModelFactory.createOntologyModel();
if (data == null) {
ontology.read(ontologyUri.toString(), "Turtle");
} else {
ontologyUri = "http://example.org/"; // TODO
ontology.read(new ByteArrayInputStream(data.getBytes("UTF-8")), ontologyUri, "Turtle");
}
Model tdb = dataset.getDefaultModel();
ExtendedIterator<Ontology> it = ontology.listOntologies();
if (!it.hasNext()) {
throw new BadRequestException();
}
while (it.hasNext()) {
Ontology o = it.next();
String prefix = ontology.getNsURIPrefix(o.getURI());
// if no prefix found, generates id
String id = (prefix != null && !prefix.isEmpty()) ? prefix : generateID();
URI resourceUri = URI.create(normalize(uri) + "/" + id);
OntModel axioms;
if (isRootOntology(o.getURI(), ontology)) {
rootId = id;
axioms = ontology;
} else {
axioms = ontology.getImportedModel(o.getURI());
}
// TODO Check if the vocab isn't already registered in the dataset
dataset.addNamedModel(resourceUri.toString(), axioms);
Date currentDate = new Date(System.currentTimeMillis());
DateFormat f = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
tdb.getResource(resourceUri.toString()).addProperty(DCTerms.source, ontologyUri);
tdb.getResource(resourceUri.toString()).addProperty(DCTerms.created, f.format(currentDate));
addToAll("/vocab/" + id, new VocabularyHandler(id, instances));
ThingDirectory.LOG.info(String.format("Registered RDFS/OWL vocabulary %s (id: %s)", o.getURI(), id));
}
dataset.commit();
RESTResource resource = new RESTResource("/vocab/" + rootId, new VocabularyHandler(rootId, instances));
return resource;
} catch (Exception e) {
e.printStackTrace();
throw new RESTException();
} finally {
dataset.end();
}
}
示例9: post
import org.apache.jena.query.Dataset; //导入方法依赖的package包/类
@Override
public RESTResource post(URI uri, Map<String, String> parameters, InputStream payload) throws RESTException {
String data = "";
String ontologyUri = null;
try {
data = ThingDescriptionUtils.streamToString(payload);
ontologyUri = new URI(data).toString();
data = null;
} catch (IOException e1) {
e1.printStackTrace();
throw new BadRequestException();
} catch (URISyntaxException e2) {
// do nothing
}
Dataset dataset = Repository.get().dataset;
dataset.begin(ReadWrite.WRITE);
try {
String rootId = null;
OntModel ontology = ModelFactory.createOntologyModel();
if (data == null) {
ontology.read(ontologyUri.toString(), "Turtle");
} else {
ontologyUri = "http://example.org/"; // TODO
ontology.read(new ByteArrayInputStream(data.getBytes("UTF-8")), ontologyUri, "Turtle");
}
Model tdb = dataset.getDefaultModel();
ExtendedIterator<Ontology> it = ontology.listOntologies();
if (!it.hasNext()) {
throw new BadRequestException();
}
while (it.hasNext()) {
Ontology o = it.next();
String prefix = ontology.getNsURIPrefix(o.getURI());
// if no prefix found, generates id
String id = (prefix != null && !prefix.isEmpty()) ? prefix : generateID();
URI resourceUri = URI.create(normalize(uri) + "/" + id);
OntModel axioms;
if (isRootOntology(o.getURI(), ontology)) {
rootId = id;
axioms = ontology;
} else {
axioms = ontology.getImportedModel(o.getURI());
}
// TODO Check if the vocab isn't already registered in the dataset
dataset.addNamedModel(resourceUri.toString(), axioms);
Date currentDate = new Date(System.currentTimeMillis());
DateFormat f = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
tdb.getResource(resourceUri.toString()).addProperty(DCTerms.source, ontologyUri);
tdb.getResource(resourceUri.toString()).addProperty(DCTerms.created, f.format(currentDate));
addToAll("/vocab/" + id, new VocabularyHandler(id, instances));
Repository.LOG.info(String.format("Registered RDFS/OWL vocabulary %s (id: %s)", o.getURI(), id));
}
dataset.commit();
RESTResource resource = new RESTResource("/vocab/" + rootId, new VocabularyHandler(rootId, instances));
return resource;
} catch (Exception e) {
e.printStackTrace();
throw new RESTException();
} finally {
dataset.end();
}
}