本文整理汇总了Java中com.hp.hpl.jena.tdb.TDBFactory.createDataset方法的典型用法代码示例。如果您正苦于以下问题:Java TDBFactory.createDataset方法的具体用法?Java TDBFactory.createDataset怎么用?Java TDBFactory.createDataset使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.hp.hpl.jena.tdb.TDBFactory
的用法示例。
在下文中一共展示了TDBFactory.createDataset方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: copyToTdb
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
private void copyToTdb() throws RepositoryException {
if ( !needsSave || null == tdbdir ) {
return;
}
final Dataset dataset = TDBFactory.createDataset( tdbdir.getAbsolutePath() );
try {
rc.export( new TdbExporter( dataset ) );
}
catch ( RepositoryException | RDFHandlerException e ) {
log.error( "Problem exporting data to TDB", e );
dataset.abort();
}
finally {
dataset.close();
}
}
示例2: loadOntology
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
private void loadOntology(java.awt.event.ActionEvent evt) {// GEN-FIRST:event_loadOntology
if (null != dataset) {
dataset.end();
}
JFileChooser fc = new JFileChooser();
fc.setCurrentDirectory(new File(System.getProperty("user.dir")));
fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int returnVal = fc.showOpenDialog(this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
dataset = TDBFactory.createDataset(fc.getSelectedFile().toString());
ontologyNameField.setText(fc.getSelectedFile().getName());
ontoPath = fc.getSelectedFile().toPath();
} else {
JOptionPane.showMessageDialog(this, "Loading ontology failed");
}
}
示例3: testSomeMethod2
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
@Test
public void testSomeMethod2() throws Exception {
Dataset ds = TDBFactory.createDataset("/scratch/WORK2/jena/dataset2/");
OntModel model1 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym1"));
OntModel model2 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym2"));
OntClass thing = model1.createClass("http://www.w3.org/2002/07/owl#Thing");
model1.createIndividual("http://example.com/onto1#VijayRaj", thing);
model2.createIndividual("http://example.;cegilovcom/onto2#VijayRaj", thing);
Model m = model1.union(model2);
FileWriter fw = new FileWriter("/scratch/WORK2/jena/testModels/mergetestds.xml");
RDFDataMgr.write(fw, ds, RDFFormat.NQUADS_UTF8);
}
示例4: createPersistentDatasetFromCode
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
/**
* Creates a persistent Jena TDB data set and Lucene index.
* @return
* @throws IOException
*/
public Dataset createPersistentDatasetFromCode() throws IOException{
log.info("Construct a persistent Jena data set with lucene index using code") ;
// Build a text dataset by code.
TextQuery.init();
// Remove old files and folders
deleteFiles(JENA_TDB_TEMP_FOLDER);
deleteFiles(LUCENE_INDEX_TEMP_FOLDER);
// Creates new folders
JENA_TDB_TEMP_FOLDER.mkdirs();
LUCENE_INDEX_TEMP_FOLDER.mkdirs();
// Creates persisted Jena data set and Lucene index
Dataset jenaDataset = TDBFactory.createDataset(JENA_TDB_TEMP_FOLDER.getAbsolutePath()) ;
// Lucene, persisted.
Directory luceneIndex = FSDirectory.open(LUCENE_INDEX_TEMP_FOLDER);
// Define the index mapping
EntityDefinition entDef = new EntityDefinition("uri", "text", RDFS.label.asNode()) ;
// Join together into a dataset
return TextDatasetFactory.createLucene(jenaDataset, luceneIndex, entDef) ;
}
示例5: TDBloading
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
/**
* Load jena TDB
*/
private void TDBloading(){
logger.info("TDB loading");
// create model from tdb
Dataset dataset = TDBFactory.createDataset(tdbDirectory);
// assume we want the default model, or we could get a named model here
dataset.begin(ReadWrite.READ);
model = dataset.getDefaultModel();
dataset.end() ;
// if model is null load local dataset into jena TDB
if(model == null)
TDBloading(datasetFile);
}
示例6: demoOfUsingADirectory
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
static void demoOfUsingADirectory() {
// Make a TDB-backed dataset
String directory = TDB_DIR;
// read something
Dataset dataset = TDBFactory.createDataset(directory);
logger.debug("read tx start!!!");
demoOfReadTransaction(dataset);
logger.debug("read tx end!!!");
dataset.close();
// write something
dataset = TDBFactory.createDataset(directory);
logger.debug("write tx start!!!");
demoOfWriteTransaction(dataset);
logger.debug("write tx end!!!");
dataset.close();
// read again
dataset = TDBFactory.createDataset(directory);
logger.debug("read tx start!!!");
demoOfReadTransaction(dataset);
logger.debug("read tx end!!!");
dataset.close();
}
示例7: inferMissingPropertyNames
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
@Test
public void inferMissingPropertyNames() throws Exception {
final Dataset ds = TDBFactory.createDataset();
final DatasetPopulator dsp = new DatasetPopulator(ds);
dsp.addModel(loadModel("infer-property-names/data.ttl"));
final Model x = loadModel("infer-property-names/expected.ttl");
ds.begin(ReadWrite.READ);
try {
final Model m = ds.getDefaultModel();
assertTrue(m.containsAll(x));
} finally {
ds.end();
}
}
示例8: test_rdfcreation_fb
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
@Test
public void test_rdfcreation_fb() throws SAXException, IOException, ParserConfigurationException, Exception {
Document dataDoc = parser.parse(RdfFactoryTest.class.getResourceAsStream(
"/data/fb-20121231.xml"), -1);
RdfFactory factory = new RdfFactory(new RunConfig(domain));
factory.createRdfs(dataDoc, testTdbDir);
Dataset dataset = TDBFactory.createDataset(testTdbDir);
dataset.begin(ReadWrite.READ);
Model model = dataset.getDefaultModel();
Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty());
dataset.end();
}
示例9: mainNodeId
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
public static void mainNodeId(String datafile, String queryFile) {
Quack.init() ;
Query query = QueryFactory.read(queryFile) ;
Dataset dsMem = TDBFactory.createDataset() ;
RDFDataMgr.read(dsMem, datafile) ;
// // TDB current execution.
// Quack.setOpExecutorFactory(dsMem, OpExecutorQuackTDB.factoryTDB1) ;
// doOne("TDB", dsMem, query) ;
//ARQ.setExecutionLogging(InfoLevel.ALL) ;
Quack.explain(true) ;
Quack.setOpExecutorFactory(dsMem, OpExecutorQuackTDB.factoryPredicateObject) ;
doOne("Quack/PredObj", dsMem, query) ;
System.out.flush() ;
// Quack.setOpExecutorFactory(dsMem, OpExecutorQuackTDB.factorySubstitute) ;
// doOne("Quack/Plain", dsMem, query) ;
// System.out.flush() ;
// try {
// StepPredicateObjectList.UseNaiveExecution = true ;
// doOne("QuackPredObj[simple]", dsMem, query) ;
// } finally { StepPredicateObjectList.UseNaiveExecution = false ; }
}
示例10: perspectiveRelationsToTrig
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
public static void perspectiveRelationsToTrig (String pathToTrigFile, ArrayList<PerspectiveObject> perspectiveObjects) {
try {
OutputStream fos = new FileOutputStream(pathToTrigFile);
Dataset ds = TDBFactory.createDataset();
Model defaultModel = ds.getDefaultModel();
//ResourcesUri.prefixModel(defaultModel);
// Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective");
ResourcesUri.prefixModelGaf(defaultModel);
String attrBase = pathToTrigFile+"_";
JenaSerialization.addJenaPerspectiveObjects(attrBase, ResourcesUri.grasp, "wasAttributedTo", perspectiveObjects, 1);
RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY);
fos.close();
} catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
示例11: createModel
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
public static Model createModel(String... dbDirs) {
Model mainModel = null;
Dataset dataset = null;
for(String dbDir : dbDirs) {
dataset = TDBFactory.createDataset(dbDir);
if(mainModel == null) {
mainModel = dataset.getDefaultModel();
} else {
Model secondaryModel = dataset.getDefaultModel();
mainModel = ModelFactory.createUnion(mainModel, secondaryModel);
}
}
mainModel = ModelFactory.createRDFSModel(mainModel);
return mainModel;
}
示例12: copyFromTdb
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
private void copyFromTdb( String file ) throws RepositoryException {
tdbdir = new File( file );
Dataset dataset = TDBFactory.createDataset( file );
try {
copyFromTdb( dataset );
}
finally {
dataset.close();
}
}
示例13: main
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
public static void main(String[] args0){
//load dataset
Dataset dataset;
JFileChooser fc = new JFileChooser();
fc.setCurrentDirectory(new File(System.getProperty("user.dir")));
fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int returnVal = fc.showOpenDialog(null);
if (returnVal == JFileChooser.APPROVE_OPTION) {
dataset = TDBFactory.createDataset(fc.getSelectedFile().toString());
TransformationProcessor tp = new TransformationProcessor(dataset);
Map<String,String> pmap = new HashMap<>();
tp.transform("deletex.sparql",pmap);
}
}
示例14: perspectiveRelationsToTrig
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
public static void perspectiveRelationsToTrig (String pathToTrigFile, ArrayList<PerspectiveObject> perspectiveObjects) {
try {
OutputStream fos = new FileOutputStream(pathToTrigFile);
Dataset ds = TDBFactory.createDataset();
Model defaultModel = ds.getDefaultModel();
ResourcesUri.prefixModel(defaultModel);
// Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective");
ResourcesUri.prefixModelGaf(defaultModel);
JenaSerialization.addJenaPerspectiveObjects(ds, perspectiveObjects);
RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY);
fos.close();
} catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
示例15: perspectiveRelationsToTrigStream
import com.hp.hpl.jena.tdb.TDBFactory; //导入方法依赖的package包/类
public static void perspectiveRelationsToTrigStream (OutputStream fos, ArrayList<PerspectiveObject> perspectiveObjects) {
Dataset ds = TDBFactory.createDataset();
Model defaultModel = ds.getDefaultModel();
ResourcesUri.prefixModel(defaultModel);
// Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective");
ResourcesUri.prefixModelGaf(defaultModel);
JenaSerialization.addJenaPerspectiveObjects(ds, perspectiveObjects);
RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY);
}