本文整理匯總了Java中org.openrdf.rio.Rio類的典型用法代碼示例。如果您正苦於以下問題:Java Rio類的具體用法?Java Rio怎麽用?Java Rio使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
Rio類屬於org.openrdf.rio包,在下文中一共展示了Rio類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: parseConfig
import org.openrdf.rio.Rio; //導入依賴的package包/類
protected Graph parseConfig(File file) throws SailConfigException, IOException {
RDFFormat format = Rio.getParserFormatForFileName(file.getAbsolutePath());
if (format==null)
throw new SailConfigException("Unsupported file format: " + file.getAbsolutePath());
RDFParser parser = Rio.createParser(format);
Graph model = new GraphImpl();
parser.setRDFHandler(new StatementCollector(model));
InputStream stream = new FileInputStream(file);
try {
parser.parse(stream, file.getAbsolutePath());
} catch (Exception e) {
throw new SailConfigException("Error parsing file!");
}
stream.close();
return model;
}
示例2: runSPARQL
import org.openrdf.rio.Rio; //導入依賴的package包/類
/**
* Execute a CONSTRUCT/DESCRIBE SPARQL query against the graph
*
* @param qs
* CONSTRUCT or DESCRIBE SPARQL query
* @param format
* the serialization format for the returned graph
* @return serialized graph of results
*/
public String runSPARQL(String qs, RDFFormat format) {
try {
RepositoryConnection con = therepository.getConnection();
try {
GraphQuery query = con.prepareGraphQuery(org.openrdf.query.QueryLanguage.SPARQL, qs);
StringWriter stringout = new StringWriter();
RDFWriter w = Rio.createWriter(format, stringout);
query.evaluate(w);
return stringout.toString();
} finally {
con.close();
}
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
示例3: map
import org.openrdf.rio.Rio; //導入依賴的package包/類
@Override
protected void map(Text key, RyaStatementWritable value, Context context) throws IOException, InterruptedException {
// receives a RyaStatementWritable; convert to a Statement
RyaStatement rstmt = value.getRyaStatement();
Statement st = RyaToRdfConversions.convertStatement(rstmt);
logger.info("Mapper receives: " + rstmt);
// then convert to an RDF string
StringWriter writer = new StringWriter();
try {
RDFWriter rdfWriter = Rio.createWriter(rdfFormat, writer);
rdfWriter.startRDF();
rdfWriter.handleStatement(st);
rdfWriter.endRDF();
} catch (RDFHandlerException e) {
throw new IOException("Error writing RDF data", e);
}
// Write the string to the output
String line = writer.toString().trim();
logger.info("Serialized to RDF: " + line);
textOut.set(line);
context.write(NullWritable.get(), textOut);
}
示例4: writeRDF
import org.openrdf.rio.Rio; //導入依賴的package包/類
@SuppressWarnings({ "unchecked", "rawtypes" })
public static RDFHandler writeRDF(final OutputStream out, final RDFFormat format,
@Nullable final Map<String, String> namespaces,
@Nullable final Map<? extends RioSetting<?>, ? extends Object> settings)
throws IOException, RDFHandlerException {
final RDFWriter writer = Rio.createWriter(format, out);
final WriterConfig config = writer.getWriterConfig();
config.set(BasicWriterSettings.PRETTY_PRINT, true);
config.set(BasicWriterSettings.RDF_LANGSTRING_TO_LANG_LITERAL, true);
config.set(BasicWriterSettings.XSD_STRING_TO_PLAIN_LITERAL, true);
if (format.equals(RDFFormat.RDFXML)) {
config.set(XMLWriterSettings.INCLUDE_XML_PI, true);
config.set(XMLWriterSettings.INCLUDE_ROOT_RDF_TAG, true);
}
if (settings != null) {
for (final Map.Entry entry : settings.entrySet()) {
config.set((RioSetting) entry.getKey(), entry.getValue());
}
}
return namespaces == null ? writer : newNamespaceHandler(writer, namespaces, null);
}
示例5: getRDF
import org.openrdf.rio.Rio; //導入依賴的package包/類
@GET
public Response getRDF(@QueryParam("entity") String entity,
@QueryParam("topK") Integer topK,
@QueryParam("fixedProperty") String fixedProperty,
@QueryParam("language") String language,
@QueryParam("maxHops") Integer maxHops,
@HeaderParam("Accept") String outputMime)
{
RDFFormat outputFormat = Rio.getParserFormatForMIMEType(outputMime.split(",")[0]);
if (outputFormat == null) {
outputFormat = RDFFormat.TURTLE;
}
String [] fixedProperties = new String[0];
if (fixedProperty != null) {
fixedProperties = fixedProperty.split(",");
}
Response r = executeQuery(entity, topK, maxHops, fixedProperties, language, outputFormat);
return Response.fromResponse(r).status(200).header("Location", null).build();
}
示例6: toStatements
import org.openrdf.rio.Rio; //導入依賴的package包/類
public static Iterator<Statement> toStatements(Iterator<Row> iterator)
throws RDFParseException, RDFHandlerException, IOException {
if (!iterator.hasNext()) {
return Collections.emptyIterator();
}
Set<Statement> ret = new HashSet<Statement>();
RDFParser rdfParser = Rio.createParser(RDFFormat.BINARY);
StatementCollector collector = new StatementCollector(ret);
rdfParser.setRDFHandler(collector);
while (iterator.hasNext()) {
toStatements(
rdfParser,
Bytes.getArray(iterator.next().getBytes(
DATA_TABLE_ATTRIBUTE_3)));
}
return Collections.unmodifiableSet(ret).iterator();
}
示例7: removeIndirectTriples
import org.openrdf.rio.Rio; //導入依賴的package包/類
/**Takes as input a set of triples and some important URIs and removes from the
* first set those triples that have one of the given URIS as their subject.
* If we imagine the given set of triples as a graph, this method will practically
* return a subgraph containing only the direct neighbours of the given URIs.
*
* @param nTriples a set of triples in NTriples format
* @param urisToKeep the URIs that will be used for determining which triples to keep (those appearing in subject, or object field)
* @return a subgraph in the form of triples in NTriples format, containing only the direct neighbours of the given URIs. */
public static String removeIndirectTriples(String nTriples, List<String> urisToKeep){
String triplesContext="http://triplesContext";
String subTriplesContext="http://subgraphTriplesContext";
Repository repository=new SailRepository(new ForwardChainingRDFSInferencer(new MemoryStore()));
try{
repository.initialize();
RepositoryConnection repoConn=repository.getConnection();
repoConn.add(new StringReader(nTriples), triplesContext, RDFFormat.NTRIPLES, repository.getValueFactory().createURI(triplesContext));
RepositoryResult<Statement> results=repoConn.getStatements(null, null, null, false, repository.getValueFactory().createURI(triplesContext));
while(results.hasNext()){
Statement result=results.next();
if(urisToKeep.contains(result.getSubject().stringValue()) || urisToKeep.contains(result.getObject().stringValue())){
repoConn.add(result, repository.getValueFactory().createURI(subTriplesContext));
}
}
ByteArrayOutputStream out=new ByteArrayOutputStream();
RDFWriter writer=Rio.createWriter(RDFFormat.NTRIPLES, out);
repoConn.export(writer, repository.getValueFactory().createURI(subTriplesContext));
repoConn.close();
return new String(out.toByteArray(),"UTF-8");
}catch(RepositoryException | IOException | RDFParseException | RDFHandlerException ex) {
logger.error("Cannot parse ntriples file - Return the original NTriples file",ex);
return nTriples;
}
}
示例8: buildIndex
import org.openrdf.rio.Rio; //導入依賴的package包/類
public void buildIndex(String inputFilename) {
solrClient.init();
DnbTitleHandler handler = new DnbTitleHandler(solrClient);
RDFParser rdfParser = Rio.createParser(RDFFormat.RDFXML);
rdfParser.setRDFHandler(handler);
try (InputStream in = new FileInputStream(inputFilename)) {
rdfParser.parse(in, "http://dnb.de");
} catch (Exception e) {
log.error("Error parsing " + inputFilename, e);
} finally {
solrClient.commit();
solrClient.optimize();
solrClient.destroy();
}
}
示例9: getResource
import org.openrdf.rio.Rio; //導入依賴的package包/類
private Model getResource(UriBuilder uriBuilder) {
RDFFormat format = RDFFormat.TURTLE;
try {
java.net.URI target = credentials.buildUrl(uriBuilder);
log.debug("Exporting {} data from resource {}", format.getName(), target.toString());
String entity = client.get(target, format.getDefaultMIMEType());
return Rio.parse(new StringReader(entity), target.toString(), format, new ParserConfig(), ValueFactoryImpl.getInstance(), new ParseErrorLogger());
} catch (IllegalArgumentException | URISyntaxException | RDFParseException | IOException e) {
if (e instanceof ClientProtocolException && "Unexpected response status: 404".compareTo(e.getMessage())==0) {
//keeping old behavior, should not be silently fail (i.e. return empty model)?
return new LinkedHashModel();
} else {
throw new RuntimeException(e);
}
}
}
示例10: importResource
import org.openrdf.rio.Rio; //導入依賴的package包/類
@Override
public boolean importResource(String resource, Model data, String dataset, boolean cleanBefore) {
RDFFormat format = RDFFormat.TURTLE;
log.debug("Importing {} data for resource {} in {}", format.getName(), resource, dataset);
try {
java.net.URI target = credentials.buildUrl(getResourceUriBuilder(dataset, resource));
ByteArrayOutputStream out = new ByteArrayOutputStream();
Rio.write(data, out, format);
InputStream in = new ByteArrayInputStream(out.toByteArray());
CloseableHttpResponse response;
if (cleanBefore) {
response = client.put(target, in, format);
} else {
response = client.post(target, in, format);
}
try {
log.debug("Request resolved with {} status code: {}", response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase());
return (response.getStatusLine().getStatusCode() == 200);
} finally {
response.close();
}
} catch (IllegalArgumentException | URISyntaxException | RDFHandlerException | IOException e) {
log.error("Error importing resource: {}", e.getMessage(), e);
throw new RuntimeException(e);
}
}
示例11: testImportCheckDataHub
import org.openrdf.rio.Rio; //導入依賴的package包/類
@Test
public void testImportCheckDataHub() throws IOException, RDFParseException, RDFHandlerException, URISyntaxException {
InputStream in = this.getClass().getResourceAsStream(TEST_FILE);
Assume.assumeNotNull(in);
String base = buildDatasetBaseUri(credentials, status.getOwner(), TEST_DATASET);
final Model model = Rio.parse(in, base, TEST_FILE_FORMAT);
Assert.assertTrue(redlink.importDataset(model, TEST_DATASET));
RestAssured
.given()
.header("Accept", "text/turtle")
.expect()
.statusCode(200)
.contentType("text/turtle")
.get(base + TEST_RESOURCE);
}
示例12: testResourceImported
import org.openrdf.rio.Rio; //導入依賴的package包/類
@Test
public void testResourceImported() throws IOException, RDFParseException, RDFHandlerException, URISyntaxException {
//first import data
InputStream in = this.getClass().getResourceAsStream(TEST_FILE);
Assume.assumeNotNull(in);
final Model model = Rio.parse(in, buildDatasetBaseUri(credentials, status.getOwner(), TEST_DATASET), TEST_FILE_FORMAT);
Assert.assertTrue(redlink.importDataset(model, TEST_DATASET, true));
final SPARQLResult triples = redlink.sparqlTupleQuery(QUERY_SELECT, TEST_DATASET);
Assert.assertNotNull(triples);
Assert.assertEquals(TEST_FILE_TRIPLES, triples.size());
//and then the actual test
String resource = buildDatasetBaseUri(credentials, status.getOwner(), TEST_DATASET) + TEST_RESOURCE;
Model resourceModel = redlink.getResource(resource, TEST_DATASET);
Assert.assertNotNull(resourceModel);
Assert.assertTrue(resourceModel.size() < triples.size());
Assert.assertEquals(TEST_RESOUCE_TRIPLES, resourceModel.size());
}
示例13: testResourceReImported
import org.openrdf.rio.Rio; //導入依賴的package包/類
@Test
@Ignore
public void testResourceReImported() throws IOException, RDFParseException, RDFHandlerException, URISyntaxException {
//first import data
InputStream in = this.getClass().getResourceAsStream(TEST_FILE);
Assume.assumeNotNull(in);
final Model model = Rio.parse(in, buildDatasetBaseUri(credentials, status.getOwner(), TEST_DATASET), TEST_FILE_FORMAT);
Assert.assertTrue(redlink.importDataset(model, TEST_DATASET, true));
final SPARQLResult triples = redlink.sparqlTupleQuery(QUERY_SELECT, TEST_DATASET);
Assert.assertNotNull(triples);
Assert.assertEquals(TEST_FILE_TRIPLES, triples.size());
//and then the actual test
final String resource = buildDatasetBaseUri(credentials, status.getOwner(), TEST_DATASET) + TEST_RESOURCE;
final ValueFactoryImpl vf = new ValueFactoryImpl();
final Resource sesameResource = vf.createURI(resource);
final Model resourceModel = new LinkedHashModel();
resourceModel.add(sesameResource, vf.createURI("http://example.org/foo"), vf.createLiteral("foo"));
Assert.assertTrue(redlink.importResource(resource, model, TEST_DATASET, true));
final Model resourceModelFromApi = redlink.getResource(resource, TEST_DATASET);
Assert.assertNotNull(resourceModelFromApi);
Assert.assertEquals(resourceModel.size(), resourceModelFromApi.size());
}
示例14: testDatasetCleanImportDescribe
import org.openrdf.rio.Rio; //導入依賴的package包/類
@Test
public void testDatasetCleanImportDescribe() throws IOException, RDFParseException, RDFHandlerException, URISyntaxException {
Assume.assumeTrue(redlink.sparqlUpdate(QUERY_CLEAN, TEST_DATASET));
Assert.assertEquals(0, getCurrentSize(TEST_DATASET));
InputStream in = this.getClass().getResourceAsStream(TEST_FILE);
Assume.assumeNotNull(in);
String dataset = buildDatasetBaseUri(credentials, status.getOwner(), TEST_DATASET);
final Model model = Rio.parse(in, dataset, TEST_FILE_FORMAT);
Assert.assertTrue(redlink.importDataset(model, TEST_DATASET));
String resource = dataset + TEST_RESOURCE;
final Model result = redlink.sparqlGraphQuery("DESCRIBE <" + resource + ">", TEST_DATASET);
Assert.assertNotNull(result);
Assert.assertFalse(result.isEmpty());
Assert.assertEquals(8, result.size());
}
示例15: parseFile
import org.openrdf.rio.Rio; //導入依賴的package包/類
public Model parseFile(InputStream configurationFile, RDFFormat format,
String defaultNamespace) throws Exception {
final Graph graph = new GraphImpl();
RDFParser parser = Rio.createParser(format);
org.openrdf.model.Model myGraph = new org.openrdf.model.impl.LinkedHashModel();
/*
* RDFHandler handler = new RDFHandler() { public void endRDF() throws
* RDFHandlerException { }
*
* public void handleComment(String arg0) throws RDFHandlerException { }
*
* public void handleNamespace(String arg0, String arg1) throws
* RDFHandlerException { }
*
* public void handleStatement(Statement statement) throws
* RDFHandlerException { graph.add(statement); }
*
* public void startRDF() throws RDFHandlerException { } };
*/
parser.setRDFHandler(new StatementCollector(myGraph));
parser.parse(configurationFile, defaultNamespace);
return myGraph;
}