本文整理匯總了Java中org.openrdf.rio.Rio.write方法的典型用法代碼示例。如果您正苦於以下問題:Java Rio.write方法的具體用法?Java Rio.write怎麽用?Java Rio.write使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.openrdf.rio.Rio
的用法示例。
在下文中一共展示了Rio.write方法的13個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: importResource
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
@Override
public boolean importResource(String resource, Model data, String dataset, boolean cleanBefore) {
RDFFormat format = RDFFormat.TURTLE;
log.debug("Importing {} data for resource {} in {}", format.getName(), resource, dataset);
try {
java.net.URI target = credentials.buildUrl(getResourceUriBuilder(dataset, resource));
ByteArrayOutputStream out = new ByteArrayOutputStream();
Rio.write(data, out, format);
InputStream in = new ByteArrayInputStream(out.toByteArray());
CloseableHttpResponse response;
if (cleanBefore) {
response = client.put(target, in, format);
} else {
response = client.post(target, in, format);
}
try {
log.debug("Request resolved with {} status code: {}", response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase());
return (response.getStatusLine().getStatusCode() == 200);
} finally {
response.close();
}
} catch (IllegalArgumentException | URISyntaxException | RDFHandlerException | IOException e) {
log.error("Error importing resource: {}", e.getMessage(), e);
throw new RuntimeException(e);
}
}
示例2: SaveRdf
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
/**
* Saves as RDF the graph created.
*
* @throws RepositoryException
* @throws RDFHandlerException
* @throws IOException
*/
private void SaveRdf() throws RepositoryException, RDFHandlerException, IOException {
RepositoryResult statements = src.getStatements(null, null, null, true);
Model model = new LinkedHashModel();
java.util.ArrayList arr = new java.util.ArrayList();
while (statements.hasNext()) {
arr.add(statements.next());
}
model.addAll(arr);
model.setNamespace("rdf", RDF.NAMESPACE);
model.setNamespace("rdfs", RDFS.NAMESPACE);
model.setNamespace("xsd", XMLSchema.NAMESPACE);
model.setNamespace("foaf", FOAF.NAMESPACE);
model.setNamespace("ex", "");
Rio.write(model, new java.io.FileWriter(new java.io.File(outputFileName),false), RDFFormat.TURTLE);
CloseAllConnections();
}
示例3: iteratesOverStatementsWhenParsingValidFile
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
@Test
public void iteratesOverStatementsWhenParsingValidFile() throws Exception {
// Arrange
ArrayList<Statement> statements = new ArrayList<Statement>();
statements.add(createHttpStatement("s1", "p1", "o1", "g1"));
statements.add(createHttpStatement("s2", "p1", "o1", "g2"));
statements.add(createHttpStatement("s3", "p1", "o3", "g1"));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Rio.write(statements, outputStream, RDFFormat.NQUADS);
outputStream.close();
// Act
Reader inputReader = new InputStreamReader(new ByteArrayInputStream(outputStream.toByteArray()));
NQuadsParserIterator parserIterator = new NQuadsParserIterator(inputReader, LDFTConfigConstants.DEFAULT_FILE_PARSER_CONFIG);
ArrayList<Statement> result = new ArrayList<Statement>();
while (parserIterator.hasNext()) {
result.add(parserIterator.next());
}
// Assert
assertThat(result.size(), equalTo(statements.size()));
for (int i = 0; i < result.size(); i++) {
assertThat(result.get(i), contextAwareStatementIsEqual(statements.get(i)));
}
}
示例4: throwsExceptionOnSyntacticError
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
@Test(expected = Exception.class)
public void throwsExceptionOnSyntacticError() throws Exception {
// Arrange
ArrayList<Statement> statements = new ArrayList<Statement>();
statements.add(createHttpStatement("s1", "p1", "o1", "g1"));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Rio.write(statements, outputStream, RDFFormat.NQUADS);
outputStream.write(";".getBytes());
outputStream.close();
// Act
Reader inputReader = new InputStreamReader(new ByteArrayInputStream(outputStream.toByteArray()));
NQuadsParserIterator parserIterator = new NQuadsParserIterator(inputReader, LDFTConfigConstants.DEFAULT_FILE_PARSER_CONFIG);
while (parserIterator.hasNext()) {
parserIterator.next();
}
}
示例5: getAnnotationAsJsonLd
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
/**
* This is a fast way to print the annotations. Much much faster using the
* appropriate SPARQL query. If this change is accepted in anno4j then we
* will remove this
*
* @param anno
* @return
*/
public String getAnnotationAsJsonLd(Annotation anno) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
/*
http://stackoverflow.com/questions/33241812/sparql-query-to-get-all-triples-related-to-a-specific-subject
CONSTRUCT {
<http://lifewatchgreece.eu/entity/annotations/6fe690c7-371e-476b-affd-919ba062ed13> ?prop ?val .
?child ?childProp ?childPropVal .
?someSubj ?incomingChildProp ?child .
}
WHERE {
<http://lifewatchgreece.eu/entity/annotations/6fe690c7-371e-476b-affd-919ba062ed13>?prop ?val ; (<>|!<>)+ ?child .
?child ?childProp ?childPropVal.
?someSubj ?incomingChildProp ?child.
}
*/
// Get the whole graph of a specific annotation
String query = "CONSTRUCT {\n"
+ " <" + anno.getResourceAsString() + "> ?prop ?val .\n"
+ " ?child ?childProp ?childPropVal . \n"
//+ " ?someSubj ?incomingChildProp ?child .\n"
+ "}\n"
+ "WHERE {\n"
+ " <" + anno.getResourceAsString() + "> ?prop ?val ; (<>|!<>)+ ?child . \n"
+ " ?child ?childProp ?childPropVal.\n"
//+ " ?someSubj ?incomingChildProp ?child. \n"
+ "}";
RDFWriter writer = Rio.createWriter(RDFFormat.JSONLD, out);
// Execute the query
GraphQueryResult results = sparqlGraphQuery(query);
Rio.write(QueryResults.asModel(results), writer);
} catch (RDFHandlerException e) {
e.printStackTrace();
} catch (QueryEvaluationException ex) {
Logger.getLogger(Anno4jRepository.class.getName()).log(Level.SEVERE, null, ex);
}
return out.toString();
}
示例6: importDataset
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
@Override
public boolean importDataset(Model data, String dataset, boolean cleanBefore) throws RDFHandlerException {
RDFFormat format = RDFFormat.TURTLE;
ByteArrayOutputStream out = new ByteArrayOutputStream();
Rio.write(data, out, format);
return importDataset(new ByteArrayInputStream(out.toByteArray()), format, dataset, cleanBefore);
}
示例7: testGetRepositoryConfig
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
/**
* Test method for
* {@link org.openrdf.repository.manager.RepositoryManager#getRepositoryConfig(java.lang.String)}
* .
*
* @throws Exception
*/
@Test
public void testGetRepositoryConfig() throws Exception
{
RepositoryConfig repositoryConfig = testRepositoryManager.getRepositoryConfig("SYSTEM");
assertNotNull(repositoryConfig);
Model exportGraph = new LinkedHashModel();
repositoryConfig.export(exportGraph);
assertEquals(23, exportGraph.size());
Rio.write(exportGraph, System.out, RDFFormat.NQUADS);
assertEquals(5, exportGraph.filter(null, StardogRepositoryConfig.NAMESPACE_NAME_URI, null).size());
assertEquals(5, exportGraph.filter(null, StardogRepositoryConfig.NAMESPACE_PREFIX_URI, null).size());
Resource topNode = GraphUtil.getUniqueSubject(exportGraph, RDF.TYPE, RepositoryConfigSchema.REPOSITORY);
System.out.println(topNode);
RepositoryConfig imported = new RepositoryConfig();
imported.parse(exportGraph, topNode);
// StardogRepositoryConfig test = new StardogRepositoryConfig();
// test.parse(exportGraph, topNode);
Model secondExport = new LinkedHashModel();
imported.export(secondExport);
assertEquals(23, secondExport.size());
System.out.println("Round-tripped configuration...");
Rio.write(secondExport, System.out, RDFFormat.NQUADS);
// Test round-tripping of the configuration
assertTrue(ModelUtil.equals(exportGraph, secondExport));
}
示例8: setUp
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
@Before
public void setUp() throws Exception {
testDir = tempDir.newFolder("vocabbuildertest").toPath();
ValueFactory vf = ValueFactoryImpl.getInstance();
String ns = "http://example.com/ns/ontology#";
testOntologyUri = vf.createURI(ns);
testProperty1 = vf.createURI(ns, "property1");
testProperty2 = vf.createURI(ns, "property_2");
testProperty3 = vf.createURI(ns, "property-3");
testProperty4 = vf.createURI(ns, "propertyLocalised4");
testProperty1Description = vf.createLiteral("property 1 description");
testProperty2Description = vf.createLiteral("property 2 description");
testProperty3Description = vf.createLiteral("property 3 description");
testProperty4DescriptionEn = vf.createLiteral("property 4 description english", "en");
testProperty4DescriptionFr = vf.createLiteral("Description de la propriété français", "fr");
Model testOntology = new LinkedHashModel();
testOntology.add(testOntologyUri, RDF.TYPE, OWL.ONTOLOGY);
testOntology.add(testProperty1, RDF.TYPE, OWL.DATATYPEPROPERTY);
testOntology.add(testProperty2, RDF.TYPE, OWL.OBJECTPROPERTY);
testOntology.add(testProperty3, RDF.TYPE, OWL.ANNOTATIONPROPERTY);
testOntology.add(testProperty4, RDF.TYPE, OWL.ANNOTATIONPROPERTY);
testOntology.add(testProperty1, DCTERMS.DESCRIPTION, testProperty1Description);
testOntology.add(testProperty2, RDFS.COMMENT, testProperty2Description);
testOntology.add(testProperty3, SKOS.DEFINITION, testProperty3Description);
testOntology.add(testProperty4, SKOS.PREF_LABEL, testProperty4DescriptionEn);
testOntology.add(testProperty4, SKOS.PREF_LABEL, testProperty4DescriptionFr);
String fileName = "test." + format.getDefaultFileExtension();
inputPath = testDir.resolve(fileName);
try (final OutputStream outputStream = Files.newOutputStream(inputPath)) {
Rio.write(testOntology, outputStream, format);
}
}
示例9: uploadArtifacts
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
public ConcurrentMap<InferredOWLOntologyID, InferredOWLOntologyID> uploadArtifacts(
final ConcurrentMap<InferredOWLOntologyID, Model> uploadQueue) throws PoddClientException
{
final ConcurrentMap<InferredOWLOntologyID, InferredOWLOntologyID> resultMap = new ConcurrentHashMap<>();
for(final InferredOWLOntologyID nextUpload : uploadQueue.keySet())
{
try
{
final StringWriter writer = new StringWriter(4096);
Rio.write(uploadQueue.get(nextUpload), writer, RDFFormat.RDFJSON);
final InferredOWLOntologyID newID =
this.appendArtifact(nextUpload,
new ByteArrayInputStream(writer.toString().getBytes(Charset.forName("UTF-8"))),
RDFFormat.RDFJSON);
if(newID == null)
{
this.log.error("Did not find a valid result from append artifact: {}", nextUpload);
}
else if(nextUpload.equals(newID))
{
this.log.error("Result from append artifact was not changed, as expected. {} {}", nextUpload, newID);
}
else
{
resultMap.putIfAbsent(nextUpload, newID);
}
}
catch(final RDFHandlerException e)
{
this.log.error("Found exception generating upload body: ", e);
}
}
return resultMap;
}
示例10: serializeRDFJSON
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
public static void serializeRDFJSON(Map<String,Metadata> data, OutputStream out) throws IOException {
ValueFactory vf = ValueFactoryImpl.getInstance();
Model results = new LinkedHashModel();
for(Map.Entry<String,Metadata> subject : data.entrySet()) {
Resource subjectResource = stringToResource(subject.getKey(), vf);
for(Map.Entry<String,Set<RDFNode>> predicate : subject.getValue().entrySet()) {
org.openrdf.model.URI predicateURI = vf.createURI(predicate.getKey());
for(RDFNode objectNode : predicate.getValue()) {
org.openrdf.model.Value objectValue;
if( objectNode instanceof Literal) {
if(((Literal) objectNode).getLanguage() != null )
objectValue = vf.createLiteral(((Literal)objectNode).getContent(),
((Literal)objectNode).getLanguage());
else if(((Literal) objectNode).getType() != null)
objectValue = vf.createLiteral(((Literal)objectNode).getContent(),
vf.createURI(((Literal)objectNode).getType().getUri()));
else
objectValue = vf.createLiteral(((Literal)objectNode).getContent());
} else {
if( objectNode instanceof URI ) {
objectValue = vf.createURI(((URI)objectNode).getUri());
} else {
objectValue = vf.createBNode(((BNode)objectNode).getAnonId());
}
}
results.add(subjectResource, predicateURI, objectValue);
}
}
}
try {
Rio.write(results, out, RDFFormat.RDFJSON);
} catch(RDFHandlerException e) {
throw new IOException(e);
}
}
示例11: post
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
@Test
public void post() throws IOException, CumulusStoreException, RDFHandlerException, ServletException {
for (int i = 0; i < TRIPLES_NT.size(); i++) {
for (String mime_type : MimeTypes.RDF_SERIALIZATIONS) {
/*
* clear data ...
*/
TRIPLE_STORE.clear();
assertTrue("store should be empty", !TRIPLE_STORE.query(new Value[] { null, null, null }).hasNext());
/*
* prepare data in desired RDF serialization
*/
Model model = new LinkedHashModel(parseNX(TRIPLES_NT.get(i)));
final ByteArrayOutputStream out = new ByteArrayOutputStream();
Rio.write(model, out, RDFFormat.forMIMEType(mime_type));
/*
* prepare mock ...
*/
when(_request.getHeader(Headers.CONTENT_TYPE)).thenReturn(mime_type);
when(_request.getInputStream()).thenReturn(new ServletInputStream() {
final InputStream _inputStream = new ByteArrayInputStream(out.toByteArray());
@Override
public int read() throws IOException {
return _inputStream.read();
}
});
/*
* HTTP POST
*/
_ld_servlet.doPost(_request, _response);
/*
* verify the HTTP POST ...
*/
verify(_response, atLeastOnce()).setStatus(HttpServletResponse.SC_CREATED);
for (Statement stmt : model) {
assertTrue("statement '" + stmt + "' has not been been added correctly for serialization '" + mime_type + "'", TRIPLE_STORE.query(Util.toValueArray(stmt))
.hasNext());
}
}
}
}
示例12: toRDF
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
public void toRDF(File file, RDFFormat format) throws RDFHandlerException, IOException {
Writer writer = Files.newWriter(file, Charset.forName("UTF-8"));
Rio.write(model, writer, format);
writer.flush();
writer.close();
}
示例13: asModel
import org.openrdf.rio.Rio; //導入方法依賴的package包/類
private Model asModel(Set<Statement> rdf) throws RDFHandlerException {
Model m = new TreeModel(rdf);
Rio.write(m, System.out, RDFFormat.TRIG);
return m;
}