本文整理汇总了Java中org.apache.jena.riot.system.StreamRDF类的典型用法代码示例。如果您正苦于以下问题:Java StreamRDF类的具体用法?Java StreamRDF怎么用?Java StreamRDF使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
StreamRDF类属于org.apache.jena.riot.system包,在下文中一共展示了StreamRDF类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: exec
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
final void exec(
final Dataset inputDataset,
final StreamRDF outputStream,
final BindingHashMapOverwrite binding,
final BNodeMap bNodeMap) {
final StringBuilder sb = new StringBuilder("Output triples");
bgp.getList().stream()
.map((t) -> TemplateLib.subst(t, binding, bNodeMap.asMap()))
.filter((t2) -> (t2.isConcrete()))
.forEach((t2) -> {
if (LOG.isTraceEnabled()) {
sb.append("\n\t").append(t2);
}
outputStream.triple(t2);
});
LOG.trace(sb.toString());
}
示例2: expand
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
public static void expand() throws IOException {
boolean combined = false ;
String DIR = "testing/Inf" ;
String DATA_FILE = "data.ttl" ;
String VOCAB_FILE = "vocab.ttl" ;
String RULES_FILE = DIR+"/rdfs-min.rules" ;
Model vocab = RDFDataMgr.loadModel(VOCAB_FILE) ;
Model data = RDFDataMgr.loadModel(DATA_FILE) ;
String rules = FileUtils.readWholeFileAsUTF8(RULES_FILE) ;
rules = rules.replaceAll("#[^\\n]*", "") ;
InferenceSetupRDFS setup = new InferenceSetupRDFS(vocab, combined) ;
Reasoner reasoner = new GenericRuleReasoner(Rule.parseRules(rules));
InfModel m = ModelFactory.createInfModel(reasoner, vocab, data);
// Expansion Graph
Graph graphExpanded = Factory.createDefaultGraph() ;
StreamRDF stream = StreamRDFLib.graph(graphExpanded) ;
// Apply inferences.
stream = new InferenceProcessorStreamRDF(stream, setup) ;
sendToStream(data.getGraph(), stream) ;
RDFDataMgr.write(System.out, graphExpanded, Lang.TTL) ;
}
示例3: bulkLoad
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
public static void bulkLoad(Dataset ds, String ... files) {
// c.f. TDB2 Loader.bulkLoad (which does not currently batch split).
DatasetGraphTDB dsg = (DatasetGraphTDB)ds.asDatasetGraph() ;
StreamRDF s1 = new StreamRDFBatchSplit(dsg, 100) ;
ProgressMonitor plog = ProgressMonitor.create(log, "Triples", 100000, 10) ;
ProgressStreamRDF sMonitor = new ProgressStreamRDF(s1, plog) ;
StreamRDF s3 = sMonitor ;
//plog.startMessage();
plog.start();
Txn.executeWrite(ds, () -> {
for ( String fn : files ) {
if ( files.length > 1 )
FmtLog.info(log, "File: %s",fn);
RDFDataMgr.parse(s3, fn) ;
}
}) ;
plog.finish();
plog.finishMessage();
}
示例4: doExportTree
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
private void doExportTree(RegisterItem item, StreamRDF out) {
if ( item.isRegister() ) {
Register register = item.getAsRegister(this);
for (RegisterEntryInfo ei : listMembers(register )) {
doExportTree( getCurrentVersion(ei.getItemURI()).asRegisterItem(), out );
}
}
Collection<Resource> graphs = scanAllVersions(item.getRoot(), out, null);
for (Resource g : graphs) {
Iterator<Quad> i = store.asDataset().asDatasetGraph().findNG(g.asNode(), Node.ANY, Node.ANY, Node.ANY);
while (i.hasNext()){
out.quad( i.next() );
}
}
}
示例5: testImport
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
@Test
public void testImport() throws IOException {
long baseSize = sizeSig();
createTestTree();
long testSize = sizeSig();
assertTrue( testSize > baseSize);
File exportFile = File.createTempFile("reg-export", "nq");
exportTo(REG1, exportFile);
store.delete(REG1 + "/reg3");
assertTrue( sizeSig() < testSize );
StreamRDF stream = store.importTree(REG1);
FileInputStream in = new FileInputStream(exportFile);
RDFDataMgr.parse(stream, in, Lang.NQUADS);
assertEquals( testSize, sizeSig() );
exportFile.delete();
}
示例6: enrich
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
@Override
public void enrich(StreamRDF stream, Node match) {
if (match.isURI()) {
Resource r = rdf.getResource( match.getURI() );
if (enrichDescribe) {
Model description = Closure.closure(r, false);
ExtendedIterator<Triple> it = description.getGraph().find(null, null, null);
while (it.hasNext()) {
stream.triple(it.next());
}
} else {
for (Property p : enrich) {
for (StmtIterator si = r.listProperties(p); si.hasNext(); ) {
stream.triple( si.next().asTriple() );
}
}
}
}
}
示例7: fetch
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
public Value fetch(String...strings) {
ConverterProcess proc = ConverterProcess.get();
String uri = asURI();
Model model = proc.fetchModel(uri);
if (model != null) {
StreamRDF out = proc.getOutputStream();
for (String puri : strings) {
puri = proc.getDataContext().getPrefixes().expandPrefix(puri);
Node p = NodeFactory.createURI(puri);
Node s = NodeFactory.createURI(uri );
ExtendedIterator<Triple> it = model.getGraph().find(s, p, null);
while (it.hasNext()) {
out.triple(it.next());
}
}
}
return this;
}
示例8: writeTurtle
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
public void writeTurtle(String baseIRI, PrefixMapping prefixes, boolean writeBase) {
// Auto-register RDF prefix so that rdf:type is displayed well
// All other prefixes come from the query and should be as author intended
prefixes = ensureRDFPrefix(prefixes);
if (writeBase) {
// Jena's streaming Turtle writers don't output base even if it is provided,
// so we write it directly.
IndentedWriter w = new IndentedWriter(out);
RiotLib.writeBase(w, baseIRI);
w.flush();
}
StreamRDF writer = new WriterStreamRDFBlocks(out);
if (dedupWindowSize > 0) {
writer = new StreamRDFDedup(writer, dedupWindowSize);
}
writer.start();
writer.base(baseIRI);
for (Entry<String, String> e : prefixes.getNsPrefixMap().entrySet()) {
writer.prefix(e.getKey(), e.getValue());
}
StreamOps.sendTriplesToStream(triples, writer);
writer.finish();
}
示例9: write
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
private static void write(final RdfStream rdfStream,
final OutputStream output,
final Lang dataFormat,
final MediaType dataMediaType) throws IOException {
final RDFFormat format = defaultSerialization(dataFormat);
// For formats that can be block-streamed (n-triples, turtle)
if (format != null) {
LOGGER.debug("Stream-based serialization of {}", dataFormat.toString());
final StreamRDF stream = getWriterStream(output, format);
stream.start();
rdfStream.forEach(stream::triple);
stream.finish();
// For formats that require analysis of the entire model and cannot be streamed directly (rdfxml, n3)
} else {
LOGGER.debug("Non-stream serialization of {}", dataFormat.toString());
final Model model = rdfStream.collect(toModel());
// use block output streaming for RDFXML
if (RDFXML.equals(dataFormat)) {
RDFDataMgr.write(output, model.getGraph(), RDFXML_PLAIN);
} else if (JSONLD.equals(dataFormat)) {
final RDFFormat jsonldFormat = getFormatFromMediaType(dataMediaType);
RDFDataMgr.write(output, model.getGraph(), jsonldFormat);
} else {
RDFDataMgr.write(output, model.getGraph(), dataFormat);
}
}
}
示例10: write
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
@Override
public void write(final Stream<? extends Triple> triples, final OutputStream output, final RDFSyntax syntax,
final IRI... profiles) {
requireNonNull(triples, "The triples stream may not be null!");
requireNonNull(output, "The output stream may not be null!");
requireNonNull(syntax, "The RDF syntax value may not be null!");
try {
if (RDFA.equals(syntax)) {
htmlSerializer.write(output, triples, profiles.length > 0 ? profiles[0] : null);
} else {
final Lang lang = rdf.asJenaLang(syntax).orElseThrow(() ->
new RuntimeTrellisException("Invalid content type: " + syntax.mediaType()));
final RDFFormat format = defaultSerialization(lang);
if (nonNull(format)) {
LOGGER.debug("Writing stream-based RDF: {}", format);
final StreamRDF stream = getWriterStream(output, format);
stream.start();
ofNullable(nsService).ifPresent(svc -> svc.getNamespaces().forEach(stream::prefix));
triples.map(rdf::asJenaTriple).forEachOrdered(stream::triple);
stream.finish();
} else {
LOGGER.debug("Writing buffered RDF: {}", lang);
final org.apache.jena.graph.Graph graph = createDefaultGraph();
ofNullable(nsService).map(NamespaceService::getNamespaces)
.ifPresent(graph.getPrefixMapping()::setNsPrefixes);
triples.map(rdf::asJenaTriple).forEachOrdered(graph::add);
if (JSONLD.equals(lang)) {
writeJsonLd(output, DatasetGraphFactory.create(graph), profiles);
} else {
RDFDataMgr.write(output, graph, lang);
}
}
}
} catch (final AtlasException ex) {
throw new RuntimeTrellisException(ex);
}
}
示例11: write
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
@Override
public void write(final Stream<? extends Triple> triples, final OutputStream output, final RDFSyntax syntax,
final IRI... profiles) {
requireNonNull(triples, "The triples stream may not be null!");
requireNonNull(output, "The output stream may not be null!");
requireNonNull(syntax, "The RDF syntax value may not be null!");
try {
if (RDFA_HTML.equals(syntax)) {
htmlSerializer.write(output, triples, profiles.length > 0 ? profiles[0] : null);
} else {
final Lang lang = rdf.asJenaLang(syntax).orElseThrow(() ->
new RuntimeRepositoryException("Invalid content type: " + syntax.mediaType));
final RDFFormat format = defaultSerialization(lang);
if (nonNull(format)) {
LOGGER.debug("Writing stream-based RDF: {}", format);
final StreamRDF stream = getWriterStream(output, format);
stream.start();
ofNullable(nsService).ifPresent(svc -> svc.getNamespaces().forEach(stream::prefix));
triples.map(rdf::asJenaTriple).forEachOrdered(stream::triple);
stream.finish();
} else {
LOGGER.debug("Writing buffered RDF: {}", lang);
final org.apache.jena.graph.Graph graph = createDefaultGraph();
ofNullable(nsService).map(NamespaceService::getNamespaces)
.ifPresent(graph.getPrefixMapping()::setNsPrefixes);
triples.map(rdf::asJenaTriple).forEachOrdered(graph::add);
if (JSONLD.equals(lang)) {
writeJsonLd(output, DatasetGraphFactory.create(graph), profiles);
} else {
RDFDataMgr.write(output, graph, lang);
}
}
}
} catch (final AtlasException ex) {
throw new RuntimeRepositoryException(ex);
}
}
示例12: toPatch
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
protected RDFPatch toPatch(String fn) {
// .gz??
Lang lang = RDFLanguages.filenameToLang(fn);
if ( lang != null && ( RDFLanguages.isTriples(lang) || RDFLanguages.isQuads(lang) ) ) {
RDFChangesCollector x = new RDFChangesCollector();
StreamRDF dest = new RDF2Patch(x);
// dest will do the start-finish on the RDFChangesCollector via parsing.
RDFDataMgr.parse(dest, fn);
return x.getRDFPatch();
}
// Not RDF - assume a text patch.
// String ext = FileOps.extension(fn);
// switch(ext) {
// case RDFPatchConst.EXT:
// break;
// case RDFPatchConst.EXT_B:
// break;
// default:
// Log.warn(addpatch.class, "Conventionally, patches have file extension ."+RDFPatchConst.EXT);
// }
Path path = Paths.get(fn);
try(InputStream in = Files.newInputStream(path) ) {
return RDFPatchOps.read(in);
} catch (IOException ex ) { throw IOX.exception(ex); }
}
示例13: exec
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
@Override
protected void exec() {
StreamRDF dest = new StreamPatch(System.out);
dest.start();
if ( getPositional().isEmpty() )
execOne(System.in);
getPositional().forEach(fn->{
RDFDataMgr.parse(dest, fn);
// InputStream input = IO.openFile(fn);
// execOne(input);
});
dest.finish();
}
示例14: exec
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public final Model exec() {
Dataset inputDataset = DatasetFactory.create();
QuerySolution initialBindings = new QuerySolutionMap();
Model initialModel = ModelFactory.createDefaultModel();
StreamRDF outputStream = new StreamRDFModel(initialModel);
exec(inputDataset, initialBindings, outputStream);
return initialModel;
}
示例15: load
import org.apache.jena.riot.system.StreamRDF; //导入依赖的package包/类
private AQuiXEvent load(final RDFStreamSource source) {
// Create a PipedRDFStream to accept input and a PipedRDFIterator to
// consume it
// You can optionally supply a buffer size here for the
// PipedRDFIterator, see the documentation for details about recommended
// buffer sizes
// this.iter = new PipedRDFIterator<Tuple<Node>>();
this.iter = new PipedRDFIterator<Triple>();
// final PipedRDFStream<Tuple<Node>> tripleStream = new
// PipedTuplesStream(this.iter);
final StreamRDF tripleStream = new PipedTriplesStream(this.iter);
final TypedInputStream tis = source.asTypedInputStream();
// PipedRDFStream and PipedRDFIterator need to be on different threads
this.executor = Executors.newSingleThreadExecutor();
// Create a runnable for our parser thread
final Runnable parser = () -> {
// Call the parsing process.
// System.out.println("started thread before");
RDFDataMgr.parse(tripleStream, tis, Lang.N3);
// System.out.println("started thread after");
};
// Start the parser on another thread
this.executor.execute(parser);
return AQuiXEvent.getStartRDF();
}