本文整理汇总了Java中org.apache.clerezza.rdf.core.serializedform.Serializer.getInstance方法的典型用法代码示例。如果您正苦于以下问题:Java Serializer.getInstance方法的具体用法?Java Serializer.getInstance怎么用?Java Serializer.getInstance使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.clerezza.rdf.core.serializedform.Serializer
的用法示例。
在下文中一共展示了Serializer.getInstance方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: start
import org.apache.clerezza.rdf.core.serializedform.Serializer; //导入方法依赖的package包/类
/**
* Starts the transformer server.
*
* @param arguments contains the port on which the server will listen
* @throws Exception
*/
private static void start(Arguments arguments) throws Exception {
TransformerServer server = new TransformerServer(arguments.getPort(), arguments.enableCors());
// create the singleton instance of Serializer
Serializer.getInstance();
// create the singleton instance of Parser
Parser.getInstance();
server.start(
new TransformerFactory() {
@Override
public Transformer getTransformer(HttpServletRequest request) {
switch (request.getMethod()) {
case "GET":
return new DictionaryMatcherTransformer();
case "POST":
return new DictionaryMatcherTransformer(request.getQueryString());
default:
throw new TransformerException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "ERROR: Method \"" + request.getMethod() + "\" is not allowed!");
}
}
});
server.join();
}
示例2: findSameEntities
import org.apache.clerezza.rdf.core.serializedform.Serializer; //导入方法依赖的package包/类
/**
* The client RDF data is always used as the source data source, of type file, for the comparisons with a target data source.
* The target data source can be of type file or a SPARQL endpoint. If the target data source in the Silk config file
* is set to be of type file then the same client data will be used and the task is a deduplication task (Silk works only with local files).
* The updated configuration file and the input RDF data and the output files are stored in the /tmp/ folder.
* @param inputRdf
* @return
* @throws IOException
*/
protected TripleCollection findSameEntities(InputStream inputRdf, String rdfFormat, InputStream configIn) throws IOException {
// Default silk config file
File configFile = null;
if(configIn != null){
configFile = FileUtil.inputStreamToFile(configIn, "silk-config-", ".xml");
}
else {
configFile = FileUtil.inputStreamToFile(getClass().getResourceAsStream("silk-config-file.xml"), "silk-config-", ".xml");
}
// file with original data serialized in N-TRIPLE format
File ntFile = File.createTempFile("input-rdf", ".nt");
// file containing the equivalences
File outFile = File.createTempFile("output-", ".nt");
// update the config file with the paths of the source datasource and output files and the format
// if the type of target datasource is "file" update the path (deduplication)
SilkConfigFileParser silkParser = new SilkConfigFileParser(configFile.getAbsolutePath());
silkParser.updateOutputFile(outFile.getAbsolutePath());
silkParser.updateSourceDataSourceFile(ntFile.getAbsolutePath(), "N-TRIPLE");
if (silkParser.getTargetDataSourcetype().equals("file")) {
silkParser.updateTargetDataSourceFile(ntFile.getAbsolutePath(), "N-TRIPLE"); //deduplication
}
silkParser.saveChanges();
// change the format into N-TRIPLE
Parser parser = Parser.getInstance();
TripleCollection origGraph = parser.parse(inputRdf, rdfFormat);
Serializer serializer = Serializer.getInstance();
serializer.serialize(new FileOutputStream(ntFile), origGraph, SupportedFormat.N_TRIPLE);
// interlink entities
Silk.executeFile(configFile, null, 1, true);
log.info("Interlinking task completed.");
TripleCollection equivalences = parseResult(outFile);
// add the equivalence set to the input rdf data to be sent back to the client
TripleCollection resultGraph = new SimpleMGraph();
resultGraph.addAll(origGraph);
resultGraph.addAll(equivalences);
// remove all temporary files
configFile.delete();
ntFile.delete();
outFile.delete();
// returns the result to the client
return resultGraph;
}