本文整理匯總了Java中org.openrdf.rio.RDFParseException類的典型用法代碼示例。如果您正苦於以下問題:Java RDFParseException類的具體用法?Java RDFParseException怎麽用?Java RDFParseException使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
RDFParseException類屬於org.openrdf.rio包,在下文中一共展示了RDFParseException類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: addToGraphstore
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
/**
* Helper method for handleAdd.
*/
private void addToGraphstore(
RepositoryConnection conn,
InputStream in,
String base,
RDFFormat format,
Resource dctx,
boolean chunked) throws IOException, RDFParseException,
RDFHandlerException, RepositoryException {
if (chunked) {
RDFParser parser = getRDFParser(format);
parser.setRDFHandler(
new ChunkedCommitHandler(conn, chunksize, dctx));
parser.parse(in, base);
} else {
if (dctx != null) {
conn.add(in, base, format, dctx);
} else {
conn.add(in, base, format);
}
}
}
示例2: before
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
@Before
public void before()
throws RepositoryException, IOException, RDFParseException,
MalformedQueryException, QueryResultParseException,
QueryResultHandlerException {
repo = new SailRepository(new MemoryStore());
repo.initialize();
conn = repo.getConnection();
vf = conn.getValueFactory();
conn.add(getResource(data), "file://", RDFFormat.TURTLE);
SPARQLResultsXMLParserFactory factory =
new SPARQLResultsXMLParserFactory();
parser = factory.getParser();
parser.setValueFactory(vf);
List<Rule> rules;
rules = Rules.fromOntology(getResource(data));
QueryRewriter rewriter = new QueryRewriter(conn, rules);
query = (TupleQuery) rewriter.rewrite(QueryLanguage.SPARQL, queryString);
nonInfQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
System.out.println("== QUERY (" + this.name + ") ==");
System.out.println(nonInfQuery);
System.out.println("== REWRITTEN QUERY (" + this.name + ") ==");
System.out.println(query);
}
示例3: setUp
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
@BeforeClass
public static void setUp() throws IOException, RepositoryException, RDFParseException {
int port;
try (final ServerSocket serverSocket = new ServerSocket(0)) {
port = serverSocket.getLocalPort();
}
server = CommunityServerBuilder.server()
.onPort(port)
.withThirdPartyJaxRsPackage("de.unikiel.inf.comsys.neo4j", "/rdf")
.build();
server.start();
GraphDatabaseService db = server.getDatabase().getGraph();
Repository rep = RepositoryRegistry.getInstance(db).getRepository();
conn = rep.getConnection();
InputStream testdata = RDFServerExtensionTest.class.getResourceAsStream("/sp2b.n3");
conn.add(testdata, "http://example.com/", RDFFormat.N3);
}
示例4: load
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
/**
* Load the t-box and a-box turtle from strings defined within this class.
*
* @throws RepositoryException
* @throws RDFParseException
* @throws IOException
*/
private void load() throws RepositoryException, RDFParseException, IOException {
final RepositoryConnection conn = repository.getConnection();
// T-Box
String ttlString = MODEL_TTL;
InputStream stringInput = new ByteArrayInputStream(ttlString.getBytes());
conn.add(stringInput, "http://dragon-research.com/cham/model/model1", RDFFormat.TURTLE, new Resource[]{});
// A-Box
ttlString = BUCKET_TTL;
stringInput = new ByteArrayInputStream(ttlString.getBytes());
conn.add(stringInput, "http://dragon-research.com/cham/bucket/bucket1", RDFFormat.TURTLE, new Resource[]{});
conn.commit();
conn.close();
}
示例5: performAdd
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
/**
* executes merge of triples from File
*
* @param file
* @param baseURI
* @param dataFormat
* @param tx
* @param contexts
* @throws RDFParseException
*/
// performAdd
// as we use mergeGraphs, baseURI is always file.toURI
public void performAdd(File file, String baseURI, RDFFormat dataFormat, Transaction tx, Resource... contexts) throws RDFParseException {
try {
graphManager.setDefaultMimetype(dataFormat.getDefaultMIMEType());
if (dataFormat.equals(RDFFormat.NQUADS) || dataFormat.equals(RDFFormat.TRIG)) {
graphManager.mergeGraphs(new FileHandle(file),tx);
} else {
if (notNull(contexts) && contexts.length>0) {
for (int i = 0; i < contexts.length; i++) {
if(notNull(contexts[i])){
graphManager.mergeAs(contexts[i].toString(), new FileHandle(file), getGraphPerms(),tx);
}else{
graphManager.mergeAs(DEFAULT_GRAPH_URI, new FileHandle(file),getGraphPerms(), tx);
}
}
} else {
graphManager.mergeAs(DEFAULT_GRAPH_URI, new FileHandle(file), getGraphPerms(),tx);
}
}
} catch (FailedRequestException e) {
logger.error(e.getLocalizedMessage());
throw new RDFParseException("Request to MarkLogic server failed, check file and format.");
}
}
示例6: testAddMalformedLiteralsStrictConfig
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
@Test
public void testAddMalformedLiteralsStrictConfig()
throws Exception
{
Assert.assertEquals(0L, testAdminCon.size());
Set<RioSetting<?>> empty = Collections.emptySet();
testAdminCon.getParserConfig().setNonFatalErrors(empty);
try {
testAdminCon.add(
MarkLogicRepositoryConnectionTest.class.getResourceAsStream(TEST_DIR_PREFIX + "malformed-literals.ttl"),
"", RDFFormat.TURTLE);
fail("upload of malformed literals should fail with error in strict configuration");
}
catch (Exception e) {
Assert.assertTrue(e instanceof RDFParseException);
}
}
示例7: importModelToDatabase
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
/**
* Imports ontology RDF directly to database. No OWL checks are performed.
* @param file
* @throws OWLOntologyCreationException
* @throws IOException
* @throws RepositoryException
*/
public void importModelToDatabase(File file) throws OWLOntologyCreationException, RepositoryException, IOException, RDFParseException, RDFHandlerException {
synchronized(repo) {
final BigdataSailRepositoryConnection connection = repo.getUnisolatedConnection();
try {
connection.begin();
try {
java.util.Optional<URI> ontIRIOpt = scanForOntologyIRI(file).map(id -> new URIImpl(id));
if (ontIRIOpt.isPresent()) {
URI graph = ontIRIOpt.get();
connection.clear(graph);
//FIXME Turtle format is hard-coded here
connection.add(file, "", RDFFormat.TURTLE, graph);
connection.commit();
} else {
throw new OWLOntologyCreationException("Detected anonymous ontology; must have IRI");
}
} catch (Exception e) {
connection.rollback();
throw e;
}
} finally {
connection.close();
}
}
}
示例8: toStatements
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
public static Iterator<Statement> toStatements(Iterator<Row> iterator)
throws RDFParseException, RDFHandlerException, IOException {
if (!iterator.hasNext()) {
return Collections.emptyIterator();
}
Set<Statement> ret = new HashSet<Statement>();
RDFParser rdfParser = Rio.createParser(RDFFormat.BINARY);
StatementCollector collector = new StatementCollector(ret);
rdfParser.setRDFHandler(collector);
while (iterator.hasNext()) {
toStatements(
rdfParser,
Bytes.getArray(iterator.next().getBytes(
DATA_TABLE_ATTRIBUTE_3)));
}
return Collections.unmodifiableSet(ret).iterator();
}
示例9: testGetSensors
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
@Test
@FileParameters("src/test/resources/SesameKnowledgeStoreTest-testGetSensors.csv")
public void testGetSensors(
String kb,
@ConvertParam(value = ParamsConverterTest.StringToStatementsConverter.class) Set<Statement> statements,
String assertType) throws RepositoryException, RDFParseException,
IOException {
SesameKnowledgeStore ks = new SesameKnowledgeStore(new SailRepository(new MemoryStore()));
ks.load(new File(kb));
RDFEntityRepresenter er = new RDFEntityRepresenter();
Set<Sensor> e = er.createSensors(statements);
Set<Sensor> a = ks.getSensors();
if (assertType.equals("assertEquals")) {
assertTrue(CollectionUtils.isEqualCollection(e, a));
return;
}
assertFalse(CollectionUtils.isEqualCollection(e, a));
ks.close();
}
示例10: testAddSensor
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
@Test
@FileParameters("src/test/resources/SesameKnowledgeStoreTest-testAddSensor.csv")
public void testAddSensor(
@ConvertParam(value = ParamsConverterTest.StringToURIConverter.class) URI sensorId,
@ConvertParam(value = ParamsConverterTest.StringToURIConverter.class) URI propertyId,
@ConvertParam(value = ParamsConverterTest.StringToURIConverter.class) URI featureId,
@ConvertParam(value = ParamsConverterTest.StringToStatementsConverter.class) Set<Statement> statements,
String assertType) throws RepositoryException, RDFParseException,
IOException {
SesameKnowledgeStore ks = new SesameKnowledgeStore(new SailRepository(new MemoryStore()));
ks.addSensor(new Sensor(sensorId, new Property(propertyId, new FeatureOfInterest(featureId))));
RDFEntityRepresenter er = new RDFEntityRepresenter();
Set<Sensor> e = er.createSensors(statements);
Set<Sensor> a = ks.getSensors();
if (assertType.equals("assertEquals")) {
assertTrue(CollectionUtils.isEqualCollection(e, a));
return;
}
assertFalse(CollectionUtils.isEqualCollection(e, a));
ks.close();
}
示例11: removeIndirectTriples
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
/**Takes as input a set of triples and some important URIs and removes from the
* first set those triples that have one of the given URIS as their subject.
* If we imagine the given set of triples as a graph, this method will practically
* return a subgraph containing only the direct neighbours of the given URIs.
*
* @param nTriples a set of triples in NTriples format
* @param urisToKeep the URIs that will be used for determining which triples to keep (those appearing in subject, or object field)
* @return a subgraph in the form of triples in NTriples format, containing only the direct neighbours of the given URIs. */
public static String removeIndirectTriples(String nTriples, List<String> urisToKeep){
String triplesContext="http://triplesContext";
String subTriplesContext="http://subgraphTriplesContext";
Repository repository=new SailRepository(new ForwardChainingRDFSInferencer(new MemoryStore()));
try{
repository.initialize();
RepositoryConnection repoConn=repository.getConnection();
repoConn.add(new StringReader(nTriples), triplesContext, RDFFormat.NTRIPLES, repository.getValueFactory().createURI(triplesContext));
RepositoryResult<Statement> results=repoConn.getStatements(null, null, null, false, repository.getValueFactory().createURI(triplesContext));
while(results.hasNext()){
Statement result=results.next();
if(urisToKeep.contains(result.getSubject().stringValue()) || urisToKeep.contains(result.getObject().stringValue())){
repoConn.add(result, repository.getValueFactory().createURI(subTriplesContext));
}
}
ByteArrayOutputStream out=new ByteArrayOutputStream();
RDFWriter writer=Rio.createWriter(RDFFormat.NTRIPLES, out);
repoConn.export(writer, repository.getValueFactory().createURI(subTriplesContext));
repoConn.close();
return new String(out.toByteArray(),"UTF-8");
}catch(RepositoryException | IOException | RDFParseException | RDFHandlerException ex) {
logger.error("Cannot parse ntriples file - Return the original NTriples file",ex);
return nTriples;
}
}
示例12: storeURL
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
private void storeURL(URL url, String baseURI, URI context, RDFFormat format)
throws RDFParseException, RepositoryException, IOException, RDFHandlerException {
logger.info("[Strabon.storeURL] Storing file.");
logger.info("[Strabon.storeURL] URL : {}", url.toString());
logger.info("[Strabon.storeURL] Context : {}", ((context == null) ? "default" : context));
logger.info("[Strabon.storeURL] Base URI : {}", ((baseURI == null) ? "null" : baseURI));
logger.info("[Strabon.storeURL] Format : {}", ((format == null) ? "null" : format));
if (context == null) {
con.add(url, baseURI, format);
} else {
con.add(url, baseURI, format, context);
}
logger.info("[Strabon.storeURL] Storing was successful.");
}
示例13: storeString
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
private void storeString(String text, String baseURI, URI context, RDFFormat format)
throws RDFParseException, RepositoryException, IOException, RDFHandlerException {
if (baseURI == null) {
baseURI = "";
}
logger.info("[Strabon.storeString] Storing triples.");
logger.info("[Strabon.storeString] Text : " + text);
logger.info("[Strabon.storeString] Base URI : " + baseURI);
logger.info("[Strabon.storeString] Context : " + ((context == null) ? "null" : context));
logger.info("[Strabon.storeString] Format : "
+ ((format == null) ? "null" : format.toString()));
StringReader reader = new StringReader(text);
if (context == null) {
con.add(reader, baseURI, format);
} else {
con.add(reader, baseURI, format, context);
}
reader.close();
logger.info("[Strabon.storeString] Storing was successful.");
}
示例14: store
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
/**
* Store the given data in the given format into Strabon repository. If url is true, then input
* comes from a URL.
*
* Returns true on success, false otherwise.
*
* @param src
* @param context
* @param format
* @param inference
* @param url
* @return
* @throws MalformedQueryException
* @throws RepositoryException
* @throws InvalidDatasetFormatFault
* @throws RDFHandlerException
* @throws RDFParseException
* @throws QueryEvaluationException
* @throws TupleQueryResultHandlerException
* @throws IOException
* @throws ClassNotFoundException
*/
public boolean store(String src, String context, String format, Boolean inference, Boolean url)
throws RepositoryException, RDFParseException, RDFHandlerException, IOException,
InvalidDatasetFormatFault {
logger.info("[StrabonEndpoint] Received STORE request.");
if ((this.strabon == null) && (!init())) {
throw new RepositoryException("Could not connect to Strabon.");
}
if (url) {
URL source = new URL(src);
if (source.getProtocol().equalsIgnoreCase(FILE_PROTOCOL) && !loadFromFile) {
// it would be a security issue if we read from the server's filesystem
throw new IllegalArgumentException("The protocol of the URL should be one of http or ftp.");
}
}
strabon.storeInRepo(src, null, context, format, inference);
logger.info("[StrabonEndpoint] STORE was successful.");
return true;
}
示例15: parse
import org.openrdf.rio.RDFParseException; //導入依賴的package包/類
/**
* Parses the data from the supplied InputStream, using the supplied baseURI
* to resolve any relative URI references.
*
* @param in The InputStream from which to read the data.
* @param baseURI The URI associated with the data in the InputStream.
* @throws java.io.IOException If an I/O error occurred while data was read from the InputStream.
* @throws org.openrdf.rio.RDFParseException
* If the parser has found an unrecoverable parse error.
* @throws org.openrdf.rio.RDFHandlerException
* If the configured statement handler has encountered an
* unrecoverable error.
*/
@Override
public void parse(InputStream in, String baseURI) throws IOException, RDFParseException, RDFHandlerException {
Preconditions.checkNotNull(baseURI);
setBaseURI(baseURI);
WireFeedInput input = new WireFeedInput();
try {
WireFeed feed = input.build(new InputSource(in));
if(feed instanceof Feed) {
parseFeed((Feed) feed);
} else {
throw new RDFParseException("data stream is not an RSS feed");
}
} catch (FeedException e) {
throw new RDFParseException(e);
}
}