本文整理汇总了Java中org.openrdf.rio.helpers.RDFHandlerBase类的典型用法代码示例。如果您正苦于以下问题:Java RDFHandlerBase类的具体用法?Java RDFHandlerBase怎么用?Java RDFHandlerBase使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
RDFHandlerBase类属于org.openrdf.rio.helpers包,在下文中一共展示了RDFHandlerBase类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: refreshSomeValuesFromRestrictions
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
private void refreshSomeValuesFromRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException {
someValuesFromByRestrictionType.clear();
ryaDaoQueryWrapper.queryAll(null, OWL.SOMEVALUESFROM, null, new RDFHandlerBase() {
@Override
public void handleStatement(final Statement statement) throws RDFHandlerException {
final Resource restrictionClass = statement.getSubject();
if (restrictions.containsKey(restrictionClass) && statement.getObject() instanceof Resource) {
final URI property = restrictions.get(restrictionClass);
final Resource valueClass = (Resource) statement.getObject();
// Should also be triggered by subclasses of the value class
final Set<Resource> valueClasses = new HashSet<>();
valueClasses.add(valueClass);
if (valueClass instanceof URI) {
valueClasses.addAll(getSubClasses((URI) valueClass));
}
for (final Resource valueSubClass : valueClasses) {
if (!someValuesFromByRestrictionType.containsKey(restrictionClass)) {
someValuesFromByRestrictionType.put(restrictionClass, new ConcurrentHashMap<>());
}
someValuesFromByRestrictionType.get(restrictionClass).put(valueSubClass, property);
}
}
}
});
}
示例2: refreshAllValuesFromRestrictions
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
private void refreshAllValuesFromRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException {
allValuesFromByValueType.clear();
ryaDaoQueryWrapper.queryAll(null, OWL.ALLVALUESFROM, null, new RDFHandlerBase() {
@Override
public void handleStatement(final Statement statement) throws RDFHandlerException {
final Resource directRestrictionClass = statement.getSubject();
if (restrictions.containsKey(directRestrictionClass) && statement.getObject() instanceof Resource) {
final URI property = restrictions.get(directRestrictionClass);
final Resource valueClass = (Resource) statement.getObject();
// Should also be triggered by subclasses of the property restriction
final Set<Resource> restrictionClasses = new HashSet<>();
restrictionClasses.add(directRestrictionClass);
if (directRestrictionClass instanceof URI) {
restrictionClasses.addAll(getSubClasses((URI) directRestrictionClass));
}
for (final Resource restrictionClass : restrictionClasses) {
if (!allValuesFromByValueType.containsKey(valueClass)) {
allValuesFromByValueType.put(valueClass, new ConcurrentHashMap<>());
}
allValuesFromByValueType.get(valueClass).put(restrictionClass, property);
}
}
}
});
}
示例3: exportEmptyStore
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
@Test
public final void exportEmptyStore()
throws OpenRDFException
{
URI dirgraph = conn.getValueFactory().createURI("http://marklogic.com/dirgraph");
Assert.assertEquals(0L, conn.size());
conn.exportStatements(null, null, null, false, new RDFHandlerBase() {
@Override
public void handleStatement(Statement st1)
throws RDFHandlerException {
Assert.assertNull(st1);
}
}, dirgraph);
}
示例4: testAddDelete
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
@Test
public void testAddDelete()
throws OpenRDFException
{
final Statement st1 = vf.createStatement(john, fname, johnfname);
testWriterCon.begin();
testWriterCon.add(st1);
testWriterCon.prepareUpdate(QueryLanguage.SPARQL,
"DELETE DATA {<" + john.stringValue() + "> <" + fname.stringValue() + "> \"" + johnfname.stringValue() + "\"}").execute();
testWriterCon.commit();
testWriterCon.exportStatements(null, null, null, false, new RDFHandlerBase() {
@Override
public void handleStatement(Statement st)
throws RDFHandlerException
{
assertThat(st, is(not(equalTo(st1))));
}
});
}
示例5: processNsMap
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
private void processNsMap() throws Exception {
InputStream in = new FileInputStream("downloads/nsmap.ttl");
RDFParser p = Rio.createParser(RDFFormat.TURTLE);
p.setRDFHandler(new RDFHandlerBase() {
@Override
public void handleStatement(Statement st) throws RDFHandlerException {
processNsMapStatement(st);
}
});
try {
p.parse(in, baseUri);
} finally {
in.close();
}
}
示例6: readStatements
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
private void readStatements(InputStream in, RDFFormat format, String baseUri)
throws MalformedNanopubException, OpenRDFException, IOException {
try {
RDFParser p = NanopubUtils.getParser(format);
p.setRDFHandler(new RDFHandlerBase() {
@Override
public void handleNamespace(String prefix, String uri) throws RDFHandlerException {
nsPrefixes.add(prefix);
ns.put(prefix, uri);
}
@Override
public void handleStatement(Statement st) throws RDFHandlerException {
statements.add(st);
}
});
p.parse(new InputStreamReader(in, Charset.forName("UTF-8")), baseUri);
} finally {
in.close();
}
}
示例7: fetchInstances
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
/**
* Query for and collect all instances of a given type. Should only be called for types expected
* to have few members, such as ontology vocabulary terms, as instances will be collected in
* memory.
*/
private Set<URI> fetchInstances(final URI type) throws QueryEvaluationException {
final Set<URI> instances = new HashSet<>();
ryaDaoQueryWrapper.queryAll(null, RDF.TYPE, type, new RDFHandlerBase() {
@Override
public void handleStatement(final Statement st) throws RDFHandlerException {
if (st.getSubject() instanceof URI) {
instances.add((URI) st.getSubject());
}
}
});
return instances;
}
示例8: createModelHandler
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
/**
* Create an instance of RDFHandler using the model passed as parameter as underlying triple store.
* @param model the model to wrap in an RDFHandler
* @param filters an optional list of filters; if any of the filters rejects the statement it is not added
* @return
*/
public static RDFHandler createModelHandler(final Model model, final Predicate<Statement>... filters) {
return new RDFHandlerBase() {
@Override
public void handleStatement(Statement st) throws RDFHandlerException {
for(Predicate<Statement> f : filters) {
if(!f.test(st)) {
return;
}
}
model.add(st);
}
};
}
示例9: refreshOneOf
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
private void refreshOneOf() throws QueryEvaluationException {
final Map<Resource, Set<Resource>> enumTypes = new HashMap<>();
// First query for all the owl:oneOf's.
// If we have the following oneOf:
// :A owl:oneOf (:B, :C)
// It will be represented by triples following a pattern similar to:
// <:A> owl:oneOf _:bnode1 .
// _:bnode1 rdf:first <:B> .
// _:bnode1 rdf:rest _:bnode2 .
// _:bnode2 rdf:first <:C> .
// _:bnode2 rdf:rest rdf:nil .
ryaDaoQueryWrapper.queryAll(null, OWL.ONEOF, null, new RDFHandlerBase() {
@Override
public void handleStatement(final Statement statement) throws RDFHandlerException {
final Resource enumType = statement.getSubject();
// listHead will point to a type class of the enumeration.
final URI listHead = (URI) statement.getObject();
if (!enumTypes.containsKey(enumType)) {
enumTypes.put(enumType, new LinkedHashSet<Resource>());
}
// listHead should point to a list of items that forms the
// enumeration.
try {
final Set<Resource> enumeration = new LinkedHashSet<>(getList(listHead));
if (!enumeration.isEmpty()) {
// Add this enumeration for this type.
enumTypes.get(enumType).addAll(enumeration);
}
} catch (final QueryEvaluationException e) {
throw new RDFHandlerException("Error getting enumeration list.", e);
}
}
});
synchronized(enumerations) {
enumerations.clear();
enumerations.putAll(enumTypes);
}
}
示例10: fromFile
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
@Override
public void fromFile(final Path statementsPath, final String visibilities) throws RyaStreamsException {
requireNonNull(statementsPath);
requireNonNull(visibilities);
if(!statementsPath.toFile().exists()) {
throw new RyaStreamsException("Could not load statements at path '" + statementsPath + "' because that " +
"does not exist. Make sure you've entered the correct path.");
}
// Create an RDF Parser whose format is derived from the statementPath's file extension.
final RDFFormat format = RDFFormat.forFileName(statementsPath.getFileName().toString());
final RDFParser parser = Rio.createParser(format);
// Set a handler that writes the statements to the specified kafka topic.
parser.setRDFHandler(new RDFHandlerBase() {
@Override
public void startRDF() throws RDFHandlerException {
log.trace("Starting loading statements.");
}
@Override
public void handleStatement(final Statement stmnt) throws RDFHandlerException {
final VisibilityStatement visiStatement = new VisibilityStatement(stmnt, visibilities);
producer.send(new ProducerRecord<>(topic, visiStatement));
}
@Override
public void endRDF() throws RDFHandlerException {
producer.flush();
log.trace("Done.");
}
});
// Do the parse and load.
try {
parser.parse(Files.newInputStream(statementsPath), "");
} catch (RDFParseException | RDFHandlerException | IOException e) {
throw new RyaStreamsException("Could not load the RDF file's Statements into Rya Streams.", e);
}
}
示例11: testInsertRemove
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
@Test
public final void testInsertRemove()
throws OpenRDFException
{
Statement st = null;
try{
testAdminCon.begin();
testAdminCon.prepareUpdate(
"INSERT DATA {GRAPH <" + dirgraph.stringValue()+"> { <" + john.stringValue() + "> <" + homeTel.stringValue() + "> \"" + johnhomeTel.doubleValue() + "\"^^<http://www.w3.org/2001/XMLSchema#double>}}").execute();
RepositoryResult<Statement> result = testAdminCon.getStatements(null, null, null, false);
try {
assertNotNull("Iterator should not be null", result);
assertTrue("Iterator should not be empty", result.hasNext());
Assert.assertEquals("There should be only one statement in repository",1L, testAdminCon.size());
while (result.hasNext()) {
st = result.next();
assertNotNull("Statement should not be in a context ", st.getContext());
assertTrue("Statement predicate should be equal to homeTel ", st.getPredicate().equals(homeTel));
}
}
finally {
result.close();
}
testAdminCon.remove(st,dirgraph);
testAdminCon.commit();
}
catch(Exception e){
}
finally{
if (testAdminCon.isActive())
testAdminCon.rollback();
}
Assert.assertEquals(0L, testAdminCon.size());
testAdminCon.exportStatements(null, null, null, false, new RDFHandlerBase() {
@Override
public void handleStatement(Statement st1)
throws RDFHandlerException
{
assertThat(st1, is((equalTo(null))));
}
},dirgraph);
}
示例12: testInsertDeleteInsertWhere
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
@Test
public void testInsertDeleteInsertWhere()
throws Exception
{
Assert.assertEquals(0L, testAdminCon.size());
final Statement st1 = vf.createStatement(john, email, johnemail, dirgraph);
final Statement st2 = vf.createStatement(john, lname, johnlname);
testAdminCon.add(st1);
testAdminCon.add(st2,dirgraph);
Assert.assertEquals(2L, testAdminCon.size());
try{
testAdminCon.begin();
testAdminCon.prepareUpdate(QueryLanguage.SPARQL,
"INSERT DATA {GRAPH <" + dirgraph.stringValue()+ "> { <" + john.stringValue() + "> <" + fname.stringValue() + "> \"" + johnfname.stringValue() + "\"} }").execute();
Assert.assertEquals(3L, testAdminCon.size());
testAdminCon.prepareUpdate(
"DELETE DATA {GRAPH <" + dirgraph.stringValue()+ "> { <" + john.stringValue() + "> <" + email.stringValue() + "> \"" + johnemail.stringValue() + "\"} }").execute();
Assert.assertEquals(2L, testAdminCon.size());
String query1 ="PREFIX ad: <http://marklogicsparql.com/addressbook#>"
+" INSERT {GRAPH <"
+ dirgraph.stringValue()
+ "> { <#1111> ad:email \"[email protected]\"}}"
+ " where { GRAPH <"+ dirgraph.stringValue()+">{<#1111> ad:lastName ?name .} } " ;
testAdminCon.prepareUpdate(QueryLanguage.SPARQL,query1, "http://marklogicsparql.com/id").execute();
Assert.assertEquals(3L, testAdminCon.size());
testAdminCon.commit();
}
catch(Exception e){
logger.debug(e.getMessage());
}
finally{
if(testAdminCon.isActive())
testAdminCon.rollback();
}
final Statement expSt = vf.createStatement(john, email, vf.createLiteral("[email protected]"));
Assert.assertEquals("Dirgraph's size must be 3",3L, testAdminCon.size(dirgraph));
testAdminCon.exportStatements(null, email, null, false, new RDFHandlerBase() {
@Override
public void handleStatement(Statement st)
throws RDFHandlerException
{
assertThat(st, equalTo(expSt));
}
}, dirgraph);
}
示例13: check
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
public boolean check() throws IOException, TrustyUriException {
md = RdfHasher.getDigest();
r = new TrustyUriResource(file);
if (r.getArtifactCode() == null) {
System.out.println("ERROR: Not a trusty URI or unknown module");
System.exit(1);
}
String moduleId = r.getModuleId();
if (!moduleId.equals(RdfModule.MODULE_ID)) {
System.out.println("ERROR: Unsupported module: " + moduleId +
" (this function only supports " + RdfModule.MODULE_ID + ")");
System.exit(1);
}
RDFFormat format = r.getFormat(RDFFormat.TURTLE);
RDFParser p = Rio.createParser(format);
previous = null;
p.getParserConfig().set(RDFaParserSettings.FAIL_ON_RDFA_UNDEFINED_PREFIXES, true);
p.setRDFHandler(new RdfPreprocessor(new RDFHandlerBase() {
@Override
public void handleStatement(Statement st) throws RDFHandlerException {
if (previous != null && StatementComparator.compareStatement(previous, st) > 0) {
throw new RuntimeException("File not sorted");
}
if (!st.equals(previous)) {
RdfHasher.digest(st, md);
}
previous = st;
}
}, r.getArtifactCode()));
BufferedReader reader = new BufferedReader(r.getInputStreamReader(), 64*1024);
try {
p.parse(reader, "");
} catch (OpenRDFException ex) {
throw new TrustyUriException(ex);
} finally {
reader.close();
}
String artifactCode = RdfHasher.getArtifactCode(md);
return artifactCode.equals(r.getArtifactCode());
}
示例14: transformLiteral
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
private void transformLiteral(final RepositoryConnection con, final URI graph) throws QueryEvaluationException, RepositoryException, MalformedQueryException, RDFHandlerException {
final Collection<Statement> toRemove = new ArrayList<Statement>();
final Collection<Statement> toAdd = new ArrayList<Statement>();
final ValueFactory factory = con.getValueFactory();
GraphQuery q = con.prepareGraphQuery(QueryLanguage.SPARQL, LITERAL_QUERY.replaceAll("%%%regex%%%", config.getRegex()));
DatasetImpl enforcedDataset = new DatasetImpl();
enforcedDataset.addDefaultGraph(graph);
q.setDataset(enforcedDataset);
q.evaluate(new RDFHandlerBase() {
@Override
public void handleStatement(Statement st) throws RDFHandlerException {
if (!(st.getObject() instanceof Literal)) {
return;
}
Literal value = (Literal) st.getObject();
String label = value.getLabel();
if (label == null) {
return;
}
String newLabel = label;
if (config.isReplaceAll()) {
newLabel = newLabel.replaceAll(config.getRegex(), config.getReplacement());
} else {
newLabel = newLabel.replaceFirst(config.getRegex(), config.getReplacement());
}
if (!newLabel.equals(label)) {
toRemove.add(st);
if (value.getLanguage() != null && !value.getLanguage().isEmpty()) {
toAdd.add(factory.createStatement(st.getSubject(), st.getPredicate(), factory.createLiteral(newLabel, value.getLanguage())));
} else if (value.getDatatype() != null) {
toAdd.add(factory.createStatement(st.getSubject(), st.getPredicate(), factory.createLiteral(newLabel, value.getDatatype())));
} else {
toAdd.add(factory.createStatement(st.getSubject(), st.getPredicate(), factory.createLiteral(newLabel)));
}
}
}
});
try {
con.remove(toRemove, graph);
con.add(toAdd, graph);
con.commit();
} catch (RepositoryException ex) {
throw new RDFHandlerException(ex);
}
}
示例15: transformLiteral
import org.openrdf.rio.helpers.RDFHandlerBase; //导入依赖的package包/类
private void transformLiteral(final RepositoryConnection con, final URI graph) throws QueryEvaluationException, RepositoryException, MalformedQueryException, RDFHandlerException {
final Collection<Statement> toRemove = new ArrayList<Statement>();
final Collection<Statement> toAdd = new ArrayList<Statement>();
final ValueFactory factory = con.getValueFactory();
GraphQuery q = con.prepareGraphQuery(QueryLanguage.SPARQL, LITERAL_QUERY);
DatasetImpl enforcedDataset = new DatasetImpl();
enforcedDataset.addDefaultGraph(graph);
q.setDataset(enforcedDataset);
q.evaluate(new RDFHandlerBase() {
@Override
public void handleStatement(Statement st) throws RDFHandlerException {
if (!(st.getObject() instanceof Literal)) {
return;
}
Literal value = (Literal) st.getObject();
String label = value.getLabel();
if (label == null) {
return;
}
String newLabel = label;
if (config.isStripHtmlTags())
newLabel = HTML_TAG_PATTERN.matcher(newLabel).replaceAll("");
if (config.isConvertHtmlEntities())
newLabel = StringEscapeUtils.unescapeHtml4(newLabel);
if (!newLabel.equals(label)) {
toRemove.add(st);
if (value.getLanguage() != null && !value.getLanguage().isEmpty()) {
toAdd.add(factory.createStatement(st.getSubject(), st.getPredicate(), factory.createLiteral(newLabel, value.getLanguage())));
} else if (value.getDatatype() != null) {
toAdd.add(factory.createStatement(st.getSubject(), st.getPredicate(), factory.createLiteral(newLabel, value.getDatatype())));
} else {
toAdd.add(factory.createStatement(st.getSubject(), st.getPredicate(), factory.createLiteral(newLabel)));
}
}
}
});
con.remove(toRemove, graph);
con.add(toAdd, graph);
con.commit();
}