本文整理汇总了Java中org.apache.clerezza.rdf.core.MGraph.add方法的典型用法代码示例。如果您正苦于以下问题:Java MGraph.add方法的具体用法?Java MGraph.add怎么用?Java MGraph.add使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.clerezza.rdf.core.MGraph
的用法示例。
在下文中一共展示了MGraph.add方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: init
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
@BeforeClass
public static void init(){
LiteralFactory lf = LiteralFactory.getInstance();
UriRef pers1 = new UriRef("http://www.example.org/test#pers1");
UriRef pers2 = new UriRef("http://www.example.org/test#pers2");
MGraph data = new SimpleMGraph();
//NOTE: This test a language literal with and without language as
// well as a xsd:string typed literal. To test correct handling of
// RDF1.1
data.add(new TripleImpl(pers1, RDF.type, FOAF.Person));
data.add(new TripleImpl(pers1, FOAF.name, new PlainLiteralImpl("Rupert Westenthaler",
new Language("de"))));
data.add(new TripleImpl(pers1, FOAF.nick, new PlainLiteralImpl("westei")));
data.add(new TripleImpl(pers1, FOAF.mbox, lf.createTypedLiteral("[email protected]")));
data.add(new TripleImpl(pers1, FOAF.age, lf.createTypedLiteral(38)));
data.add(new TripleImpl(pers1, FOAF.knows, pers2));
data.add(new TripleImpl(pers2, FOAF.name, new PlainLiteralImpl("Reto Bachmann-Gmür")));
rdfData = data.getGraph();
}
示例2: createBudgetLine
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* Return the budget line Object id and populate trpList with created triples
* @param trpList
* @param name
* @param childrens
* @return
*/
private UriRef createBudgetLine(MGraph trpList,String name, List<String> childrens){
UriRef budgetLineRoot = Entities.generateNewEntity();
//creation du nom
//TODO : real language aware object creation
trpList.add(new TripleImpl(budgetLineRoot, SKOS.prefLabel, lf.createTypedLiteral(name)));
//add type of object
trpList.add(new TripleImpl(budgetLineRoot, RDF.type, Onthology.budgetLineType.getUri()));
//create childrens for each parent
for(String child : childrens){
UriRef childBudgetLine = createBudgetLine(trpList, child, Collections.<String> emptyList());
trpList.add(new TripleImpl(budgetLineRoot, Onthology.hasBudgetLine.getUri(), childBudgetLine));
}
return budgetLineRoot;
}
示例3: writeEntityInformation
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
private void writeEntityInformation(MGraph writer, Keyword keyword, UriRef entity, Language lang) {
//The rdfs:label
writer.add(new TripleImpl(entity, RDFS_LABEL,
new PlainLiteralImpl(keyword.getForm(), lang)));
//the rdf:type
for(Clazz type : keyword.getClasses()){
writer.add(new TripleImpl(entity, RDF_TYPE, new UriRef(type.getUrl().toString())));
UriRef dbpediaType = createDbpediaTypeUri(type);
if(dbpediaType != null){
writer.add(new TripleImpl(entity, RDF_TYPE, dbpediaType));
}
}
if(keyword.getAbstract() != null){
writer.add(new TripleImpl(entity, RDFS_COMMENT,
new PlainLiteralImpl(keyword.getAbstract(),lang)));
}
if(keyword.getImages() != null && keyword.getImages().length > 0){
Image image = keyword.getImages()[0];
// for(Image image : keyword.getImages()){
writer.add(new TripleImpl(entity, FOAF_DEPICTION, new UriRef(image.getImage().toString())));
writer.add(new TripleImpl(entity, FOAF_THUMBNAIL, new UriRef(image.getThumb().toString())));
// }
}
}
开发者ID:michelemostarda,项目名称:machinelinking-stanbol-enhancement-engine,代码行数:25,代码来源:MLAnnotateEnhancementEngine.java
示例4: generateDemoData
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
private void generateDemoData(MGraph graph, UriRef rootUri) {
generateTemplate(graph,rootUri);
String cityName = "demoCity";
//TODO : create a real literal with a lang (cf importer in xplor)
graph.add(new TripleImpl(rootUri, SKOS.prefLabel, lf.createTypedLiteral(cityName) ));
//create a fake budget plan
String userName = "fakeUser";
UriRef rootFakeBudget = Entities.generateNewEntity();
graph.add(new TripleImpl(rootFakeBudget, RDF.type, Onthology.budgetPlanType.getUri()));
//TODO : create a real language object
graph.add(new TripleImpl(rootFakeBudget, Onthology.fromUser.getUri(), lf.createTypedLiteral(userName)));
Random r = new Random();
//get all "lines" of the budget
// TODO : get to 100% with random generation int maxPercentage = 100;
Iterator<Triple> lines = graph.filter(null, RDF.type, Onthology.budgetLineType.getUri());
List<Triple> trps = new ArrayList<Triple>();
while(lines.hasNext()){
Triple line = lines.next();
UriRef valueId = Entities.generateNewEntity();
trps.add(new TripleImpl(valueId, RDF.type, Onthology.budgetPlanValueType.getUri()));
trps.add(new TripleImpl(valueId, Onthology.forBudgetLine.getUri(), line.getSubject()));
double amount = r.nextDouble() * (10 + r.nextDouble());
trps.add(new TripleImpl(valueId, Onthology.percentage.getUri(), lf.createTypedLiteral(r.nextDouble())));
trps.add(new TripleImpl(rootFakeBudget, Onthology.hasBudgetPlanValue.getUri(), valueId));
}
graph.addAll(trps);
}
示例5: addGraphsSize
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* Add the size of the graphs within each dataset/pipe to the rdf data for visualization
*/
private void addGraphsSize(MGraph responseGraph){
Iterator<Triple> datasets = getDlcGraph().filter(DlcGraphProvider.DATA_LIFECYCLE_GRAPH_REFERENCE, DLC.pipe, null);
while(datasets.hasNext()){
final UriRef datasetRef = (UriRef) datasets.next().getObject();
final DataSet dataSet = dataSetFactory.getDataSet(datasetRef);
// add source graph size
int sourceGraphSize = dataSet.getSourceGraph().size();
responseGraph.add(new TripleImpl(dataSet.getSourceGraphRef(), DLC.size, new PlainLiteralImpl(Integer.toString(sourceGraphSize))));
// add digest graph size
int digestGraphSize = dataSet.getDigestGraph().size();
responseGraph.add(new TripleImpl(dataSet.getDigestGraphRef(), DLC.size, new PlainLiteralImpl(Integer.toString(digestGraphSize))));
// add enhance graph size
int enhanceGraphSize = dataSet.getEnhancementsGraph().size();
responseGraph.add(new TripleImpl(dataSet.getEnhancementsGraphRef(), DLC.size, new PlainLiteralImpl(Integer.toString(enhanceGraphSize))));
// add interlink graph size
int interlinkGraphSize = dataSet.getInterlinksGraph().size();
responseGraph.add(new TripleImpl(dataSet.getInterlinksGraphRef(), DLC.size, new PlainLiteralImpl(Integer.toString(interlinkGraphSize))));
// add smush graph size
int smushGraphSize = dataSet.getSmushGraph().size();
responseGraph.add(new TripleImpl(dataSet.getSmushGraphRef(), DLC.size, new PlainLiteralImpl(Integer.toString(smushGraphSize))));
// add publish graph size
int publishGraphSize = dataSet.getPublishGraph().size();
responseGraph.add(new TripleImpl(dataSet.getPublishGraphRef(), DLC.size, new PlainLiteralImpl(Integer.toString(publishGraphSize))));
}
}
示例6: addSubjects
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* Add dc:subject property to items pointing to entities extracted by NLP
* engines in the default chain. Given a node and a TripleCollection
* containing fise:Enhancements about that node dc:subject properties are
* added to an item pointing to entities referenced by those enhancements if
* the enhancement confidence value is above a threshold.
*
* @param node
* @param metadata
*/
private void addSubjects(MGraph targetGraph, UriRef itemRef, TripleCollection metadata) {
final GraphNode enhancementType = new GraphNode(TechnicalClasses.ENHANCER_ENHANCEMENT, metadata);
final Set<UriRef> entities = new HashSet<UriRef>();
// get all the enhancements
final Iterator<GraphNode> enhancements = enhancementType.getSubjectNodes(RDF.type);
while (enhancements.hasNext()) {
final GraphNode enhhancement = enhancements.next();
final Iterator<Literal> confidenceLiterals = enhhancement.getLiterals(org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_CONFIDENCE);
//look the confidence value for each enhancement
double enhancementConfidence = confidenceLiterals.hasNext() ?
LiteralFactory.getInstance().createObject(Double.class,
(TypedLiteral) confidenceLiterals.next()) : 1;
if (enhancementConfidence >= confidenceThreshold) {
// get entities referenced in the enhancement
final Iterator<Resource> referencedEntities = enhhancement.getObjects(org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_REFERENCE);
while (referencedEntities.hasNext()) {
final UriRef entity = (UriRef) referencedEntities.next();
// Add dc:subject to the patent for each referenced entity
targetGraph.add(new TripleImpl(itemRef, DC.subject, entity));
entities.add(entity);
}
}
}
for (UriRef uriRef : entities) {
// We don't get the entity description directly from metadata
// as the context there would include
addResourceDescription(uriRef, targetGraph);
}
}
示例7: perform
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* Smush the union of the source, digest and enhancements graphs using the
* interlinking graph. More precisely collates URIs coming from different
* equivalent resources in a single one chosen among them. All the triples
* in the union graph are copied in the smush graph that is then smushed
* using the interlinking graph. URIs are canonicalized to http://
*
* @param graphToSmushRef
* @return
*/
void perform() {
messageWriter.println("Smushing task.");
final SameAsSmusher smusher = new SameAsSmusher() {
@Override
protected UriRef getPreferedIri(Set<UriRef> uriRefs
) {
Set<UriRef> httpUri = new HashSet<UriRef>();
for (UriRef uriRef : uriRefs) {
if (uriRef.getUnicodeString().startsWith("http")) {
httpUri.add(uriRef);
}
}
if (httpUri.size() == 1) {
return httpUri.iterator().next();
}
// There is no http URI in the set of equivalent resource. The entity was unknown.
// A new representation of the entity with http URI will be created.
if (httpUri.size() == 0) {
return generateNewHttpUri(dataSet, uriRefs);
}
if (httpUri.size() > 1) {
return chooseBest(httpUri);
}
throw new Error("Negative size set.");
}
};
if (dataSet.getSmushGraph().size() > 0) {
dataSet.getSmushGraph().clear();
}
dataSet.getSmushGraph().addAll(dataSet.getDigestGraph());
dataSet.getSmushGraph().addAll(dataSet.getEnhancementsGraph());
log.info("All triples from the union of digest and enhancements graph are now in the smush graph.");
log.info("Starting smushing.");
smusher.smush(dataSet.getSmushGraph(), dataSet.getInterlinksGraph(), true);
log.info("Smush task completed.");
// Remove from smush graph equivalences between temporary uri (urn:x-temp) and http uri that are added by the clerezza smusher.
// These equivalences must be removed as only equivalences between known entities (http uri) must be maintained and then published
MGraph equivToRemove = new SimpleMGraph();
Lock srl = dataSet.getSmushGraph().getLock().readLock();
srl.lock();
try {
Iterator<Triple> isameas = dataSet.getSmushGraph().filter(null, OWL.sameAs, null);
while (isameas.hasNext()) {
Triple sameas = isameas.next();
NonLiteral subject = sameas.getSubject();
Resource object = sameas.getObject();
if (subject.toString().startsWith("<" + URN_SCHEME) || object.toString().startsWith("<" + URN_SCHEME)) {
equivToRemove.add(sameas);
}
}
} finally {
srl.unlock();
}
dataSet.getSmushGraph().removeAll(equivToRemove);
messageWriter.println("Smushing of " + dataSet.getUri()
+ "Smushed graph size = " + dataSet.getSmushGraph().size());
canonicalizeResources();
}
示例8: canonicalizeResources
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* All the resources in the smush graph must be http dereferencable when
* published. All the triples in the smush graph are copied into a temporary
* graph. For each triple the subject and the object that have a non-http
* URI are changed in http uri and an equivalence link is added in the
* interlinking graph for each resource (subject and object) that has been
* changed.
*/
private void canonicalizeResources() {
LockableMGraph graph = dataSet.getSmushGraph();
MGraph graphCopy = new SimpleMGraph();
// graph containing the same triple with the http URI for each subject and object
MGraph canonicGraph = new SimpleMGraph();
Lock rl = graph.getLock().readLock();
rl.lock();
try {
graphCopy.addAll(graph);
} finally {
rl.unlock();
}
Iterator<Triple> ismushTriples = graphCopy.iterator();
while (ismushTriples.hasNext()) {
Triple triple = ismushTriples.next();
UriRef subject = (UriRef) triple.getSubject();
Resource object = triple.getObject();
// generate an http URI for both subject and object and add an equivalence link into the interlinking graph
if (subject.getUnicodeString().startsWith(URN_SCHEME)) {
subject = generateNewHttpUri(dataSet, Collections.singleton(subject));
}
if (object.toString().startsWith("<" + URN_SCHEME)) {
object = generateNewHttpUri(dataSet, Collections.singleton((UriRef) object));
}
// add the triple with the http uris to the canonic graph
canonicGraph.add(new TripleImpl(subject, triple.getPredicate(), object));
}
Lock wl = graph.getLock().writeLock();
wl.lock();
try {
graph.clear();
graph.addAll(canonicGraph);
} finally {
wl.unlock();
}
}
示例9: generateTemplate
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* "Template" for a community budget. Budget lines names issued from http://www.metropolitiques.eu/Dans-l-ombre-des-maires.html
* @param graph
* @param rootUri
*/
private void generateTemplate(MGraph graph, UriRef rootUri){
//generate cityId
String cityId = UUID.randomUUID().toString();
graph.add(new TripleImpl(rootUri,Onthology.cityId.getUri(),lf.createTypedLiteral(cityId)));
for(String budgetLine : linesTemplates){
createBudgetLine(graph, budgetLine, Collections.EMPTY_LIST);
}
}
示例10: addLanguageProperty
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* Adds language specific properties.
*
* @param entityAnnotation
* @param g
* @param lang
*/
public static void addLanguageProperty(UriRef entityAnnotation, MGraph g, String lang) {
g.add(new TripleImpl(entityAnnotation, DC_LANGUAGE, new PlainLiteralImpl(lang)));
g.add(new TripleImpl(entityAnnotation, ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(1.0)));
g.add(new TripleImpl(entityAnnotation, DC_TYPE, DCTERMS_LINGUISTIC_SYSTEM));
}