本文整理汇总了Java中org.apache.clerezza.rdf.core.MGraph.addAll方法的典型用法代码示例。如果您正苦于以下问题:Java MGraph.addAll方法的具体用法?Java MGraph.addAll怎么用?Java MGraph.addAll使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.clerezza.rdf.core.MGraph
的用法示例。
在下文中一共展示了MGraph.addAll方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getSkosOnto
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
@Deprecated //"use the model endpoint instead
@Path("/skosontology/all")
@GET
@Consumes(WILDCARD)
@Produces(RDF_XML)
//TODO : change query parameter name from skos to \"model"
//TODO : change endpoint from skosontology to ontologiesmodel
//TODO : remove the \"/all" and remove the /skosontology model
//TODO : without model parameter, this endpoint provide the list of available ontologies
//TODO : remove the skos_ prefix in the file name as it not really mean something
public Response getSkosOnto(@QueryParam(value = "skos") String modelName, @Context HttpHeaders headers) throws JSONException {
log.warn("This endpoint is deprecated, use the model endpoint instead");
UriRef soURI = new UriRef("urn:x-onto-utils:skosOntology" + modelName);
MGraph g;
Set<UriRef> l = tcManager.listMGraphs();
if(l.contains(soURI)){
g = tcManager.getMGraph(soURI);
}
else{
//initialisation, loading of the graph
g = tcManager.createMGraph(soURI);
g.addAll(parser.parse(this.getClass().getResourceAsStream("/skos_"+modelName+".rdf"), "Application/rdf+xml"));
}
return okMGraphResponse(headers, g);
}
示例2: extractTextFromRdf
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* Extract text from dcterms:title and dcterms:abstract fields in the source
* graph and adds a sioc:content property with that text in the enhance
* graph. The text is used by the ECS for indexing. The keywords will be
* related to the resource so that it could be retrieved.
*
* @return
*/
private void extractTextFromRdf(DataSet dataSet, String selectedDigester, PrintWriter messageWriter) {
RdfDigester digester = digesters.get(selectedDigester);
MGraph tempGraph = new IndexedMGraph();
LockableMGraph sourceGraph = dataSet.getSourceGraph();
Lock rl = sourceGraph.getLock().readLock();
rl.lock();
try {
tempGraph.addAll(sourceGraph);
} finally {
rl.unlock();
}
digester.extractText(tempGraph);
tempGraph.removeAll(sourceGraph);
dataSet.getDigestGraph().addAll(tempGraph);
messageWriter.println("Extracted text from " + dataSet.getDigestGraphRef().getUnicodeString() + " by " + selectedDigester + " digester");
}
示例3: generateDemoData
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
private void generateDemoData(MGraph graph, UriRef rootUri) {
generateTemplate(graph,rootUri);
String cityName = "demoCity";
//TODO : create a real literal with a lang (cf importer in xplor)
graph.add(new TripleImpl(rootUri, SKOS.prefLabel, lf.createTypedLiteral(cityName) ));
//create a fake budget plan
String userName = "fakeUser";
UriRef rootFakeBudget = Entities.generateNewEntity();
graph.add(new TripleImpl(rootFakeBudget, RDF.type, Onthology.budgetPlanType.getUri()));
//TODO : create a real language object
graph.add(new TripleImpl(rootFakeBudget, Onthology.fromUser.getUri(), lf.createTypedLiteral(userName)));
Random r = new Random();
//get all "lines" of the budget
// TODO : get to 100% with random generation int maxPercentage = 100;
Iterator<Triple> lines = graph.filter(null, RDF.type, Onthology.budgetLineType.getUri());
List<Triple> trps = new ArrayList<Triple>();
while(lines.hasNext()){
Triple line = lines.next();
UriRef valueId = Entities.generateNewEntity();
trps.add(new TripleImpl(valueId, RDF.type, Onthology.budgetPlanValueType.getUri()));
trps.add(new TripleImpl(valueId, Onthology.forBudgetLine.getUri(), line.getSubject()));
double amount = r.nextDouble() * (10 + r.nextDouble());
trps.add(new TripleImpl(valueId, Onthology.percentage.getUri(), lf.createTypedLiteral(r.nextDouble())));
trps.add(new TripleImpl(rootFakeBudget, Onthology.hasBudgetPlanValue.getUri(), valueId));
}
graph.addAll(trps);
}
示例4: serviceEntry
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* This method return an RdfViewable, this is an RDF serviceUri with
* associated presentational information.
*/
@GET
public RdfViewable serviceEntry(@Context final UriInfo uriInfo,
@QueryParam("url") final UriRef url,
@HeaderParam("user-agent") String userAgent) throws Exception {
//this maks sure we are nt invoked with a trailing slash which would affect
//relative resolution of links (e.g. css)
TrailingSlash.enforcePresent(uriInfo);
final String resourcePath = uriInfo.getAbsolutePath().toString();
if (url != null) {
String query = url.toString();
log.info(query);
}
//The URI at which this service was accessed, this will be the
//central serviceUri in the response
final UriRef serviceUri = new UriRef(resourcePath);
//the in memory graph to which the triples for the response are added
final MGraph responseGraph = new IndexedMGraph();
Lock rl = getDlcGraph().getLock().readLock();
rl.lock();
try {
responseGraph.addAll(getDlcGraph());
//Add the size info of the graphs of all the datasets
addGraphsSize(responseGraph);
} finally {
rl.unlock();
}
//This GraphNode represents the service within our result graph
final GraphNode node = new GraphNode(serviceUri, responseGraph);
node.addProperty(DLC.graph, DlcGraphProvider.DATA_LIFECYCLE_GRAPH_REFERENCE);
//What we return is the GraphNode to the template with the same path and name
return new RdfViewable("PipesAdmin", node, PipesAdmin.class);
}
示例5: deletePipe
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* Deletes all the graphs created with the pipe: rdf.graph, enhance.graph, interlink.graph, smush.graph, publish.graph.
* Removes from the DLC meta graph all the pipe metadata.
* @param uriInfo
* @param dataSetUri
* @return
* @throws Exception
*/
@POST
@Path("delete_pipe")
@Produces("text/plain")
public Response deletePipe(@Context final UriInfo uriInfo,
@FormParam("pipe") final UriRef dataSetUri) throws Exception {
AccessController.checkPermission(new AllPermission());
String message = "";
final DataSet dataSet = dataSetFactory.getDataSet(dataSetUri);
// remove graphs
tcManager.deleteTripleCollection(dataSet.getSourceGraphRef());
tcManager.deleteTripleCollection(dataSet.getDigestGraphRef());
tcManager.deleteTripleCollection(dataSet.getEnhancementsGraphRef());
tcManager.deleteTripleCollection(dataSet.getInterlinksGraphRef());
tcManager.deleteTripleCollection(dataSet.getSmushGraphRef());
tcManager.deleteTripleCollection(dataSet.getLogGraphRef());
LockableMGraph publishGraph = dataSet.getPublishGraph();
MGraph publishedTriples = new IndexedMGraph();
Lock pl = publishGraph.getLock().readLock();
pl.lock();
try {
publishedTriples.addAll(publishGraph);
}
finally {
pl.unlock();
}
tcManager.deleteTripleCollection(dataSet.getPublishGraphRef());
// remove pipe metadata
removePipeMetaData(dataSetUri);
message += "The dataset: " + dataSetUri + " has been deleted";
return RedirectUtil.createSeeOtherResponse("./", uriInfo);
//return message;
}
示例6: canonicalizeResources
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* All the resources in the smush graph must be http dereferencable when
* published. All the triples in the smush graph are copied into a temporary
* graph. For each triple the subject and the object that have a non-http
* URI are changed in http uri and an equivalence link is added in the
* interlinking graph for each resource (subject and object) that has been
* changed.
*/
private void canonicalizeResources() {
LockableMGraph graph = dataSet.getSmushGraph();
MGraph graphCopy = new SimpleMGraph();
// graph containing the same triple with the http URI for each subject and object
MGraph canonicGraph = new SimpleMGraph();
Lock rl = graph.getLock().readLock();
rl.lock();
try {
graphCopy.addAll(graph);
} finally {
rl.unlock();
}
Iterator<Triple> ismushTriples = graphCopy.iterator();
while (ismushTriples.hasNext()) {
Triple triple = ismushTriples.next();
UriRef subject = (UriRef) triple.getSubject();
Resource object = triple.getObject();
// generate an http URI for both subject and object and add an equivalence link into the interlinking graph
if (subject.getUnicodeString().startsWith(URN_SCHEME)) {
subject = generateNewHttpUri(dataSet, Collections.singleton(subject));
}
if (object.toString().startsWith("<" + URN_SCHEME)) {
object = generateNewHttpUri(dataSet, Collections.singleton((UriRef) object));
}
// add the triple with the http uris to the canonic graph
canonicGraph.add(new TripleImpl(subject, triple.getPredicate(), object));
}
Lock wl = graph.getLock().writeLock();
wl.lock();
try {
graph.clear();
graph.addAll(canonicGraph);
} finally {
wl.unlock();
}
}
示例7: unpublishDataset
import org.apache.clerezza.rdf.core.MGraph; //导入方法依赖的package包/类
/**
* Removes the published triples from the content graph. More precisely the same triples stored in the publish.graph of a dataset
* will be removed from the content graph. Then all the triples in publish.graph are deleted so that data could be published again
* starting from smush.graph
*/
@POST
@Path("unpublish_dataset")
@Produces("text/plain")
public Response unpublishDataset(@Context final UriInfo uriInfo,
@FormParam("pipe") final UriRef pipeRef) {
final DataSet dataSet = dataSetFactory.getDataSet(pipeRef);
String message = "";
LockableMGraph publishGraph = dataSet.getPublishGraph();
int numberOfTriples = publishGraph.size();
if(numberOfTriples > 0) {
MGraph publishedTriples = new IndexedMGraph();
Lock pwl = publishGraph.getLock().readLock();
pwl.lock();
try {
publishedTriples.addAll(publishGraph);
}
finally {
pwl.unlock();
}
// remove published triples from content graph
LockableMGraph contentGraph = tcManager.getMGraph(new UriRef(SourcingAdmin.CONTENT_GRAPH_NAME));
contentGraph.removeAll(publishedTriples);
// removes all the triples in publish.graph
publishGraph.clear();
message += "All " + numberOfTriples + " triples have been removed from the content graph.";
}
else {
message += "There are no triples in " + pipeRef;
}
// update the dataset status (unpublished)
updateDatasetStatus(pipeRef);
return RedirectUtil.createSeeOtherResponse("./", uriInfo);
//return message;
}