本文整理汇总了Java中com.hp.hpl.jena.query.ARQ类的典型用法代码示例。如果您正苦于以下问题:Java ARQ类的具体用法?Java ARQ怎么用?Java ARQ使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ARQ类属于com.hp.hpl.jena.query包,在下文中一共展示了ARQ类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: crm2AliadaClass
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
public String crm2AliadaClass(final String crmClass) {
final Query query = QueryFactory.create(CRM_TO_ALIADA_CLASS_P1 + crmClass + CRM_TO_ALIADA_CLASS_P2);
ARQ.getContext().setTrue(ARQ.useSAX);
QueryExecution execution = null;
try {
execution = QueryExecutionFactory.sparqlService("http://172.25.5.15:8890/sparql", query);
execution.setTimeout(2000, 5000);
final ResultSet results = execution.execSelect();
//Iterating over the SPARQL Query results
while (results.hasNext()) {
QuerySolution soln = results.nextSolution();
//Printing DBpedia entries' abstract.
System.out.println(soln.get("?abstract"));
return soln.get("?abstract").asResource().getURI();
}
return "NULL";
} finally {
try {
execution.close();
} catch (Exception exception) {
// TODO: handle exception
}
}
}
示例2: visit
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
@Override
public void visit(OpService opService) {
Table input = pop();
if (!input.isEmpty()) {
TableFactory tableFactory = new TableFactory();
ArrayList<Var> vars= new ArrayList<Var>( OpVars.mentionedVars(opService));
//Table inputVars = TableFactory.create(input,vars);
Table inputVars = new TableFiltered(input,vars);
Op op1 = OpSequence.create(opService.getSubOp(),
OpTable.create(inputVars));
opService = new OpService(opService.getService(), op1,
opService.getSilent());
}
QueryIterator qIter = Service.exec(opService, ARQ.getContext());
Table table = TableFactory.create(qIter);
push(table);
}
示例3: getContext
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
public static Context getContext(boolean fastMode) {
Context result = ARQ.getContext().copy();
String defaultValue = fastMode ? "true" : "false";
result.set(D2RQOptions.MULTIPLEX_QUERIES, defaultValue);
result.set(D2RQOptions.FILTER_TO_SQL, defaultValue);
return result;
}
示例4: configure
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
@Override
public void configure(BrokerPool pool, Path dataDir, Element config) throws DatabaseConfigurationException {
super.configure(pool, dataDir, config);
if (LOG.isDebugEnabled()) {
LOG.debug("Configuring SPARQL index");
}
/*
* Some configurables.
*/
NamedNodeMap attributes = config.getAttributes();
for (int i = 0; i < attributes.getLength(); i++) {
Attr attr = (Attr) attributes.item(i);
if (attr.getName().equals(CFG_FILE_MODE)) {
if (attr.getValue().equals(CFG_FILE_MODE_MAPPED)) {
SystemTDB.setFileMode(FileMode.mapped);
} else if (attr.getValue().equals(CFG_FILE_MODE_DIRECT)) {
SystemTDB.setFileMode(FileMode.direct);
}
} else if (attr.getName().equals(CFG_LOG_EXEC)) {
if (attr.getValue().equals(CFG_LOG_EXEC_TRUE)) {
ARQ.isTrue(ARQ.symLogExec);
}
}
}
// TDB.transactionJournalWriteBlockMode
}
示例5: executeBGP
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
private static QueryIterator executeBGP(BasicPattern pattern, QueryIterator input, ExecutionContext execCxt) {
if (pattern.isEmpty())
return input ;
boolean hideBNodeVars = execCxt.getContext().isTrue(ARQ.hideNonDistiguishedVariables) ;
StageGenerator gen = StageBuilder.executeInline ;
QueryIterator qIter = gen.execute(pattern, input, execCxt) ;
// Remove non-distinguished variables here.
// Project out only named variables.
if (hideBNodeVars)
qIter = new QueryIterDistinguishedVars(qIter, execCxt) ;
return qIter ;
}
示例6: queryEndpoint
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
protected ResultSet queryEndpoint ( String endpoint, String sparqlQueryString )
{
Query query = QueryFactory.create(sparqlQueryString);
ARQ.getContext().setTrue(ARQ.useSAX);
QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, query);
ResultSet results = qexec.execSelect();
qexec.close();
return results;
}
示例7: tdbindexdump
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
protected tdbindexdump(String[] argv) {
super(argv) ;
super.modVersion.addClass(ARQ.class) ;
super.modVersion.addClass(RIOT.class) ;
super.modVersion.addClass(TDB.class) ;
super.add(argLocation) ;
}
示例8: tdbindexcopy
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
protected tdbindexcopy(String[] argv) {
super(argv) ;
super.modVersion.addClass(ARQ.class) ;
super.modVersion.addClass(RIOT.class) ;
super.modVersion.addClass(TDB.class) ;
super.add(argLocation) ;
}
示例9: modifyOp
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
@Override
protected Op modifyOp(Op op)
{
if ( context.isFalse(ARQ.optimization) )
return minimalModifyOp(op) ;
return Algebra.optimize(op, super.context) ;
}
示例10: create
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
@Override
public Plan create(Op op, DatasetGraph dataset, Binding input, Context context)
{
if ( context == null )
context = ARQ.getContext().copy() ;
DatasetGraphTDB dsgtdb = dsgToQuery(dataset) ;
setup(dsgtdb, context) ;
// This is the route for op execution, not from a Query.
QueryEngineQuackTDB engine = new QueryEngineQuackTDB(op, dsgtdb, input, context) ;
return engine.getPlan() ;
}
示例11: explain
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
public static void explain(ExplainCategory eCat, Object obj) {
if ( active.contains(eCat) ) {
Logger log = ARQ.getExecLogger() ;
if ( log.isInfoEnabled() ) {
output(log, eCat, String.valueOf(obj)) ;
}
}
}
示例12: setVerbose
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
public static void setVerbose(boolean verbose) {
if ( verbose ) {
// Force statistics output
LogCtl.enable(ARQ.logExecName) ;
JOIN_EXPLAIN = true ;
ARQ.setExecutionLogging(InfoLevel.ALL) ;
Explain2.setActive(Quack.quackExec);
Explain2.setActive(Quack.quackPlan);
} else {
ARQ.setExecutionLogging(InfoLevel.NONE) ;
Explain2.remove(Quack.quackExec);
Explain2.remove(Quack.quackPlan);
JOIN_EXPLAIN = false ;
}
}
示例13: make
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
/**
* <p>Method that constructs a QueryExecution object from the given query, dataset, and context</p>
* @param query - the Query object
* @param dataset - the input dataset for the given model
* @param context - the context for this query
* @return a QueryExecution object
*/
private static QueryExecution make( Query query, Dataset dataset, Context context )
{
query.validate() ;
if ( context == null ) context = ARQ.getContext().copy();
DatasetGraph dsg = null ;
if ( dataset != null ) dsg = dataset.asDatasetGraph() ;
QueryEngineFactory f = findFactory( query, dsg, context );
if ( f == null )
{
ALog.warn( QueryExecutionFactory.class, "Failed to find a QueryEngineFactory for query: " + query );
return null ;
}
return new QueryExecutionBase( query, dataset, context, f );
}
示例14: Server
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
public Server(int port, String pagePath, String subDomain, long timeout, Model model, Listener listener) {
this(port, pagePath, subDomain, model, listener);
ARQ.getContext().set(ARQ.queryTimeout, Long.toString(timeout));
}
示例15: start
import com.hp.hpl.jena.query.ARQ; //导入依赖的package包/类
public void start() {
startupError = true;
if (config.isLocalMappingFile()) {
this.dataset = new AutoReloadableDataset(loader,
config.getLocalMappingFilename(),
config.getAutoReloadMapping());
} else {
this.dataset = new AutoReloadableDataset(loader, null, false);
}
if (loader.getMapping().configuration().getUseAllOptimizations()) {
log.info("Fast mode (all optimizations)");
} else {
log.info("Safe mode (launch using --fast to use all optimizations)");
}
// Set up a custom DescribeHandler that calls out to
// {@link ResourceDescriber}
DescribeHandlerRegistry.get().clear();
DescribeHandlerRegistry.get().add(new DescribeHandlerFactory() {
public DescribeHandler create() {
return new DescribeHandler() {
private BulkUpdateHandler adder;
public void start(Model accumulateResultModel,
Context qContext) {
adder = accumulateResultModel.getGraph()
.getBulkUpdateHandler();
}
public void describe(Resource resource) {
log.info("DESCRIBE <" + resource + ">");
boolean outgoingTriplesOnly = isVocabularyResource(resource)
&& !getConfig().getVocabularyIncludeInstances();
adder.add(new ResourceDescriber(getMapping(), resource
.asNode(), outgoingTriplesOnly,
Relation.NO_LIMIT,
Math.round(config.getSPARQLTimeout())).description());
}
public void finish() {
}
};
}
});
Registry.add(RDFServer.ServiceRegistryName,
createJosekiServiceRegistry());
if (config.getSPARQLTimeout() > 0) {
ARQ.getContext().set(ARQ.queryTimeout, config.getSPARQLTimeout() * 1000);
}
startupError = false;
}