本文整理匯總了Java中org.apache.solr.client.solrj.SolrQuery.setParam方法的典型用法代碼示例。如果您正苦於以下問題:Java SolrQuery.setParam方法的具體用法?Java SolrQuery.setParam怎麽用?Java SolrQuery.setParam使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.solr.client.solrj.SolrQuery
的用法示例。
在下文中一共展示了SolrQuery.setParam方法的14個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: checkSitemapFile
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
/**
* The method computes the number of sitemap files. If we have less than
* <em>numberOfURLs</em> URLs and only one MyCoRe type the sitemap_google.xml
* contained all URLs. Otherwise it split the sitemap in an sitemap_google.xml
* index file and a lot of sitemap_google_xxxx.xml URL files.
*
* @return the number of files, one for a single sitemap_google.xml file, more than
* one for the index and all parts.
*
*/
protected final int checkSitemapFile() throws IOException {
int number = 0;
QueryResponse response;
SolrQuery query = new SolrQuery();
query.setQuery(SOLR_QUERY);
query.setRows(Integer.MAX_VALUE);
query.setParam("fl", "id,modified");
try {
response = MCRSolrClientFactory.getSolrClient().query(query);
objidlist = response.getResults().stream().map((document) -> {
String id = (String) document.getFieldValue("id");
Date modified = (Date) document.getFieldValue("modified");
return new MCRObjectIDDateImpl(modified, id);
}).collect(Collectors.toList());
} catch (SolrServerException e) {
LOGGER.error(e);
}
number = objidlist.size() / numberOfURLs;
if (objidlist.size() % numberOfURLs != 0)
number++;
return number;
}
示例2: main
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
public static void main(String[] args) {
SolrClient solr = new HttpSolrClient.Builder().withBaseSolrUrl("http://localhost:8983/solr/chronix/").build();
//Define a group by function for the time series records
Function<GenericTimeSeries<Long, Double>, String> groupBy = ts -> ts.getAttribute("name") + "-" + ts.getAttribute("host");
//Define a reduce function for the grouped time series records. We use the average.
BinaryOperator<GenericTimeSeries<Long, Double>> reduce = (ts1, ts2) -> merge(ts1, ts2, (y1, y2) -> (y1 + y2) / 2);
//Instantiate a Chronix Client
ChronixClient<GenericTimeSeries<Long, Double>, SolrClient, SolrQuery> chronix = new ChronixClient<>(
new GenericTimeSeriesConverter(), new ChronixSolrStorage<>(200, groupBy, reduce));
//We want the maximum of all time series that metric matches *load*.
SolrQuery query = new SolrQuery("name:*Load*");
query.setParam("cf", "metric{max}");
//The result is a Java Stream. We simply collect the result into a list.
List<GenericTimeSeries<Long, Double>> maxTS = chronix.stream(solr, query).collect(Collectors.toList());
//Just print it out.
LOGGER.info("Result for query {} is: {}", query, maxTS);
}
示例3: doConstructSolrQuery
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
/**
* Convert given Query into a SolrQuery executable via {@link org.apache.solr.client.solrj.SolrClient}
*
* @param query
* @return
*/
@Override
public final SolrQuery doConstructSolrQuery(SolrDataQuery query) {
Assert.notNull(query, "Cannot construct solrQuery from null value.");
Assert.notNull(query.getCriteria(), "Query has to have a criteria.");
SolrQuery solrQuery = new SolrQuery();
solrQuery.setParam(CommonParams.Q, getQueryString(query));
if (query instanceof Query) {
processQueryOptions(solrQuery, (Query) query);
}
if (query instanceof FacetQuery) {
processFacetOptions(solrQuery, (FacetQuery) query);
}
if (query instanceof HighlightQuery) {
processHighlightOptions(solrQuery, (HighlightQuery) query);
}
return solrQuery;
}
示例4: exec
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
@Override
public void exec(PatientSearch search, FrameTask task) throws FrameException {
// create + exec SOLR query
SolrQuery query = search.initQuery();
query.setParam("q.op", "OR");
query.setQuery("+all:\"" + ClientUtils.escapeQueryChars(search.getQueryStr()) + '"');
// search for the specific domains we don't have frames for
String domains = "obs procedure "; // encounter stuff
domains += "mh "; // other patient stuff
domains += "roadtrip auxiliary "; // CPE specific stuff
domains += "pov skin diagnosis ptf exam treatment"; // encounter flags
query.addFilterQuery("+domain: (" + domains + ")");
query.addFilterQuery("-removed:true");
query.addSort(SearchService.DATETIME, ORDER.desc);
query.setFields(SearchService.UID, SearchService.DATETIME,
SearchService.SUMMARY, SearchService.URL_FIELD,
SearchService.DOMAIN, SearchService.KIND,
SearchService.FACILITY);
task.addAction(new SolrSearchAction(query, execSolrQuery(query, task)));
}
示例5: queryWatson
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
public static QueryResponse queryWatson(String question) {
solrClient = getSolrClient(service.getSolrUrl(SOLR_CLUSTER_ID), USERNAME, PASSWORD);
SolrQuery query = new SolrQuery(question);
query.setParam("ranker_id", RANKER_ID);
query.setRequestHandler("/fcselect"); // use if your solrconfig.xml file does not specify fcselect as the default request handler
QueryResponse response = null;
try {
response = solrClient.query("example_collection", query);
} catch (Exception e) {
e.printStackTrace();
}
return response;
}
示例6: complete
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
public List<String> complete(String query) {
List<String> results = new ArrayList<>();
SolrServer solrServer = new HttpSolrServer("http://" + systemConfig.getSolrHostColonPort() + "/solr");
SolrQuery solrQuery = new SolrQuery();
solrQuery.setParam("qt", "/terms");
solrQuery.setTermsLower(query);
solrQuery.setTermsPrefix(query);
// dataverses and files use SearchFields.NAME
// solrQuery.addTermsField(SearchFields.NAME);
solrQuery.addTermsField("text");
// long datasetFieldDescription = 33L;
// String solrFieldDatasetFieldDescription = datasetFieldService.find(datasetFieldDescription).getSolrField();
// solrQuery.addTermsField(solrFieldDatasetFieldDescription);
List<Term> items = null;
try {
logger.info("Solr query: " + solrQuery);
QueryResponse qr = solrServer.query(solrQuery);
TermsResponse resp = qr.getTermsResponse();
// items = resp.getTerms(SearchFields.NAME);
items = resp.getTerms("text");
// items = resp.getTerms(solrFieldDatasetFieldDescription);
} catch (SolrServerException e) {
items = null;
}
if (items != null) {
for (Term term : items) {
logger.info("term: " + term.getTerm());
results.add(term.getTerm());
}
} else {
logger.info("no terms found");
}
return results;
}
開發者ID:pengchengluo,項目名稱:Peking-University-Open-Research-Data-Platform,代碼行數:39,代碼來源:AutoCompleteBean.java
示例7: getSolrQuery
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
@SuppressWarnings("rawtypes")
public static SolrQuery getSolrQuery(MCRQuery query, Document input, HttpServletRequest request) {
int rows = Integer.parseInt(input.getRootElement().getAttributeValue("numPerPage", "10"));
MCRCondition condition = query.getCondition();
HashMap<String, List<MCRCondition>> table;
if (condition instanceof MCRSetCondition) {
table = MCRConditionTransformer.groupConditionsByIndex((MCRSetCondition) condition);
} else {
// if there is only one condition its no set condition. we don't need to group
LOGGER.warn("Condition is not SetCondition.");
table = new HashMap<>();
ArrayList<MCRCondition> conditionList = new ArrayList<>();
conditionList.add(condition);
table.put("metadata", conditionList);
}
boolean booleanAnd = !(condition instanceof MCROrCondition<?>);
SolrQuery mergedSolrQuery = MCRConditionTransformer.buildMergedSolrQuery(query.getSortBy(), false, booleanAnd,
table,
rows);
String mask = input.getRootElement().getAttributeValue("mask");
if (mask != null) {
mergedSolrQuery.setParam("mask", mask);
mergedSolrQuery.setParam("_session", request.getParameter("_session"));
}
return mergedSolrQuery;
}
示例8: transform
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
@Override
public SolrQuery transform(final Query query) {
final SolrQuery solrQuery = new SolrQuery(query.getQuery());
solrQuery.setParam("q.op", query.getDefaultOperator().name());
if (query.getCursorMark() != null) {
solrQuery.setParam(CursorMarkParams.CURSOR_MARK_PARAM, query.getCursorMark());
} else {
solrQuery.setStart(query.getOffset());
}
solrQuery.setRows(query.getRows());
if (query.getReturnFields() != null) {
query.getReturnFields().stream().forEach(f -> solrQuery.addField(f.getName()));
}
if (query.getHighlightFields() != null && !query.getHighlightFields().isEmpty()) {
solrQuery.setHighlight(true);
query.getHighlightFields().stream().forEach(hf -> solrQuery.addHighlightField(hf.getName()));
}
if (query.getFacetFields() != null) {
query.getFacetFields().stream().forEach(ff -> solrQuery.addFacetField(ff.getName()));
}
if (query.getSorts() != null) {
for (Sort sort : query.getSorts()) {
SolrQuery.ORDER solrOrder = sort.getSortOrder() == SortOrder.ASC ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc;
SolrQuery.SortClause sortClause = new SolrQuery.SortClause(sort.getField().getName(), solrOrder);
solrQuery.addSort(sortClause);
}
}
if (query.getFilterQueries() != null) {
query.getFilterQueries().stream().forEach(fq -> solrQuery.addFilterQuery(fq.getField().getName() + ":" + fq.getValue()));
}
if (query.getParams() != null) {
query.getParams().entrySet().stream().forEach(e -> solrQuery.add(e.getKey(), e.getValue()));
}
return solrQuery;
}
示例9: main
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
public static void main(String[] args) {
SolrClient solr = new HttpSolrClient.Builder().withBaseSolrUrl("http://localhost:8983/solr/chronix/").build();
//Define a group by function for the time series records
Function<MetricTimeSeries, String> groupBy = ts -> ts.getName() + "-" + ts.attribute("host");
//Define a reduce function for the grouped time series records
BinaryOperator<MetricTimeSeries> reduce = (ts1, ts2) -> {
if (ts1 == null || ts2 == null) {
return new MetricTimeSeries.Builder("empty", "metric").build();
}
ts1.addAll(ts2.getTimestampsAsArray(), ts2.getValuesAsArray());
return ts1;
};
//Instantiate a Chronix Client
ChronixClient<MetricTimeSeries, SolrClient, SolrQuery> chronix = new ChronixClient<>(
new MetricTimeSeriesConverter(), new ChronixSolrStorage<>(200, groupBy, reduce));
//We want the maximum of all time series that metric matches *load*.
SolrQuery query = new SolrQuery("name:*Load*");
query.setParam("cf", "metric{max}");
//The result is a Java Stream. We simply collect the result into a list.
List<MetricTimeSeries> maxTS = chronix.stream(solr, query).collect(Collectors.toList());
//Just print it out.
LOGGER.info("Result for query {} is: {}", query, prettyPrint(maxTS));
}
示例10: execute
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
@Override
public RepeatStatus execute(StepContribution contribution, ChunkContext context){
SolrQuery query = new SolrQuery();
query.setRequestHandler("/suggest");
query.setParam("suggest.dictionary", suggester);
query.setParam("suggest.build", true);
try {
solrClient.query(query);
} catch (Exception e) {
e.printStackTrace();
}
return RepeatStatus.FINISHED;
}
示例11: doConstructSolrQuery
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
@Override
public SolrQuery doConstructSolrQuery(TermsQuery query) {
Assert.notNull(query, "Cannot construct solrQuery from null value.");
SolrQuery solrQuery = new SolrQuery();
String queryString = getQueryString(query);
if (StringUtils.hasText(queryString)) {
solrQuery.setParam(CommonParams.Q, queryString);
}
appendTermsOptionsToSolrQuery(query.getTermsOptions(), solrQuery);
processTermsFields(solrQuery, query);
appendRequestHandler(solrQuery, query.getRequestHandler());
return solrQuery;
}
示例12: appendProjectionOnFields
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
/**
* Append field list to {@link org.apache.solr.client.solrj.SolrQuery}
*
* @param solrQuery
* @param fields
*/
protected void appendProjectionOnFields(SolrQuery solrQuery, List<Field> fields) {
if (CollectionUtils.isEmpty(fields)) {
return;
}
List<String> solrReadableFields = new ArrayList<String>();
for (Field field : fields) {
if (field instanceof CalculatedField) {
solrReadableFields.add(createCalculatedFieldFragment((CalculatedField) field));
} else {
solrReadableFields.add(field.getName());
}
}
solrQuery.setParam(CommonParams.FL, StringUtils.join(solrReadableFields, ","));
}
示例13: doTest
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
@Override
public void doTest() throws Exception {
handle.clear();
handle.put("timestamp", SKIPVAL);
// start a couple indexing threads
int[] maxDocList = new int[] {300, 700, 1200, 1350, 3000};
int[] maxDocNightlyList = new int[] {3000, 7000, 12000, 30000, 45000, 60000};
int maxDoc;
if (!TEST_NIGHTLY) {
maxDoc = maxDocList[random().nextInt(maxDocList.length - 1)];
} else {
maxDoc = maxDocNightlyList[random().nextInt(maxDocList.length - 1)];
}
indexThread = new StopableIndexingThread(controlClient, cloudClient, "1", true, maxDoc);
indexThread.start();
indexThread2 = new StopableIndexingThread(controlClient, cloudClient, "2", true, maxDoc);
indexThread2.start();
// give some time to index...
int[] waitTimes = new int[] {200, 2000, 3000};
Thread.sleep(waitTimes[random().nextInt(waitTimes.length - 1)]);
// bring shard replica down
JettySolrRunner replica = chaosMonkey.stopShard("shard1", 1).jetty;
// wait a moment - lets allow some docs to be indexed so replication time is non 0
Thread.sleep(waitTimes[random().nextInt(waitTimes.length - 1)]);
// bring shard replica up
replica.start();
// make sure replication can start
Thread.sleep(3000);
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
// stop indexing threads
indexThread.safeStop();
indexThread2.safeStop();
indexThread.join();
indexThread2.join();
Thread.sleep(1000);
waitForThingsToLevelOut(120);
Thread.sleep(2000);
waitForThingsToLevelOut(30);
Thread.sleep(5000);
waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, false, true);
// test that leader and replica have same doc count
String fail = checkShardConsistency("shard1", false, false);
if (fail != null) {
fail(fail);
}
SolrQuery query = new SolrQuery("*:*");
query.setParam("distrib", "false");
long client1Docs = shardToJetty.get("shard1").get(0).client.solrClient.query(query).getResults().getNumFound();
long client2Docs = shardToJetty.get("shard1").get(1).client.solrClient.query(query).getResults().getNumFound();
assertTrue(client1Docs > 0);
assertEquals(client1Docs, client2Docs);
// won't always pass yet...
//query("q", "*:*", "sort", "id desc");
}
示例14: initQuery
import org.apache.solr.client.solrj.SolrQuery; //導入方法依賴的package包/類
public SolrQuery initQuery() {
// create default SOLR query
SolrQuery query = new SolrQuery(ClientUtils.escapeQueryChars(getQueryStr()));
query.addFilterQuery("pid:" + pid); // specific to the current patient
query.setParam("q.op", "AND");
query.setRows(101); // limit to 100 rows
// default fields to fetch
query.setFields(UID, DATETIME, SUMMARY, URL_FIELD, DOMAIN, KIND, FACILITY);
// apply range filter (if specified)
if (hasFilter("range")) {
String range = getFilter("range");
query.addFilterQuery("{!tag=dt}datetime:[" + new PointInTime(range) + " TO *]");
}
// apply kind(s) filter (if specified)
if (hasFilter("kinds")) {
Set<String> kinds = StringUtils.commaDelimitedListToSet(getFilter("kinds"));
String kindFilter = StringUtils.collectionToDelimitedString(kinds, " OR ", "\"", "\"");
query.addFilterQuery("kind:(" + kindFilter + ")");
}
// apply types filter (if specified)
if (hasFilter("types")) {
Set<String> types = StringUtils.commaDelimitedListToSet(getFilter("types"));
String typeFilter = StringUtils.collectionToDelimitedString(types, " OR ", "\"", "\"");
query.addFilterQuery("{!tag=domain}domain:(" + typeFilter + ")");
}
// add date range facets
for (String s : facetsNames.keySet()) {
query.addFacetQuery(s);
}
query.addFacetField("{!ex=domain}domain");
query.setFacetMinCount(1);
// extra debug stuff
if (isDebugEnabled()) {
query.set("debug", "true");
}
// System.out.println(URLDecoder.decode(query.toString()));
return query;
}