本文整理汇总了Java中org.elasticsearch.ExceptionsHelper.convertToElastic方法的典型用法代码示例。如果您正苦于以下问题:Java ExceptionsHelper.convertToElastic方法的具体用法?Java ExceptionsHelper.convertToElastic怎么用?Java ExceptionsHelper.convertToElastic使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.elasticsearch.ExceptionsHelper
的用法示例。
在下文中一共展示了ExceptionsHelper.convertToElastic方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: hitExecute
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
List<String> matchedQueries = new ArrayList<>(2);
try {
addMatchedQueries(hitContext, context.parsedQuery().namedFilters(), matchedQueries);
if (context.parsedPostFilter() != null) {
addMatchedQueries(hitContext, context.parsedPostFilter().namedFilters(), matchedQueries);
}
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
} finally {
SearchContext.current().clearReleasables(Lifetime.COLLECTION);
}
hitContext.hit().matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()]));
}
示例2: shardOperation
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
@Override
protected FieldStatsShardResponse shardOperation(FieldStatsShardRequest request) {
ShardId shardId = request.shardId();
Map<String, FieldStats> fieldStats = new HashMap<>();
IndexService indexServices = indicesService.indexServiceSafe(shardId.getIndex());
MapperService mapperService = indexServices.mapperService();
IndexShard shard = indexServices.shardSafe(shardId.id());
try (Engine.Searcher searcher = shard.acquireSearcher("fieldstats")) {
for (String field : request.getFields()) {
MappedFieldType fieldType = mapperService.fullName(field);
if (fieldType != null) {
IndexReader reader = searcher.reader();
Terms terms = MultiFields.getTerms(reader, field);
if (terms != null) {
fieldStats.put(field, fieldType.stats(terms, reader.maxDoc()));
}
} else {
throw new IllegalArgumentException("field [" + field + "] doesn't exist");
}
}
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
return new FieldStatsShardResponse(shardId, fieldStats);
}
示例3: topDocs
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
@Override
public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
Query rawParentFilter;
if (parentObjectMapper == null) {
rawParentFilter = Queries.newNonNestedFilter();
} else {
rawParentFilter = parentObjectMapper.nestedTypeFilter();
}
BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
Query childFilter = childObjectMapper.nestedTypeFilter();
Query q = Queries.filtered(query(), new NestedChildrenQuery(parentFilter, childFilter, hitContext));
if (size() == 0) {
return new TopDocs(context.searcher().count(q), Lucene.EMPTY_SCORE_DOCS, 0);
} else {
int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
TopDocsCollector topDocsCollector;
if (sort() != null) {
try {
topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores());
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
} else {
topDocsCollector = TopScoreDocCollector.create(topN);
}
try {
context.searcher().search(q, topDocsCollector);
} finally {
clearReleasables(Lifetime.COLLECTION);
}
return topDocsCollector.topDocs(from(), size());
}
}
示例4: getParentId
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
public static String getParentId(ParentFieldMapper fieldMapper, LeafReader reader, int docId) {
try {
SortedDocValues docValues = reader.getSortedDocValues(fieldMapper.name());
if (docValues == null) {
// hit has no _parent field.
return null;
}
BytesRef parentId = docValues.get(docId);
return parentId.length > 0 ? parentId.utf8ToString() : null;
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
示例5: getBitSet
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
@Override
public BitSet getBitSet(LeafReaderContext context) throws IOException {
try {
return getAndLoadIfNotPresent(query, context);
} catch (ExecutionException e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
示例6: attemptSyncedFlush
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
/**
* Blocking version of {@link SyncedFlushService#attemptSyncedFlush(ShardId, ActionListener)}
*/
public static ShardsSyncedFlushResult attemptSyncedFlush(InternalTestCluster cluster, ShardId shardId) {
SyncedFlushService service = cluster.getInstance(SyncedFlushService.class);
LatchedListener<ShardsSyncedFlushResult> listener = new LatchedListener();
service.attemptSyncedFlush(shardId, listener);
try {
listener.latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (listener.error != null) {
throw ExceptionsHelper.convertToElastic(listener.error);
}
return listener.result;
}
示例7: sendPreSyncRequests
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
/**
* Blocking version of {@link SyncedFlushService#sendPreSyncRequests(List, ClusterState, ShardId, ActionListener)}
*/
public static Map<String, Engine.CommitId> sendPreSyncRequests(SyncedFlushService service, List<ShardRouting> activeShards, ClusterState state, ShardId shardId) {
LatchedListener<Map<String, Engine.CommitId>> listener = new LatchedListener<>();
service.sendPreSyncRequests(activeShards, state, shardId, listener);
try {
listener.latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (listener.error != null) {
throw ExceptionsHelper.convertToElastic(listener.error);
}
return listener.result;
}
示例8: topDocs
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
@Override
public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
Query rawParentFilter;
if (parentObjectMapper == null) {
rawParentFilter = Queries.newNonNestedFilter();
} else {
rawParentFilter = parentObjectMapper.nestedTypeFilter();
}
BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
Query childFilter = childObjectMapper.nestedTypeFilter();
Query q = Queries.filtered(query.query(), new NestedChildrenQuery(parentFilter, childFilter, hitContext));
if (size() == 0) {
return new TopDocs(context.searcher().count(q), Lucene.EMPTY_SCORE_DOCS, 0);
} else {
int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
TopDocsCollector topDocsCollector;
if (sort() != null) {
try {
topDocsCollector = TopFieldCollector.create(sort(), topN, true, trackScores(), trackScores());
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
} else {
topDocsCollector = TopScoreDocCollector.create(topN);
}
try {
context.searcher().search(q, topDocsCollector);
} finally {
clearReleasables(Lifetime.COLLECTION);
}
return topDocsCollector.topDocs(from(), size());
}
}
示例9: hitsExecute
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
@Override
public void hitsExecute(SearchContext context, SearchHit[] hits) {
if (hits.length == 0 ||
// in case the request has only suggest, parsed query is null
context.parsedQuery() == null) {
return;
}
hits = hits.clone(); // don't modify the incoming hits
Arrays.sort(hits, (a, b) -> Integer.compare(a.docId(), b.docId()));
@SuppressWarnings("unchecked")
List<String>[] matchedQueries = new List[hits.length];
for (int i = 0; i < matchedQueries.length; ++i) {
matchedQueries[i] = new ArrayList<>();
}
Map<String, Query> namedQueries = new HashMap<>(context.parsedQuery().namedFilters());
if (context.parsedPostFilter() != null) {
namedQueries.putAll(context.parsedPostFilter().namedFilters());
}
try {
for (Map.Entry<String, Query> entry : namedQueries.entrySet()) {
String name = entry.getKey();
Query query = entry.getValue();
int readerIndex = -1;
int docBase = -1;
Weight weight = context.searcher().createNormalizedWeight(query, false);
Bits matchingDocs = null;
final IndexReader indexReader = context.searcher().getIndexReader();
for (int i = 0; i < hits.length; ++i) {
SearchHit hit = hits[i];
int hitReaderIndex = ReaderUtil.subIndex(hit.docId(), indexReader.leaves());
if (readerIndex != hitReaderIndex) {
readerIndex = hitReaderIndex;
LeafReaderContext ctx = indexReader.leaves().get(readerIndex);
docBase = ctx.docBase;
// scorers can be costly to create, so reuse them across docs of the same segment
Scorer scorer = weight.scorer(ctx);
matchingDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), scorer);
}
if (matchingDocs.get(hit.docId() - docBase)) {
matchedQueries[i].add(name);
}
}
}
for (int i = 0; i < hits.length; ++i) {
hits[i].matchedQueries(matchedQueries[i].toArray(new String[matchedQueries[i].size()]));
}
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
} finally {
context.clearReleasables(Lifetime.COLLECTION);
}
}
示例10: newConfigurationException
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
public static ElasticsearchException newConfigurationException(String processorType, String processorTag,
String propertyName, Exception cause) {
ElasticsearchException exception = ExceptionsHelper.convertToElastic(cause);
addHeadersToException(exception, processorType, processorTag, propertyName);
return exception;
}
示例11: parse
import org.elasticsearch.ExceptionsHelper; //导入方法依赖的package包/类
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
SubSearchContext subSearchContext = new SubSearchContext(context);
XContentParser.Token token;
String currentFieldName = null;
try {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if ("sort".equals(currentFieldName)) {
sortParseElement.parse(parser, subSearchContext);
} else if ("_source".equals(currentFieldName)) {
sourceParseElement.parse(parser, subSearchContext);
} else if ("fields".equals(currentFieldName)) {
fieldsParseElement.parse(parser, subSearchContext);
} else if (token.isValue()) {
switch (currentFieldName) {
case "from":
subSearchContext.from(parser.intValue());
break;
case "size":
subSearchContext.size(parser.intValue());
break;
case "track_scores":
case "trackScores":
subSearchContext.trackScores(parser.booleanValue());
break;
case "version":
subSearchContext.version(parser.booleanValue());
break;
case "explain":
subSearchContext.explain(parser.booleanValue());
break;
default:
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
switch (currentFieldName) {
case "highlight":
highlighterParseElement.parse(parser, subSearchContext);
break;
case "scriptFields":
case "script_fields":
scriptFieldsParseElement.parse(parser, subSearchContext);
break;
default:
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
switch (currentFieldName) {
case "fielddataFields":
case "fielddata_fields":
fieldDataFieldsParseElement.parse(parser, subSearchContext);
break;
default:
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].",
parser.getTokenLocation());
}
}
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
return new TopHitsAggregator.Factory(aggregationName, fetchPhase, subSearchContext);
}