本文整理汇总了Java中org.elasticsearch.search.SearchHit.getSource方法的典型用法代码示例。如果您正苦于以下问题:Java SearchHit.getSource方法的具体用法?Java SearchHit.getSource怎么用?Java SearchHit.getSource使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.elasticsearch.search.SearchHit
的用法示例。
在下文中一共展示了SearchHit.getSource方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: dateSearchBraces
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
@Test
public void dateSearchBraces() throws IOException, SqlParseException, SQLFeatureNotSupportedException, ParseException {
DateTimeFormatter formatter = DateTimeFormat.forPattern(TS_DATE_FORMAT);
DateTime dateToCompare = new DateTime(2015, 3, 15, 0, 0, 0);
SearchHits response = query(String.format("SELECT odbc_time FROM %s/odbc WHERE odbc_time < {ts '2015-03-15 00:00:00.000'}", TEST_INDEX));
SearchHit[] hits = response.getHits();
for(SearchHit hit : hits) {
Map<String, Object> source = hit.getSource();
String insertTimeStr = (String) source.get("odbc_time");
insertTimeStr = insertTimeStr.replace("{ts '", "").replace("'}", "");
DateTime insertTime = formatter.parseDateTime(insertTimeStr);
String errorMessage = String.format("insert_time must be smaller then 2015-03-15. found: %s", insertTime);
Assert.assertTrue(errorMessage, insertTime.isBefore(dateToCompare));
}
}
示例2: aggregateRelatedTerms
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
public void aggregateRelatedTerms(String input, String model) {
//get the first 10 related terms
SearchResponse usrhis = es.getClient().prepareSearch(props.getProperty(INDEX_NAME)).setTypes(model).setQuery(QueryBuilders.termQuery("keywords", input)).addSort(WEIGHT, SortOrder.DESC).setSize(11)
.execute().actionGet();
LOG.info("\n************************ {} results***************************", model);
for (SearchHit hit : usrhis.getHits().getHits()) {
Map<String, Object> result = hit.getSource();
String keywords = (String) result.get("keywords");
String relatedKey = extractRelated(keywords, input);
if (!relatedKey.equals(input)) {
LinkedTerm lTerm = new LinkedTerm(relatedKey, (double) result.get(WEIGHT), model);
LOG.info("( {} {} )", relatedKey, (double) result.get(WEIGHT));
termList.add(lTerm);
}
}
}
示例3: getRelatedDataFromES
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
/**
* Get recommend dataset for a giving dataset
*
* @param type recommend method
* @param input a giving dataset
* @param num the number of recommended dataset
* @return recommended dataset list
*/
public List<LinkedTerm> getRelatedDataFromES(String type, String input, int num) {
SearchRequestBuilder builder = es.getClient().prepareSearch(props.getProperty(INDEX_NAME)).setTypes(type).setQuery(QueryBuilders.termQuery("concept_A", input)).addSort(WEIGHT, SortOrder.DESC)
.setSize(num);
SearchResponse usrhis = builder.execute().actionGet();
for (SearchHit hit : usrhis.getHits().getHits()) {
Map<String, Object> result = hit.getSource();
String conceptB = (String) result.get("concept_B");
if (!conceptB.equals(input)) {
LinkedTerm lTerm = new LinkedTerm(conceptB, (double) result.get(WEIGHT), type);
termList.add(lTerm);
}
}
return termList;
}
示例4: complexConditionQuery
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
@Test
public void complexConditionQuery() throws IOException, SqlParseException, SQLFeatureNotSupportedException{
String errorMessage = "Result does not exist to the condition (gender='m' AND (age> 25 OR account_number>5)) OR (gender='f' AND (age>30 OR account_number < 8)";
SearchHits response = query(String.format("SELECT * FROM %s/account WHERE (gender='m' AND (age> 25 OR account_number>5)) OR (gender='f' AND (age>30 OR account_number < 8))", TEST_INDEX));
SearchHit[] hits = response.getHits();
for(SearchHit hit : hits) {
Map<String, Object> source = hit.getSource();
String gender = ((String)source.get("gender")).toLowerCase();
int age = (int)source.get("age");
int account_number = (int) source.get("account_number");
Assert.assertTrue(errorMessage, (gender.equals("m") && (age> 25 || account_number>5)) || (gender.equals("f") && (age>30 || account_number < 8)));
}
}
示例5: getOnServiceMetadata
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
/**
* getMetadataNameMap: Get on service metadata names, key is lowcase of short
* name and value is the original short name
*
* @param es
* the elasticsearch client
* @return a map from lower case metadata name to original metadata name
*/
private Map<String, String> getOnServiceMetadata(ESDriver es) {
String indexName = props.getProperty(MudrodConstants.ES_INDEX_NAME);
String metadataType = props.getProperty("recom_metadataType");
Map<String, String> shortnameMap = new HashMap<>();
SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
.actionGet();
while (true) {
for (SearchHit hit : scrollResp.getHits().getHits()) {
Map<String, Object> metadata = hit.getSource();
String shortName = (String) metadata.get("Dataset-ShortName");
shortnameMap.put(shortName.toLowerCase(), shortName);
}
scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
if (scrollResp.getHits().getHits().length == 0) {
break;
}
}
return shortnameMap;
}
示例6: getMaxSimilarity
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
private static double getMaxSimilarity(ESDriver es, String index, String type, String concept) {
double maxSim = 1.0;
SearchRequestBuilder builder = es.getClient().prepareSearch(index).setTypes(type).setQuery(QueryBuilders.termQuery("concept_A", concept)).addSort("weight", SortOrder.DESC).setSize(1);
SearchResponse usrhis = builder.execute().actionGet();
SearchHit[] hits = usrhis.getHits().getHits();
if (hits.length == 1) {
SearchHit hit = hits[0];
Map<String, Object> result = hit.getSource();
maxSim = (double) result.get("weight");
}
if (maxSim == 0.0) {
maxSim = 1.0;
}
return maxSim;
}
示例7: getSessions
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
/**
* getSessions: Get sessions from logs
*
* @param props
* the Mudrod configuration
* @param es
* the Elasticsearch driver
* @param logIndex
* a log index name
* @return list of session names
*/
protected List<String> getSessions(Properties props, ESDriver es, String logIndex) {
String cleanupPrefix = props.getProperty(MudrodConstants.CLEANUP_TYPE_PREFIX);
String sessionStatPrefix = props.getProperty(MudrodConstants.SESSION_STATS_PREFIX);
List<String> sessions = new ArrayList<>();
SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(sessionStatPrefix).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
.actionGet();
while (true) {
for (SearchHit hit : scrollResp.getHits().getHits()) {
Map<String, Object> session = hit.getSource();
String sessionID = (String) session.get("SessionID");
sessions.add(sessionID + "," + logIndex + "," + cleanupPrefix);
}
scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
if (scrollResp.getHits().getHits().length == 0) {
break;
}
}
return sessions;
}
示例8: getSessionTree
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
/**
* Method of converting a given session to a tree structure
*
* @param type session type name in Elasticsearch
* @param sessionID ID of session
* @return an instance of session tree structure
* @throws UnsupportedEncodingException UnsupportedEncodingException
*/
private SessionTree getSessionTree(String indexName, String type, String sessionID) throws UnsupportedEncodingException {
SearchResponse response = es.getClient().prepareSearch(indexName).setTypes(type).setQuery(QueryBuilders.termQuery("SessionID", sessionID)).setSize(100).addSort("Time", SortOrder.ASC)
.execute().actionGet();
SessionTree tree = new SessionTree(this.props, this.es, sessionID, type);
int seq = 1;
for (SearchHit hit : response.getHits().getHits()) {
Map<String, Object> result = hit.getSource();
String request = (String) result.get("Request");
String time = (String) result.get("Time");
String logType = (String) result.get("LogType");
String referer = (String) result.get("Referer");
SessionNode node = new SessionNode(request, logType, referer, time, seq);
tree.insert(node);
seq++;
}
return tree;
}
示例9: SearchResult
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
public SearchResult(SearchResponse resp) {
SearchHits hits = resp.getHits();
this.total = hits.getTotalHits();
results = new ArrayList<>(hits.getHits().length);
for (SearchHit searchHit : hits.getHits()) {
if (searchHit.getSource() != null) {
results.add(searchHit.getSource());
} else if (searchHit.getFields() != null) {
Map<String, SearchHitField> fields = searchHit.getFields();
results.add(toFieldsMap(fields));
}
}
}
示例10: notBetweenTest
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
@Test
public void notBetweenTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException {
int min = 20;
int max = 37;
SearchHits response = query(String.format("SELECT * FROM %s WHERE age NOT BETWEEN %s AND %s LIMIT 1000", TEST_INDEX, min, max));
SearchHit[] hits = response.getHits();
for(SearchHit hit : hits) {
Map<String, Object> source = hit.getSource();
// ignore document which not contains the age field.
if(source.containsKey("age")) {
int age = (int) hit.getSource().get("age");
assertThat(age, not(allOf(greaterThanOrEqualTo(min), lessThanOrEqualTo(max))));
}
}
}
示例11: dateBetweenSearch
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
@Test
public void dateBetweenSearch() throws IOException, SqlParseException, SQLFeatureNotSupportedException {
DateTimeFormatter formatter = DateTimeFormat.forPattern(DATE_FORMAT);
DateTime dateLimit1 = new DateTime(2014, 8, 18, 0, 0, 0);
DateTime dateLimit2 = new DateTime(2014, 8, 21, 0, 0, 0);
SearchHits response = query(String.format("SELECT insert_time FROM %s/online WHERE insert_time BETWEEN '2014-08-18' AND '2014-08-21' LIMIT 3", TEST_INDEX));
SearchHit[] hits = response.getHits();
for(SearchHit hit : hits) {
Map<String, Object> source = hit.getSource();
DateTime insertTime = formatter.parseDateTime((String) source.get("insert_time"));
boolean isBetween =
(insertTime.isAfter(dateLimit1) || insertTime.isEqual(dateLimit1)) &&
(insertTime.isBefore(dateLimit2) || insertTime.isEqual(dateLimit2));
Assert.assertTrue("insert_time must be between 2014-08-18 and 2014-08-21", isBetween);
}
}
示例12: aggregateRelatedTermsSWEET
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
/**
* Method of querying related terms from ontology
*
* @param input input query
* @param model source name
*/
public void aggregateRelatedTermsSWEET(String input, String model) {
SearchResponse usrhis = es.getClient().prepareSearch(props.getProperty(INDEX_NAME)).setTypes(model).setQuery(QueryBuilders.termQuery("concept_A", input)).addSort(WEIGHT, SortOrder.DESC)
.setSize(11).execute().actionGet();
LOG.info("\n************************ {} results***************************", model);
for (SearchHit hit : usrhis.getHits().getHits()) {
Map<String, Object> result = hit.getSource();
String conceptB = (String) result.get("concept_B");
if (!conceptB.equals(input)) {
LinkedTerm lTerm = new LinkedTerm(conceptB, (double) result.get(WEIGHT), model);
LOG.info("( {} {} )", conceptB, (double) result.get(WEIGHT));
termList.add(lTerm);
}
}
}
示例13: loadMetadataFromES
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
public List<Tuple2<String, String>> loadMetadataFromES(ESDriver es, List<String> variables) throws Exception {
SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setQuery(QueryBuilders.matchAllQuery()).setScroll(new TimeValue(60000)).setSize(100).execute()
.actionGet();
List<Tuple2<String, String>> datasetsTokens = new ArrayList<>();
while (true) {
for (SearchHit hit : scrollResp.getHits().getHits()) {
Map<String, Object> result = hit.getSource();
String shortName = (String) result.get("Dataset-ShortName");
String filedStr = "";
int size = variables.size();
for (int i = 0; i < size; i++) {
String filed = variables.get(i);
Object filedValue = result.get(filed);
if (filedValue != null) {
filedStr = es.customAnalyzing(indexName, filedValue.toString());
}
}
datasetsTokens.add(new Tuple2<String, String>(shortName, filedStr));
}
scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
if (scrollResp.getHits().getHits().length == 0) {
break;
}
}
return datasetsTokens;
}
示例14: normalizeMetadataVariables
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
public void normalizeMetadataVariables(ESDriver es) {
es.createBulkProcessor();
SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
.actionGet();
while (true) {
for (SearchHit hit : scrollResp.getHits().getHits()) {
Map<String, Object> metadata = hit.getSource();
Map<String, Object> updatedValues = new HashMap<>();
this.normalizeSpatialVariables(metadata, updatedValues);
this.normalizeTemporalVariables(metadata, updatedValues);
this.normalizeOtherVariables(metadata, updatedValues);
UpdateRequest ur = es.generateUpdateRequest(indexName, metadataType, hit.getId(), updatedValues);
es.getBulkProcessor().add(ur);
}
scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
if (scrollResp.getHits().getHits().length == 0) {
break;
}
}
es.destroyBulkProcessor();
}
示例15: standardTriples
import org.elasticsearch.search.SearchHit; //导入方法依赖的package包/类
public static void standardTriples(ESDriver es, String index, String type) throws IOException {
es.createBulkProcessor();
SearchResponse sr = es.getClient().prepareSearch(index).setTypes(type).setQuery(QueryBuilders.matchAllQuery()).setSize(0)
.addAggregation(AggregationBuilders.terms("concepts").field("concept_A").size(0)).execute().actionGet();
Terms concepts = sr.getAggregations().get("concepts");
for (Terms.Bucket entry : concepts.getBuckets()) {
String concept = (String) entry.getKey();
double maxSim = LinkageTriple.getMaxSimilarity(es, index, type, concept);
if (maxSim == 1.0) {
continue;
}
SearchResponse scrollResp = es.getClient().prepareSearch(index).setTypes(type).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.termQuery("concept_A", concept))
.addSort("weight", SortOrder.DESC).setSize(100).execute().actionGet();
while (true) {
for (SearchHit hit : scrollResp.getHits().getHits()) {
Map<String, Object> metadata = hit.getSource();
double sim = (double) metadata.get("weight");
double newSim = sim / maxSim;
UpdateRequest ur = es.generateUpdateRequest(index, type, hit.getId(), "weight", Double.parseDouble(df.format(newSim)));
es.getBulkProcessor().add(ur);
}
scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
if (scrollResp.getHits().getHits().length == 0) {
break;
}
}
}
es.destroyBulkProcessor();
}