本文整理汇总了Java中org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse类的典型用法代码示例。如果您正苦于以下问题:Java AnalyzeResponse类的具体用法?Java AnalyzeResponse怎么用?Java AnalyzeResponse使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
AnalyzeResponse类属于org.elasticsearch.action.admin.indices.analyze包,在下文中一共展示了AnalyzeResponse类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testAnalyzerWithFieldOrTypeTests
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
public void testAnalyzerWithFieldOrTypeTests() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
ensureGreen();
client().admin().indices().preparePutMapping("test")
.setType("document").setSource("simple", "type=text,analyzer=simple").get();
for (int i = 0; i < 10; i++) {
final AnalyzeRequestBuilder requestBuilder = client().admin().indices().prepareAnalyze("THIS IS A TEST");
requestBuilder.setIndex(indexOrAlias());
requestBuilder.setField("document.simple");
AnalyzeResponse analyzeResponse = requestBuilder.get();
assertThat(analyzeResponse.getTokens().size(), equalTo(4));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(3);
assertThat(token.getTerm(), equalTo("test"));
assertThat(token.getStartOffset(), equalTo(10));
assertThat(token.getEndOffset(), equalTo(14));
assertThat(token.getPositionLength(), equalTo(1));
}
}
示例2: testDetailAnalyzeSpecifyAttributes
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
public void testDetailAnalyzeSpecifyAttributes() throws Exception {
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("This is troubled")
.setExplain(true).setTokenizer("standard").addTokenFilter("snowball").setAttributes("keyword").get();
assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1));
assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("snowball"));
assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(3));
assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getTerm(), equalTo("troubl"));
String[] expectedAttributesKey = {
"keyword"};
assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getAttributes().size(), equalTo(expectedAttributesKey.length));
Object extendedAttribute;
for (String key : expectedAttributesKey) {
extendedAttribute = analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getAttributes().get(key);
assertThat(extendedAttribute, notNullValue());
}
}
示例3: testCustomCharFilterInRequest
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
public void testCustomCharFilterInRequest() throws Exception {
Map<String, Object> charFilterSettings = new HashMap<>();
charFilterSettings.put("type", "mapping");
charFilterSettings.put("mappings", new String[]{"ph => f", "qu => q"});
AnalyzeResponse analyzeResponse = client().admin().indices()
.prepareAnalyze()
.setText("jeff quit phish")
.setTokenizer("keyword")
.addCharFilter(charFilterSettings)
.setExplain(true)
.get();
assertThat(analyzeResponse.detail().analyzer(), IsNull.nullValue());
//charfilters
assertThat(analyzeResponse.detail().charfilters().length, equalTo(1));
assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("_anonymous_charfilter_[0]"));
assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(1));
assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[0], equalTo("jeff qit fish"));
//tokenizer
assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("keyword"));
assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(1));
assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("jeff qit fish"));
assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getStartOffset(), equalTo(0));
assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getEndOffset(), equalTo(15));
assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getPositionLength(), equalTo(1));
}
示例4: testAnalyzeNormalizedKeywordField
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
public void testAnalyzeNormalizedKeywordField() throws IOException {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(Settings.builder().put(indexSettings())
.put("index.analysis.normalizer.my_normalizer.type", "custom")
.putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase"))
.addMapping("test", "keyword", "type=keyword,normalizer=my_normalizer"));
ensureGreen("test");
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(1));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
assertThat(token.getTerm(), equalTo("abc"));
assertThat(token.getStartOffset(), equalTo(0));
assertThat(token.getEndOffset(), equalTo(3));
assertThat(token.getPosition(), equalTo(0));
assertThat(token.getPositionLength(), equalTo(1));
}
示例5: testTokenizer
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
@Test
public void testTokenizer() throws Exception {
AnalyzeRequest analyzeRequest = new AnalyzeRequest();
analyzeRequest.text("My œsophagus caused a débâcle");
/**
* whitespace (空白字符)分词器按空白字符 —— 空格、tabs、换行符等等进行简单拆分
* letter 分词器 ,采用另外一种策略,按照任何非字符进行拆分
* standard 分词器使用 Unicode 文本分割算法
*/
analyzeRequest.addTokenFilter("standard");
analyzeRequest.addCharFilter("asciifolding");
ActionFuture<AnalyzeResponse> analyzeResponseActionFuture = client.admin().indices().analyze(analyzeRequest);
List<AnalyzeResponse.AnalyzeToken> analyzeTokens = analyzeResponseActionFuture.actionGet().getTokens();
for (AnalyzeResponse.AnalyzeToken analyzeToken : analyzeTokens){
System.out.println(analyzeToken.getTerm());
}
}
示例6: lexicalizeTest
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
@Ignore
public void lexicalizeTest() {
String text = "淘汽云修";
AnalyzeResponse response = esClient.admin().indices().prepareAnalyze(text).setAnalyzer("benz_index").get();
List<AnalyzeToken> exceptedTokens = new ArrayList<>();
exceptedTokens.add(new AnalyzeToken("淘", 0, 1, 0));
exceptedTokens.add(new AnalyzeToken("汽", 1, 2, 1));
exceptedTokens.add(new AnalyzeToken("云", 2, 3, 2));
exceptedTokens.add(new AnalyzeToken("修", 3, 4, 3));
Assert.assertEquals(exceptedTokens, AnalyzeToken.valueOf(response));
Map<String, String> addWords = new HashMap<>();
addWords.put("淘汽", "c");
addWords.put("云修", "c");
addWords.put("淘汽云修", "c");
esClient.lexicalize().addWords(addWords).buildAcFailed().get();
response = esClient.admin().indices().prepareAnalyze(text).setAnalyzer("benz_index").get();
exceptedTokens.clear();
exceptedTokens.add(new AnalyzeToken("淘汽", 0, 2, 0));
exceptedTokens.add(new AnalyzeToken("淘汽云修", 0, 4, 1));
exceptedTokens.add(new AnalyzeToken("云修", 2, 4, 2));
Assert.assertEquals(exceptedTokens, AnalyzeToken.valueOf(response));
}
示例7: analyze
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
@Override
public List<String> analyze(AnalyzeRequest request) {
StopWatch watch = new StopWatch();
String index = request.index == null ? this.index : request.index;
try {
AnalyzeResponse response = client().admin().indices().prepareAnalyze(index, request.text).setAnalyzer(request.analyzer).get();
return response.getTokens().stream().map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList());
} catch (ElasticsearchException e) {
throw new SearchException(e); // due to elastic search uses async executor to run, we have to wrap the exception to retain the original place caused the exception
} finally {
long elapsedTime = watch.elapsedTime();
ActionLogContext.track("elasticsearch", elapsedTime);
logger.debug("analyze, index={}, analyzer={}, elapsedTime={}", index, request.analyzer, elapsedTime);
checkSlowOperation(elapsedTime);
}
}
示例8: assertIncludes
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
private void assertIncludes(String input, List<String> expectedTokens, List<String> fields) throws ExecutionException, InterruptedException, IOException {
for (String field : fields) {
AnalyzeResponse response = client().admin().indices().prepareAnalyze(input).setField(field).setIndex("test").execute().get();
index("test", "type", "1", field, input);
// Verify all the expected tokens are in there
List<String> tokens = new ArrayList<String>();
for (AnalyzeToken token : response.getTokens()) {
assertFalse(StringUtils.isEmpty(token.getTerm()));
tokens.add(token.getTerm());
}
flush();
refresh();
for (String expectedToken : expectedTokens) {
assertTrue(tokens.contains(expectedToken));
SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.termQuery(field, expectedToken)).execute().actionGet();
assertThat(sr.getHits().getTotalHits(), is(1L));
sr = client().prepareSearch("test").setQuery(QueryBuilders.termQuery(field, "bogussearchterm")).execute().actionGet();
assertThat(sr.getHits().getTotalHits(), is(0l));
}
}
}
示例9: analyze
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
/**
* analyze the given text using the provided analyzer, return an ngram
* @param text
* @param analyzer
* @return
*/
public Ngram analyze(String text, String analyzer){
List<AnalyzeResponse.AnalyzeToken> tokens = client.admin().indices().prepareAnalyze(indexName,text).setAnalyzer(analyzer).get().getTokens();
Ngram ngram = new Ngram();
StringBuilder sb = new StringBuilder();
for (int i=0;i<tokens.size();i++)
{
AnalyzeResponse.AnalyzeToken token = tokens.get(i);
sb.append(token.getTerm());
if (i!=tokens.size()-1){
sb.append(" ");
}
}
ngram.setNgram(sb.toString());
return ngram;
}
示例10: test_defaultAnalyzer
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
@Test
public void test_defaultAnalyzer() {
String text = "Fess (フェス) は「5 分で簡単に構築可能な全文検索サーバー」です。 Java 実行環境があればどの OS でも実行可能です。 Fess は Apache ライセンスで提供され、無料 (フリーソフト) でご利用いただけます。";
SuggestAnalyzer analyzer = SuggestUtil.createDefaultAnalyzer(runner.client(), settings);
final List<AnalyzeResponse.AnalyzeToken> tokens = analyzer.analyze(text, null);
final List<AnalyzeResponse.AnalyzeToken> readingTokens = analyzer.analyzeAndReading(text, null);
assertEquals(tokens.size(), readingTokens.size());
for (int i = 0; i < tokens.size(); i++) {
final String term = tokens.get(i).getTerm();
final String reading = readingTokens.get(i).getTerm();
switch (term) {
case "fess":
assertEquals("フェス", reading);
break;
case "全文検索":
assertEquals("ゼンブンケンサク", reading);
break;
case "無料":
assertEquals("ムリョウ", reading);
break;
default:
break;
}
}
}
示例11: testSimpleAnalyzerTests
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
public void testSimpleAnalyzerTests() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
ensureGreen();
for (int i = 0; i < 10; i++) {
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "this is a test").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(4));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
assertThat(token.getTerm(), equalTo("this"));
assertThat(token.getStartOffset(), equalTo(0));
assertThat(token.getEndOffset(), equalTo(4));
assertThat(token.getPosition(), equalTo(0));
assertThat(token.getPositionLength(), equalTo(1));
token = analyzeResponse.getTokens().get(1);
assertThat(token.getTerm(), equalTo("is"));
assertThat(token.getStartOffset(), equalTo(5));
assertThat(token.getEndOffset(), equalTo(7));
assertThat(token.getPosition(), equalTo(1));
assertThat(token.getPositionLength(), equalTo(1));
token = analyzeResponse.getTokens().get(2);
assertThat(token.getTerm(), equalTo("a"));
assertThat(token.getStartOffset(), equalTo(8));
assertThat(token.getEndOffset(), equalTo(9));
assertThat(token.getPosition(), equalTo(2));
assertThat(token.getPositionLength(), equalTo(1));
token = analyzeResponse.getTokens().get(3);
assertThat(token.getTerm(), equalTo("test"));
assertThat(token.getStartOffset(), equalTo(10));
assertThat(token.getEndOffset(), equalTo(14));
assertThat(token.getPosition(), equalTo(3));
assertThat(token.getPositionLength(), equalTo(1));
}
}
示例12: testAnalyzeWithNoIndex
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
public void testAnalyzeWithNoIndex() throws Exception {
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setAnalyzer("simple").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(4));
analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("keyword").addTokenFilter("lowercase").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(1));
assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("this is a test"));
analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("standard").addTokenFilter("lowercase").addTokenFilter("reverse").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(4));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
assertThat(token.getTerm(), equalTo("siht"));
token = analyzeResponse.getTokens().get(1);
assertThat(token.getTerm(), equalTo("si"));
token = analyzeResponse.getTokens().get(2);
assertThat(token.getTerm(), equalTo("a"));
token = analyzeResponse.getTokens().get(3);
assertThat(token.getTerm(), equalTo("tset"));
analyzeResponse = client().admin().indices().prepareAnalyze("of course").setTokenizer("standard").addTokenFilter("stop").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(1));
assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("course"));
assertThat(analyzeResponse.getTokens().get(0).getPosition(), equalTo(1));
assertThat(analyzeResponse.getTokens().get(0).getStartOffset(), equalTo(3));
assertThat(analyzeResponse.getTokens().get(0).getEndOffset(), equalTo(9));
assertThat(analyzeResponse.getTokens().get(0).getPositionLength(), equalTo(1));
}
示例13: testAnalyzeWithCharFilters
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
public void testAnalyzeWithCharFilters() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(Settings.builder().put(indexSettings())
.put("index.analysis.char_filter.custom_mapping.type", "mapping")
.putArray("index.analysis.char_filter.custom_mapping.mappings", "ph=>f", "qu=>q")
.put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard")
.putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "custom_mapping")));
ensureGreen();
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("<h2><b>THIS</b> IS A</h2> <a href=\"#\">TEST</a>").setTokenizer("standard").addCharFilter("html_strip").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(4));
analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A <b>TEST</b>").setTokenizer("keyword").addTokenFilter("lowercase").addCharFilter("html_strip").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(1));
assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("this is a test"));
analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "jeff quit phish").setTokenizer("keyword").addTokenFilter("lowercase").addCharFilter("custom_mapping").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(1));
assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("jeff qit fish"));
analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "<a href=\"#\">jeff quit fish</a>").setTokenizer("standard").addCharFilter("html_strip").addCharFilter("custom_mapping").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(3));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
assertThat(token.getTerm(), equalTo("jeff"));
token = analyzeResponse.getTokens().get(1);
assertThat(token.getTerm(), equalTo("qit"));
token = analyzeResponse.getTokens().get(2);
assertThat(token.getTerm(), equalTo("fish"));
}
示例14: testThatStandardAndDefaultAnalyzersAreSame
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
public void testThatStandardAndDefaultAnalyzersAreSame() throws Exception {
AnalyzeResponse response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("standard").get();
assertTokens(response, "this", "is", "a", "test");
response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("default").get();
assertTokens(response, "this", "is", "a", "test");
response = client().admin().indices().prepareAnalyze("this is a test").get();
assertTokens(response, "this", "is", "a", "test");
}
示例15: testAnalyzerWithMultiValues
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; //导入依赖的package包/类
public void testAnalyzerWithMultiValues() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
ensureGreen();
client().admin().indices().preparePutMapping("test")
.setType("document").setSource("simple", "type=text,analyzer=simple,position_increment_gap=100").get();
String[] texts = new String[]{"THIS IS A TEST", "THE SECOND TEXT"};
final AnalyzeRequestBuilder requestBuilder = client().admin().indices().prepareAnalyze();
requestBuilder.setText(texts);
requestBuilder.setIndex(indexOrAlias());
requestBuilder.setField("simple");
AnalyzeResponse analyzeResponse = requestBuilder.get();
assertThat(analyzeResponse.getTokens().size(), equalTo(7));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(3);
assertThat(token.getTerm(), equalTo("test"));
assertThat(token.getPosition(), equalTo(3));
assertThat(token.getStartOffset(), equalTo(10));
assertThat(token.getEndOffset(), equalTo(14));
assertThat(token.getPositionLength(), equalTo(1));
token = analyzeResponse.getTokens().get(5);
assertThat(token.getTerm(), equalTo("second"));
assertThat(token.getPosition(), equalTo(105));
assertThat(token.getStartOffset(), equalTo(19));
assertThat(token.getEndOffset(), equalTo(25));
assertThat(token.getPositionLength(), equalTo(1));
}