本文整理汇总了Java中org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider类的典型用法代码示例。如果您正苦于以下问题:Java AnalysisProvider类的具体用法?Java AnalysisProvider怎么用?Java AnalysisProvider使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
AnalysisProvider类属于org.elasticsearch.indices.analysis.AnalysisModule包,在下文中一共展示了AnalysisProvider类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getTokenFilterProvider
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
/**
* Returns a registered {@link TokenFilterFactory} provider by {@link IndexSettings}
* or a registered {@link TokenFilterFactory} provider by predefined name
* or <code>null</code> if the tokenFilter was not registered
* @param tokenFilter global or defined tokenFilter name
* @param indexSettings an index settings
* @return {@link TokenFilterFactory} provider or <code>null</code>
*/
public AnalysisProvider<TokenFilterFactory> getTokenFilterProvider(String tokenFilter, IndexSettings indexSettings) {
final Map<String, Settings> tokenFilterSettings = indexSettings.getSettings().getGroups("index.analysis.filter");
if (tokenFilterSettings.containsKey(tokenFilter)) {
Settings currentSettings = tokenFilterSettings.get(tokenFilter);
String typeName = currentSettings.get("type");
/*
* synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index.
* instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and
* hide internal data-structures as much as possible.
*/
if ("synonym".equals(typeName)) {
return requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings));
} else if ("synonym_graph".equals(typeName)) {
return requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings));
} else {
return getAnalysisProvider(Component.FILTER, tokenFilters, tokenFilter, typeName);
}
} else {
return getTokenFilterProvider(tokenFilter);
}
}
示例2: analyze
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
private List<String> analyze(Settings settings, String analyzerName, String text) throws IOException {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() {
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
}
}));
IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(idxSettings);
Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer();
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", text, 1.0f);
TokenStream stream = AllTokenStream.allTokenStream("_all", text, 1.0f, analyzer);
stream.reset();
CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class);
List<String> terms = new ArrayList<>();
while (stream.incrementToken()) {
String tokText = termAtt.toString();
terms.add(tokText);
}
return terms;
}
示例3: getTokenizers
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
@Override
public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
final Map<String, AnalysisProvider<TokenizerFactory>> extra = new HashMap<>();
extra.put("fess_japanese_tokenizer", (indexSettings, env, name, settings) -> new JapaneseTokenizerFactory(indexSettings, env, name,
settings, pluginComponent.getFessAnalysisService()));
extra.put("fess_japanese_reloadable_tokenizer",
(indexSettings, env, name, settings) -> new ReloadableJapaneseTokenizerFactory(indexSettings, env, name, settings,
pluginComponent.getFessAnalysisService()));
extra.put("fess_korean_tokenizer", (indexSettings, env, name, settings) -> new KoreanTokenizerFactory(indexSettings, env, name,
settings, pluginComponent.getFessAnalysisService()));
extra.put("fess_vietnamese_tokenizer", (indexSettings, env, name, settings) -> new VietnameseTokenizerFactory(indexSettings, env,
name, settings, pluginComponent.getFessAnalysisService()));
extra.put("fess_simplified_chinese_tokenizer", (indexSettings, env, name, settings) -> new ChineseTokenizerFactory(indexSettings,
env, name, settings, pluginComponent.getFessAnalysisService()));
return extra;
}
示例4: getTokenFilters
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
final Map<String, AnalysisProvider<TokenFilterFactory>> extra = new HashMap<>();
extra.put("reloadable_kuromoji_baseform", KuromojiBaseFormFilterFactory::new);
extra.put("reloadable_kuromoji_part_of_speech", KuromojiPartOfSpeechFilterFactory::new);
extra.put("reloadable_kuromoji_readingform", KuromojiReadingFormFilterFactory::new);
extra.put("reloadable_kuromoji_stemmer", KuromojiKatakanaStemmerFactory::new);
extra.put("kanji_number", KanjiNumberFilterFactory::new);
extra.put("kuromoji_pos_concat", PosConcatenationFilterFactory::new);
extra.put("char_type", CharTypeFilterFactory::new);
extra.put("number_concat", NumberConcatenationFilterFactory::new);
extra.put("pattern_concat", PatternConcatenationFilterFactory::new);
extra.put("stop_prefix", StopTokenPrefixFilterFactory::new);
extra.put("stop_suffix", StopTokenSuffixFilterFactory::new);
extra.put("reloadable_keyword_marker", ReloadableKeywordMarkerFilterFactory::new);
extra.put("reloadable_stop", ReloadableStopFilterFactory::new);
extra.put("flexible_porter_stem", FlexiblePorterStemFilterFactory::new);
extra.put("alphanum_word", AlphaNumWordFilterFactory::new);
return extra;
}
示例5: getTokenFilters
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
final Map<String, AnalysisProvider<TokenFilterFactory>> extra = new HashMap<>();
extra.put("synonym_filter", new AnalysisProvider<TokenFilterFactory>() {
@Override
public TokenFilterFactory get(final IndexSettings indexSettings, final Environment environment, final String name, final Settings settings)
throws IOException {
return new SynonymTokenFilterFactory(indexSettings, environment, name, settings, pluginComponent.getAnalysisRegistry());
}
@Override
public boolean requiresAnalysisSettings() {
return true;
}
});
return extra;
}
示例6: AnalysisRegistry
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
public AnalysisRegistry(Environment environment,
Map<String, AnalysisProvider<CharFilterFactory>> charFilters,
Map<String, AnalysisProvider<TokenFilterFactory>> tokenFilters,
Map<String, AnalysisProvider<TokenizerFactory>> tokenizers,
Map<String, AnalysisProvider<AnalyzerProvider<?>>> analyzers,
Map<String, AnalysisProvider<AnalyzerProvider<?>>> normalizers) {
this.environment = environment;
this.charFilters = unmodifiableMap(charFilters);
this.tokenFilters = unmodifiableMap(tokenFilters);
this.tokenizers = unmodifiableMap(tokenizers);
this.analyzers = unmodifiableMap(analyzers);
this.normalizers = unmodifiableMap(normalizers);
}
示例7: getAnalyzer
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
/**
* Returns a registered {@link Analyzer} provider by name or <code>null</code> if the analyzer was not registered
*/
public Analyzer getAnalyzer(String analyzer) throws IOException {
AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> analyzerProvider = this.prebuiltAnalysis.getAnalyzerProvider(analyzer);
if (analyzerProvider == null) {
AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> provider = analyzers.get(analyzer);
return provider == null ? null : cachedAnalyzer.computeIfAbsent(analyzer, (key) -> {
try {
return provider.get(environment, key).get();
} catch (IOException ex) {
throw new ElasticsearchException("failed to load analyzer for name " + key, ex);
}}
);
}
return analyzerProvider.get(environment, analyzer).get();
}
示例8: buildTokenFilterFactories
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
public Map<String, TokenFilterFactory> buildTokenFilterFactories(IndexSettings indexSettings) throws IOException {
final Map<String, Settings> tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER);
Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilters = new HashMap<>(this.tokenFilters);
/*
* synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index.
* instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and
* hide internal data-structures as much as possible.
*/
tokenFilters.put("synonym", requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings)));
tokenFilters.put("synonym_graph", requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings)));
return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.tokenFilterFactories);
}
示例9: getTokenizerProvider
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
/**
* Returns a registered {@link TokenizerFactory} provider by {@link IndexSettings}
* or a registered {@link TokenizerFactory} provider by predefined name
* or <code>null</code> if the tokenizer was not registered
* @param tokenizer global or defined tokenizer name
* @param indexSettings an index settings
* @return {@link TokenizerFactory} provider or <code>null</code>
*/
public AnalysisProvider<TokenizerFactory> getTokenizerProvider(String tokenizer, IndexSettings indexSettings) {
final Map<String, Settings> tokenizerSettings = indexSettings.getSettings().getGroups("index.analysis.tokenizer");
if (tokenizerSettings.containsKey(tokenizer)) {
Settings currentSettings = tokenizerSettings.get(tokenizer);
return getAnalysisProvider(Component.TOKENIZER, tokenizers, tokenizer, currentSettings.get("type"));
} else {
return getTokenizerProvider(tokenizer);
}
}
示例10: getCharFilterProvider
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
/**
* Returns a registered {@link CharFilterFactory} provider by {@link IndexSettings}
* or a registered {@link CharFilterFactory} provider by predefined name
* or <code>null</code> if the charFilter was not registered
* @param charFilter global or defined charFilter name
* @param indexSettings an index settings
* @return {@link CharFilterFactory} provider or <code>null</code>
*/
public AnalysisProvider<CharFilterFactory> getCharFilterProvider(String charFilter, IndexSettings indexSettings) {
final Map<String, Settings> tokenFilterSettings = indexSettings.getSettings().getGroups("index.analysis.char_filter");
if (tokenFilterSettings.containsKey(charFilter)) {
Settings currentSettings = tokenFilterSettings.get(charFilter);
return getAnalysisProvider(Component.CHAR_FILTER, charFilters, charFilter, currentSettings.get("type"));
} else {
return getCharFilterProvider(charFilter);
}
}
示例11: requiresAnalysisSettings
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
private static <T> AnalysisModule.AnalysisProvider<T> requiresAnalysisSettings(AnalysisModule.AnalysisProvider<T> provider) {
return new AnalysisModule.AnalysisProvider<T>() {
@Override
public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
return provider.get(indexSettings, environment, name, settings);
}
@Override
public boolean requiresAnalysisSettings() {
return true;
}
};
}
示例12: getAnalysisProvider
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
private <T> AnalysisProvider<T> getAnalysisProvider(Component component, Map<String, AnalysisProvider<T>> providerMap, String name, String typeName) {
if (typeName == null) {
throw new IllegalArgumentException(component + " [" + name + "] must specify either an analyzer type, or a tokenizer");
}
AnalysisProvider<T> type = providerMap.get(typeName);
if (type == null) {
throw new IllegalArgumentException("Unknown " + component + " type [" + typeName + "] for [" + name + "]");
}
return type;
}
示例13: getNewRegistry
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
public AnalysisRegistry getNewRegistry(Settings settings) {
try {
return new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() {
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
}
})).getAnalysisRegistry();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
示例14: testDefaultsCompoundAnalysis
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
public void testDefaultsCompoundAnalysis() throws Exception {
Settings settings = getJsonSettings();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() {
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
}
}));
TokenFilterFactory filterFactory = analysisModule.getAnalysisRegistry().buildTokenFilterFactories(idxSettings).get("dict_dec");
MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class));
}
示例15: getTokenFilters
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; //导入依赖的package包/类
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
Map<String, AnalysisProvider<TokenFilterFactory>> extra = new HashMap<>();
extra.put("icu_normalizer", IcuNormalizerTokenFilterFactory::new);
extra.put("icu_folding", IcuFoldingTokenFilterFactory::new);
extra.put("icu_collation", IcuCollationTokenFilterFactory::new);
extra.put("icu_transform", IcuTransformTokenFilterFactory::new);
return extra;
}