本文整理汇总了Java中org.elasticsearch.common.settings.Settings.getAsArray方法的典型用法代码示例。如果您正苦于以下问题:Java Settings.getAsArray方法的具体用法?Java Settings.getAsArray怎么用?Java Settings.getAsArray使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.elasticsearch.common.settings.Settings
的用法示例。
在下文中一共展示了Settings.getAsArray方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: VoikkoTokenFilterFactory
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
public VoikkoTokenFilterFactory(IndexSettings indexSettings,
@SuppressWarnings("unused") Environment environment,
String name,
Settings settings) {
super(indexSettings, name, settings);
cfg.analyzeAll = settings.getAsBoolean("analyzeAll", cfg.analyzeAll);
cfg.minimumWordSize = settings.getAsInt("minimumWordSize", cfg.minimumWordSize);
cfg.maximumWordSize = settings.getAsInt("maximumWordSize", cfg.maximumWordSize);
analysisCache = new AnalysisCache(settings.getAsInt("analysisCacheSize", 1024));
String language = settings.get("language", "fi_FI");
String dictionaryPath = settings.get("dictionaryPath");
for (String dir : settings.getAsArray("libraryPath"))
Voikko.addLibraryPath(dir);
voikkoPool = new VoikkoPool(language, dictionaryPath);
voikkoPool.setMaxSize(settings.getAsInt("poolMaxSize", 10));
}
开发者ID:EvidentSolutions,项目名称:elasticsearch-analysis-voikko,代码行数:22,代码来源:VoikkoTokenFilterFactory.java
示例2: CJKBigramFilterFactory
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
public CJKBigramFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
outputUnigrams = settings.getAsBooleanLenientForPreEs6Indices(
indexSettings.getIndexVersionCreated(), "output_unigrams", false, deprecationLogger);
final String[] asArray = settings.getAsArray("ignored_scripts");
Set<String> scripts = new HashSet<>(Arrays.asList("han", "hiragana", "katakana", "hangul"));
if (asArray != null) {
scripts.removeAll(Arrays.asList(asArray));
}
int flags = 0;
for (String script : scripts) {
if ("han".equals(script)) {
flags |= CJKBigramFilter.HAN;
} else if ("hiragana".equals(script)) {
flags |= CJKBigramFilter.HIRAGANA;
} else if ("katakana".equals(script)) {
flags |= CJKBigramFilter.KATAKANA;
} else if ("hangul".equals(script)) {
flags |= CJKBigramFilter.HANGUL;
}
}
this.flags = flags;
}
示例3: KeepWordFilterFactory
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
public KeepWordFilterFactory(IndexSettings indexSettings,
Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
final String[] arrayKeepWords = settings.getAsArray(KEEP_WORDS_KEY, null);
final String keepWordsPath = settings.get(KEEP_WORDS_PATH_KEY, null);
if ((arrayKeepWords == null && keepWordsPath == null) || (arrayKeepWords != null && keepWordsPath != null)) {
// we don't allow both or none
throw new IllegalArgumentException("keep requires either `" + KEEP_WORDS_KEY + "` or `"
+ KEEP_WORDS_PATH_KEY + "` to be configured");
}
if (settings.get(ENABLE_POS_INC_KEY) != null) {
throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain");
}
this.keepWords = Analysis.getWordSet(env, indexSettings.getIndexVersionCreated(), settings, KEEP_WORDS_KEY);
}
示例4: parseStemExclusion
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
public static CharArraySet parseStemExclusion(Settings settings, CharArraySet defaultStemExclusion) {
String value = settings.get("stem_exclusion");
if (value != null) {
if ("_none_".equals(value)) {
return CharArraySet.EMPTY_SET;
} else {
// LUCENE 4 UPGRADE: Should be settings.getAsBoolean("stem_exclusion_case", false)?
return new CharArraySet(Strings.commaDelimitedListToSet(value), false);
}
}
String[] stemExclusion = settings.getAsArray("stem_exclusion", null);
if (stemExclusion != null) {
// LUCENE 4 UPGRADE: Should be settings.getAsBoolean("stem_exclusion_case", false)?
return new CharArraySet(Arrays.asList(stemExclusion), false);
} else {
return defaultStemExclusion;
}
}
示例5: onRefreshSettings
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
@Override
public void onRefreshSettings(Settings settings) {
String[] newTracerLogInclude = settings.getAsArray(SETTING_TRACE_LOG_INCLUDE,
TransportService.this.settings.getAsArray(SETTING_TRACE_LOG_INCLUDE, DEFAULT_TRACE_LOG_INCLUDE, true), true);
String[] newTracerLogExclude = settings.getAsArray(SETTING_TRACE_LOG_EXCLUDE,
TransportService.this.settings.getAsArray(SETTING_TRACE_LOG_EXCLUDE, DEFAULT_TRACE_LOG_EXCLUDE, true), true);
if (newTracerLogInclude == TransportService.this.tracerLogInclude && newTracerLogExclude == TransportService.this.tracelLogExclude) {
return;
}
if (Arrays.equals(newTracerLogInclude, TransportService.this.tracerLogInclude) &&
Arrays.equals(newTracerLogExclude, TransportService.this.tracelLogExclude)) {
return;
}
TransportService.this.tracerLogInclude = newTracerLogInclude;
TransportService.this.tracelLogExclude = newTracerLogExclude;
logger.info("tracer log updated to use include: {}, exclude: {}", newTracerLogInclude, newTracerLogExclude);
}
示例6: KeepWordFilterFactory
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
@Inject
public KeepWordFilterFactory(Index index, IndexSettingsService indexSettingsService,
Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettingsService.getSettings(), name, settings);
final String[] arrayKeepWords = settings.getAsArray(KEEP_WORDS_KEY, null);
final String keepWordsPath = settings.get(KEEP_WORDS_PATH_KEY, null);
if ((arrayKeepWords == null && keepWordsPath == null) || (arrayKeepWords != null && keepWordsPath != null)) {
// we don't allow both or none
throw new IllegalArgumentException("keep requires either `" + KEEP_WORDS_KEY + "` or `"
+ KEEP_WORDS_PATH_KEY + "` to be configured");
}
if (version.onOrAfter(Version.LUCENE_4_4) && settings.get(ENABLE_POS_INC_KEY) != null) {
throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain or use"
+ " an older compatibility version (<=4.3) but beware that it might cause highlighting bugs.");
}
enablePositionIncrements = version.onOrAfter(Version.LUCENE_4_4) ? true : settings.getAsBoolean(ENABLE_POS_INC_KEY, true);
this.keepWords = Analysis.getWordSet(env, settings, KEEP_WORDS_KEY);
}
示例7: AwarenessAllocationDecider
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
@Inject
public AwarenessAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) {
super(settings);
this.awarenessAttributes = settings.getAsArray(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES);
forcedAwarenessAttributes = Maps.newHashMap();
Map<String, Settings> forceGroups = settings.getGroups(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP);
for (Map.Entry<String, Settings> entry : forceGroups.entrySet()) {
String[] aValues = entry.getValue().getAsArray("values");
if (aValues.length > 0) {
forcedAwarenessAttributes.put(entry.getKey(), aValues);
}
}
nodeSettingsService.addListener(new ApplySettings());
}
示例8: getWordList
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
/**
* Fetches a list of words from the specified settings file. The list should either be available at the key
* specified by settingsPrefix or in a file specified by settingsPrefix + _path.
*
* @throws IllegalArgumentException
* If the word list cannot be found at either key.
*/
public static List<String> getWordList(Environment env, Settings settings, String settingPrefix) {
String wordListPath = settings.get(settingPrefix + "_path", null);
if (wordListPath == null) {
String[] explicitWordList = settings.getAsArray(settingPrefix, null);
if (explicitWordList == null) {
return null;
} else {
return Arrays.asList(explicitWordList);
}
}
final Path wordListFile = env.configFile().resolve(wordListPath);
try (BufferedReader reader = FileSystemUtils.newBufferedReader(wordListFile.toUri().toURL(), Charsets.UTF_8)) {
return loadWordList(reader, "#");
} catch (IOException ioe) {
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
throw new IllegalArgumentException(message);
}
}
示例9: PatternCaptureGroupTokenFilterFactory
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
public PatternCaptureGroupTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
String[] regexes = settings.getAsArray(PATTERNS_KEY, null, false);
if (regexes == null) {
throw new IllegalArgumentException("required setting '" + PATTERNS_KEY + "' is missing for token filter [" + name + "]");
}
patterns = new Pattern[regexes.length];
for (int i = 0; i < regexes.length; i++) {
patterns[i] = Pattern.compile(regexes[i]);
}
preserveOriginal = settings.getAsBooleanLenientForPreEs6Indices(
indexSettings.getIndexVersionCreated(), PRESERVE_ORIG_KEY, true, deprecationLogger);
}
示例10: KeepTypesFilterFactory
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
public KeepTypesFilterFactory(IndexSettings indexSettings,
Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
final String[] arrayKeepTypes = settings.getAsArray(KEEP_TYPES_KEY, null);
if ((arrayKeepTypes == null)) {
throw new IllegalArgumentException("keep_types requires `" + KEEP_TYPES_KEY + "` to be configured");
}
this.keepTypes = new HashSet<>(Arrays.asList(arrayKeepTypes));
}
示例11: TribeService
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
@Inject
public TribeService(Settings settings, ClusterService clusterService, DiscoveryService discoveryService) {
super(settings);
this.clusterService = clusterService;
Map<String, Settings> nodesSettings = Maps.newHashMap(settings.getGroups("tribe", true));
nodesSettings.remove("blocks"); // remove prefix settings that don't indicate a client
nodesSettings.remove("on_conflict"); // remove prefix settings that don't indicate a client
for (Map.Entry<String, Settings> entry : nodesSettings.entrySet()) {
Settings clientSettings = buildClientSettings(entry.getKey(), settings, entry.getValue());
nodes.add(new TribeClientNode(clientSettings));
}
String[] blockIndicesWrite = Strings.EMPTY_ARRAY;
String[] blockIndicesRead = Strings.EMPTY_ARRAY;
String[] blockIndicesMetadata = Strings.EMPTY_ARRAY;
if (!nodes.isEmpty()) {
// remove the initial election / recovery blocks since we are not going to have a
// master elected in this single tribe node local "cluster"
clusterService.removeInitialStateBlock(discoveryService.getNoMasterBlock());
clusterService.removeInitialStateBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK);
if (settings.getAsBoolean("tribe.blocks.write", false)) {
clusterService.addInitialStateBlock(TRIBE_WRITE_BLOCK);
}
blockIndicesWrite = settings.getAsArray("tribe.blocks.write.indices", Strings.EMPTY_ARRAY);
if (settings.getAsBoolean("tribe.blocks.metadata", false)) {
clusterService.addInitialStateBlock(TRIBE_METADATA_BLOCK);
}
blockIndicesMetadata = settings.getAsArray("tribe.blocks.metadata.indices", Strings.EMPTY_ARRAY);
blockIndicesRead = settings.getAsArray("tribe.blocks.read.indices", Strings.EMPTY_ARRAY);
for (Node node : nodes) {
node.injector().getInstance(ClusterService.class).add(new TribeClusterStateListener(node));
}
}
this.blockIndicesMetadata = blockIndicesMetadata;
this.blockIndicesRead = blockIndicesRead;
this.blockIndicesWrite = blockIndicesWrite;
this.onConflict = settings.get("tribe.on_conflict", ON_CONFLICT_ANY);
}
示例12: onRefreshSettings
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
@Override
public void onRefreshSettings(Settings settings) {
String[] awarenessAttributes = settings.getAsArray(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES,
AwarenessAllocationDecider.this.settings.getAsArray(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES));
if ("".equals(settings.get(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES, null))) {
awarenessAttributes = Strings.EMPTY_ARRAY; // the empty string resets this
}
if (awarenessAttributes != null && !Arrays.equals(AwarenessAllocationDecider.this.awarenessAttributes, awarenessAttributes)) {
logger.info("updating [cluster.routing.allocation.awareness.attributes] from [{}] to [{}]", AwarenessAllocationDecider.this.awarenessAttributes, awarenessAttributes);
AwarenessAllocationDecider.this.awarenessAttributes = awarenessAttributes;
}
Map<String, String[]> forcedAwarenessAttributes = new HashMap<>();
Map<String, Settings> forceGroups = settings.getGroups(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP);
if (forceGroups.isEmpty()) {
// check initial values (from config file)
forceGroups = AwarenessAllocationDecider.this.settings.getGroups(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP);
}
if (!forceGroups.isEmpty()) {
for (Map.Entry<String, Settings> entry : forceGroups.entrySet()) {
String[] aValues = entry.getValue().getAsArray("values");
if (aValues.length > 0) {
forcedAwarenessAttributes.put(entry.getKey(), aValues);
}
}
}
AwarenessAllocationDecider.this.forcedAwarenessAttributes = forcedAwarenessAttributes;
}
示例13: createBoundTransportAddress
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
private BoundTransportAddress createBoundTransportAddress(String name, Settings profileSettings, List<InetSocketAddress> boundAddresses) {
String[] boundAddressesHostStrings = new String[boundAddresses.size()];
TransportAddress[] transportBoundAddresses = new TransportAddress[boundAddresses.size()];
for (int i = 0; i < boundAddresses.size(); i++) {
InetSocketAddress boundAddress = boundAddresses.get(i);
boundAddressesHostStrings[i] = boundAddress.getHostString();
transportBoundAddresses[i] = new InetSocketTransportAddress(boundAddress);
}
final String[] publishHosts;
if (DEFAULT_PROFILE.equals(name)) {
publishHosts = settings.getAsArray("transport.netty.publish_host", settings.getAsArray("transport.publish_host", settings.getAsArray("transport.host", null)));
} else {
publishHosts = profileSettings.getAsArray("publish_host", boundAddressesHostStrings);
}
final InetAddress publishInetAddress;
try {
publishInetAddress = networkService.resolvePublishHostAddresses(publishHosts);
} catch (Exception e) {
throw new BindTransportException("Failed to resolve publish address", e);
}
final int publishPort = resolvePublishPort(name, settings, profileSettings, boundAddresses, publishInetAddress);
final TransportAddress publishAddress = new InetSocketTransportAddress(new InetSocketAddress(publishInetAddress, publishPort));
return new BoundTransportAddress(transportBoundAddresses, publishAddress);
}
示例14: buildCorsConfig
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
private CorsConfig buildCorsConfig(Settings settings) {
if (settings.getAsBoolean(SETTING_CORS_ENABLED, false) == false) {
return CorsConfigBuilder.forOrigins().disable().build();
}
String origin = settings.get(SETTING_CORS_ALLOW_ORIGIN);
final CorsConfigBuilder builder;
if (Strings.isNullOrEmpty(origin)) {
builder = CorsConfigBuilder.forOrigins();
} else if (origin.equals(ANY_ORIGIN)) {
builder = CorsConfigBuilder.forAnyOrigin();
} else {
Pattern p = RestUtils.checkCorsSettingForRegex(origin);
if (p == null) {
builder = CorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin));
} else {
builder = CorsConfigBuilder.forPattern(p);
}
}
if (settings.getAsBoolean(SETTING_CORS_ALLOW_CREDENTIALS, false)) {
builder.allowCredentials();
}
String[] strMethods = settings.getAsArray(SETTING_CORS_ALLOW_METHODS, DEFAULT_CORS_METHODS);
HttpMethod[] methods = new HttpMethod[strMethods.length];
for (int i = 0; i < methods.length; i++) {
methods[i] = HttpMethod.valueOf(strMethods[i]);
}
return builder.allowedRequestMethods(methods)
.maxAge(settings.getAsInt(SETTING_CORS_MAX_AGE, DEFAULT_CORS_MAX_AGE))
.allowedRequestHeaders(settings.getAsArray(SETTING_CORS_ALLOW_HEADERS, DEFAULT_CORS_HEADERS))
.shortCircuit()
.build();
}
示例15: PatternCaptureGroupTokenFilterFactory
import org.elasticsearch.common.settings.Settings; //导入方法依赖的package包/类
public PatternCaptureGroupTokenFilterFactory(Index index, Settings indexSettings, String name, Settings settings) {
super(index, indexSettings, name, settings);
String[] regexes = settings.getAsArray(PATTERNS_KEY, null, false);
if (regexes == null) {
throw new IllegalArgumentException("required setting '" + PATTERNS_KEY + "' is missing for token filter [" + name + "]");
}
patterns = new Pattern[regexes.length];
for (int i = 0; i < regexes.length; i++) {
patterns[i] = Pattern.compile(regexes[i]);
}
preserveOriginal = settings.getAsBoolean(PRESERVE_ORIG_KEY, true);
}