本文整理汇总了Java中org.elasticsearch.test.ESTestCase类的典型用法代码示例。如果您正苦于以下问题:Java ESTestCase类的具体用法?Java ESTestCase怎么用?Java ESTestCase使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ESTestCase类属于org.elasticsearch.test包,在下文中一共展示了ESTestCase类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: buildRecoveryTarget
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
private static RecoverySource buildRecoveryTarget(boolean primary, ShardRoutingState state) {
switch (state) {
case UNASSIGNED:
case INITIALIZING:
if (primary) {
return ESTestCase.randomFrom(RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE,
RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE);
} else {
return RecoverySource.PeerRecoverySource.INSTANCE;
}
case STARTED:
case RELOCATING:
return null;
default:
throw new IllegalStateException("illegal state");
}
}
示例2: ensureAllArraysAreReleased
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public static void ensureAllArraysAreReleased() throws Exception {
final Map<Object, Object> masterCopy = new HashMap<>(ACQUIRED_ARRAYS);
if (!masterCopy.isEmpty()) {
// not empty, we might be executing on a shared cluster that keeps on obtaining
// and releasing arrays, lets make sure that after a reasonable timeout, all master
// copy (snapshot) have been released
boolean success = ESTestCase.awaitBusy(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet()));
if (!success) {
masterCopy.keySet().retainAll(ACQUIRED_ARRAYS.keySet());
ACQUIRED_ARRAYS.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
if (!masterCopy.isEmpty()) {
final Object cause = masterCopy.entrySet().iterator().next().getValue();
throw new RuntimeException(masterCopy.size() + " arrays have not been released", cause instanceof Throwable ? (Throwable) cause : null);
}
}
}
}
示例3: ensureAllPagesAreReleased
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public static void ensureAllPagesAreReleased() throws Exception {
final Map<Object, Throwable> masterCopy = new HashMap<>(ACQUIRED_PAGES);
if (!masterCopy.isEmpty()) {
// not empty, we might be executing on a shared cluster that keeps on obtaining
// and releasing pages, lets make sure that after a reasonable timeout, all master
// copy (snapshot) have been released
boolean success =
ESTestCase.awaitBusy(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet()));
if (!success) {
masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet());
ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
if (!masterCopy.isEmpty()) {
final Throwable t = masterCopy.entrySet().iterator().next().getValue();
throw new RuntimeException(masterCopy.size() + " pages have not been released", t);
}
}
}
}
示例4: testEnglishFilterFactory
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public void testEnglishFilterFactory() throws IOException {
int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
Version v = VersionUtils.randomVersion(random());
Settings settings = Settings.builder()
.put("index.analysis.filter.my_english.type", "stemmer")
.put("index.analysis.filter.my_english.language", "english")
.put("index.analysis.analyzer.my_english.tokenizer","whitespace")
.put("index.analysis.analyzer.my_english.filter","my_english")
.put(SETTING_VERSION_CREATED,v)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_english");
assertThat(tokenFilter, instanceOf(StemmerTokenFilterFactory.class));
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader("foo bar"));
TokenStream create = tokenFilter.create(tokenizer);
IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers;
NamedAnalyzer analyzer = indexAnalyzers.get("my_english");
assertThat(create, instanceOf(PorterStemFilter.class));
assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"});
}
}
示例5: testDefault
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public void testDefault() throws IOException {
int default_hash_count = 1;
int default_bucket_size = 512;
int default_hash_set_size = 1;
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("min_hash");
String source = "the quick brown fox";
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader(source));
// with_rotation is true by default, and hash_set_size is 1, so even though the source doesn't
// have enough tokens to fill all the buckets, we still expect 512 tokens.
assertStreamHasNumberOfTokens(tokenFilter.create(tokenizer),
default_hash_count * default_bucket_size * default_hash_set_size);
}
示例6: testSettings
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public void testSettings() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.filter.test_min_hash.type", "min_hash")
.put("index.analysis.filter.test_min_hash.hash_count", "1")
.put("index.analysis.filter.test_min_hash.bucket_count", "2")
.put("index.analysis.filter.test_min_hash.hash_set_size", "1")
.put("index.analysis.filter.test_min_hash.with_rotation", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("test_min_hash");
String source = "sushi";
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader(source));
// despite the fact that bucket_count is 2 and hash_set_size is 1,
// because with_rotation is false, we only expect 1 token here.
assertStreamHasNumberOfTokens(tokenFilter.create(tokenizer), 1);
}
示例7: testCorrectPositionIncrementSetting
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public void testCorrectPositionIncrementSetting() throws IOException {
Builder builder = Settings.builder().put("index.analysis.filter.my_stop.type", "stop");
if (random().nextBoolean()) {
builder.put("index.analysis.filter.my_stop.version", Version.LATEST);
} else {
// don't specify
}
builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(builder.build());
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_stop");
assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class));
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader("foo bar"));
TokenStream create = tokenFilter.create(tokenizer);
assertThat(create, instanceOf(StopFilter.class));
}
示例8: testMultiTerms
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public void testMultiTerms() throws IOException {
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", type)
.put("index.analysis.filter.my_word_delimiter.catenate_all", "true")
.put("index.analysis.filter.my_word_delimiter.preserve_original", "true")
.build());
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
String[] expected = new String[]{"PowerShot", "PowerShot", "Power", "Shot", "50042", "500-42", "500", "42",
"wifi", "wi-fi", "wi", "fi", "wifi4000", "wi-fi-4000", "wi", "fi", "4000", "j2se", "j2se", "j", "2", "se",
"ONeil", "O'Neil's", "O", "Neil" };
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader(source));
int[] expectedIncr = new int[]{1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1};
int[] expectedPosLen = new int[]{2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 1, 3, 3, 1, 1, 1, 3, 3, 1, 1, 1, 2, 2, 1, 1};
assertTokenStreamContents(tokenFilter.create(tokenizer), expected, null, null, null,
expectedIncr, expectedPosLen, null);
}
示例9: testPartsAndCatenate
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
/** Correct offset order when doing both parts and concatenation: PowerShot is a synonym of Power */
public void testPartsAndCatenate() throws IOException {
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", type)
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
.build());
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot";
int[] expectedIncr = new int[]{1, 0, 1};
int[] expectedPosLen = new int[]{2, 1, 1};
String[] expected = new String[]{"PowerShot", "Power", "Shot" };
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected, null, null, null,
expectedIncr, expectedPosLen, null);
}
示例10: createAnalysis
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public ESTestCase.TestAnalysis createAnalysis() throws IOException {
Settings settings = Settings
.builder()
.loadFromStream("lemmagen.json", getClass().getResourceAsStream("lemmagen.json"))
.build();
Settings nodeSettings = Settings
.builder()
.put("path.home", (new File("")).getAbsolutePath())
.put("path.conf", (new File("")).getAbsolutePath())
.build();
Index index = new Index("test", "_na_");
ESTestCase.TestAnalysis analysis = createTestAnalysis(index, nodeSettings, settings, new AnalysisLemmagenPlugin());
return analysis;
}
示例11: buildUnassignedInfo
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
private static UnassignedInfo buildUnassignedInfo(ShardRoutingState state) {
switch (state) {
case UNASSIGNED:
case INITIALIZING:
return new UnassignedInfo(ESTestCase.randomFrom(UnassignedInfo.Reason.values()), "auto generated for test");
case STARTED:
case RELOCATING:
return null;
default:
throw new IllegalStateException("illegal state");
}
}
示例12: randomRecoverySource
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public static RecoverySource randomRecoverySource() {
return ESTestCase.randomFrom(RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE,
RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE,
RecoverySource.PeerRecoverySource.INSTANCE,
RecoverySource.LocalShardsRecoverySource.INSTANCE,
new RecoverySource.SnapshotRecoverySource(
new Snapshot("repo", new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())),
Version.CURRENT,
"some_index"));
}
示例13: makeStats
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
/** Create a fake NodeStats for the given node and usage */
public static NodeStats makeStats(String nodeName, DiskUsage usage) {
FsInfo.Path[] paths = new FsInfo.Path[1];
FsInfo.Path path = new FsInfo.Path("/dev/null", null,
usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes());
paths[0] = path;
FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), null, paths);
return new NodeStats(new DiscoveryNode(nodeName, ESTestCase.buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT),
System.currentTimeMillis(),
null, null, null, null, null,
fsInfo,
null, null, null,
null, null, null);
}
示例14: testForSlice
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
public void testForSlice() {
R original = newRequest();
original.setAbortOnVersionConflict(randomBoolean());
original.setRefresh(randomBoolean());
original.setTimeout(parseTimeValue(randomPositiveTimeValue(), "timeout"));
original.setWaitForActiveShards(
randomFrom(ActiveShardCount.ALL, ActiveShardCount.NONE, ActiveShardCount.ONE, ActiveShardCount.DEFAULT));
original.setRetryBackoffInitialTime(parseTimeValue(randomPositiveTimeValue(), "retry_backoff_initial_time"));
original.setMaxRetries(between(0, 1000));
original.setSlices(between(2, 1000));
original.setRequestsPerSecond(
randomBoolean() ? Float.POSITIVE_INFINITY : randomValueOtherThanMany(r -> r < 0, ESTestCase::randomFloat));
original.setSize(randomBoolean() ? AbstractBulkByScrollRequest.SIZE_ALL_MATCHES : between(0, Integer.MAX_VALUE));
TaskId slicingTask = new TaskId(randomAsciiOfLength(5), randomLong());
SearchRequest sliceRequest = new SearchRequest();
R forSliced = original.forSlice(slicingTask, sliceRequest);
assertEquals(original.isAbortOnVersionConflict(), forSliced.isAbortOnVersionConflict());
assertEquals(original.isRefresh(), forSliced.isRefresh());
assertEquals(original.getTimeout(), forSliced.getTimeout());
assertEquals(original.getWaitForActiveShards(), forSliced.getWaitForActiveShards());
assertEquals(original.getRetryBackoffInitialTime(), forSliced.getRetryBackoffInitialTime());
assertEquals(original.getMaxRetries(), forSliced.getMaxRetries());
assertEquals("only the parent task should store results", false, forSliced.getShouldStoreResult());
assertEquals("slice requests always have a single worker", 1, forSliced.getSlices());
assertEquals("requests_per_second is split between all workers", original.getRequestsPerSecond() / original.getSlices(),
forSliced.getRequestsPerSecond(), Float.MIN_NORMAL);
assertEquals("size is split evenly between all workers", original.getSize() == AbstractBulkByScrollRequest.SIZE_ALL_MATCHES
? AbstractBulkByScrollRequest.SIZE_ALL_MATCHES : original.getSize() / original.getSlices(), forSliced.getSize());
assertEquals(slicingTask, forSliced.getParentTask());
extraForSliceAssertions(original, forSliced);
}
示例15: mutate
import org.elasticsearch.test.ESTestCase; //导入依赖的package包/类
private static RescoreBuilder<?> mutate(RescoreBuilder<?> original) throws IOException {
RescoreBuilder<?> mutation = ESTestCase.copyWriteable(original, namedWriteableRegistry, QueryRescorerBuilder::new);
if (randomBoolean()) {
Integer windowSize = original.windowSize();
if (windowSize != null) {
mutation.windowSize(windowSize + 1);
} else {
mutation.windowSize(randomIntBetween(0, 100));
}
} else {
QueryRescorerBuilder queryRescorer = (QueryRescorerBuilder) mutation;
switch (randomIntBetween(0, 3)) {
case 0:
queryRescorer.setQueryWeight(queryRescorer.getQueryWeight() + 0.1f);
break;
case 1:
queryRescorer.setRescoreQueryWeight(queryRescorer.getRescoreQueryWeight() + 0.1f);
break;
case 2:
QueryRescoreMode other;
do {
other = randomFrom(QueryRescoreMode.values());
} while (other == queryRescorer.getScoreMode());
queryRescorer.setScoreMode(other);
break;
case 3:
// only increase the boost to make it a slightly different query
queryRescorer.getRescoreQuery().boost(queryRescorer.getRescoreQuery().boost() + 0.1f);
break;
default:
throw new IllegalStateException("unexpected random mutation in test");
}
}
return mutation;
}