本文整理汇总了C#中RandomIndexWriter.Commit方法的典型用法代码示例。如果您正苦于以下问题:C# RandomIndexWriter.Commit方法的具体用法?C# RandomIndexWriter.Commit怎么用?C# RandomIndexWriter.Commit使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类RandomIndexWriter
的用法示例。
在下文中一共展示了RandomIndexWriter.Commit方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestIndexing
public virtual void TestIndexing()
{
DirectoryInfo tmpDir = CreateTempDir("TestNeverDelete");
BaseDirectoryWrapper d = NewFSDirectory(tmpDir);
// We want to "see" files removed if Lucene removed
// them. this is still worth running on Windows since
// some files the IR opens and closes.
if (d is MockDirectoryWrapper)
{
((MockDirectoryWrapper)d).NoDeleteOpenFile = false;
}
RandomIndexWriter w = new RandomIndexWriter(Random(), d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
w.w.Config.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 5, 30));
w.Commit();
ThreadClass[] indexThreads = new ThreadClass[Random().Next(4)];
long stopTime = Environment.TickCount + AtLeast(1000);
for (int x = 0; x < indexThreads.Length; x++)
{
indexThreads[x] = new ThreadAnonymousInnerClassHelper(w, stopTime);
indexThreads[x].Name = "Thread " + x;
indexThreads[x].Start();
}
HashSet<string> allFiles = new HashSet<string>();
DirectoryReader r = DirectoryReader.Open(d);
while (Environment.TickCount < stopTime)
{
IndexCommit ic = r.IndexCommit;
if (VERBOSE)
{
Console.WriteLine("TEST: check files: " + ic.FileNames);
}
allFiles.AddAll(ic.FileNames);
// Make sure no old files were removed
foreach (string fileName in allFiles)
{
Assert.IsTrue(SlowFileExists(d, fileName), "file " + fileName + " does not exist");
}
DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
if (r2 != null)
{
r.Dispose();
r = r2;
}
Thread.Sleep(1);
}
r.Dispose();
foreach (ThreadClass t in indexThreads)
{
t.Join();
}
w.Dispose();
d.Dispose();
System.IO.Directory.Delete(tmpDir.FullName, true);
}
示例2: SetUp
public override void SetUp()
{
base.SetUp();
dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true), Similarity, TimeZone);
for (int i = 900; i < 1112; i++)
{
Document doc = new Document();
string num = Regex.Replace(Regex.Replace(English.IntToEnglish(i), "[-]", " "), "[,]", "");
doc.Add(NewTextField("numbers", num, Field.Store.NO));
writer.AddDocument(doc);
}
{
Document doc = new Document();
doc.Add(NewTextField("numbers", "thou hast sand betwixt thy toes", Field.Store.NO));
writer.AddDocument(doc);
}
{
Document doc = new Document();
doc.Add(NewTextField("numbers", "hundredeight eightyeight yeight", Field.Store.NO));
writer.AddDocument(doc);
}
{
Document doc = new Document();
doc.Add(NewTextField("numbers", "tres y cinco", Field.Store.NO));
writer.AddDocument(doc);
}
writer.Commit();
writer.Dispose();
}
示例3: SetUp
public override void SetUp()
{
base.SetUp();
_dir = NewDirectory();
_indexWriter = new RandomIndexWriter(Random(), _dir, new MockAnalyzer(Random()), Similarity, TimeZone);
FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.StoreTermVectors = true;
ft.StoreTermVectorOffsets = true;
ft.StoreTermVectorPositions = true;
Analyzer analyzer = new MockAnalyzer(Random());
Document doc;
for (int i = 0; i < 100; i++)
{
doc = new Document();
doc.Add(new Field(_idFieldName, Random().toString(), ft));
doc.Add(new Field(_textFieldName, new StringBuilder(Random().toString()).append(Random().toString()).append(
Random().toString()).toString(), ft));
doc.Add(new Field(_classFieldName, Random().toString(), ft));
_indexWriter.AddDocument(doc, analyzer);
}
_indexWriter.Commit();
_originalIndex = SlowCompositeReaderWrapper.Wrap(_indexWriter.Reader);
}
示例4: CreateRandomIndexes
private void CreateRandomIndexes(int maxSegments)
{
dir = NewDirectory();
numDocs = AtLeast(150);
int numTerms = TestUtil.NextInt(Random(), 1, numDocs / 5);
ISet<string> randomTerms = new HashSet<string>();
while (randomTerms.size() < numTerms)
{
randomTerms.add(TestUtil.RandomSimpleString(Random()));
}
terms = new List<string>(randomTerms);
int seed = Random().Next();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed)));
iwc.SetMergePolicy(TestSortingMergePolicy.NewSortingMergePolicy(sort));
iw = new RandomIndexWriter(new Random(seed), dir, iwc);
for (int i = 0; i < numDocs; ++i)
{
Document doc = RandomDocument();
iw.AddDocument(doc);
if (i == numDocs / 2 || (i != numDocs - 1 && Random().nextInt(8) == 0))
{
iw.Commit();
}
if (Random().nextInt(15) == 0)
{
string term = RandomInts.RandomFrom(Random(), terms);
iw.DeleteDocuments(new Term("s", term));
}
}
reader = iw.Reader;
}
示例5: Test
public virtual void Test()
{
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetCodec(new Lucene46Codec());
RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, conf);
Document doc = new Document();
// these fields should sometimes get term vectors, etc
Field idField = NewStringField("id", "", Field.Store.NO);
Field bodyField = NewTextField("body", "", Field.Store.NO);
Field dvField = new NumericDocValuesField("dv", 5);
doc.Add(idField);
doc.Add(bodyField);
doc.Add(dvField);
for (int i = 0; i < 100; i++)
{
idField.StringValue = Convert.ToString(i);
bodyField.StringValue = TestUtil.RandomUnicodeString(Random());
riw.AddDocument(doc);
if (Random().Next(7) == 0)
{
riw.Commit();
}
// TODO: we should make a new format with a clean header...
// if (Random().nextInt(20) == 0) {
// riw.DeleteDocuments(new Term("id", Integer.toString(i)));
// }
}
riw.Dispose();
CheckHeaders(dir);
dir.Dispose();
}
示例6: TestReuseDocsEnumNoReuse
public virtual void TestReuseDocsEnumNoReuse()
{
Directory dir = NewDirectory();
Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat());
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp));
int numdocs = AtLeast(20);
CreateRandomIndex(numdocs, writer, Random());
writer.Commit();
DirectoryReader open = DirectoryReader.Open(dir);
foreach (AtomicReaderContext ctx in open.Leaves())
{
AtomicReader indexReader = (AtomicReader)ctx.Reader();
Terms terms = indexReader.Terms("body");
TermsEnum iterator = terms.Iterator(null);
IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>();
MatchNoBits bits = new MatchNoBits(indexReader.MaxDoc());
while ((iterator.Next()) != null)
{
DocsEnum docs = iterator.Docs(Random().NextBoolean() ? bits : new MatchNoBits(indexReader.MaxDoc()), null, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
enums[docs] = true;
}
Assert.AreEqual(terms.Size(), enums.Count);
}
IOUtils.Close(writer, open, dir);
}
示例7: TestFloatNorms
public virtual void TestFloatNorms()
{
Directory dir = NewDirectory();
MockAnalyzer analyzer = new MockAnalyzer(Random());
analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
Similarity provider = new MySimProvider(this);
config.SetSimilarity(provider);
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, config);
LineFileDocs docs = new LineFileDocs(Random());
int num = AtLeast(100);
for (int i = 0; i < num; i++)
{
Document doc = docs.NextDoc();
float nextFloat = (float)Random().NextDouble();
// Cast to a double to get more precision output to the string.
Field f = new TextField(FloatTestField, "" + (double)nextFloat, Field.Store.YES);
f.Boost = nextFloat;
doc.Add(f);
writer.AddDocument(doc);
doc.RemoveField(FloatTestField);
if (Rarely())
{
writer.Commit();
}
}
writer.Commit();
writer.Dispose();
AtomicReader open = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
NumericDocValues norms = open.GetNormValues(FloatTestField);
Assert.IsNotNull(norms);
for (int i = 0; i < open.MaxDoc; i++)
{
Document document = open.Document(i);
float expected = Convert.ToSingle(document.Get(FloatTestField));
Assert.AreEqual(expected, Number.IntBitsToFloat((int)norms.Get(i)), 0.0f);
}
open.Dispose();
dir.Dispose();
docs.Dispose();
}
示例8: SetUp
public override void SetUp()
{
base.SetUp();
// initialize directory
Directory = NewDirectory();
Writer = new RandomIndexWriter(Random(), Directory);
// write document
Document doc = new Document();
doc.Add(NewTextField("UUID", "1", Field.Store.YES));
Writer.AddDocument(doc);
Writer.Commit();
}
示例9: TestBasic
public virtual void TestBasic()
{
AssumeTrue("Test requires SortedSetDV support", DefaultCodecSupportsSortedSet());
Directory dir = NewDirectory();
FacetsConfig config = new FacetsConfig();
config.SetMultiValued("a", true);
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
Document doc = new Document();
doc.Add(new SortedSetDocValuesFacetField("a", "foo"));
doc.Add(new SortedSetDocValuesFacetField("a", "bar"));
doc.Add(new SortedSetDocValuesFacetField("a", "zoo"));
doc.Add(new SortedSetDocValuesFacetField("b", "baz"));
writer.AddDocument(config.Build(doc));
if (Random().NextBoolean())
{
writer.Commit();
}
doc = new Document();
doc.Add(new SortedSetDocValuesFacetField("a", "foo"));
writer.AddDocument(config.Build(doc));
// NRT open
IndexSearcher searcher = NewSearcher(writer.Reader);
// Per-top-reader state:
SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.IndexReader);
FacetsCollector c = new FacetsCollector();
searcher.Search(new MatchAllDocsQuery(), c);
SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c);
Assert.AreEqual("dim=a path=[] value=4 childCount=3\n foo (2)\n bar (1)\n zoo (1)\n", facets.GetTopChildren(10, "a").ToString());
Assert.AreEqual("dim=b path=[] value=1 childCount=1\n baz (1)\n", facets.GetTopChildren(10, "b").ToString());
// DrillDown:
DrillDownQuery q = new DrillDownQuery(config);
q.Add("a", "foo");
q.Add("b", "baz");
TopDocs hits = searcher.Search(q, 1);
Assert.AreEqual(1, hits.TotalHits);
IOUtils.Close(writer, searcher.IndexReader, dir);
}
示例10: TestBasic
public void TestBasic()
{
Directory dir = NewDirectory();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
iwc.SetMergePolicy(NewLogMergePolicy());
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
KeyValuePair<List<string>, IDictionary<string, Document>> res = GenerateIndexDocuments(AtLeast(1000), true, false);
IDictionary<string, Document> docs = res.Value;
List<String> invalidDocTerms = res.Key;
foreach (Document doc in docs.Values)
{
writer.AddDocument(doc);
}
writer.Commit();
writer.Dispose();
IndexReader ir = DirectoryReader.Open(dir);
IDictionary dictionary = new DocumentDictionary(ir, FIELD_NAME, WEIGHT_FIELD_NAME, PAYLOAD_FIELD_NAME);
IInputIterator inputIterator = dictionary.EntryIterator;
BytesRef f;
while ((f = inputIterator.Next()) != null)
{
string field = f.Utf8ToString();
Document doc = docs.ContainsKey(field) ? docs[field] : null;
docs.Remove(field);
//Document doc = docs.Remove(f.Utf8ToString());
assertTrue(f.equals(new BytesRef(doc.Get(FIELD_NAME))));
IndexableField weightField = doc.GetField(WEIGHT_FIELD_NAME);
assertEquals(inputIterator.Weight, (weightField != null) ? Convert.ToInt64(weightField.NumericValue) : 0);
assertTrue(inputIterator.Payload.Equals(doc.GetField(PAYLOAD_FIELD_NAME).BinaryValue));
}
foreach (string invalidTerm in invalidDocTerms)
{
var invalid = docs[invalidTerm];
docs.Remove(invalidTerm);
assertNotNull(invalid);
}
assertTrue(!docs.Any());
ir.Dispose();
dir.Dispose();
}
示例11: TestBinary
public virtual void TestBinary()
{
Directory dir = NewDirectory();
Document doc = new Document();
BytesRef @ref = new BytesRef();
Field field = new BinaryDocValuesField("bytes", @ref);
doc.Add(field);
IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
iwc.SetMergePolicy(NewLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
int numDocs = AtLeast(500);
for (int i = 0; i < numDocs; i++)
{
@ref.CopyChars(TestUtil.RandomUnicodeString(Random()));
iw.AddDocument(doc);
if (Random().Next(17) == 0)
{
iw.Commit();
}
}
DirectoryReader ir = iw.Reader;
iw.ForceMerge(1);
DirectoryReader ir2 = iw.Reader;
AtomicReader merged = GetOnlySegmentReader(ir2);
iw.Dispose();
BinaryDocValues multi = MultiDocValues.GetBinaryValues(ir, "bytes");
BinaryDocValues single = merged.GetBinaryDocValues("bytes");
BytesRef actual = new BytesRef();
BytesRef expected = new BytesRef();
for (int i = 0; i < numDocs; i++)
{
single.Get(i, expected);
multi.Get(i, actual);
Assert.AreEqual(expected, actual);
}
ir.Dispose();
ir2.Dispose();
dir.Dispose();
}
示例12: TestEmptyReader
public void TestEmptyReader()
{
Directory dir = NewDirectory();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
iwc.SetMergePolicy(NewLogMergePolicy());
// Make sure the index is created?
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
writer.Commit();
writer.Dispose();
IndexReader ir = DirectoryReader.Open(dir);
IDictionary dictionary = new DocumentValueSourceDictionary(ir, FIELD_NAME, new DoubleConstValueSource(10), PAYLOAD_FIELD_NAME);
IInputIterator inputIterator = dictionary.EntryIterator;
assertNull(inputIterator.Next());
assertEquals(inputIterator.Weight, 0);
assertNull(inputIterator.Payload);
ir.Dispose();
dir.Dispose();
}
示例13: TestNumerics
public virtual void TestNumerics()
{
Directory dir = NewDirectory();
Document doc = new Document();
Field field = new NumericDocValuesField("numbers", 0);
doc.Add(field);
IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
iwc.SetMergePolicy(NewLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
int numDocs = AtLeast(500);
for (int i = 0; i < numDocs; i++)
{
field.LongValue = Random().NextLong();
iw.AddDocument(doc);
if (Random().Next(17) == 0)
{
iw.Commit();
}
}
DirectoryReader ir = iw.Reader;
iw.ForceMerge(1);
DirectoryReader ir2 = iw.Reader;
AtomicReader merged = GetOnlySegmentReader(ir2);
iw.Dispose();
NumericDocValues multi = MultiDocValues.GetNumericValues(ir, "numbers");
NumericDocValues single = merged.GetNumericDocValues("numbers");
for (int i = 0; i < numDocs; i++)
{
Assert.AreEqual(single.Get(i), multi.Get(i));
}
ir.Dispose();
ir2.Dispose();
dir.Dispose();
}
示例14: TestNestedDocScoringWithDeletes
public void TestNestedDocScoringWithDeletes()
{
Directory dir = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
// Cannot assert this since we use NoMergePolicy:
w.DoRandomForceMergeAssert = false;
IList<Document> docs = new List<Document>();
docs.Add(MakeJob("java", 2007));
docs.Add(MakeJob("python", 2010));
docs.Add(MakeResume("Lisa", "United Kingdom"));
w.AddDocuments(docs);
docs.Clear();
docs.Add(MakeJob("c", 1999));
docs.Add(MakeJob("ruby", 2005));
docs.Add(MakeJob("java", 2006));
docs.Add(MakeResume("Frank", "United States"));
w.AddDocuments(docs);
w.Commit();
IndexSearcher s = NewSearcher(DirectoryReader.Open(dir));
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(NumericRangeQuery.NewIntRange("year", 1990, 2010, true, true), new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("docType", "resume")))), ScoreMode.Total);
TopDocs topDocs = s.Search(q, 10);
assertEquals(2, topDocs.TotalHits);
assertEquals(6, topDocs.ScoreDocs[0].Doc);
assertEquals(3.0f, topDocs.ScoreDocs[0].Score, 0.0f);
assertEquals(2, topDocs.ScoreDocs[1].Doc);
assertEquals(2.0f, topDocs.ScoreDocs[1].Score, 0.0f);
s.IndexReader.Dispose();
w.DeleteDocuments(new Term("skill", "java"));
w.Dispose();
s = NewSearcher(DirectoryReader.Open(dir));
topDocs = s.Search(q, 10);
assertEquals(2, topDocs.TotalHits);
assertEquals(6, topDocs.ScoreDocs[0].Doc);
assertEquals(2.0f, topDocs.ScoreDocs[0].Score, 0.0f);
assertEquals(2, topDocs.ScoreDocs[1].Doc);
assertEquals(1.0f, topDocs.ScoreDocs[1].Score, 0.0f);
s.IndexReader.Dispose();
dir.Dispose();
}
示例15: TestTermUTF16SortOrder
public virtual void TestTermUTF16SortOrder()
{
Random rnd = Random();
Directory dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(rnd, dir, Similarity, TimeZone);
Document d = new Document();
// Single segment
Field f = NewStringField("f", "", Field.Store.NO);
d.Add(f);
char[] chars = new char[2];
HashSet<string> allTerms = new HashSet<string>();
int num = AtLeast(200);
for (int i = 0; i < num; i++)
{
string s;
if (rnd.NextBoolean())
{
// Single char
if (rnd.NextBoolean())
{
// Above surrogates
chars[0] = (char)GetInt(rnd, 1 + UnicodeUtil.UNI_SUR_LOW_END, 0xffff);
}
else
{
// Below surrogates
chars[0] = (char)GetInt(rnd, 0, UnicodeUtil.UNI_SUR_HIGH_START - 1);
}
s = new string(chars, 0, 1);
}
else
{
// Surrogate pair
chars[0] = (char)GetInt(rnd, UnicodeUtil.UNI_SUR_HIGH_START, UnicodeUtil.UNI_SUR_HIGH_END);
Assert.IsTrue(((int)chars[0]) >= UnicodeUtil.UNI_SUR_HIGH_START && ((int)chars[0]) <= UnicodeUtil.UNI_SUR_HIGH_END);
chars[1] = (char)GetInt(rnd, UnicodeUtil.UNI_SUR_LOW_START, UnicodeUtil.UNI_SUR_LOW_END);
s = new string(chars, 0, 2);
}
allTerms.Add(s);
f.StringValue = s;
writer.AddDocument(d);
if ((1 + i) % 42 == 0)
{
writer.Commit();
}
}
IndexReader r = writer.Reader;
// Test each sub-segment
foreach (AtomicReaderContext ctx in r.Leaves)
{
CheckTermsOrder(ctx.Reader, allTerms, false);
}
CheckTermsOrder(r, allTerms, true);
// Test multi segment
r.Dispose();
writer.ForceMerge(1);
// Test single segment
r = writer.Reader;
CheckTermsOrder(r, allTerms, true);
r.Dispose();
writer.Dispose();
dir.Dispose();
}