本文整理汇总了C#中Lucene.Net.Store.MockRAMDirectory.Close方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Store.MockRAMDirectory.Close方法的具体用法?C# Lucene.Net.Store.MockRAMDirectory.Close怎么用?C# Lucene.Net.Store.MockRAMDirectory.Close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Store.MockRAMDirectory
的用法示例。
在下文中一共展示了Lucene.Net.Store.MockRAMDirectory.Close方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestLucene
public virtual void TestLucene()
{
int num = 100;
Directory indexA = new MockRAMDirectory();
Directory indexB = new MockRAMDirectory();
FillIndex(indexA, 0, num);
bool fail = VerifyIndex(indexA, 0);
if (fail)
{
Assert.Fail("Index a is invalid");
}
FillIndex(indexB, num, num);
fail = VerifyIndex(indexB, num);
if (fail)
{
Assert.Fail("Index b is invalid");
}
Directory merged = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMergeFactor(2);
writer.AddIndexes(new Directory[]{indexA, indexB});
writer.Close();
fail = VerifyIndex(merged, 0);
merged.Close();
Assert.IsFalse(fail, "The merged index is invalid");
}
示例2: TestLucene
public virtual void TestLucene()
{
int num = 100;
Directory indexA = new MockRAMDirectory();
Directory indexB = new MockRAMDirectory();
FillIndex(indexA, 0, num);
Assert.IsFalse(VerifyIndex(indexA, 0), "Index a is invalid");
FillIndex(indexB, num, num);
Assert.IsFalse(VerifyIndex(indexB, num), "Index b is invalid");
Directory merged = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.MergeFactor = 2;
writer.AddIndexesNoOptimize(new []{indexA, indexB});
writer.Optimize();
writer.Close();
var fail = VerifyIndex(merged, 0);
merged.Close();
Assert.IsFalse(fail, "The merged index is invalid");
}
示例3: TestCloneNoChangesStillReadOnly
public virtual void TestCloneNoChangesStillReadOnly()
{
Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.CreateIndex(dir1, true);
IndexReader r1 = IndexReader.Open(dir1, false);
IndexReader r2 = r1.Clone(false);
Assert.IsTrue(DeleteWorked(1, r2), "deleting from the cloned should have worked");
r1.Close();
r2.Close();
dir1.Close();
}
示例4: TestCloneReadOnlySegmentReader
public virtual void TestCloneReadOnlySegmentReader()
{
Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.CreateIndex(dir1, false);
IndexReader reader = IndexReader.Open(dir1, false);
IndexReader readOnlyReader = reader.Clone(true);
Assert.IsTrue(IsReadOnly(readOnlyReader), "reader isn't read only");
Assert.IsFalse(DeleteWorked(1, readOnlyReader), "deleting from the original should not have worked");
reader.Close();
readOnlyReader.Close();
dir1.Close();
}
示例5: TestBinaryFieldInIndex
public virtual void TestBinaryFieldInIndex()
{
IFieldable binaryFldStored = new Field("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.YES);
IFieldable stringFldStored = new Field("stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO);
// binary fields with store off are not allowed
Assert.Throws<ArgumentException>(
() => new Field("fail", System.Text.Encoding.UTF8.GetBytes(binaryValStored), Field.Store.NO));
Document doc = new Document();
doc.Add(binaryFldStored);
doc.Add(stringFldStored);
/** test for field count */
Assert.AreEqual(2, doc.fields_ForNUnit.Count);
/** add the doc to a ram index */
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(doc);
writer.Close();
/** open a reader and fetch the document */
IndexReader reader = IndexReader.Open(dir, false);
Document docFromReader = reader.Document(0);
Assert.IsTrue(docFromReader != null);
/** fetch the binary stored field and compare it's content with the original one */
System.String binaryFldStoredTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(docFromReader.GetBinaryValue("binaryStored")));
Assert.IsTrue(binaryFldStoredTest.Equals(binaryValStored));
/** fetch the string field and compare it's content with the original one */
System.String stringFldStoredTest = docFromReader.Get("stringStored");
Assert.IsTrue(stringFldStoredTest.Equals(binaryValStored));
/** delete the document from index */
reader.DeleteDocument(0);
Assert.AreEqual(0, reader.NumDocs());
reader.Close();
dir.Close();
}
示例6: TestIndexing
public virtual void TestIndexing()
{
Directory mainDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.UseCompoundFile = false;
IndexReader reader = writer.GetReader(); // start pooling readers
reader.Close();
writer.MergeFactor = 2;
writer.SetMaxBufferedDocs(10);
RunThread[] indexThreads = new RunThread[4];
for (int x = 0; x < indexThreads.Length; x++)
{
indexThreads[x] = new RunThread(this, x % 2, writer);
indexThreads[x].Name = "Thread " + x;
indexThreads[x].Start();
}
long startTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
long duration = 5 * 1000;
while (((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) - startTime) < duration)
{
System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 100));
}
int delCount = 0;
int addCount = 0;
for (int x = 0; x < indexThreads.Length; x++)
{
indexThreads[x].run_Renamed_Field = false;
Assert.IsTrue(indexThreads[x].ex == null);
addCount += indexThreads[x].addCount;
delCount += indexThreads[x].delCount;
}
for (int x = 0; x < indexThreads.Length; x++)
{
indexThreads[x].Join();
}
//System.out.println("addCount:"+addCount);
//System.out.println("delCount:"+delCount);
writer.Close();
mainDir.Close();
}
示例7: TestMissingTerms
public virtual void TestMissingTerms()
{
System.String fieldName = "field1";
MockRAMDirectory rd = new MockRAMDirectory();
IndexWriter w = new IndexWriter(rd, new KeywordAnalyzer(), MaxFieldLength.UNLIMITED);
for (int i = 0; i < 100; i++)
{
Document doc = new Document();
int term = i * 10; //terms are units of 10;
doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.NOT_ANALYZED));
w.AddDocument(doc);
}
w.Close();
IndexReader reader = IndexReader.Open(rd, true);
IndexSearcher searcher = new IndexSearcher(reader);
int numDocs = reader.NumDocs();
ScoreDoc[] results;
MatchAllDocsQuery q = new MatchAllDocsQuery();
System.Collections.ArrayList terms = new System.Collections.ArrayList();
terms.Add("5");
results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[]) terms.ToArray(typeof(System.String))), numDocs).ScoreDocs;
Assert.AreEqual(0, results.Length, "Must match nothing");
terms = new System.Collections.ArrayList();
terms.Add("10");
results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[])terms.ToArray(typeof(System.String))), numDocs).ScoreDocs;
Assert.AreEqual(1, results.Length, "Must match 1");
terms = new System.Collections.ArrayList();
terms.Add("10");
terms.Add("20");
results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[]) terms.ToArray(typeof(System.String))), numDocs).ScoreDocs;
Assert.AreEqual(2, results.Length, "Must match 2");
reader.Close();
rd.Close();
}
示例8: TestReuseAcrossWriters
public virtual void TestReuseAcrossWriters()
{
Directory dir = new MockRAMDirectory();
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
IndexWriter writer = new IndexWriter(dir, true, new StandardAnalyzer(), dp);
// Force frequent commits
writer.SetMaxBufferedDocs(2);
Document doc = new Document();
doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for (int i = 0; i < 7; i++)
writer.AddDocument(doc);
IndexCommit cp = (IndexCommit) dp.Snapshot();
CopyFiles(dir, cp);
writer.Close();
CopyFiles(dir, cp);
writer = new IndexWriter(dir, true, new StandardAnalyzer(), dp);
CopyFiles(dir, cp);
for (int i = 0; i < 7; i++)
writer.AddDocument(doc);
CopyFiles(dir, cp);
writer.Close();
CopyFiles(dir, cp);
dp.Release();
writer = new IndexWriter(dir, true, new StandardAnalyzer(), dp);
writer.Close();
try
{
CopyFiles(dir, cp);
Assert.Fail("did not hit expected IOException");
}
catch (System.IO.IOException ioe)
{
// expected
}
dir.Close();
}
示例9: TestNullOrSubScorer
public virtual void TestNullOrSubScorer()
{
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
doc.Add(new Field("field", "a b c d", Field.Store.NO, Field.Index.ANALYZED));
w.AddDocument(doc);
IndexReader r = w.GetReader();
IndexSearcher s = new IndexSearcher(r);
BooleanQuery q = new BooleanQuery();
q.Add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
// PhraseQuery w/ no terms added returns a null scorer
PhraseQuery pq = new PhraseQuery();
q.Add(pq, BooleanClause.Occur.SHOULD);
Assert.AreEqual(1, s.Search(q, 10).TotalHits);
// A required clause which returns null scorer should return null scorer to
// IndexSearcher.
q = new BooleanQuery();
pq = new PhraseQuery();
q.Add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
q.Add(pq, BooleanClause.Occur.MUST);
Assert.AreEqual(0, s.Search(q, 10).TotalHits);
DisjunctionMaxQuery dmq = new DisjunctionMaxQuery(1.0f);
dmq.Add(new TermQuery(new Term("field", "a")));
dmq.Add(pq);
Assert.AreEqual(1, s.Search(dmq, 10).TotalHits);
r.Close();
w.Close();
dir.Close();
}
示例10: TestReuseAcrossWriters
public virtual void TestReuseAcrossWriters()
{
Directory dir = new MockRAMDirectory();
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
// Force frequent flushes
writer.SetMaxBufferedDocs(2);
Document doc = new Document();
doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for (int i = 0; i < 7; i++)
{
writer.AddDocument(doc);
if (i % 2 == 0)
{
writer.Commit();
}
}
IndexCommit cp = dp.Snapshot();
CopyFiles(dir, cp);
writer.Close();
CopyFiles(dir, cp);
writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
CopyFiles(dir, cp);
for (int i = 0; i < 7; i++)
{
writer.AddDocument(doc);
if (i % 2 == 0)
{
writer.Commit();
}
}
CopyFiles(dir, cp);
writer.Close();
CopyFiles(dir, cp);
dp.Release();
writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
writer.Close();
Assert.Throws<System.IO.FileNotFoundException>(() => CopyFiles(dir, cp), "did not hit expected IOException");
dir.Close();
}
示例11: TestFilterIndexReader_Renamed
public virtual void TestFilterIndexReader_Renamed()
{
RAMDirectory directory = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document d1 = new Document();
d1.Add(new Field("default", "one two", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(d1);
Document d2 = new Document();
d2.Add(new Field("default", "one three", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(d2);
Document d3 = new Document();
d3.Add(new Field("default", "two four", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(d3);
writer.Close();
IndexReader reader = new TestReader(IndexReader.Open(directory));
Assert.IsTrue(reader.IsOptimized());
TermEnum terms = reader.Terms();
while (terms.Next())
{
Assert.IsTrue(terms.Term().Text().IndexOf('e') != - 1);
}
terms.Close();
TermPositions positions = reader.TermPositions(new Term("default", "one"));
while (positions.Next())
{
Assert.IsTrue((positions.Doc() % 2) == 1);
}
int NUM_DOCS = 3;
TermDocs td = reader.TermDocs(null);
for (int i = 0; i < NUM_DOCS; i++)
{
Assert.IsTrue(td.Next());
Assert.AreEqual(i, td.Doc());
Assert.AreEqual(1, td.Freq());
}
td.Close();
reader.Close();
directory.Close();
}
示例12: TestOperationsOnDiskFull
/// <summary> Make sure if modifier tries to commit but hits disk full that modifier
/// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
/// </summary>
private void TestOperationsOnDiskFull(bool updates)
{
bool debug = false;
Term searchTerm = new Term("content", "aaa");
int START_COUNT = 157;
int END_COUNT = 144;
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
// First build up a starting index:
MockRAMDirectory startDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(startDir, autoCommit, new WhitespaceAnalyzer(), true);
for (int i = 0; i < 157; i++)
{
Document d = new Document();
d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(d);
}
writer.Close();
long diskUsage = startDir.SizeInBytes();
long diskFree = diskUsage + 10;
System.IO.IOException err = null;
bool done = false;
// Iterate w/ ever increasing free disk space:
while (!done)
{
MockRAMDirectory dir = new MockRAMDirectory(startDir);
dir.SetPreventDoubleWrite(false);
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
modifier.SetMaxBufferedDocs(1000); // use flush or close
modifier.SetMaxBufferedDeleteTerms(1000); // use flush or close
// For each disk size, first try to commit against
// dir that will hit random IOExceptions & disk
// full; after, give it infinite disk space & turn
// off random IOExceptions & retry w/ same reader:
bool success = false;
for (int x = 0; x < 2; x++)
{
double rate = 0.1;
double diskRatio = ((double) diskFree) / diskUsage;
long thisDiskFree;
System.String testName;
if (0 == x)
{
thisDiskFree = diskFree;
if (diskRatio >= 2.0)
{
rate /= 2;
}
if (diskRatio >= 4.0)
{
rate /= 2;
}
if (diskRatio >= 6.0)
{
rate = 0.0;
}
if (debug)
{
System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
}
testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
}
else
{
thisDiskFree = 0;
rate = 0.0;
if (debug)
{
System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
}
testName = "reader re-use after disk full";
}
dir.SetMaxSizeInBytes(thisDiskFree);
dir.SetRandomIOExceptionRate(rate, diskFree);
try
{
if (0 == x)
{
int docId = 12;
for (int i = 0; i < 13; i++)
{
//.........这里部分代码省略.........
示例13: TestDeleteAll
public virtual void TestDeleteAll()
{
for (int pass = 0; pass < 2; pass++)
{
bool autoCommit = (0 == pass);
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
modifier.SetMaxBufferedDocs(2);
modifier.SetMaxBufferedDeleteTerms(2);
int id = 0;
int value_Renamed = 100;
for (int i = 0; i < 7; i++)
{
AddDoc(modifier, ++id, value_Renamed);
}
modifier.Commit();
IndexReader reader = IndexReader.Open(dir);
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
// Add 1 doc (so we will have something buffered)
AddDoc(modifier, 99, value_Renamed);
// Delete all
modifier.DeleteAll();
// Delete all shouldn't be on disk yet
reader = IndexReader.Open(dir);
Assert.AreEqual(7, reader.NumDocs());
reader.Close();
// Add a doc and update a doc (after the deleteAll, before the commit)
AddDoc(modifier, 101, value_Renamed);
UpdateDoc(modifier, 102, value_Renamed);
// commit the delete all
modifier.Commit();
// Validate there are no docs left
reader = IndexReader.Open(dir);
Assert.AreEqual(2, reader.NumDocs());
reader.Close();
modifier.Close();
dir.Close();
}
}
示例14: TestCloneWriteToOrig
public virtual void TestCloneWriteToOrig()
{
Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.CreateIndex(dir1, true);
IndexReader r1 = IndexReader.Open(dir1, false);
IndexReader r2 = r1.Clone(false);
Assert.IsTrue(DeleteWorked(1, r1), "deleting from the original should have worked");
r1.Close();
r2.Close();
dir1.Close();
}
示例15: TestCloseStoredFields
public virtual void TestCloseStoredFields()
{
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
w.UseCompoundFile = false;
Document doc = new Document();
doc.Add(new Field("field", "yes it's stored", Field.Store.YES, Field.Index.ANALYZED));
w.AddDocument(doc);
w.Close();
IndexReader r1 = IndexReader.Open(dir, false);
IndexReader r2 = r1.Clone(false);
r1.Close();
r2.Close();
dir.Close();
}