本文整理汇总了C#中Lucene.Net.Search.IndexSearcher.Search方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Search.IndexSearcher.Search方法的具体用法?C# Lucene.Net.Search.IndexSearcher.Search怎么用?C# Lucene.Net.Search.IndexSearcher.Search使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Search.IndexSearcher
的用法示例。
在下文中一共展示了Lucene.Net.Search.IndexSearcher.Search方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestDemo_Renamed_Method
public virtual void TestDemo_Renamed_Method()
{
Analyzer analyzer = new StandardAnalyzer();
// Store the index in memory:
Directory directory = new RAMDirectory();
// To store an index on disk, use this instead (note that the
// parameter true will overwrite the index in that directory
// if one exists):
//Directory directory = FSDirectory.getDirectory("/tmp/testindex", true);
IndexWriter iwriter = new IndexWriter(directory, analyzer, true);
iwriter.SetMaxFieldLength(25000);
Document doc = new Document();
System.String text = "This is the text to be indexed.";
doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.TOKENIZED));
iwriter.AddDocument(doc);
iwriter.Close();
// Now search the index:
IndexSearcher isearcher = new IndexSearcher(directory);
// Parse a simple query that searches for "text":
Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("fieldname", analyzer);
Query query = parser.Parse("text");
Hits hits = isearcher.Search(query);
Assert.AreEqual(1, hits.Length());
// Iterate through the results:
for (int i = 0; i < hits.Length(); i++)
{
Document hitDoc = hits.Doc(i);
Assert.AreEqual("This is the text to be indexed.", hitDoc.Get("fieldname"));
}
isearcher.Close();
directory.Close();
}
示例2: TestDemo_Renamed
public virtual void TestDemo_Renamed()
{
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
// Store the index in memory:
Directory directory = new RAMDirectory();
// To store an index on disk, use this instead:
//Directory directory = FSDirectory.open("/tmp/testindex");
IndexWriter iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000));
Document doc = new Document();
System.String text = "This is the text to be indexed.";
doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED));
iwriter.AddDocument(doc);
iwriter.Close();
// Now search the index:
IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
// Parse a simple query that searches for "text":
QueryParser parser = new QueryParser("fieldname", analyzer);
Query query = parser.Parse("text");
ScoreDoc[] hits = isearcher.Search(query, null, 1000).scoreDocs;
Assert.AreEqual(1, hits.Length);
// Iterate through the results:
for (int i = 0; i < hits.Length; i++)
{
Document hitDoc = isearcher.Doc(hits[i].doc);
Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed.");
}
isearcher.Close();
directory.Close();
}
示例3: TestIterator
public virtual void TestIterator()
{
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.Add(new Field("field", "iterator test doc 1", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("field", "iterator test doc 2", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Close();
_TestUtil.CheckIndex(directory);
IndexSearcher searcher = new IndexSearcher(directory);
Hits hits = searcher.Search(new TermQuery(new Term("field", "iterator")));
HitIterator iterator = (HitIterator) hits.Iterator();
Assert.AreEqual(2, iterator.Length());
Assert.IsTrue(iterator.MoveNext());
Hit hit = (Hit) iterator.Current;
Assert.AreEqual("iterator test doc 1", hit.Get("field"));
Assert.IsTrue(iterator.MoveNext());
hit = (Hit) iterator.Current;
Assert.AreEqual("iterator test doc 2", hit.GetDocument().Get("field"));
Assert.IsFalse(iterator.MoveNext());
bool caughtException = false;
try
{
System.Object generatedAux = iterator.Current;
}
catch (System.ArgumentOutOfRangeException e)
{
Assert.IsTrue(true);
caughtException = true;
}
Assert.IsTrue(caughtException);
}
示例4: PreviouslyIndexed
public static bool PreviouslyIndexed(string url)
{
string indexFileLocation = indexDir;
Lucene.Net.Store.Directory dir = Lucene.Net.Store.FSDirectory.GetDirectory(indexFileLocation, false);
Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(dir);
Lucene.Net.Search.Hits hits = null;
try
{
Lucene.Net.Search.Query query = new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("url", url));
hits = searcher.Search(query);
}
catch { }
finally
{
searcher.Close();
}
return hits.Length() > 0;
}
示例5: SearchProjects
public static List<IndexedItem> SearchProjects(string s)
{
List<IndexedItem> retVal = new List<IndexedItem>();
string indexFileLocation = indexDir;
Lucene.Net.Store.Directory dir = Lucene.Net.Store.FSDirectory.GetDirectory(indexFileLocation, false);
Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(dir);
try
{
Lucene.Net.Search.Query query = new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("content", s));
query = query.Combine(new Lucene.Net.Search.Query[] { query, new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("url", fromUrl)) });
query = query.Combine(new Lucene.Net.Search.Query[] { query, new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("title", s)) });
//execute the query
Lucene.Net.Search.Hits hits = searcher.Search(query);
//iterate over the results.
for (int i = 0; i < hits.Length(); i++)
{
Lucene.Net.Documents.Document doc = hits.Doc(i);
string article = doc.Get("content");
string title = doc.Get("title");
string url = doc.Get("url");
retVal.Add(new IndexedItem { Article = article, Href = url, Title = title });
}
foreach (IndexedItem ind in retVal)
{
Console.WriteLine(ind.Href);
}
retVal = retVal.Distinct().ToList();
}
catch { }
finally
{
searcher.Close();
}
return retVal;
}
示例6: DoTestRank
// Test that queries based on reverse/ordFieldScore scores correctly
private void DoTestRank(System.String field, bool inOrder)
{
IndexSearcher s = new IndexSearcher(dir, true);
ValueSource vs;
if (inOrder)
{
vs = new OrdFieldSource(field);
}
else
{
vs = new ReverseOrdFieldSource(field);
}
Query q = new ValueSourceQuery(vs);
Log("test: " + q);
QueryUtils.Check(q, s);
ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
Assert.AreEqual(N_DOCS, h.Length, "All docs should be matched!");
System.String prevID = inOrder?"IE":"IC"; // smaller than all ids of docs in this test ("ID0001", etc.)
for (int i = 0; i < h.Length; i++)
{
System.String resID = s.Doc(h[i].Doc).Get(ID_FIELD);
Log(i + ". score=" + h[i].Score + " - " + resID);
Log(s.Explain(q, h[i].Doc));
if (inOrder)
{
Assert.IsTrue(String.CompareOrdinal(resID, prevID) < 0, "res id " + resID + " should be < prev res id " + prevID);
}
else
{
Assert.IsTrue(String.CompareOrdinal(resID, prevID) > 0, "res id " + resID + " should be > prev res id " + prevID);
}
prevID = resID;
}
}
示例7: TestAddIndexOnDiskFull
public virtual void TestAddIndexOnDiskFull()
{
int START_COUNT = 57;
int NUM_DIR = 50;
int END_COUNT = START_COUNT + NUM_DIR * 25;
bool debug = false;
// Build up a bunch of dirs that have indexes which we
// will then merge together by calling addIndexes(*):
Directory[] dirs = new Directory[NUM_DIR];
long inputDiskUsage = 0;
for (int i = 0; i < NUM_DIR; i++)
{
dirs[i] = new RAMDirectory();
IndexWriter writer = new IndexWriter(dirs[i], new WhitespaceAnalyzer(), true);
for (int j = 0; j < 25; j++)
{
AddDocWithIndex(writer, 25 * i + j);
}
writer.Close();
System.String[] files = dirs[i].List();
for (int j = 0; j < files.Length; j++)
{
inputDiskUsage += dirs[i].FileLength(files[j]);
}
}
// Now, build a starting index that has START_COUNT docs. We
// will then try to addIndexes into a copy of this:
RAMDirectory startDir = new RAMDirectory();
IndexWriter writer2 = new IndexWriter(startDir, new WhitespaceAnalyzer(), true);
for (int j = 0; j < START_COUNT; j++)
{
AddDocWithIndex(writer2, j);
}
writer2.Close();
// Make sure starting index seems to be working properly:
Term searchTerm = new Term("content", "aaa");
IndexReader reader = IndexReader.Open(startDir);
Assert.AreEqual(57, reader.DocFreq(searchTerm), "first docFreq");
IndexSearcher searcher = new IndexSearcher(reader);
Hits hits = searcher.Search(new TermQuery(searchTerm));
Assert.AreEqual(57, hits.Length(), "first number of hits");
searcher.Close();
reader.Close();
// Iterate with larger and larger amounts of free
// disk space. With little free disk space,
// addIndexes will certainly run out of space &
// fail. Verify that when this happens, index is
// not corrupt and index in fact has added no
// documents. Then, we increase disk space by 2000
// bytes each iteration. At some point there is
// enough free disk space and addIndexes should
// succeed and index should show all documents were
// added.
// String[] files = startDir.list();
long diskUsage = startDir.SizeInBytes();
long startDiskUsage = 0;
System.String[] files2 = startDir.List();
for (int i = 0; i < files2.Length; i++)
{
startDiskUsage += startDir.FileLength(files2[i]);
}
for (int iter = 0; iter < 6; iter++)
{
if (debug)
System.Console.Out.WriteLine("TEST: iter=" + iter);
// Start with 100 bytes more than we are currently using:
long diskFree = diskUsage + 100;
bool autoCommit = iter % 2 == 0;
int method = iter / 2;
bool success = false;
bool done = false;
System.String methodName;
if (0 == method)
{
methodName = "addIndexes(Directory[])";
}
else if (1 == method)
{
methodName = "addIndexes(IndexReader[])";
}
else
{
methodName = "addIndexesNoOptimize(Directory[])";
}
while (!done)
//.........这里部分代码省略.........
示例8: TestSetBufferSize
public virtual void TestSetBufferSize()
{
System.IO.FileInfo indexDir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "testSetBufferSize"));
MockFSDirectory dir = new MockFSDirectory(indexDir);
try
{
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer.SetUseCompoundFile(false);
for (int i = 0; i < 37; i++)
{
Document doc = new Document();
doc.Add(new Field("content", "aaa bbb ccc ddd" + i, Field.Store.YES, Field.Index.TOKENIZED));
doc.Add(new Field("id", "" + i, Field.Store.YES, Field.Index.TOKENIZED));
writer.AddDocument(doc);
}
writer.Close();
dir.allIndexInputs.Clear();
IndexReader reader = IndexReader.Open(dir);
Term aaa = new Term("content", "aaa");
Term bbb = new Term("content", "bbb");
Term ccc = new Term("content", "ccc");
Assert.AreEqual(reader.DocFreq(ccc), 37);
reader.DeleteDocument(0);
Assert.AreEqual(reader.DocFreq(aaa), 37);
dir.TweakBufferSizes();
reader.DeleteDocument(4);
Assert.AreEqual(reader.DocFreq(bbb), 37);
dir.TweakBufferSizes();
IndexSearcher searcher = new IndexSearcher(reader);
Hits hits = searcher.Search(new TermQuery(bbb));
dir.TweakBufferSizes();
Assert.AreEqual(35, hits.Length());
dir.TweakBufferSizes();
hits = searcher.Search(new TermQuery(new Term("id", "33")));
dir.TweakBufferSizes();
Assert.AreEqual(1, hits.Length());
hits = searcher.Search(new TermQuery(aaa));
dir.TweakBufferSizes();
Assert.AreEqual(35, hits.Length());
searcher.Close();
reader.Close();
}
finally
{
_TestUtil.RmDir(indexDir);
}
}
示例9: Search
private void Search()
{
try
{
SearchProgressBar.Maximum = 11;
ProgressLabel.Text = "Progress: Initialize Search ...";
Searcher searcher = new IndexSearcher(@"Canon\index");
Analyzer analyzer = new StandardAnalyzer();
ArrayList resultList = new ArrayList();
System.IO.StreamReader in_Renamed = new System.IO.StreamReader(new System.IO.StreamReader(System.Console.OpenStandardInput(), System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(System.Console.OpenStandardInput(), System.Text.Encoding.Default).CurrentEncoding);
String line = QueryInputBox.Text;
if (line.Length == - 1)
return;
ProgressLabel.Text = "Progress: Parsing Query ...";
Query query = QueryParser.Parse(line, "contents", analyzer);
//int[] ix = qtm.GetTermFrequencies();
Hits hits = searcher.Search(query);
SearchProgressBar.Increment(1);
ProgressLabel.Text = "Progress: Searched. Analyzing results ...";
//QueryHighlightExtractor highlighter = new QueryHighlightExtractor(query, new WhitespaceAnalyzer(), "<B>", "</B>");
Highlighter highlighter = new Highlighter(new QueryScorer(query));
highlighter.SetTextFragmenter(new SimpleFragmenter(80));
int maxNumFragmentsRequired = 1;
//int HITS_PER_PAGE = 10;
for (int i = 0; i < 10; i++)
{
SearchProgressBar.Increment(1);
ProgressLabel.Text = "Progress: Analyzing hit " + (i+1).ToString();
// get the document from index
Document doc = hits.Doc(i);
//SegmentReader ir = new SegmentReader();
//Lucene.Net.Index.TermFreqVector tfv =
//tfv.GetTermFrequencies
string score = hits.Score(i).ToString();
//Box += "Hit no. " + i + " scored: " + score + " occ: " + /*highlighter.tokenFrequency */ " best fragment: \n";
ResultSet a = new ResultSet();
a.BookName = doc.Get("path").Replace(@"c:\cscd\temp\","");
a.Score = hits.Score(i);
a.numberOfHits = hits.Length();
// get the document filename
// we can't get the text from the index
//because we didn't store it there
//so get it from archive
string path = doc.Get("path");
string name = GetInternalName(path);
PaliReaderUtils.AalekhDecoder.UnzipFromZipLibrary(name);
path = System.IO.Directory.GetCurrentDirectory() + @"\Work\" + name + ".htm";
string plainText = "";
//load text from zip archive temporarily
using (StreamReader sr = new StreamReader(path, System.Text.Encoding.Default))
{
plainText = parseHtml(sr.ReadToEnd());
}
//-------------------------------Highlighter Code 1.4
TokenStream tokenStream = analyzer.TokenStream(new StringReader(plainText));
a.textFragment = highlighter.GetBestFragments(tokenStream, plainText, maxNumFragmentsRequired, "...");
if(File.Exists(path))
File.Delete(path);
//-------------------------------
resultList.Add(a);
}
SearchProgressBar.Value = 0;
searcher.Close();
ssr = new ShowSearchResults(/*Box*/resultList);
//this.Hide();
ssr.OpenBookEvent += new ShowSearchResults.OpenBook(this.TriggerOpenBook);
ssr.Closing += new System.ComponentModel.CancelEventHandler(this.Closing_ResultWindow);
this.Hide();
ssr.ShowDialog();
}
catch (System.Exception e)
{
MessageBox.Show(" caught a " + e.GetType() + "\n with message: " + e.Message);
}
}
示例10: TestNPESpanQuery
public virtual void TestNPESpanQuery()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(new System.Collections.Hashtable(0)), IndexWriter.MaxFieldLength.LIMITED);
// Add documents
AddDoc(writer, "1", "the big dogs went running to the market");
AddDoc(writer, "2", "the cat chased the mouse, then the cat ate the mouse quickly");
// Commit
writer.Close();
// Get searcher
IndexReader reader = IndexReader.Open(dir);
IndexSearcher searcher = new IndexSearcher(reader);
// Control (make sure docs indexed)
Assert.AreEqual(2, HitCount(searcher, "the"));
Assert.AreEqual(1, HitCount(searcher, "cat"));
Assert.AreEqual(1, HitCount(searcher, "dogs"));
Assert.AreEqual(0, HitCount(searcher, "rabbit"));
// This throws exception (it shouldn't)
Assert.AreEqual(1, searcher.Search(CreateSpan(0, true, new SpanQuery[]{CreateSpan(4, false, "chased", "cat"), CreateSpan("ate")}), 10).totalHits);
reader.Close();
dir.Close();
}
示例11: TestGetValuesForIndexedDocument
public virtual void TestGetValuesForIndexedDocument()
{
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(MakeDocumentWithFields());
writer.Close();
Searcher searcher = new IndexSearcher(dir);
// search for something that does exists
Query query = new TermQuery(new Term("keyword", "test1"));
// ensure that queries return expected results without DateFilter first
ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
Assert.AreEqual(1, hits.Length);
DoAssert(searcher.Doc(hits[0].doc), true);
searcher.Close();
}
示例12: DoTestCaching
// Test that values loaded for FieldScoreQuery are cached properly and consumes the proper RAM resources.
private void DoTestCaching(System.String field, FieldScoreQuery.Type tp)
{
// prepare expected array types for comparison
System.Collections.Hashtable expectedArrayTypes = new System.Collections.Hashtable();
expectedArrayTypes[FieldScoreQuery.Type.BYTE] = new sbyte[0];
expectedArrayTypes[FieldScoreQuery.Type.SHORT] = new short[0];
expectedArrayTypes[FieldScoreQuery.Type.INT] = new int[0];
expectedArrayTypes[FieldScoreQuery.Type.FLOAT] = new float[0];
IndexSearcher s = new IndexSearcher(dir, true);
System.Object[] innerArray = new Object[s.IndexReader.GetSequentialSubReaders().Length];
bool warned = false; // print warning once.
for (int i = 0; i < 10; i++)
{
FieldScoreQuery q = new FieldScoreQuery(field, tp);
ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
Assert.AreEqual(N_DOCS, h.Length, "All docs should be matched!");
IndexReader[] readers = s.IndexReader.GetSequentialSubReaders();
for (int j = 0; j < readers.Length; j++)
{
IndexReader reader = readers[j];
try
{
if (i == 0)
{
innerArray[j] = q.valSrc.GetValues(reader).InnerArray;
Log(i + ". compare: " + innerArray[j].GetType() + " to " + expectedArrayTypes[tp].GetType());
Assert.AreEqual(innerArray[j].GetType(), expectedArrayTypes[tp].GetType(), "field values should be cached in the correct array type!");
}
else
{
Log(i + ". compare: " + innerArray[j] + " to " + q.valSrc.GetValues(reader).InnerArray);
Assert.AreSame(innerArray[j], q.valSrc.GetValues(reader).InnerArray, "field values should be cached and reused!");
}
}
catch (System.NotSupportedException)
{
if (!warned)
{
System.Console.Error.WriteLine("WARNING: " + TestName() + " cannot fully test values of " + q);
warned = true;
}
}
}
}
// verify new values are reloaded (not reused) for a new reader
s = new IndexSearcher(dir, true);
FieldScoreQuery q2 = new FieldScoreQuery(field, tp);
ScoreDoc[] h2 = s.Search(q2, null, 1000).ScoreDocs;
Assert.AreEqual(N_DOCS, h2.Length, "All docs should be matched!");
IndexReader[] readers2 = s.IndexReader.GetSequentialSubReaders();
for (int j = 0; j < readers2.Length; j++)
{
IndexReader reader = readers2[j];
try
{
Log("compare: " + innerArray + " to " + q2.valSrc.GetValues(reader).InnerArray);
Assert.AreNotSame(innerArray, q2.valSrc.GetValues(reader).InnerArray, "cached field values should not be reused if reader as changed!");
}
catch (System.NotSupportedException)
{
if (!warned)
{
System.Console.Error.WriteLine("WARNING: " + TestName() + " cannot fully test values of " + q2);
warned = true;
}
}
}
}
示例13: DoTestExactScore
// Test that queries based on reverse/ordFieldScore returns docs with expected score.
private void DoTestExactScore(System.String field, bool inOrder)
{
IndexSearcher s = new IndexSearcher(dir, true);
ValueSource vs;
if (inOrder)
{
vs = new OrdFieldSource(field);
}
else
{
vs = new ReverseOrdFieldSource(field);
}
Query q = new ValueSourceQuery(vs);
TopDocs td = s.Search(q, null, 1000);
Assert.AreEqual(N_DOCS, td.TotalHits, "All docs should be matched!");
ScoreDoc[] sd = td.ScoreDocs;
for (int i = 0; i < sd.Length; i++)
{
float score = sd[i].Score;
System.String id = s.IndexReader.Document(sd[i].Doc).Get(ID_FIELD);
Log("-------- " + i + ". Explain doc " + id);
Log(s.Explain(q, sd[i].Doc));
float expectedScore = N_DOCS - i;
Assert.AreEqual(expectedScore, score, TEST_SCORE_TOLERANCE_DELTA, "score of result " + i + " shuould be " + expectedScore + " != " + score);
System.String expectedId = inOrder?Id2String(N_DOCS - i):Id2String(i + 1); // reverse ==> smaller values first
Assert.IsTrue(expectedId.Equals(id), "id of result " + i + " shuould be " + expectedId + " != " + score);
}
}
示例14: TestNegativePositions
public void TestNegativePositions()
{
SinkTokenizer tokens = new SinkTokenizer();
Token t = new Token();
t.SetTermText("a");
t.SetPositionIncrement(0);
tokens.Add(t);
t.SetTermText("b");
t.SetPositionIncrement(1);
tokens.Add(t);
t.SetTermText("c");
tokens.Add(t);
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
Document doc = new Document();
doc.Add(new Field("field", tokens));
w.AddDocument(doc);
w.Close();
IndexSearcher s = new IndexSearcher(dir);
PhraseQuery pq = new PhraseQuery();
pq.Add(new Term("field", "a"));
pq.Add(new Term("field", "b"));
pq.Add(new Term("field", "c"));
Hits hits = s.Search(pq);
Assert.AreEqual(1, hits.Length());
Query q = new SpanTermQuery(new Term("field", "a"));
hits = s.Search(q);
Assert.AreEqual(1, hits.Length());
TermPositions tps = s.GetIndexReader().TermPositions(new Term("field", "a"));
Assert.IsTrue(tps.Next());
Assert.AreEqual(1, tps.Freq());
Assert.AreEqual(-1, tps.NextPosition());
Assert.IsTrue(_TestUtil.CheckIndex(dir));
s.Close();
dir.Close();
}
示例15: TestNullLockFactory
public virtual void TestNullLockFactory()
{
Directory dir = new MyRAMDirectory(this);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
for (int i = 0; i < 100; i++)
{
AddDoc(writer);
}
writer.Close();
Term searchTerm = new Term("content", "aaa");
IndexSearcher searcher = new IndexSearcher(dir);
Hits hits = searcher.Search(new TermQuery(searchTerm));
Assert.AreEqual(100, hits.Length(), "did not get right number of hits");
writer.Close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer.Close();
dir.Close();
}