本文整理汇总了C#中Lucene.Net.Index.IndexWriter类的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Index.IndexWriter类的具体用法?C# Lucene.Net.Index.IndexWriter怎么用?C# Lucene.Net.Index.IndexWriter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Lucene.Net.Index.IndexWriter类属于命名空间,在下文中一共展示了Lucene.Net.Index.IndexWriter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: SetUp
public override void SetUp()
{
base.SetUp();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 5137; ++i)
{
Document doc = new Document();
doc.Add(new Field(FIELD, "meaninglessnames", Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
{
Document doc = new Document();
doc.Add(new Field(FIELD, "tangfulin", Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
for (int i = 5138; i < 11377; ++i)
{
Document doc = new Document();
doc.Add(new Field(FIELD, "meaninglessnames", Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
{
Document doc = new Document();
doc.Add(new Field(FIELD, "tangfulin", Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
writer.Close();
}
示例2: TestAddSameDocTwice
public virtual void TestAddSameDocTwice()
{
// LUCENE-5367: this was a problem with the previous code, making sure it
// works with the new code.
Directory indexDir = NewDirectory(), taxoDir = NewDirectory();
IndexWriter indexWriter = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
FacetsConfig facetsConfig = new FacetsConfig();
Document doc = new Document();
doc.Add(new FacetField("a", "b"));
doc = facetsConfig.Build(taxoWriter, doc);
// these two addDocument() used to fail
indexWriter.AddDocument(doc);
indexWriter.AddDocument(doc);
IOUtils.Close(indexWriter, taxoWriter);
DirectoryReader indexReader = DirectoryReader.Open(indexDir);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = NewSearcher(indexReader);
FacetsCollector fc = new FacetsCollector();
searcher.Search(new MatchAllDocsQuery(), fc);
Facets facets = GetTaxonomyFacetCounts(taxoReader, facetsConfig, fc);
FacetResult res = facets.GetTopChildren(10, "a");
Assert.AreEqual(1, res.LabelValues.Length);
Assert.AreEqual(2, res.LabelValues[0].value);
IOUtils.Close(indexReader, taxoReader);
IOUtils.Close(indexDir, taxoDir);
}
示例3: TestCustomLockFactory
public virtual void TestCustomLockFactory()
{
Directory dir = new RAMDirectory();
MockLockFactory lf = new MockLockFactory(this);
dir.SetLockFactory(lf);
// Lock prefix should have been set:
Assert.IsTrue(lf.lockPrefixSet, "lock prefix was not set by the RAMDirectory");
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
// add 100 documents (so that commit lock is used)
for (int i = 0; i < 100; i++)
{
AddDoc(writer);
}
// Both write lock and commit lock should have been created:
Assert.AreEqual(1, lf.locksCreated.Count, "# of unique locks created (after instantiating IndexWriter)");
Assert.IsTrue(lf.makeLockCount >= 1, "# calls to makeLock is 0 (after instantiating IndexWriter)");
for (System.Collections.IEnumerator e = lf.locksCreated.Keys.GetEnumerator(); e.MoveNext(); )
{
System.String lockName = (System.String) e.Current;
MockLockFactory.MockLock lock_Renamed = (MockLockFactory.MockLock) lf.locksCreated[lockName];
Assert.IsTrue(lock_Renamed.lockAttempts > 0, "# calls to Lock.obtain is 0 (after instantiating IndexWriter)");
}
writer.Close();
}
示例4: TestMmapIndex
public virtual void TestMmapIndex()
{
Assert.Ignore("Need to port tests, but we don't really support MMapDirectories anyway");
FSDirectory storeDirectory;
storeDirectory = new MMapDirectory(new System.IO.DirectoryInfo(storePathname), null);
// plan to add a set of useful stopwords, consider changing some of the
// interior filters.
StandardAnalyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT, Support.Compatibility.SetFactory.CreateHashSet<string>());
// TODO: something about lock timeouts and leftover locks.
IndexWriter writer = new IndexWriter(storeDirectory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexSearcher searcher = new IndexSearcher(storeDirectory, true);
for (int dx = 0; dx < 1000; dx++)
{
System.String f = RandomField();
Document doc = new Document();
doc.Add(new Field("data", f, Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
}
searcher.Close();
writer.Close();
RmDir(new System.IO.FileInfo(storePathname));
}
示例5: TestCustomLockFactory
public virtual void TestCustomLockFactory()
{
Directory dir = new MockDirectoryWrapper(Random(), new RAMDirectory());
MockLockFactory lf = new MockLockFactory(this);
dir.LockFactory = lf;
// Lock prefix should have been set:
Assert.IsTrue(lf.LockPrefixSet, "lock prefix was not set by the RAMDirectory");
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
// add 100 documents (so that commit lock is used)
for (int i = 0; i < 100; i++)
{
AddDoc(writer);
}
// Both write lock and commit lock should have been created:
Assert.AreEqual(1, lf.LocksCreated.Count, "# of unique locks created (after instantiating IndexWriter)");
Assert.IsTrue(lf.MakeLockCount >= 1, "# calls to makeLock is 0 (after instantiating IndexWriter)");
foreach (String lockName in lf.LocksCreated.Keys)
{
MockLockFactory.MockLock @lock = (MockLockFactory.MockLock)lf.LocksCreated[lockName];
Assert.IsTrue(@lock.LockAttempts > 0, "# calls to Lock.obtain is 0 (after instantiating IndexWriter)");
}
writer.Dispose();
}
示例6: SetUp
public virtual void SetUp()
{
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
Document doc = new Document();
doc.Add(Field.Text("Field", "one two three four five"));
doc.Add(Field.Text("sorter", "b"));
writer.AddDocument(doc);
doc = new Document();
doc.Add(Field.Text("Field", "one two three four"));
doc.Add(Field.Text("sorter", "d"));
writer.AddDocument(doc);
doc = new Document();
doc.Add(Field.Text("Field", "one two three y"));
doc.Add(Field.Text("sorter", "a"));
writer.AddDocument(doc);
doc = new Document();
doc.Add(Field.Text("Field", "one two x"));
doc.Add(Field.Text("sorter", "c"));
writer.AddDocument(doc);
writer.Optimize();
writer.Close();
searcher = new IndexSearcher(directory);
query = new TermQuery(new Term("Field", "three"));
filter = new AnonymousClassFilter(this);
}
示例7: TestNullOrSubScorer
public virtual void TestNullOrSubScorer()
{
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
doc.Add(new Field("field", "a b c d", Field.Store.NO, Field.Index.ANALYZED));
w.AddDocument(doc);
IndexReader r = w.GetReader();
IndexSearcher s = new IndexSearcher(r);
BooleanQuery q = new BooleanQuery();
q.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
// LUCENE-2617: make sure that a term not in the index still contributes to the score via coord factor
float score = s.Search(q, 10).MaxScore;
Query subQuery = new TermQuery(new Term("field", "not_in_index"));
subQuery.Boost = 0;
q.Add(subQuery, Occur.SHOULD);
float score2 = s.Search(q, 10).MaxScore;
Assert.AreEqual(score * .5, score2, 1e-6);
// LUCENE-2617: make sure that a clause not in the index still contributes to the score via coord factor
BooleanQuery qq = (BooleanQuery)q.Clone();
PhraseQuery phrase = new PhraseQuery();
phrase.Add(new Term("field", "not_in_index"));
phrase.Add(new Term("field", "another_not_in_index"));
phrase.Boost = 0;
qq.Add(phrase, Occur.SHOULD);
score2 = s.Search(qq, 10).MaxScore;
Assert.AreEqual(score * (1.0 / 3), score2, 1e-6);
// now test BooleanScorer2
subQuery = new TermQuery(new Term("field", "b"));
subQuery.Boost = 0;
q.Add(subQuery, Occur.MUST);
score2 = s.Search(q, 10).MaxScore;
Assert.AreEqual(score * (2.0 / 3), score2, 1e-6);
// PhraseQuery w/ no terms added returns a null scorer
PhraseQuery pq = new PhraseQuery();
q.Add(pq, Occur.SHOULD);
Assert.AreEqual(1, s.Search(q, 10).TotalHits);
// A required clause which returns null scorer should return null scorer to
// IndexSearcher.
q = new BooleanQuery();
pq = new PhraseQuery();
q.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
q.Add(pq, Occur.MUST);
Assert.AreEqual(0, s.Search(q, 10).TotalHits);
DisjunctionMaxQuery dmq = new DisjunctionMaxQuery(1.0f);
dmq.Add(new TermQuery(new Term("field", "a")));
dmq.Add(pq);
Assert.AreEqual(1, s.Search(dmq, 10).TotalHits);
r.Close();
w.Close();
dir.Close();
}
示例8: TestMethod
public virtual void TestMethod()
{
RAMDirectory directory = new RAMDirectory();
System.String[] values = new System.String[]{"1", "2", "3", "4"};
try
{
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
for (int i = 0; i < values.Length; i++)
{
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
doc.Add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
writer.AddDocument(doc);
}
writer.Close();
BooleanQuery booleanQuery1 = new BooleanQuery();
booleanQuery1.Add(new TermQuery(new Term(FIELD, "1")), BooleanClause.Occur.SHOULD);
booleanQuery1.Add(new TermQuery(new Term(FIELD, "2")), BooleanClause.Occur.SHOULD);
BooleanQuery query = new BooleanQuery();
query.Add(booleanQuery1, BooleanClause.Occur.MUST);
query.Add(new TermQuery(new Term(FIELD, "9")), BooleanClause.Occur.MUST_NOT);
IndexSearcher indexSearcher = new IndexSearcher(directory);
Hits hits = indexSearcher.Search(query);
Assert.AreEqual(2, hits.Length(), "Number of matched documents");
}
catch (System.IO.IOException e)
{
Assert.Fail(e.Message);
}
}
示例9: Index
void Index()
{
Lucene.Net.Index.IndexWriter wr = new Lucene.Net.Index.IndexWriter(dir, new Lucene.Net.Analysis.WhitespaceAnalyzer(), Lucene.Net.Index.IndexWriter.MaxFieldLength.UNLIMITED);
Lucene.Net.Documents.Document doc = null;
Lucene.Net.Documents.Field f = null;
doc = new Lucene.Net.Documents.Document();
f = new Lucene.Net.Documents.Field("field", "a b c d", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
doc.Add(f);
wr.AddDocument(doc);
doc = new Lucene.Net.Documents.Document();
f = new Lucene.Net.Documents.Field("field", "a b a d", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
doc.Add(f);
wr.AddDocument(doc);
doc = new Lucene.Net.Documents.Document();
f = new Lucene.Net.Documents.Field("field", "a b e f", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
doc.Add(f);
wr.AddDocument(doc);
doc = new Lucene.Net.Documents.Document();
f = new Lucene.Net.Documents.Field("field", "x y z", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
doc.Add(f);
wr.AddDocument(doc);
wr.Dispose();
}
示例10: TestDemo_Renamed_Method
public virtual void TestDemo_Renamed_Method()
{
Analyzer analyzer = new StandardAnalyzer();
// Store the index in memory:
Directory directory = new RAMDirectory();
// To store an index on disk, use this instead (note that the
// parameter true will overwrite the index in that directory
// if one exists):
//Directory directory = FSDirectory.getDirectory("/tmp/testindex", true);
IndexWriter iwriter = new IndexWriter(directory, analyzer, true);
iwriter.SetMaxFieldLength(25000);
Document doc = new Document();
System.String text = "This is the text to be indexed.";
doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.TOKENIZED));
iwriter.AddDocument(doc);
iwriter.Close();
// Now search the index:
IndexSearcher isearcher = new IndexSearcher(directory);
// Parse a simple query that searches for "text":
Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("fieldname", analyzer);
Query query = parser.Parse("text");
Hits hits = isearcher.Search(query);
Assert.AreEqual(1, hits.Length());
// Iterate through the results:
for (int i = 0; i < hits.Length(); i++)
{
Document hitDoc = hits.Doc(i);
Assert.AreEqual("This is the text to be indexed.", hitDoc.Get("fieldname"));
}
isearcher.Close();
directory.Close();
}
示例11: TestRAMDirectoryNoLocking
public virtual void TestRAMDirectoryNoLocking()
{
Directory dir = new RAMDirectory();
dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
Assert.IsTrue(typeof(NoLockFactory).IsInstanceOfType(dir.GetLockFactory()), "RAMDirectory.setLockFactory did not take");
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
// Create a 2nd IndexWriter. This is normally not allowed but it should run through since we're not
// using any locks:
IndexWriter writer2 = null;
try
{
writer2 = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
}
catch (System.Exception e)
{
System.Console.Out.WriteLine(e.StackTrace);
Assert.Fail("Should not have hit an IOException with no locking");
}
writer.Close();
if (writer2 != null)
{
writer2.Close();
}
}
示例12: Build
private void Build()
{
try
{
/* build an index */
IndexWriter writer = new IndexWriter(index, new SimpleAnalyzer(), T);
for (int d = minId; d <= maxId; d++)
{
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
doc.Add(new Field("id", Pad(d), Field.Store.YES, Field.Index.UN_TOKENIZED));
int r = rand.Next();
if (maxR < r)
{
maxR = r;
}
if (r < minR)
{
minR = r;
}
doc.Add(new Field("rand", Pad(r), Field.Store.YES, Field.Index.UN_TOKENIZED));
doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.UN_TOKENIZED));
writer.AddDocument(doc);
}
writer.Optimize();
writer.Close();
}
catch (System.Exception e)
{
throw new System.Exception("can't build index", e);
}
}
示例13: CreateRandomTerms
public virtual void CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir)
{
int[] freq = new int[nTerms];
for (int i = 0; i < nTerms; i++)
{
int f = (nTerms + 1) - i; // make first terms less frequent
freq[i] = (int) System.Math.Ceiling(System.Math.Pow(f, power));
terms[i] = new Term("f", System.Convert.ToString((char) ('A' + i)));
}
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
for (int i = 0; i < nDocs; i++)
{
Document d = new Document();
for (int j = 0; j < nTerms; j++)
{
if (r.Next(freq[j]) == 0)
{
d.Add(new Field("f", terms[j].Text(), Field.Store.NO, Field.Index.UN_TOKENIZED));
//System.out.println(d);
}
}
iw.AddDocument(d);
}
iw.Optimize();
iw.Close();
}
示例14: SetUp
public override void SetUp()
{
base.SetUp();
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
long theLong = System.Int64.MaxValue;
double theDouble = System.Double.MaxValue;
sbyte theByte = (sbyte) System.SByte.MaxValue;
short theShort = System.Int16.MaxValue;
int theInt = System.Int32.MaxValue;
float theFloat = System.Single.MaxValue;
for (int i = 0; i < NUM_DOCS; i++)
{
Document doc = new Document();
doc.Add(new Field("theLong", System.Convert.ToString(theLong--), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theDouble", (theDouble--).ToString("E16"), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theByte", System.Convert.ToString((sbyte) theByte--), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theShort", System.Convert.ToString(theShort--), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theInt", System.Convert.ToString(theInt--), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theFloat", (theFloat--).ToString("E8"), Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
writer.Close();
reader = IndexReader.Open(directory);
}
示例15: SetUp
public virtual void SetUp()
{
// Create an index writer.
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
// oldest doc:
// Add the first document. text = "Document 1" dateTime = Oct 10 03:25:22 EDT 2007
writer.AddDocument(CreateDocument("Document 1", 633275835220000000L));
// Add the second document. text = "Document 2" dateTime = Oct 10 03:25:26 EDT 2007
writer.AddDocument(CreateDocument("Document 2", 633275835260000000L));
// Add the third document. text = "Document 3" dateTime = Oct 11 07:12:13 EDT 2007
writer.AddDocument(CreateDocument("Document 3", 633276835330000000L));
// Add the fourth document. text = "Document 4" dateTime = Oct 11 08:02:09 EDT 2007
writer.AddDocument(CreateDocument("Document 4", 633276865290000000L));
// latest doc:
// Add the fifth document. text = "Document 5" dateTime = Oct 12 13:25:43 EDT 2007
writer.AddDocument(CreateDocument("Document 5", 633277923430000000L));
//// oldest doc:
//// Add the first document. text = "Document 1" dateTime = Oct 10 03:25:22 EDT 2007
//writer.AddDocument(CreateDocument("Document 1", 1192001122000L));
//// Add the second document. text = "Document 2" dateTime = Oct 10 03:25:26 EDT 2007
//writer.AddDocument(CreateDocument("Document 2", 1192001126000L));
//// Add the third document. text = "Document 3" dateTime = Oct 11 07:12:13 EDT 2007
//writer.AddDocument(CreateDocument("Document 3", 1192101133000L));
//// Add the fourth document. text = "Document 4" dateTime = Oct 11 08:02:09 EDT 2007
//writer.AddDocument(CreateDocument("Document 4", 1192104129000L));
//// latest doc:
//// Add the fifth document. text = "Document 5" dateTime = Oct 12 13:25:43 EDT 2007
//writer.AddDocument(CreateDocument("Document 5", 1192209943000L));
writer.Optimize();
writer.Close();
}