本文整理汇总了C#中Lucene.Net.Index.IndexWriter.AddDocument方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Index.IndexWriter.AddDocument方法的具体用法?C# Lucene.Net.Index.IndexWriter.AddDocument怎么用?C# Lucene.Net.Index.IndexWriter.AddDocument使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.IndexWriter
的用法示例。
在下文中一共展示了Lucene.Net.Index.IndexWriter.AddDocument方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestAddSameDocTwice
public virtual void TestAddSameDocTwice()
{
// LUCENE-5367: this was a problem with the previous code, making sure it
// works with the new code.
Directory indexDir = NewDirectory(), taxoDir = NewDirectory();
IndexWriter indexWriter = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
FacetsConfig facetsConfig = new FacetsConfig();
Document doc = new Document();
doc.Add(new FacetField("a", "b"));
doc = facetsConfig.Build(taxoWriter, doc);
// these two addDocument() used to fail
indexWriter.AddDocument(doc);
indexWriter.AddDocument(doc);
IOUtils.Close(indexWriter, taxoWriter);
DirectoryReader indexReader = DirectoryReader.Open(indexDir);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = NewSearcher(indexReader);
FacetsCollector fc = new FacetsCollector();
searcher.Search(new MatchAllDocsQuery(), fc);
Facets facets = GetTaxonomyFacetCounts(taxoReader, facetsConfig, fc);
FacetResult res = facets.GetTopChildren(10, "a");
Assert.AreEqual(1, res.LabelValues.Length);
Assert.AreEqual(2, res.LabelValues[0].value);
IOUtils.Close(indexReader, taxoReader);
IOUtils.Close(indexDir, taxoDir);
}
示例2: SetUp
public virtual void SetUp()
{
// Create an index writer.
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
// oldest doc:
// Add the first document. text = "Document 1" dateTime = Oct 10 03:25:22 EDT 2007
writer.AddDocument(CreateDocument("Document 1", 633275835220000000L));
// Add the second document. text = "Document 2" dateTime = Oct 10 03:25:26 EDT 2007
writer.AddDocument(CreateDocument("Document 2", 633275835260000000L));
// Add the third document. text = "Document 3" dateTime = Oct 11 07:12:13 EDT 2007
writer.AddDocument(CreateDocument("Document 3", 633276835330000000L));
// Add the fourth document. text = "Document 4" dateTime = Oct 11 08:02:09 EDT 2007
writer.AddDocument(CreateDocument("Document 4", 633276865290000000L));
// latest doc:
// Add the fifth document. text = "Document 5" dateTime = Oct 12 13:25:43 EDT 2007
writer.AddDocument(CreateDocument("Document 5", 633277923430000000L));
//// oldest doc:
//// Add the first document. text = "Document 1" dateTime = Oct 10 03:25:22 EDT 2007
//writer.AddDocument(CreateDocument("Document 1", 1192001122000L));
//// Add the second document. text = "Document 2" dateTime = Oct 10 03:25:26 EDT 2007
//writer.AddDocument(CreateDocument("Document 2", 1192001126000L));
//// Add the third document. text = "Document 3" dateTime = Oct 11 07:12:13 EDT 2007
//writer.AddDocument(CreateDocument("Document 3", 1192101133000L));
//// Add the fourth document. text = "Document 4" dateTime = Oct 11 08:02:09 EDT 2007
//writer.AddDocument(CreateDocument("Document 4", 1192104129000L));
//// latest doc:
//// Add the fifth document. text = "Document 5" dateTime = Oct 12 13:25:43 EDT 2007
//writer.AddDocument(CreateDocument("Document 5", 1192209943000L));
writer.Optimize();
writer.Close();
}
示例3: Index
void Index()
{
Lucene.Net.Index.IndexWriter wr = new Lucene.Net.Index.IndexWriter(dir, new Lucene.Net.Analysis.WhitespaceAnalyzer(), Lucene.Net.Index.IndexWriter.MaxFieldLength.UNLIMITED);
Lucene.Net.Documents.Document doc = null;
Lucene.Net.Documents.Field f = null;
doc = new Lucene.Net.Documents.Document();
f = new Lucene.Net.Documents.Field("field", "a b c d", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
doc.Add(f);
wr.AddDocument(doc);
doc = new Lucene.Net.Documents.Document();
f = new Lucene.Net.Documents.Field("field", "a b a d", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
doc.Add(f);
wr.AddDocument(doc);
doc = new Lucene.Net.Documents.Document();
f = new Lucene.Net.Documents.Field("field", "a b e f", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
doc.Add(f);
wr.AddDocument(doc);
doc = new Lucene.Net.Documents.Document();
f = new Lucene.Net.Documents.Field("field", "x y z", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
doc.Add(f);
wr.AddDocument(doc);
wr.Close();
}
示例4: SetUp
public override void SetUp()
{
base.SetUp();
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("sorter", "b", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("field", "one two three four", Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("sorter", "d", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("field", "one two three y", Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("sorter", "a", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("field", "one two x", Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("sorter", "c", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Close();
searcher = new IndexSearcher(directory, true);
query = new TermQuery(new Term("field", "three"));
filter = NewStaticFilterB();
}
示例5: SetUp
public override void SetUp()
{
base.SetUp();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 5137; ++i)
{
Document doc = new Document();
doc.Add(new Field(FIELD, "meaninglessnames", Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
{
Document doc = new Document();
doc.Add(new Field(FIELD, "tangfulin", Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
for (int i = 5138; i < 11377; ++i)
{
Document doc = new Document();
doc.Add(new Field(FIELD, "meaninglessnames", Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
{
Document doc = new Document();
doc.Add(new Field(FIELD, "tangfulin", Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
writer.Close();
}
示例6: TestPhrasePrefix
public virtual void TestPhrasePrefix()
{
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document doc1 = new Document();
Document doc2 = new Document();
Document doc3 = new Document();
Document doc4 = new Document();
Document doc5 = new Document();
doc1.Add(new Field("body", "blueberry pie", Field.Store.YES, Field.Index.ANALYZED));
doc2.Add(new Field("body", "blueberry strudel", Field.Store.YES, Field.Index.ANALYZED));
doc3.Add(new Field("body", "blueberry pizza", Field.Store.YES, Field.Index.ANALYZED));
doc4.Add(new Field("body", "blueberry chewing gum", Field.Store.YES, Field.Index.ANALYZED));
doc5.Add(new Field("body", "piccadilly circus", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc1);
writer.AddDocument(doc2);
writer.AddDocument(doc3);
writer.AddDocument(doc4);
writer.AddDocument(doc5);
writer.Optimize();
writer.Close();
IndexSearcher searcher = new IndexSearcher(indexStore);
//PhrasePrefixQuery query1 = new PhrasePrefixQuery();
MultiPhraseQuery query1 = new MultiPhraseQuery();
//PhrasePrefixQuery query2 = new PhrasePrefixQuery();
MultiPhraseQuery query2 = new MultiPhraseQuery();
query1.Add(new Term("body", "blueberry"));
query2.Add(new Term("body", "strawberry"));
System.Collections.ArrayList termsWithPrefix = new System.Collections.ArrayList();
IndexReader ir = IndexReader.Open(indexStore);
// this TermEnum gives "piccadilly", "pie" and "pizza".
System.String prefix = "pi";
TermEnum te = ir.Terms(new Term("body", prefix + "*"));
do
{
if (te.Term().Text().StartsWith(prefix))
{
termsWithPrefix.Add(te.Term());
}
}
while (te.Next());
query1.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
query2.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
ScoreDoc[] result;
result = searcher.Search(query1, null, 1000).scoreDocs;
Assert.AreEqual(2, result.Length);
result = searcher.Search(query2, null, 1000).scoreDocs;
Assert.AreEqual(0, result.Length);
}
示例7: SetUp
public virtual void SetUp()
{
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
Document doc = new Document();
doc.Add(Field.Text("Field", "one two three four five"));
doc.Add(Field.Text("sorter", "b"));
writer.AddDocument(doc);
doc = new Document();
doc.Add(Field.Text("Field", "one two three four"));
doc.Add(Field.Text("sorter", "d"));
writer.AddDocument(doc);
doc = new Document();
doc.Add(Field.Text("Field", "one two three y"));
doc.Add(Field.Text("sorter", "a"));
writer.AddDocument(doc);
doc = new Document();
doc.Add(Field.Text("Field", "one two x"));
doc.Add(Field.Text("sorter", "c"));
writer.AddDocument(doc);
writer.Optimize();
writer.Close();
searcher = new IndexSearcher(directory);
query = new TermQuery(new Term("Field", "three"));
filter = new AnonymousClassFilter(this);
}
示例8: TestDemo_Renamed
public virtual void TestDemo_Renamed()
{
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
// Store the index in memory:
Directory directory = new RAMDirectory();
// To store an index on disk, use this instead:
//Directory directory = FSDirectory.open("/tmp/testindex");
IndexWriter iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000));
Document doc = new Document();
System.String text = "This is the text to be indexed.";
doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED));
iwriter.AddDocument(doc);
iwriter.Close();
// Now search the index:
IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
// Parse a simple query that searches for "text":
QueryParser parser = new QueryParser("fieldname", analyzer);
Query query = parser.Parse("text");
ScoreDoc[] hits = isearcher.Search(query, null, 1000).scoreDocs;
Assert.AreEqual(1, hits.Length);
// Iterate through the results:
for (int i = 0; i < hits.Length; i++)
{
Document hitDoc = isearcher.Doc(hits[i].doc);
Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed.");
}
isearcher.Close();
directory.Close();
}
示例9: TestMethod
public virtual void TestMethod()
{
RAMDirectory directory = new RAMDirectory();
System.String[] values = new System.String[]{"1", "2", "3", "4"};
try
{
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
for (int i = 0; i < values.Length; i++)
{
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
doc.Add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
writer.AddDocument(doc);
}
writer.Close();
BooleanQuery booleanQuery1 = new BooleanQuery();
booleanQuery1.Add(new TermQuery(new Term(FIELD, "1")), BooleanClause.Occur.SHOULD);
booleanQuery1.Add(new TermQuery(new Term(FIELD, "2")), BooleanClause.Occur.SHOULD);
BooleanQuery query = new BooleanQuery();
query.Add(booleanQuery1, BooleanClause.Occur.MUST);
query.Add(new TermQuery(new Term(FIELD, "9")), BooleanClause.Occur.MUST_NOT);
IndexSearcher indexSearcher = new IndexSearcher(directory);
Hits hits = indexSearcher.Search(query);
Assert.AreEqual(2, hits.Length(), "Number of matched documents");
}
catch (System.IO.IOException e)
{
Assert.Fail(e.Message);
}
}
示例10: SearchFiltered
public void SearchFiltered(IndexWriter writer, Directory directory, Filter filter, bool optimize)
{
try
{
for (int i = 0; i < 60; i++)
{//Simple docs
Document doc = new Document();
doc.Add(new Field(FIELD, i.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
if (optimize)
writer.Optimize();
writer.Close();
BooleanQuery booleanQuery = new BooleanQuery();
booleanQuery.Add(new TermQuery(new Term(FIELD, "36")), Occur.SHOULD);
IndexSearcher indexSearcher = new IndexSearcher(directory);
ScoreDoc[] hits = indexSearcher.Search(booleanQuery, filter, 1000).ScoreDocs;
Assert.AreEqual(1, hits.Length, "Number of matched documents");
}
catch (System.IO.IOException e)
{
Assert.Fail(e.Message);
}
}
示例11: TestFilterWorks
public virtual void TestFilterWorks()
{
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 500; i++)
{
Document document = new Document();
document.Add(new Field("field", English.IntToEnglish(i) + " equals " + English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(document);
}
writer.Close();
IndexReader reader = IndexReader.Open(dir, true);
SpanTermQuery query = new SpanTermQuery(new Term("field", English.IntToEnglish(10).Trim()));
SpanQueryFilter filter = new SpanQueryFilter(query);
SpanFilterResult result = filter.BitSpans(reader);
DocIdSet docIdSet = result.DocIdSet;
Assert.IsTrue(docIdSet != null, "docIdSet is null and it shouldn't be");
AssertContainsDocId("docIdSet doesn't contain docId 10", docIdSet, 10);
var spans = result.Positions;
Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
int size = GetDocIdSetSize(docIdSet);
Assert.IsTrue(spans.Count == size, "spans Size: " + spans.Count + " is not: " + size);
for (System.Collections.IEnumerator iterator = spans.GetEnumerator(); iterator.MoveNext(); )
{
SpanFilterResult.PositionInfo info = (SpanFilterResult.PositionInfo) iterator.Current;
Assert.IsTrue(info != null, "info is null and it shouldn't be");
//The doc should indicate the bit is on
AssertContainsDocId("docIdSet doesn't contain docId " + info.Doc, docIdSet, info.Doc);
//There should be two positions in each
Assert.IsTrue(info.Positions.Count == 2, "info.getPositions() Size: " + info.Positions.Count + " is not: " + 2);
}
reader.Close();
}
示例12: TestNullOrSubScorer
public virtual void TestNullOrSubScorer()
{
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
doc.Add(new Field("field", "a b c d", Field.Store.NO, Field.Index.ANALYZED));
w.AddDocument(doc);
IndexReader r = w.GetReader();
IndexSearcher s = new IndexSearcher(r);
BooleanQuery q = new BooleanQuery();
q.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
// LUCENE-2617: make sure that a term not in the index still contributes to the score via coord factor
float score = s.Search(q, 10).MaxScore;
Query subQuery = new TermQuery(new Term("field", "not_in_index"));
subQuery.Boost = 0;
q.Add(subQuery, Occur.SHOULD);
float score2 = s.Search(q, 10).MaxScore;
Assert.AreEqual(score * .5, score2, 1e-6);
// LUCENE-2617: make sure that a clause not in the index still contributes to the score via coord factor
BooleanQuery qq = (BooleanQuery)q.Clone();
PhraseQuery phrase = new PhraseQuery();
phrase.Add(new Term("field", "not_in_index"));
phrase.Add(new Term("field", "another_not_in_index"));
phrase.Boost = 0;
qq.Add(phrase, Occur.SHOULD);
score2 = s.Search(qq, 10).MaxScore;
Assert.AreEqual(score * (1.0 / 3), score2, 1e-6);
// now test BooleanScorer2
subQuery = new TermQuery(new Term("field", "b"));
subQuery.Boost = 0;
q.Add(subQuery, Occur.MUST);
score2 = s.Search(q, 10).MaxScore;
Assert.AreEqual(score * (2.0 / 3), score2, 1e-6);
// PhraseQuery w/ no terms added returns a null scorer
PhraseQuery pq = new PhraseQuery();
q.Add(pq, Occur.SHOULD);
Assert.AreEqual(1, s.Search(q, 10).TotalHits);
// A required clause which returns null scorer should return null scorer to
// IndexSearcher.
q = new BooleanQuery();
pq = new PhraseQuery();
q.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
q.Add(pq, Occur.MUST);
Assert.AreEqual(0, s.Search(q, 10).TotalHits);
DisjunctionMaxQuery dmq = new DisjunctionMaxQuery(1.0f);
dmq.Add(new TermQuery(new Term("field", "a")));
dmq.Add(pq);
Assert.AreEqual(1, s.Search(dmq, 10).TotalHits);
r.Close();
w.Close();
dir.Close();
}
示例13: SetUp
public override void SetUp()
{
base.SetUp();
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
long theLong = System.Int64.MaxValue;
double theDouble = System.Double.MaxValue;
sbyte theByte = (sbyte) System.SByte.MaxValue;
short theShort = System.Int16.MaxValue;
int theInt = System.Int32.MaxValue;
float theFloat = System.Single.MaxValue;
for (int i = 0; i < NUM_DOCS; i++)
{
Document doc = new Document();
doc.Add(new Field("theLong", System.Convert.ToString(theLong--), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theDouble", (theDouble--).ToString("E16"), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theByte", System.Convert.ToString((sbyte) theByte--), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theShort", System.Convert.ToString(theShort--), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theInt", System.Convert.ToString(theInt--), Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("theFloat", (theFloat--).ToString("E8"), Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
writer.Close();
reader = IndexReader.Open(directory);
}
示例14: TestNullOrSubScorer
public virtual void TestNullOrSubScorer()
{
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
doc.Add(new Field("field", "a b c d", Field.Store.NO, Field.Index.ANALYZED));
w.AddDocument(doc);
IndexReader r = w.GetReader();
IndexSearcher s = new IndexSearcher(r);
BooleanQuery q = new BooleanQuery();
q.Add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
// PhraseQuery w/ no terms added returns a null scorer
PhraseQuery pq = new PhraseQuery();
q.Add(pq, BooleanClause.Occur.SHOULD);
Assert.AreEqual(1, s.Search(q, 10).TotalHits);
// A required clause which returns null scorer should return null scorer to
// IndexSearcher.
q = new BooleanQuery();
pq = new PhraseQuery();
q.Add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
q.Add(pq, BooleanClause.Occur.MUST);
Assert.AreEqual(0, s.Search(q, 10).TotalHits);
DisjunctionMaxQuery dmq = new DisjunctionMaxQuery(1.0f);
dmq.Add(new TermQuery(new Term("field", "a")));
dmq.Add(pq);
Assert.AreEqual(1, s.Search(dmq, 10).TotalHits);
r.Close();
w.Close();
dir.Close();
}
示例15: CreateRandomTerms
public virtual void CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir)
{
int[] freq = new int[nTerms];
for (int i = 0; i < nTerms; i++)
{
int f = (nTerms + 1) - i; // make first terms less frequent
freq[i] = (int) System.Math.Ceiling(System.Math.Pow(f, power));
terms[i] = new Term("f", System.Convert.ToString((char) ('A' + i)));
}
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
for (int i = 0; i < nDocs; i++)
{
Document d = new Document();
for (int j = 0; j < nTerms; j++)
{
if (r.Next(freq[j]) == 0)
{
d.Add(new Field("f", terms[j].Text(), Field.Store.NO, Field.Index.UN_TOKENIZED));
//System.out.println(d);
}
}
iw.AddDocument(d);
}
iw.Optimize();
iw.Close();
}