本文整理汇总了C#中Lucene.Net.Documents.Field.SetTokenStream方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Documents.Field.SetTokenStream方法的具体用法?C# Lucene.Net.Documents.Field.SetTokenStream怎么用?C# Lucene.Net.Documents.Field.SetTokenStream使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Documents.Field
的用法示例。
在下文中一共展示了Lucene.Net.Documents.Field.SetTokenStream方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestIndexStoreCombos
public virtual void TestIndexStoreCombos()
{
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
byte[] b = new byte[50];
for (int i = 0; i < 50; i++)
b[i] = (byte) (i + 77);
Document doc = new Document();
Field f = new Field("binary", b, 10, 17, Field.Store.YES);
f.SetTokenStream(new WhitespaceTokenizer(new System.IO.StringReader("doc1field1")));
Field f2 = new Field("string", "value", Field.Store.YES, Field.Index.ANALYZED);
f2.SetTokenStream(new WhitespaceTokenizer(new System.IO.StringReader("doc1field2")));
doc.Add(f);
doc.Add(f2);
w.AddDocument(doc);
// add 2 docs to test in-memory merging
f.SetTokenStream(new WhitespaceTokenizer(new System.IO.StringReader("doc2field1")));
f2.SetTokenStream(new WhitespaceTokenizer(new System.IO.StringReader("doc2field2")));
w.AddDocument(doc);
// force segment flush so we can force a segment merge with doc3 later.
w.Commit();
f.SetTokenStream(new WhitespaceTokenizer(new System.IO.StringReader("doc3field1")));
f2.SetTokenStream(new WhitespaceTokenizer(new System.IO.StringReader("doc3field2")));
w.AddDocument(doc);
w.Commit();
w.Optimize(); // force segment merge.
IndexReader ir = IndexReader.Open(dir);
doc = ir.Document(0);
f = doc.GetField("binary");
b = f.GetBinaryValue();
Assert.IsTrue(b != null);
Assert.AreEqual(17, b.Length, 17);
Assert.AreEqual(87, b[0]);
Assert.IsTrue(ir.Document(0).GetFieldable("binary").IsBinary());
Assert.IsTrue(ir.Document(1).GetFieldable("binary").IsBinary());
Assert.IsTrue(ir.Document(2).GetFieldable("binary").IsBinary());
Assert.AreEqual("value", ir.Document(0).Get("string"));
Assert.AreEqual("value", ir.Document(1).Get("string"));
Assert.AreEqual("value", ir.Document(2).Get("string"));
// test that the terms were indexed.
Assert.IsTrue(ir.TermDocs(new Term("binary", "doc1field1")).Next());
Assert.IsTrue(ir.TermDocs(new Term("binary", "doc2field1")).Next());
Assert.IsTrue(ir.TermDocs(new Term("binary", "doc3field1")).Next());
Assert.IsTrue(ir.TermDocs(new Term("string", "doc1field2")).Next());
Assert.IsTrue(ir.TermDocs(new Term("string", "doc2field2")).Next());
Assert.IsTrue(ir.TermDocs(new Term("string", "doc3field2")).Next());
ir.Close();
dir.Close();
}