本文整理汇总了C#中Documents.Document.GetBinaryValue方法的典型用法代码示例。如果您正苦于以下问题:C# Documents.Document.GetBinaryValue方法的具体用法?C# Documents.Document.GetBinaryValue怎么用?C# Documents.Document.GetBinaryValue使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Documents.Document
的用法示例。
在下文中一共展示了Documents.Document.GetBinaryValue方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestBinaryField
public virtual void TestBinaryField()
{
Documents.Document doc = new Documents.Document();
FieldType ft = new FieldType();
ft.Stored = true;
IndexableField stringFld = new Field("string", BinaryVal, ft);
IndexableField binaryFld = new StoredField("binary", BinaryVal.GetBytes(Encoding.UTF8));
IndexableField binaryFld2 = new StoredField("binary", BinaryVal2.GetBytes(Encoding.UTF8));
doc.Add(stringFld);
doc.Add(binaryFld);
Assert.AreEqual(2, doc.Fields.Count);
Assert.IsTrue(binaryFld.BinaryValue != null);
Assert.IsTrue(binaryFld.FieldType.Stored);
Assert.IsFalse(binaryFld.FieldType.Indexed);
string binaryTest = doc.GetBinaryValue("binary").Utf8ToString();
Assert.IsTrue(binaryTest.Equals(BinaryVal));
string stringTest = doc.Get("string");
Assert.IsTrue(binaryTest.Equals(stringTest));
doc.Add(binaryFld2);
Assert.AreEqual(3, doc.Fields.Count);
BytesRef[] binaryTests = doc.GetBinaryValues("binary");
Assert.AreEqual(2, binaryTests.Length);
binaryTest = binaryTests[0].Utf8ToString();
string binaryTest2 = binaryTests[1].Utf8ToString();
Assert.IsFalse(binaryTest.Equals(binaryTest2));
Assert.IsTrue(binaryTest.Equals(BinaryVal));
Assert.IsTrue(binaryTest2.Equals(BinaryVal2));
doc.RemoveField("string");
Assert.AreEqual(2, doc.Fields.Count);
doc.RemoveFields("binary");
Assert.AreEqual(0, doc.Fields.Count);
}
示例2: TestWriteReadMerge
public virtual void TestWriteReadMerge()
{
// get another codec, other than the default: so we are merging segments across different codecs
Codec otherCodec;
/*if ("SimpleText".Equals(Codec.Default.Name))
{*/
otherCodec = new Lucene46Codec();
/*}
else
{
otherCodec = new SimpleTextCodec();
}*/
Directory dir = NewDirectory();
IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30));
RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwConf.Clone());
int docCount = AtLeast(200);
var data = new byte[docCount][][];
for (int i = 0; i < docCount; ++i)
{
int fieldCount = Rarely() ? RandomInts.NextIntBetween(Random(), 1, 500) : RandomInts.NextIntBetween(Random(), 1, 5);
data[i] = new byte[fieldCount][];
for (int j = 0; j < fieldCount; ++j)
{
int length = Rarely() ? Random().Next(1000) : Random().Next(10);
int max = Rarely() ? 256 : 2;
data[i][j] = RandomByteArray(length, max);
}
}
FieldType type = new FieldType(StringField.TYPE_STORED);
type.Indexed = false;
type.Freeze();
IntField id = new IntField("id", 0, Field.Store.YES);
for (int i = 0; i < data.Length; ++i)
{
Document doc = new Document();
doc.Add(id);
id.IntValue = i;
for (int j = 0; j < data[i].Length; ++j)
{
Field f = new Field("bytes" + j, data[i][j], type);
doc.Add(f);
}
iw.w.AddDocument(doc);
if (Random().NextBoolean() && (i % (data.Length / 10) == 0))
{
iw.w.Dispose();
// test merging against a non-compressing codec
if (iwConf.Codec == otherCodec)
{
iwConf.SetCodec(Codec.Default);
}
else
{
iwConf.SetCodec(otherCodec);
}
iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwConf.Clone());
}
}
for (int i = 0; i < 10; ++i)
{
int min = Random().Next(data.Length);
int max = min + Random().Next(20);
iw.DeleteDocuments(NumericRangeQuery.NewIntRange("id", min, max, true, false));
}
iw.ForceMerge(2); // force merges with deletions
iw.Commit();
DirectoryReader ir = DirectoryReader.Open(dir);
Assert.IsTrue(ir.NumDocs > 0);
int numDocs = 0;
for (int i = 0; i < ir.MaxDoc; ++i)
{
Document doc = ir.Document(i);
if (doc == null)
{
continue;
}
++numDocs;
int docId = (int)doc.GetField("id").NumericValue;
Assert.AreEqual(data[docId].Length + 1, doc.Fields.Count);
for (int j = 0; j < data[docId].Length; ++j)
{
var arr = data[docId][j];
BytesRef arr2Ref = doc.GetBinaryValue("bytes" + j);
var arr2 = Arrays.CopyOfRange(arr2Ref.Bytes, arr2Ref.Offset, arr2Ref.Offset + arr2Ref.Length);
Assert.AreEqual(arr, arr2);
}
}
Assert.IsTrue(ir.NumDocs <= numDocs);
ir.Dispose();
iw.DeleteAll();
iw.Commit();
iw.ForceMerge(1);
//.........这里部分代码省略.........
示例3: TestTransitionAPI
public virtual void TestTransitionAPI()
{
Directory dir = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
Documents.Document doc = new Documents.Document();
doc.Add(new Field("stored", "abc", Field.Store.YES, Field.Index.NO));
doc.Add(new Field("stored_indexed", "abc xyz", Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field("stored_tokenized", "abc xyz", Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("indexed", "abc xyz", Field.Store.NO, Field.Index.NOT_ANALYZED));
doc.Add(new Field("tokenized", "abc xyz", Field.Store.NO, Field.Index.ANALYZED));
doc.Add(new Field("tokenized_reader", new StringReader("abc xyz")));
doc.Add(new Field("tokenized_tokenstream", w.w.Analyzer.TokenStream("tokenized_tokenstream", new StringReader("abc xyz"))));
doc.Add(new Field("binary", new byte[10]));
doc.Add(new Field("tv", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
doc.Add(new Field("tv_pos", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
doc.Add(new Field("tv_off", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
doc.Add(new Field("tv_pos_off", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
w.AddDocument(doc);
IndexReader r = w.Reader;
w.Dispose();
doc = r.Document(0);
// 4 stored fields
Assert.AreEqual(4, doc.Fields.Count);
Assert.AreEqual("abc", doc.Get("stored"));
Assert.AreEqual("abc xyz", doc.Get("stored_indexed"));
Assert.AreEqual("abc xyz", doc.Get("stored_tokenized"));
BytesRef br = doc.GetBinaryValue("binary");
Assert.IsNotNull(br);
Assert.AreEqual(10, br.Length);
IndexSearcher s = new IndexSearcher(r);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("stored_indexed", "abc xyz")), 1).TotalHits);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("stored_tokenized", "abc")), 1).TotalHits);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("stored_tokenized", "xyz")), 1).TotalHits);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("indexed", "abc xyz")), 1).TotalHits);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized", "abc")), 1).TotalHits);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized", "xyz")), 1).TotalHits);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_reader", "abc")), 1).TotalHits);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_reader", "xyz")), 1).TotalHits);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_tokenstream", "abc")), 1).TotalHits);
Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_tokenstream", "xyz")), 1).TotalHits);
foreach (string field in new string[] { "tv", "tv_pos", "tv_off", "tv_pos_off" })
{
Fields tvFields = r.GetTermVectors(0);
Terms tvs = tvFields.Terms(field);
Assert.IsNotNull(tvs);
Assert.AreEqual(2, tvs.Size());
TermsEnum tvsEnum = tvs.Iterator(null);
Assert.AreEqual(new BytesRef("abc"), tvsEnum.Next());
DocsAndPositionsEnum dpEnum = tvsEnum.DocsAndPositions(null, null);
if (field.Equals("tv"))
{
Assert.IsNull(dpEnum);
}
else
{
Assert.IsNotNull(dpEnum);
}
Assert.AreEqual(new BytesRef("xyz"), tvsEnum.Next());
Assert.IsNull(tvsEnum.Next());
}
r.Dispose();
dir.Dispose();
}