本文整理汇总了C#中Lucene.Net.Documents.FieldType.Freeze方法的典型用法代码示例。如果您正苦于以下问题:C# FieldType.Freeze方法的具体用法?C# FieldType.Freeze怎么用?C# FieldType.Freeze使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Documents.FieldType
的用法示例。
在下文中一共展示了FieldType.Freeze方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Test
public virtual void Test()
{
IndexWriter w = new IndexWriter(Dir, NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
try
{
FieldType ft = new FieldType();
ft.Indexed = true;
ft.Stored = Random().NextBoolean();
ft.Freeze();
Document doc = new Document();
if (Random().NextBoolean())
{
// totally ok short field value
doc.Add(new Field(TestUtil.RandomSimpleString(Random(), 1, 10), TestUtil.RandomSimpleString(Random(), 1, 10), ft));
}
// problematic field
string name = TestUtil.RandomSimpleString(Random(), 1, 50);
string value = TestUtil.RandomSimpleString(Random(), MinTestTermLength, MaxTestTermLegnth);
Field f = new Field(name, value, ft);
if (Random().NextBoolean())
{
// totally ok short field value
doc.Add(new Field(TestUtil.RandomSimpleString(Random(), 1, 10), TestUtil.RandomSimpleString(Random(), 1, 10), ft));
}
doc.Add(f);
try
{
w.AddDocument(doc);
Assert.Fail("Did not get an exception from adding a monster term");
}
catch (System.ArgumentException e)
{
string maxLengthMsg = Convert.ToString(IndexWriter.MAX_TERM_LENGTH);
string msg = e.Message;
Assert.IsTrue(msg.Contains("immense term"), "IllegalArgumentException didn't mention 'immense term': " + msg);
Assert.IsTrue(msg.Contains(maxLengthMsg), "IllegalArgumentException didn't mention max length (" + maxLengthMsg + "): " + msg);
Assert.IsTrue(msg.Contains(name), "IllegalArgumentException didn't mention field name (" + name + "): " + msg);
}
}
finally
{
w.Dispose();
}
}
示例2: BeforeClass
public void BeforeClass()
{
NoDocs = AtLeast(4096);
Distance = (1 << 30) / NoDocs;
Directory = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 100, 1000)).SetMergePolicy(NewLogMergePolicy()));
FieldType storedInt = new FieldType(IntField.TYPE_NOT_STORED);
storedInt.Stored = true;
storedInt.Freeze();
FieldType storedInt8 = new FieldType(storedInt);
storedInt8.NumericPrecisionStep = 8;
FieldType storedInt4 = new FieldType(storedInt);
storedInt4.NumericPrecisionStep = 4;
FieldType storedInt2 = new FieldType(storedInt);
storedInt2.NumericPrecisionStep = 2;
FieldType storedIntNone = new FieldType(storedInt);
storedIntNone.NumericPrecisionStep = int.MaxValue;
FieldType unstoredInt = IntField.TYPE_NOT_STORED;
FieldType unstoredInt8 = new FieldType(unstoredInt);
unstoredInt8.NumericPrecisionStep = 8;
FieldType unstoredInt4 = new FieldType(unstoredInt);
unstoredInt4.NumericPrecisionStep = 4;
FieldType unstoredInt2 = new FieldType(unstoredInt);
unstoredInt2.NumericPrecisionStep = 2;
IntField field8 = new IntField("field8", 0, storedInt8), field4 = new IntField("field4", 0, storedInt4), field2 = new IntField("field2", 0, storedInt2), fieldNoTrie = new IntField("field" + int.MaxValue, 0, storedIntNone), ascfield8 = new IntField("ascfield8", 0, unstoredInt8), ascfield4 = new IntField("ascfield4", 0, unstoredInt4), ascfield2 = new IntField("ascfield2", 0, unstoredInt2);
Document doc = new Document();
// add fields, that have a distance to test general functionality
doc.Add(field8);
doc.Add(field4);
doc.Add(field2);
doc.Add(fieldNoTrie);
// add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
doc.Add(ascfield8);
doc.Add(ascfield4);
doc.Add(ascfield2);
// Add a series of noDocs docs with increasing int values
for (int l = 0; l < NoDocs; l++)
{
int val = Distance * l + StartOffset;
field8.IntValue = val;
field4.IntValue = val;
field2.IntValue = val;
fieldNoTrie.IntValue = val;
val = l - (NoDocs / 2);
ascfield8.IntValue = val;
ascfield4.IntValue = val;
ascfield2.IntValue = val;
writer.AddDocument(doc);
}
Reader = writer.Reader;
Searcher = NewSearcher(Reader);
writer.Dispose();
}
示例3: FieldType
protected internal virtual FieldType FieldType(Options options)
{
var ft = new FieldType(TextField.TYPE_NOT_STORED)
{
StoreTermVectors = true,
StoreTermVectorPositions = (new OptionsWrapper(options)).positions,
StoreTermVectorOffsets = (new OptionsWrapper(options)).offsets,
StoreTermVectorPayloads = (new OptionsWrapper(options)).payloads
};
ft.Freeze();
return ft;
}
示例4: TestOmitTFAndNorms
public virtual void TestOmitTFAndNorms()
{
Directory dir = NewDirectory();
RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.IndexOptions = FieldInfo.IndexOptions.DOCS_ONLY;
ft.OmitNorms = true;
ft.Freeze();
Field f = NewField("foo", "bar", ft);
doc.Add(f);
iw.AddDocument(doc);
IndexReader ir = iw.Reader;
iw.Dispose();
IndexSearcher @is = NewSearcher(ir);
foreach (Similarity sim in Sims)
{
@is.Similarity = sim;
BooleanQuery query = new BooleanQuery(true);
query.Add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.SHOULD);
Assert.AreEqual(1, @is.Search(query, 10).TotalHits);
}
ir.Dispose();
dir.Dispose();
}
示例5: AddRandomFields
protected internal override void AddRandomFields(Document doc)
{
foreach (FieldInfo.IndexOptions opts in Enum.GetValues(typeof(FieldInfo.IndexOptions)))
{
string field = "f_" + opts;
string pf = TestUtil.GetPostingsFormat(Codec.Default, field);
if (opts == FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS && DoesntSupportOffsets.Contains(pf))
{
continue;
}
var ft = new FieldType {IndexOptions = opts, Indexed = true, OmitNorms = true};
ft.Freeze();
int numFields = Random().Next(5);
for (int j = 0; j < numFields; ++j)
{
doc.Add(new Field("f_" + opts, TestUtil.RandomSimpleString(Random(), 2), ft));
}
}
}
示例6: TestWriteReadMerge
public virtual void TestWriteReadMerge()
{
// get another codec, other than the default: so we are merging segments across different codecs
Codec otherCodec;
/*if ("SimpleText".Equals(Codec.Default.Name))
{*/
otherCodec = new Lucene46Codec();
/*}
else
{
otherCodec = new SimpleTextCodec();
}*/
Directory dir = NewDirectory();
IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30));
RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwConf.Clone());
int docCount = AtLeast(200);
var data = new byte[docCount][][];
for (int i = 0; i < docCount; ++i)
{
int fieldCount = Rarely() ? RandomInts.NextIntBetween(Random(), 1, 500) : RandomInts.NextIntBetween(Random(), 1, 5);
data[i] = new byte[fieldCount][];
for (int j = 0; j < fieldCount; ++j)
{
int length = Rarely() ? Random().Next(1000) : Random().Next(10);
int max = Rarely() ? 256 : 2;
data[i][j] = RandomByteArray(length, max);
}
}
FieldType type = new FieldType(StringField.TYPE_STORED);
type.Indexed = false;
type.Freeze();
IntField id = new IntField("id", 0, Field.Store.YES);
for (int i = 0; i < data.Length; ++i)
{
Document doc = new Document();
doc.Add(id);
id.IntValue = i;
for (int j = 0; j < data[i].Length; ++j)
{
Field f = new Field("bytes" + j, data[i][j], type);
doc.Add(f);
}
iw.w.AddDocument(doc);
if (Random().NextBoolean() && (i % (data.Length / 10) == 0))
{
iw.w.Dispose();
// test merging against a non-compressing codec
if (iwConf.Codec == otherCodec)
{
iwConf.SetCodec(Codec.Default);
}
else
{
iwConf.SetCodec(otherCodec);
}
iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwConf.Clone());
}
}
for (int i = 0; i < 10; ++i)
{
int min = Random().Next(data.Length);
int max = min + Random().Next(20);
iw.DeleteDocuments(NumericRangeQuery.NewIntRange("id", min, max, true, false));
}
iw.ForceMerge(2); // force merges with deletions
iw.Commit();
DirectoryReader ir = DirectoryReader.Open(dir);
Assert.IsTrue(ir.NumDocs > 0);
int numDocs = 0;
for (int i = 0; i < ir.MaxDoc; ++i)
{
Document doc = ir.Document(i);
if (doc == null)
{
continue;
}
++numDocs;
int docId = (int)doc.GetField("id").NumericValue;
Assert.AreEqual(data[docId].Length + 1, doc.Fields.Count);
for (int j = 0; j < data[docId].Length; ++j)
{
var arr = data[docId][j];
BytesRef arr2Ref = doc.GetBinaryValue("bytes" + j);
var arr2 = Arrays.CopyOfRange(arr2Ref.Bytes, arr2Ref.Offset, arr2Ref.Offset + arr2Ref.Length);
Assert.AreEqual(arr, arr2);
}
}
Assert.IsTrue(ir.NumDocs <= numDocs);
ir.Dispose();
iw.DeleteAll();
iw.Commit();
iw.ForceMerge(1);
//.........这里部分代码省略.........
示例7: TestReadSkip
public void TestReadSkip()
{
Directory dir = NewDirectory();
IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30));
RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf);
FieldType ft = new FieldType();
ft.Stored = true;
ft.Freeze();
string @string = TestUtil.RandomSimpleString(Random(), 50);
var bytes = @string.GetBytes(IOUtils.CHARSET_UTF_8);
long l = Random().NextBoolean() ? Random().Next(42) : Random().NextLong();
int i = Random().NextBoolean() ? Random().Next(42) : Random().Next();
float f = Random().NextFloat();
double d = Random().NextDouble();
IList<Field> fields = Arrays.AsList(new Field("bytes", bytes, ft), new Field("string", @string, ft), new LongField("long", l, Field.Store.YES), new IntField("int", i, Field.Store.YES), new FloatField("float", f, Field.Store.YES), new DoubleField("double", d, Field.Store.YES)
);
for (int k = 0; k < 100; ++k)
{
Document doc = new Document();
foreach (Field fld in fields)
{
doc.Add(fld);
}
iw.w.AddDocument(doc);
}
iw.Commit();
DirectoryReader reader = DirectoryReader.Open(dir);
int docID = Random().Next(100);
foreach (Field fld in fields)
{
string fldName = fld.Name();
Document sDoc = reader.Document(docID, Collections.Singleton(fldName));
IndexableField sField = sDoc.GetField(fldName);
if (typeof(Field) == fld.GetType())
{
Assert.AreEqual(fld.BinaryValue(), sField.BinaryValue());
Assert.AreEqual(fld.StringValue, sField.StringValue);
}
else
{
Assert.AreEqual(fld.NumericValue, sField.NumericValue);
}
}
reader.Dispose();
iw.Dispose();
dir.Dispose();
}
示例8: TranslateFieldType
public static FieldType TranslateFieldType(Store store, Index index, TermVector termVector)
{
FieldType ft = new FieldType();
ft.Stored = store == Store.YES;
switch (index)
{
case Index.ANALYZED:
ft.Indexed = true;
ft.Tokenized = true;
break;
case Index.ANALYZED_NO_NORMS:
ft.Indexed = true;
ft.Tokenized = true;
ft.OmitNorms = true;
break;
case Index.NOT_ANALYZED:
ft.Indexed = true;
ft.Tokenized = false;
break;
case Index.NOT_ANALYZED_NO_NORMS:
ft.Indexed = true;
ft.Tokenized = false;
ft.OmitNorms = true;
break;
case Index.NO:
break;
}
switch (termVector)
{
case TermVector.NO:
break;
case TermVector.YES:
ft.StoreTermVectors = true;
break;
case TermVector.WITH_POSITIONS:
ft.StoreTermVectors = true;
ft.StoreTermVectorPositions = true;
break;
case TermVector.WITH_OFFSETS:
ft.StoreTermVectors = true;
ft.StoreTermVectorOffsets = true;
break;
case TermVector.WITH_POSITIONS_OFFSETS:
ft.StoreTermVectors = true;
ft.StoreTermVectorPositions = true;
ft.StoreTermVectorOffsets = true;
break;
}
ft.Freeze();
return ft;
}
示例9: StoredField
static StoredField()
{
TYPE = new FieldType();
TYPE.Stored = true;
TYPE.Freeze();
}
示例10: Test
public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
{
MockDirectoryWrapper dir = new MockDirectoryWrapper(Random(), new MMapDirectory(CreateTempDir("4GBStoredFields")));
dir.Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(scheduler)
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE);
IndexWriter w = new IndexWriter(dir, config);
MergePolicy mp = w.Config.MergePolicy;
if (mp is LogByteSizeMergePolicy)
{
// 1 petabyte:
((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
}
Document doc = new Document();
FieldType ft = new FieldType();
ft.Indexed = false;
ft.Stored = true;
ft.Freeze();
int valueLength = RandomInts.NextIntBetween(Random(), 1 << 13, 1 << 20);
var value = new byte[valueLength];
for (int i = 0; i < valueLength; ++i)
{
// random so that even compressing codecs can't compress it
value[i] = (byte)Random().Next(256);
}
Field f = new Field("fld", value, ft);
doc.Add(f);
int numDocs = (int)((1L << 32) / valueLength + 100);
for (int i = 0; i < numDocs; ++i)
{
w.AddDocument(doc);
if (VERBOSE && i % (numDocs / 10) == 0)
{
Console.WriteLine(i + " of " + numDocs + "...");
}
}
w.ForceMerge(1);
w.Dispose();
if (VERBOSE)
{
bool found = false;
foreach (string file in dir.ListAll())
{
if (file.EndsWith(".fdt"))
{
long fileLength = dir.FileLength(file);
if (fileLength >= 1L << 32)
{
found = true;
}
Console.WriteLine("File length of " + file + " : " + fileLength);
}
}
if (!found)
{
Console.WriteLine("No .fdt file larger than 4GB, test bug?");
}
}
DirectoryReader rd = DirectoryReader.Open(dir);
Document sd = rd.Document(numDocs - 1);
Assert.IsNotNull(sd);
Assert.AreEqual(1, sd.Fields.Count);
BytesRef valueRef = sd.GetBinaryValue("fld");
Assert.IsNotNull(valueRef);
Assert.AreEqual(new BytesRef(value), valueRef);
rd.Dispose();
dir.Dispose();
}
示例11: TestStats
public virtual void TestStats()
{
Directory dir = NewDirectory();
RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.IndexOptions = FieldInfo.IndexOptions.DOCS_ONLY;
ft.Freeze();
Field f = NewField("foo", "bar", ft);
doc.Add(f);
iw.AddDocument(doc);
IndexReader ir = iw.Reader;
iw.Dispose();
Assert.AreEqual(-1, ir.TotalTermFreq(new Term("foo", new BytesRef("bar"))));
Assert.AreEqual(-1, ir.GetSumTotalTermFreq("foo"));
ir.Dispose();
dir.Dispose();
}
示例12: BeforeClass
//.........这里部分代码省略.........
//NUMBER_FORMAT = NumberFormat.getNumberInstance(LOCALE);
//NUMBER_FORMAT.setMaximumFractionDigits((Random().nextInt() & 20) + 1);
//NUMBER_FORMAT.setMinimumFractionDigits((Random().nextInt() & 20) + 1);
//NUMBER_FORMAT.setMaximumIntegerDigits((Random().nextInt() & 20) + 1);
//NUMBER_FORMAT.setMinimumIntegerDigits((Random().nextInt() & 20) + 1);
NUMBER_FORMAT = new NumberFormat(LOCALE);
double randomDouble;
long randomLong;
int randomInt;
float randomFloat;
while ((randomLong = Convert.ToInt64(NormalizeNumber(Math.Abs(Random().nextLong()))
)) == 0L)
;
while ((randomDouble = Convert.ToDouble(NormalizeNumber(Math.Abs(Random().NextDouble()))
)) == 0.0)
;
while ((randomFloat = Convert.ToSingle(NormalizeNumber(Math.Abs(Random().nextFloat()))
)) == 0.0f)
;
while ((randomInt = Convert.ToInt32(NormalizeNumber(Math.Abs(Random().nextInt())))) == 0)
;
randomNumberMap.Put(FieldType.NumericType.LONG.ToString(), randomLong);
randomNumberMap.Put(FieldType.NumericType.INT.ToString(), randomInt);
randomNumberMap.Put(FieldType.NumericType.FLOAT.ToString(), randomFloat);
randomNumberMap.Put(FieldType.NumericType.DOUBLE.ToString(), randomDouble);
randomNumberMap.Put(DATE_FIELD_NAME, randomDate);
RANDOM_NUMBER_MAP = Collections.UnmodifiableMap(randomNumberMap);
directory = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), directory,
NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000))
.SetMergePolicy(NewLogMergePolicy()));
Document doc = new Document();
HashMap<String, NumericConfig> numericConfigMap = new HashMap<String, NumericConfig>();
HashMap<String, Field> numericFieldMap = new HashMap<String, Field>();
qp.NumericConfigMap = (numericConfigMap);
foreach (FieldType.NumericType type in Enum.GetValues(typeof(FieldType.NumericType)))
{
numericConfigMap.Put(type.ToString(), new NumericConfig(PRECISION_STEP,
NUMBER_FORMAT, type));
FieldType ft2 = new FieldType(IntField.TYPE_NOT_STORED);
ft2.NumericTypeValue = (type);
ft2.Stored = (true);
ft2.NumericPrecisionStep = (PRECISION_STEP);
ft2.Freeze();
Field field;
switch (type)
{
case FieldType.NumericType.INT:
field = new IntField(type.ToString(), 0, ft2);
break;
case FieldType.NumericType.FLOAT:
field = new FloatField(type.ToString(), 0.0f, ft2);
break;
case FieldType.NumericType.LONG:
field = new LongField(type.ToString(), 0L, ft2);
break;
case FieldType.NumericType.DOUBLE:
field = new DoubleField(type.ToString(), 0.0, ft2);
break;
default:
fail();
field = null;
break;
}
numericFieldMap.Put(type.ToString(), field);
doc.Add(field);
}
numericConfigMap.Put(DATE_FIELD_NAME, new NumericConfig(PRECISION_STEP,
DATE_FORMAT, FieldType.NumericType.LONG));
FieldType ft = new FieldType(LongField.TYPE_NOT_STORED);
ft.Stored = (true);
ft.NumericPrecisionStep = (PRECISION_STEP);
LongField dateField = new LongField(DATE_FIELD_NAME, 0L, ft);
numericFieldMap.Put(DATE_FIELD_NAME, dateField);
doc.Add(dateField);
foreach (NumberType numberType in Enum.GetValues(typeof(NumberType)))
{
setFieldValues(numberType, numericFieldMap);
if (VERBOSE) Console.WriteLine("Indexing document: " + doc);
writer.AddDocument(doc);
}
reader = writer.Reader;
searcher = NewSearcher(reader);
writer.Dispose();
}