当前位置: 首页>>代码示例>>C#>>正文


C# Lucene.Net.Store.RAMDirectory.List方法代码示例

本文整理汇总了C#中Lucene.Net.Store.RAMDirectory.List方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Store.RAMDirectory.List方法的具体用法?C# Lucene.Net.Store.RAMDirectory.List怎么用?C# Lucene.Net.Store.RAMDirectory.List使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Lucene.Net.Store.RAMDirectory的用法示例。


在下文中一共展示了Lucene.Net.Store.RAMDirectory.List方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: TestKeepAllDeletionPolicy

		public virtual void  TestKeepAllDeletionPolicy()
		{
			
			for (int pass = 0; pass < 4; pass++)
			{
				
				bool autoCommit = pass < 2;
				bool useCompoundFile = (pass % 2) > 0;
				
				KeepAllDeletionPolicy policy = new KeepAllDeletionPolicy(this);
				
				Directory dir = new RAMDirectory();
				
				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
				writer.SetMaxBufferedDocs(10);
				writer.SetUseCompoundFile(useCompoundFile);
				for (int i = 0; i < 107; i++)
				{
					AddDoc(writer);
				}
				writer.Close();
				
				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
				writer.SetUseCompoundFile(useCompoundFile);
				writer.Optimize();
				writer.Close();
				
				Assert.AreEqual(2, policy.numOnInit);
				if (autoCommit)
				{
					Assert.IsTrue(policy.numOnCommit > 2);
				}
				else
				{
					// If we are not auto committing then there should
					// be exactly 2 commits (one per close above):
					Assert.AreEqual(2, policy.numOnCommit);
				}
				
				// Simplistic check: just verify all segments_N's still
				// exist, and, I can open a reader on each:
				dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
				long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
				while (gen > 0)
				{
					IndexReader reader = IndexReader.Open(dir);
					reader.Close();
					dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
					gen--;
					
					if (gen > 0)
					{
						// Now that we've removed a commit point, which
						// should have orphan'd at least one index file.
						// Open & close a writer and assert that it
						// actually removed something:
						int preCount = dir.List().Length;
						writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false, policy);
						writer.Close();
						int postCount = dir.List().Length;
						Assert.IsTrue(postCount < preCount);
					}
				}
				
				dir.Close();
			}
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:67,代码来源:TestDeletionPolicy.cs

示例2: TestDeleteLeftoverFiles

		public virtual void  TestDeleteLeftoverFiles()
		{
			
			Directory dir = new RAMDirectory();
			
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			writer.SetMaxBufferedDocs(10);
			int i;
			for (i = 0; i < 35; i++)
			{
				AddDoc(writer, i);
			}
			writer.SetUseCompoundFile(false);
			for (; i < 45; i++)
			{
				AddDoc(writer, i);
			}
			writer.Close();
			
			// Delete one doc so we get a .del file:
			IndexReader reader = IndexReader.Open(dir);
			Term searchTerm = new Term("id", "7");
			int delCount = reader.DeleteDocuments(searchTerm);
			Assert.AreEqual(1, delCount, "didn't delete the right number of documents");
			
			// Set one norm so we get a .s0 file:
			reader.SetNorm(21, "content", (float) 1.5);
			reader.Close();
			
			// Now, artificially create an extra .del file & extra
			// .s0 file:
			System.String[] files = dir.List();
			
			/*
			for(int i=0;i<files.length;i++) {
			System.out.println(i + ": " + files[i]);
			}
			*/
			
			// The numbering of fields can vary depending on which
			// JRE is in use.  On some JREs we see content bound to
			// field 0; on others, field 1.  So, here we have to
			// figure out which field number corresponds to
			// "content", and then set our expected file names below
			// accordingly:
			CompoundFileReader cfsReader = new CompoundFileReader(dir, "_2.cfs");
			FieldInfos fieldInfos = new FieldInfos(cfsReader, "_2.fnm");
			int contentFieldIndex = - 1;
			for (i = 0; i < fieldInfos.Size(); i++)
			{
				FieldInfo fi = fieldInfos.FieldInfo(i);
				if (fi.Name_ForNUnitTest.Equals("content"))
				{
					contentFieldIndex = i;
					break;
				}
			}
			cfsReader.Close();
			Assert.IsTrue(contentFieldIndex != - 1, "could not locate the 'content' field number in the _2.cfs segment");
			
			System.String normSuffix = "s" + contentFieldIndex;
			
			// Create a bogus separate norms file for a
			// segment/field that actually has a separate norms file
			// already:
			CopyFile(dir, "_2_1." + normSuffix, "_2_2." + normSuffix);
			
			// Create a bogus separate norms file for a
			// segment/field that actually has a separate norms file
			// already, using the "not compound file" extension:
			CopyFile(dir, "_2_1." + normSuffix, "_2_2.f" + contentFieldIndex);
			
			// Create a bogus separate norms file for a
			// segment/field that does not have a separate norms
			// file already:
			CopyFile(dir, "_2_1." + normSuffix, "_1_1." + normSuffix);
			
			// Create a bogus separate norms file for a
			// segment/field that does not have a separate norms
			// file already using the "not compound file" extension:
			CopyFile(dir, "_2_1." + normSuffix, "_1_1.f" + contentFieldIndex);
			
			// Create a bogus separate del file for a
			// segment that already has a separate del file: 
			CopyFile(dir, "_0_1.del", "_0_2.del");
			
			// Create a bogus separate del file for a
			// segment that does not yet have a separate del file:
			CopyFile(dir, "_0_1.del", "_1_1.del");
			
			// Create a bogus separate del file for a
			// non-existent segment:
			CopyFile(dir, "_0_1.del", "_188_1.del");
			
			// Create a bogus segment file:
			CopyFile(dir, "_0.cfs", "_188.cfs");
			
			// Create a bogus fnm file when the CFS already exists:
			CopyFile(dir, "_0.cfs", "_0.fnm");
			
//.........这里部分代码省略.........
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:101,代码来源:TestIndexFileDeleter.cs

示例3: TestAddIndexOnDiskFull

		public virtual void  TestAddIndexOnDiskFull()
		{
			int START_COUNT = 57;
			int NUM_DIR = 50;
			int END_COUNT = START_COUNT + NUM_DIR * 25;
			
			bool debug = false;
			
			// Build up a bunch of dirs that have indexes which we
			// will then merge together by calling addIndexes(*):
			Directory[] dirs = new Directory[NUM_DIR];
			long inputDiskUsage = 0;
			for (int i = 0; i < NUM_DIR; i++)
			{
				dirs[i] = new RAMDirectory();
				IndexWriter writer = new IndexWriter(dirs[i], new WhitespaceAnalyzer(), true);
				for (int j = 0; j < 25; j++)
				{
					AddDocWithIndex(writer, 25 * i + j);
				}
				writer.Close();
				System.String[] files = dirs[i].List();
				for (int j = 0; j < files.Length; j++)
				{
					inputDiskUsage += dirs[i].FileLength(files[j]);
				}
			}
			
			// Now, build a starting index that has START_COUNT docs.  We
			// will then try to addIndexes into a copy of this:
			RAMDirectory startDir = new RAMDirectory();
			IndexWriter writer2 = new IndexWriter(startDir, new WhitespaceAnalyzer(), true);
			for (int j = 0; j < START_COUNT; j++)
			{
				AddDocWithIndex(writer2, j);
			}
			writer2.Close();
			
			// Make sure starting index seems to be working properly:
			Term searchTerm = new Term("content", "aaa");
			IndexReader reader = IndexReader.Open(startDir);
			Assert.AreEqual(57, reader.DocFreq(searchTerm), "first docFreq");
			
			IndexSearcher searcher = new IndexSearcher(reader);
			Hits hits = searcher.Search(new TermQuery(searchTerm));
			Assert.AreEqual(57, hits.Length(), "first number of hits");
			searcher.Close();
			reader.Close();
			
			// Iterate with larger and larger amounts of free
			// disk space.  With little free disk space,
			// addIndexes will certainly run out of space &
			// fail.  Verify that when this happens, index is
			// not corrupt and index in fact has added no
			// documents.  Then, we increase disk space by 2000
			// bytes each iteration.  At some point there is
			// enough free disk space and addIndexes should
			// succeed and index should show all documents were
			// added.
			
			// String[] files = startDir.list();
			long diskUsage = startDir.SizeInBytes();
			
			long startDiskUsage = 0;
			System.String[] files2 = startDir.List();
			for (int i = 0; i < files2.Length; i++)
			{
				startDiskUsage += startDir.FileLength(files2[i]);
			}
			
			for (int iter = 0; iter < 6; iter++)
			{
				
				if (debug)
					System.Console.Out.WriteLine("TEST: iter=" + iter);
				
				// Start with 100 bytes more than we are currently using:
				long diskFree = diskUsage + 100;
				
				bool autoCommit = iter % 2 == 0;
				int method = iter / 2;
				
				bool success = false;
				bool done = false;
				
				System.String methodName;
				if (0 == method)
				{
					methodName = "addIndexes(Directory[])";
				}
				else if (1 == method)
				{
					methodName = "addIndexes(IndexReader[])";
				}
				else
				{
					methodName = "addIndexesNoOptimize(Directory[])";
				}
				
				while (!done)
//.........这里部分代码省略.........
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:101,代码来源:TestIndexWriter.cs

示例4: TestChangingRAMBuffer2

		public virtual void  TestChangingRAMBuffer2()
		{
			RAMDirectory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			writer.SetMaxBufferedDocs(10);
			writer.SetMaxBufferedDeleteTerms(10);
			writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
			
			for (int j = 1; j < 52; j++)
			{
				Document doc = new Document();
				doc.Add(new Field("field", "aaa" + j, Field.Store.YES, Field.Index.TOKENIZED));
				writer.AddDocument(doc);
			}
			
			long lastGen = - 1;
			for (int j = 1; j < 52; j++)
			{
				writer.DeleteDocuments(new Term("field", "aaa" + j));
				_TestUtil.SyncConcurrentMerges(writer);
				long gen = SegmentInfos.GenerationFromSegmentsFileName(SegmentInfos.GetCurrentSegmentFileName(dir.List()));
				if (j == 1)
					lastGen = gen;
				else if (j < 10)
				{
					// No new files should be created
					Assert.AreEqual(gen, lastGen);
				}
				else if (10 == j)
				{
					Assert.IsTrue(gen > lastGen);
					lastGen = gen;
					writer.SetRAMBufferSizeMB(0.000001);
					writer.SetMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
				}
				else if (j < 20)
				{
					Assert.IsTrue(gen > lastGen);
					lastGen = gen;
				}
				else if (20 == j)
				{
					writer.SetRAMBufferSizeMB(16);
					writer.SetMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
					lastGen = gen;
				}
				else if (j < 30)
				{
					Assert.AreEqual(gen, lastGen);
				}
				else if (30 == j)
				{
					writer.SetRAMBufferSizeMB(0.000001);
					writer.SetMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
				}
				else if (j < 40)
				{
					Assert.IsTrue(gen > lastGen);
					lastGen = gen;
				}
				else if (40 == j)
				{
					writer.SetMaxBufferedDeleteTerms(10);
					writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
					lastGen = gen;
				}
				else if (j < 50)
				{
					Assert.AreEqual(gen, lastGen);
					writer.SetMaxBufferedDeleteTerms(10);
					writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
				}
				else if (50 == j)
				{
					Assert.IsTrue(gen > lastGen);
				}
			}
			writer.Close();
			dir.Close();
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:80,代码来源:TestIndexWriter.cs

示例5: TestSmallRAMBuffer

		public virtual void  TestSmallRAMBuffer()
		{
			RAMDirectory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			writer.SetRAMBufferSizeMB(0.000001);
			int lastNumFile = dir.List().Length;
			for (int j = 0; j < 9; j++)
			{
				Document doc = new Document();
				doc.Add(new Field("field", "aaa" + j, Field.Store.YES, Field.Index.TOKENIZED));
				writer.AddDocument(doc);
				int numFile = dir.List().Length;
				// Verify that with a tiny RAM buffer we see new
				// segment after every doc
				Assert.IsTrue(numFile > lastNumFile);
				lastNumFile = numFile;
			}
			writer.Close();
			dir.Close();
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:20,代码来源:TestIndexWriter.cs

示例6: TestSimulatedCorruptIndex2

		public virtual void  TestSimulatedCorruptIndex2()
		{
			Directory dir = new RAMDirectory();
			
			IndexWriter writer = null;
			
			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			
			// add 100 documents
			for (int i = 0; i < 100; i++)
			{
				AddDoc(writer);
			}
			
			// close
			writer.Close();
			
			long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
			Assert.IsTrue(gen > 1, "segment generation should be > 1 but got " + gen);
			
			System.String[] files = dir.List();
			for (int i = 0; i < files.Length; i++)
			{
				if (files[i].EndsWith(".cfs"))
				{
					dir.DeleteFile(files[i]);
					break;
				}
			}
			
			IndexReader reader = null;
			try
			{
				reader = IndexReader.Open(dir);
				Assert.Fail("reader did not hit IOException on opening a corrupt index");
			}
			catch (System.Exception)
			{
			}
			if (reader != null)
			{
				reader.Close();
			}
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:44,代码来源:TestIndexWriter.cs


注:本文中的Lucene.Net.Store.RAMDirectory.List方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。