当前位置: 首页>>代码示例>>C#>>正文


C# Lucene.Net.Store.RAMDirectory.Close方法代码示例

本文整理汇总了C#中Lucene.Net.Store.RAMDirectory.Close方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Store.RAMDirectory.Close方法的具体用法?C# Lucene.Net.Store.RAMDirectory.Close怎么用?C# Lucene.Net.Store.RAMDirectory.Close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Lucene.Net.Store.RAMDirectory的用法示例。


在下文中一共展示了Lucene.Net.Store.RAMDirectory.Close方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: TestDemo_Renamed_Method

		public virtual void  TestDemo_Renamed_Method()
		{
			
			Analyzer analyzer = new StandardAnalyzer();
			
			// Store the index in memory:
			Directory directory = new RAMDirectory();
			// To store an index on disk, use this instead (note that the 
			// parameter true will overwrite the index in that directory
			// if one exists):
			//Directory directory = FSDirectory.getDirectory("/tmp/testindex", true);
			IndexWriter iwriter = new IndexWriter(directory, analyzer, true);
			iwriter.SetMaxFieldLength(25000);
			Document doc = new Document();
			System.String text = "This is the text to be indexed.";
			doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.TOKENIZED));
			iwriter.AddDocument(doc);
			iwriter.Close();
			
			// Now search the index:
			IndexSearcher isearcher = new IndexSearcher(directory);
			// Parse a simple query that searches for "text":
			Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("fieldname", analyzer);
			Query query = parser.Parse("text");
			Hits hits = isearcher.Search(query);
			Assert.AreEqual(1, hits.Length());
			// Iterate through the results:
			for (int i = 0; i < hits.Length(); i++)
			{
				Document hitDoc = hits.Doc(i);
				Assert.AreEqual("This is the text to be indexed.", hitDoc.Get("fieldname"));
			}
			isearcher.Close();
			directory.Close();
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:35,代码来源:TestDemo.cs

示例2: TestDemo_Renamed

		public virtual void  TestDemo_Renamed()
		{
			
			Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
			
			// Store the index in memory:
			Directory directory = new RAMDirectory();
			// To store an index on disk, use this instead:
			//Directory directory = FSDirectory.open("/tmp/testindex");
			IndexWriter iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000));
			Document doc = new Document();
			System.String text = "This is the text to be indexed.";
			doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED));
			iwriter.AddDocument(doc);
			iwriter.Close();
			
			// Now search the index:
			IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
			// Parse a simple query that searches for "text":
			QueryParser parser = new QueryParser("fieldname", analyzer);
			Query query = parser.Parse("text");
			ScoreDoc[] hits = isearcher.Search(query, null, 1000).scoreDocs;
			Assert.AreEqual(1, hits.Length);
			// Iterate through the results:
			for (int i = 0; i < hits.Length; i++)
			{
				Document hitDoc = isearcher.Doc(hits[i].doc);
				Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed.");
			}
			isearcher.Close();
			directory.Close();
		}
开发者ID:Rationalle,项目名称:ravendb,代码行数:32,代码来源:TestDemo.cs

示例3: TestMultiValuedNRQ

		public virtual void  TestMultiValuedNRQ()
		{
			System.Random rnd = NewRandom();
			
			RAMDirectory directory = new RAMDirectory();
			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED);
			
			//DecimalFormat format = new DecimalFormat("00000000000", new System.Globalization.CultureInfo("en-US").NumberFormat);
			
			for (int l = 0; l < 5000; l++)
			{
				Document doc = new Document();
				for (int m = 0, c = rnd.Next(10); m <= c; m++)
				{
					int value_Renamed = rnd.Next(System.Int32.MaxValue);
                    doc.Add(new Field("asc", value_Renamed.ToString().PadLeft(11, '0'), Field.Store.NO, Field.Index.NOT_ANALYZED));
					doc.Add(new NumericField("trie", Field.Store.NO, true).SetIntValue(value_Renamed));
				}
				writer.AddDocument(doc);
			}
			writer.Close();
			
			Searcher searcher = new IndexSearcher(directory, true);
			for (int i = 0; i < 50; i++)
			{
				int lower = rnd.Next(System.Int32.MaxValue);
				int upper = rnd.Next(System.Int32.MaxValue);
				if (lower > upper)
				{
					int a = lower; lower = upper; upper = a;
				}
				TermRangeQuery cq = new TermRangeQuery("asc", lower.ToString().PadLeft(11, '0'),  upper.ToString().PadLeft(11, '0'), true, true);
				System.Int32 tempAux = (System.Int32) lower;
				System.Int32 tempAux2 = (System.Int32) upper;
				NumericRangeQuery tq = NumericRangeQuery.NewIntRange("trie", tempAux, tempAux2, true, true);
				TopDocs trTopDocs = searcher.Search(cq, 1);
				TopDocs nrTopDocs = searcher.Search(tq, 1);
				Assert.AreEqual(trTopDocs.totalHits, nrTopDocs.totalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
			}
			searcher.Close();
			
			directory.Close();
		}
开发者ID:Rationalle,项目名称:ravendb,代码行数:43,代码来源:TestMultiValuedNumericRangeQuery.cs

示例4: Test

		public virtual void  Test()
		{
			//Positive test of FieldInfos
			Assert.IsTrue(testDoc != null);
			FieldInfos fieldInfos = new FieldInfos();
			fieldInfos.Add(testDoc);
			//Since the complement is stored as well in the fields map
			Assert.IsTrue(fieldInfos.Size() == 7); //this is 7 b/c we are using the no-arg constructor
			RAMDirectory dir = new RAMDirectory();
			System.String name = "testFile";
			OutputStream output = dir.CreateFile(name);
			Assert.IsTrue(output != null);
			//Use a RAMOutputStream
			
			try
			{
				fieldInfos.Write(output);
				output.Close();
				Assert.IsTrue(output.Length() > 0);
				FieldInfos readIn = new FieldInfos(dir, name);
				Assert.IsTrue(fieldInfos.Size() == readIn.Size());
				FieldInfo info = readIn.FieldInfo("textField1");
				Assert.IsTrue(info != null);
				Assert.IsTrue(info.storeTermVector == false);
				
				info = readIn.FieldInfo("textField2");
				Assert.IsTrue(info != null);
				Assert.IsTrue(info.storeTermVector == true);
				
				dir.Close();
			}
			catch (System.IO.IOException e)
			{
				Assert.IsTrue(false);
			}
		}
开发者ID:runefs,项目名称:Marvin,代码行数:36,代码来源:TestFieldInfos.cs

示例5: TestFieldSetValue

		public virtual void  TestFieldSetValue()
		{
			
			Field field = new Field("id", "id1", Field.Store.YES, Field.Index.NOT_ANALYZED);
			Document doc = new Document();
			doc.Add(field);
			doc.Add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
			
			RAMDirectory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			writer.AddDocument(doc);
			field.SetValue("id2");
			writer.AddDocument(doc);
			field.SetValue("id3");
			writer.AddDocument(doc);
			writer.Close();
			
			Searcher searcher = new IndexSearcher(dir);
			
			Query query = new TermQuery(new Term("keyword", "test"));
			
			// ensure that queries return expected results without DateFilter first
			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
			Assert.AreEqual(3, hits.Length);
			int result = 0;
			for (int i = 0; i < 3; i++)
			{
				Document doc2 = searcher.Doc(hits[i].doc);
				Field f = doc2.GetField("id");
				if (f.StringValue().Equals("id1"))
					result |= 1;
				else if (f.StringValue().Equals("id2"))
					result |= 2;
				else if (f.StringValue().Equals("id3"))
					result |= 4;
				else
					Assert.Fail("unexpected id field");
			}
			searcher.Close();
			dir.Close();
			Assert.AreEqual(7, result, "did not see all IDs");
		}
开发者ID:VirtueMe,项目名称:ravendb,代码行数:42,代码来源:TestDocument.cs

示例6: TestPhraseQueryInConjunctionScorer

		public virtual void  TestPhraseQueryInConjunctionScorer()
		{
			RAMDirectory directory = new RAMDirectory();
			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
			
			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
			doc.Add(new Field("source", "marketing info", Field.Store.YES, Field.Index.TOKENIZED));
			writer.AddDocument(doc);
			
			doc = new Lucene.Net.Documents.Document();
			doc.Add(new Field("contents", "foobar", Field.Store.YES, Field.Index.TOKENIZED));
			doc.Add(new Field("source", "marketing info", Field.Store.YES, Field.Index.TOKENIZED));
			writer.AddDocument(doc);
			
			writer.Optimize();
			writer.Close();
			
			IndexSearcher searcher = new IndexSearcher(directory);
			
			PhraseQuery phraseQuery = new PhraseQuery();
			phraseQuery.Add(new Term("source", "marketing"));
			phraseQuery.Add(new Term("source", "info"));
			Hits hits = searcher.Search(phraseQuery);
			Assert.AreEqual(2, hits.Length());
			QueryUtils.Check(phraseQuery, searcher);

			
			TermQuery termQuery = new TermQuery(new Term("contents", "foobar"));
			BooleanQuery booleanQuery = new BooleanQuery();
			booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
			booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
			hits = searcher.Search(booleanQuery);
			Assert.AreEqual(1, hits.Length());
			QueryUtils.Check(termQuery, searcher);

			
			searcher.Close();
			
			writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
			doc = new Lucene.Net.Documents.Document();
			doc.Add(new Field("contents", "map entry woo", Field.Store.YES, Field.Index.TOKENIZED));
			writer.AddDocument(doc);
			
			doc = new Lucene.Net.Documents.Document();
			doc.Add(new Field("contents", "woo map entry", Field.Store.YES, Field.Index.TOKENIZED));
			writer.AddDocument(doc);
			
			doc = new Lucene.Net.Documents.Document();
			doc.Add(new Field("contents", "map foobarword entry woo", Field.Store.YES, Field.Index.TOKENIZED));
			writer.AddDocument(doc);
			
			writer.Optimize();
			writer.Close();
			
			searcher = new IndexSearcher(directory);
			
			termQuery = new TermQuery(new Term("contents", "woo"));
			phraseQuery = new PhraseQuery();
			phraseQuery.Add(new Term("contents", "map"));
			phraseQuery.Add(new Term("contents", "entry"));
			
			hits = searcher.Search(termQuery);
			Assert.AreEqual(3, hits.Length());
			hits = searcher.Search(phraseQuery);
			Assert.AreEqual(2, hits.Length());

			
			booleanQuery = new BooleanQuery();
			booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
			booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
			hits = searcher.Search(booleanQuery);
			Assert.AreEqual(2, hits.Length());
			
			booleanQuery = new BooleanQuery();
			booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
			booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
			hits = searcher.Search(booleanQuery);
			Assert.AreEqual(2, hits.Length());
			QueryUtils.Check(booleanQuery, searcher);

			
			searcher.Close();
			directory.Close();
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:84,代码来源:TestPhraseQuery.cs

示例7: TestKeepLastNDeletionPolicy

		public virtual void  TestKeepLastNDeletionPolicy()
		{
			
			int N = 5;
			
			for (int pass = 0; pass < 4; pass++)
			{
				
				bool autoCommit = pass < 2;
				bool useCompoundFile = (pass % 2) > 0;
				
				Directory dir = new RAMDirectory();
				
				KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
				
				for (int j = 0; j < N + 1; j++)
				{
					IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
					writer.SetMaxBufferedDocs(10);
					writer.SetUseCompoundFile(useCompoundFile);
					for (int i = 0; i < 17; i++)
					{
						AddDoc(writer);
					}
					writer.Optimize();
					writer.Close();
				}
				
				Assert.IsTrue(policy.numDelete > 0);
				Assert.AreEqual(N + 1, policy.numOnInit);
				if (autoCommit)
				{
					Assert.IsTrue(policy.numOnCommit > 1);
				}
				else
				{
					Assert.AreEqual(N + 1, policy.numOnCommit);
				}
				
				// Simplistic check: just verify only the past N segments_N's still
				// exist, and, I can open a reader on each:
				dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
				long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
				for (int i = 0; i < N + 1; i++)
				{
					try
					{
						IndexReader reader = IndexReader.Open(dir);
						reader.Close();
						if (i == N)
						{
							Assert.Fail("should have failed on commits prior to last " + N);
						}
					}
					catch (System.IO.IOException e)
					{
						if (i != N)
						{
							throw e;
						}
					}
					if (i < N)
					{
						dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
					}
					gen--;
				}
				
				dir.Close();
			}
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:71,代码来源:TestDeletionPolicy.cs

示例8: TestKeepAllDeletionPolicy

		public virtual void  TestKeepAllDeletionPolicy()
		{
			
			for (int pass = 0; pass < 4; pass++)
			{
				
				bool autoCommit = pass < 2;
				bool useCompoundFile = (pass % 2) > 0;
				
				KeepAllDeletionPolicy policy = new KeepAllDeletionPolicy(this);
				
				Directory dir = new RAMDirectory();
				
				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
				writer.SetMaxBufferedDocs(10);
				writer.SetUseCompoundFile(useCompoundFile);
				for (int i = 0; i < 107; i++)
				{
					AddDoc(writer);
				}
				writer.Close();
				
				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
				writer.SetUseCompoundFile(useCompoundFile);
				writer.Optimize();
				writer.Close();
				
				Assert.AreEqual(2, policy.numOnInit);
				if (autoCommit)
				{
					Assert.IsTrue(policy.numOnCommit > 2);
				}
				else
				{
					// If we are not auto committing then there should
					// be exactly 2 commits (one per close above):
					Assert.AreEqual(2, policy.numOnCommit);
				}
				
				// Simplistic check: just verify all segments_N's still
				// exist, and, I can open a reader on each:
				dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
				long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
				while (gen > 0)
				{
					IndexReader reader = IndexReader.Open(dir);
					reader.Close();
					dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
					gen--;
					
					if (gen > 0)
					{
						// Now that we've removed a commit point, which
						// should have orphan'd at least one index file.
						// Open & close a writer and assert that it
						// actually removed something:
						int preCount = dir.List().Length;
						writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false, policy);
						writer.Close();
						int postCount = dir.List().Length;
						Assert.IsTrue(postCount < preCount);
					}
				}
				
				dir.Close();
			}
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:67,代码来源:TestDeletionPolicy.cs

示例9: TestQuery

		public virtual void  TestQuery()
		{
			
			RAMDirectory dir = new RAMDirectory();
			IndexWriter iw = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
			iw.SetMaxBufferedDocs(2); // force multi-segment
			AddDoc("one", iw, 1f);
			AddDoc("two", iw, 20f);
			AddDoc("three four", iw, 300f);
			iw.Close();
			
			IndexReader ir = IndexReader.Open(dir);
			IndexSearcher is_Renamed = new IndexSearcher(ir);
			ScoreDoc[] hits;
			
			// assert with norms scoring turned off
			
			hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs;
			Assert.AreEqual(3, hits.Length);
			Assert.AreEqual(ir.Document(hits[0].Doc).Get("key"), "one");
			Assert.AreEqual(ir.Document(hits[1].Doc).Get("key"), "two");
			Assert.AreEqual(ir.Document(hits[2].Doc).Get("key"), "three four");
			
			// assert with norms scoring turned on
			
			MatchAllDocsQuery normsQuery = new MatchAllDocsQuery("key");
			hits = is_Renamed.Search(normsQuery, null, 1000).ScoreDocs;
			Assert.AreEqual(3, hits.Length);
			
			Assert.AreEqual(ir.Document(hits[0].Doc).Get("key"), "three four");
			Assert.AreEqual(ir.Document(hits[1].Doc).Get("key"), "two");
			Assert.AreEqual(ir.Document(hits[2].Doc).Get("key"), "one");
			
			// change norm & retest
			ir.SetNorm(0, "key", 400f);
			normsQuery = new MatchAllDocsQuery("key");
			hits = is_Renamed.Search(normsQuery, null, 1000).ScoreDocs;
			Assert.AreEqual(3, hits.Length);
			
			Assert.AreEqual(ir.Document(hits[0].Doc).Get("key"), "one");
			Assert.AreEqual(ir.Document(hits[1].Doc).Get("key"), "three four");
			Assert.AreEqual(ir.Document(hits[2].Doc).Get("key"), "two");
			
			// some artificial queries to trigger the use of skipTo():
			
			BooleanQuery bq = new BooleanQuery();
			bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
			bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
			hits = is_Renamed.Search(bq, null, 1000).ScoreDocs;
			Assert.AreEqual(3, hits.Length);
			
			bq = new BooleanQuery();
			bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
			bq.Add(new TermQuery(new Term("key", "three")), BooleanClause.Occur.MUST);
			hits = is_Renamed.Search(bq, null, 1000).ScoreDocs;
			Assert.AreEqual(1, hits.Length);
			
			// delete a document:
			is_Renamed.GetIndexReader().DeleteDocument(0);
			hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs;
			Assert.AreEqual(2, hits.Length);
			
			// test parsable toString()
			QueryParser qp = new QueryParser("key", analyzer);
			hits = is_Renamed.Search(qp.Parse(new MatchAllDocsQuery().ToString()), null, 1000).ScoreDocs;
			Assert.AreEqual(2, hits.Length);
			
			// test parsable toString() with non default boost
			Query maq = new MatchAllDocsQuery();
			maq.SetBoost(2.3f);
			Query pq = qp.Parse(maq.ToString());
			hits = is_Renamed.Search(pq, null, 1000).ScoreDocs;
			Assert.AreEqual(2, hits.Length);
			
			is_Renamed.Close();
			ir.Close();
			dir.Close();
		}
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:78,代码来源:TestMatchAllDocsQuery.cs

示例10: TestManyFields

		public virtual void  TestManyFields()
		{
			RAMDirectory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			writer.SetMaxBufferedDocs(10);
			for (int j = 0; j < 100; j++)
			{
				Document doc = new Document();
				doc.Add(new Field("a" + j, "aaa" + j, Field.Store.YES, Field.Index.TOKENIZED));
				doc.Add(new Field("b" + j, "aaa" + j, Field.Store.YES, Field.Index.TOKENIZED));
				doc.Add(new Field("c" + j, "aaa" + j, Field.Store.YES, Field.Index.TOKENIZED));
				doc.Add(new Field("d" + j, "aaa", Field.Store.YES, Field.Index.TOKENIZED));
				doc.Add(new Field("e" + j, "aaa", Field.Store.YES, Field.Index.TOKENIZED));
				doc.Add(new Field("f" + j, "aaa", Field.Store.YES, Field.Index.TOKENIZED));
				writer.AddDocument(doc);
			}
			writer.Close();
			
			IndexReader reader = IndexReader.Open(dir);
			Assert.AreEqual(100, reader.MaxDoc());
			Assert.AreEqual(100, reader.NumDocs());
			for (int j = 0; j < 100; j++)
			{
				Assert.AreEqual(1, reader.DocFreq(new Term("a" + j, "aaa" + j)));
				Assert.AreEqual(1, reader.DocFreq(new Term("b" + j, "aaa" + j)));
				Assert.AreEqual(1, reader.DocFreq(new Term("c" + j, "aaa" + j)));
				Assert.AreEqual(1, reader.DocFreq(new Term("d" + j, "aaa")));
				Assert.AreEqual(1, reader.DocFreq(new Term("e" + j, "aaa")));
				Assert.AreEqual(1, reader.DocFreq(new Term("f" + j, "aaa")));
			}
			reader.Close();
			dir.Close();
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:33,代码来源:TestIndexWriter.cs

示例11: TestCommitOnCloseAbort

		public virtual void  TestCommitOnCloseAbort()
		{
			Directory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			writer.SetMaxBufferedDocs(10);
			for (int i = 0; i < 14; i++)
			{
				AddDoc(writer);
			}
			writer.Close();
			
			Term searchTerm = new Term("content", "aaa");
			IndexSearcher searcher = new IndexSearcher(dir);
			Hits hits = searcher.Search(new TermQuery(searchTerm));
			Assert.AreEqual(14, hits.Length(), "first number of hits");
			searcher.Close();
			
			writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false);
			writer.SetMaxBufferedDocs(10);
			for (int j = 0; j < 17; j++)
			{
				AddDoc(writer);
			}
			// Delete all docs:
			writer.DeleteDocuments(searchTerm);
			
			searcher = new IndexSearcher(dir);
			hits = searcher.Search(new TermQuery(searchTerm));
			Assert.AreEqual(14, hits.Length(), "reader incorrectly sees changes from writer with autoCommit disabled");
			searcher.Close();
			
			// Now, close the writer:
			writer.Abort();
			
			AssertNoUnreferencedFiles(dir, "unreferenced files remain after abort()");
			
			searcher = new IndexSearcher(dir);
			hits = searcher.Search(new TermQuery(searchTerm));
			Assert.AreEqual(14, hits.Length(), "saw changes after writer.abort");
			searcher.Close();
			
			// Now make sure we can re-open the index, add docs,
			// and all is good:
			writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false);
			writer.SetMaxBufferedDocs(10);
			for (int i = 0; i < 12; i++)
			{
				for (int j = 0; j < 17; j++)
				{
					AddDoc(writer);
				}
				searcher = new IndexSearcher(dir);
				hits = searcher.Search(new TermQuery(searchTerm));
				Assert.AreEqual(14, hits.Length(), "reader incorrectly sees changes from writer with autoCommit disabled");
				searcher.Close();
			}
			
			writer.Close();
			searcher = new IndexSearcher(dir);
			hits = searcher.Search(new TermQuery(searchTerm));
			Assert.AreEqual(218, hits.Length(), "didn't see changes after close");
			searcher.Close();
			
			dir.Close();
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:65,代码来源:TestIndexWriter.cs

示例12: TestFuzziness

        public virtual void TestFuzziness()
        {
            RAMDirectory directory = new RAMDirectory();
            IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true,
                                                 IndexWriter.MaxFieldLength.LIMITED);
            AddDoc("aaaaa", writer);
            AddDoc("aaaab", writer);
            AddDoc("aaabb", writer);
            AddDoc("aabbb", writer);
            AddDoc("abbbb", writer);
            AddDoc("bbbbb", writer);
            AddDoc("ddddd", writer);
            writer.Optimize();
            writer.Close();
            IndexSearcher searcher = new IndexSearcher(directory, true);

            FuzzyQuery query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length);

            // same with prefix
            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 1);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length);
            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 2);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length);
            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 3);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length);
            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 4);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(2, hits.Length);
            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 5);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(1, hits.Length);
            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 6);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(1, hits.Length);

            // test scoring
            query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.defaultMinSimilarity, 0);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length, "3 documents should match");
            List<String> order = new List<string>(new[] {"bbbbb", "abbbb", "aabbb"});
            for (int i = 0; i < hits.Length; i++)
            {
                String term = searcher.Doc(hits[i].Doc).Get("field");
                //System.out.println(hits[i].score);
                Assert.AreEqual(order[i], term);
            }

            // test BooleanQuery.maxClauseCount
            int savedClauseCount = BooleanQuery.MaxClauseCount;
            try
            {
                BooleanQuery.MaxClauseCount = 2;
                // This query would normally return 3 documents, because 3 terms match (see above):
                query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.defaultMinSimilarity, 0);
                hits = searcher.Search(query, null, 1000).ScoreDocs;
                Assert.AreEqual(2, hits.Length, "only 2 documents should match");
                order = new List<string>(new[] {"bbbbb", "abbbb"});
                for (int i = 0; i < hits.Length; i++)
                {
                    String term = searcher.Doc(hits[i].Doc).Get("field");
                    //System.out.println(hits[i].score);
                    Assert.AreEqual(order[i], term);
                }
            }
            finally
            {
                BooleanQuery.MaxClauseCount = savedClauseCount;
            }

            // not similar enough:
            query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMinSimilarity, 0);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(0, hits.Length);
            query = new FuzzyQuery(new Term("field", "aaccc"), FuzzyQuery.defaultMinSimilarity, 0);
                // edit distance to "aaaaa" = 3
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(0, hits.Length);

            // query identical to a word in the index:
            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length);
            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
            // default allows for up to two edits:
            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));

            // query similar to a word in the index:
            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 0);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length);
            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));

//.........这里部分代码省略.........
开发者ID:synhershko,项目名称:lucene.net,代码行数:101,代码来源:TestFuzzyQuery.cs

示例13: TestFuzzinessLong

		public virtual void  TestFuzzinessLong()
		{
			RAMDirectory directory = new RAMDirectory();
			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			AddDoc("aaaaaaa", writer);
			AddDoc("segment", writer);
			writer.Optimize();
			writer.Close();
	        IndexSearcher searcher = new IndexSearcher(directory, true);
			
			FuzzyQuery query;
			// not similar enough:
			query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMinSimilarity, 0);
			ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(0, hits.Length);
			// edit distance to "aaaaaaa" = 3, this matches because the string is longer than
			// in testDefaultFuzziness so a bigger difference is allowed:
			query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 0);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(1, hits.Length);
			Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaaaa"));
			
			// now with prefix
			query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 1);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(1, hits.Length);
			Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaaaa"));
			query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 4);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(1, hits.Length);
			Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaaaa"));
			query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 5);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(0, hits.Length);
			
			// no match, more than half of the characters is wrong:
			query = new FuzzyQuery(new Term("field", "aaacccc"), FuzzyQuery.defaultMinSimilarity, 0);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(0, hits.Length);
			
			// now with prefix
			query = new FuzzyQuery(new Term("field", "aaacccc"), FuzzyQuery.defaultMinSimilarity, 2);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(0, hits.Length);
			
			// "student" and "stellent" are indeed similar to "segment" by default:
			query = new FuzzyQuery(new Term("field", "student"), FuzzyQuery.defaultMinSimilarity, 0);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(1, hits.Length);
			query = new FuzzyQuery(new Term("field", "stellent"), FuzzyQuery.defaultMinSimilarity, 0);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(1, hits.Length);
			
			// now with prefix
			query = new FuzzyQuery(new Term("field", "student"), FuzzyQuery.defaultMinSimilarity, 1);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(1, hits.Length);
			query = new FuzzyQuery(new Term("field", "stellent"), FuzzyQuery.defaultMinSimilarity, 1);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(1, hits.Length);
			query = new FuzzyQuery(new Term("field", "student"), FuzzyQuery.defaultMinSimilarity, 2);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(0, hits.Length);
			query = new FuzzyQuery(new Term("field", "stellent"), FuzzyQuery.defaultMinSimilarity, 2);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(0, hits.Length);
			
			// "student" doesn't match anymore thanks to increased minimum similarity:
			query = new FuzzyQuery(new Term("field", "student"), 0.6f, 0);
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(0, hits.Length);

	        Assert.Throws<ArgumentException>(() => new FuzzyQuery(new Term("field", "student"), 1.1f),
	                                         "Expected ArgumentException");
	        Assert.Throws<ArgumentException>(() => new FuzzyQuery(new Term("field", "student"), -0.1f),
	                                         "Expected ArgumentException");
			
			searcher.Close();
			directory.Close();
		}
开发者ID:synhershko,项目名称:lucene.net,代码行数:80,代码来源:TestFuzzyQuery.cs

示例14: TestPhraseQueryInConjunctionScorer

		public virtual void  TestPhraseQueryInConjunctionScorer()
		{
			RAMDirectory directory = new RAMDirectory();
			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
			
			Document doc = new Document();
			doc.Add(new Field("source", "marketing info", true, true, true));
			writer.AddDocument(doc);
			
			doc = new Document();
			doc.Add(new Field("contents", "foobar", true, true, true));
			doc.Add(new Field("source", "marketing info", true, true, true));
			writer.AddDocument(doc);
			
			writer.Optimize();
			writer.Close();
			
			IndexSearcher searcher = new IndexSearcher(directory);
			
			PhraseQuery phraseQuery = new PhraseQuery();
			phraseQuery.Add(new Term("source", "marketing"));
			phraseQuery.Add(new Term("source", "info"));
			Hits hits = searcher.Search(phraseQuery);
			Assert.AreEqual(2, hits.Length());
			
			TermQuery termQuery = new TermQuery(new Term("contents", "foobar"));
			BooleanQuery booleanQuery = new BooleanQuery();
			booleanQuery.Add(termQuery, true, false);
			booleanQuery.Add(phraseQuery, true, false);
			hits = searcher.Search(booleanQuery);
			Assert.AreEqual(1, hits.Length());
			
			searcher.Close();
			
			writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
			doc = new Document();
			doc.Add(new Field("contents", "map entry woo", true, true, true));
			writer.AddDocument(doc);
			
			doc = new Document();
			doc.Add(new Field("contents", "woo map entry", true, true, true));
			writer.AddDocument(doc);
			
			doc = new Document();
			doc.Add(new Field("contents", "map foobarword entry woo", true, true, true));
			writer.AddDocument(doc);
			
			writer.Optimize();
			writer.Close();
			
			searcher = new IndexSearcher(directory);
			
			termQuery = new TermQuery(new Term("contents", "woo"));
			phraseQuery = new PhraseQuery();
			phraseQuery.Add(new Term("contents", "map"));
			phraseQuery.Add(new Term("contents", "entry"));
			
			hits = searcher.Search(termQuery);
			Assert.AreEqual(3, hits.Length());
			hits = searcher.Search(phraseQuery);
			Assert.AreEqual(2, hits.Length());
			
			booleanQuery = new BooleanQuery();
			booleanQuery.Add(termQuery, true, false);
			booleanQuery.Add(phraseQuery, true, false);
			hits = searcher.Search(booleanQuery);
			Assert.AreEqual(2, hits.Length());
			
			booleanQuery = new BooleanQuery();
			booleanQuery.Add(phraseQuery, true, false);
			booleanQuery.Add(termQuery, true, false);
			hits = searcher.Search(booleanQuery);
			Assert.AreEqual(2, hits.Length());
			
			searcher.Close();
			directory.Close();
		}
开发者ID:runefs,项目名称:Marvin,代码行数:77,代码来源:TestPhraseQuery.cs

示例15: TestWithPendingDeletes3

		public virtual void  TestWithPendingDeletes3()
		{
			// main directory
			Directory dir = new RAMDirectory();
			// auxiliary directory
			Directory aux = new RAMDirectory();
			
			SetUpDirs(dir, aux);
			IndexWriter writer = NewWriter(dir, false);
			
			// Adds 10 docs, then replaces them with another 10
			// docs, so 10 pending deletes:
			for (int i = 0; i < 20; i++)
			{
				Document doc = new Document();
				doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
				doc.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
				writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
			}
			
			// Deletes one of the 10 added docs, leaving 9:
			PhraseQuery q = new PhraseQuery();
			q.Add(new Term("content", "bbb"));
			q.Add(new Term("content", "14"));
			writer.DeleteDocuments(q);
			
			writer.AddIndexesNoOptimize(new Directory[]{aux});
			
			writer.Optimize();
		    writer.Commit();
			
			VerifyNumDocs(dir, 1039);
			VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
			VerifyTermDocs(dir, new Term("content", "bbb"), 9);
			
			writer.Close();
			dir.Close();
			aux.Close();
		}
开发者ID:synhershko,项目名称:lucene.net,代码行数:39,代码来源:TestAddIndexesNoOptimize.cs


注:本文中的Lucene.Net.Store.RAMDirectory.Close方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。