当前位置: 首页>>代码示例>>C#>>正文


C# IndexWriter.AddDocument方法代码示例

本文整理汇总了C#中IndexWriter.AddDocument方法的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter.AddDocument方法的具体用法?C# IndexWriter.AddDocument怎么用?C# IndexWriter.AddDocument使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在IndexWriter的用法示例。


在下文中一共展示了IndexWriter.AddDocument方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: TestAddSameDocTwice

        public virtual void TestAddSameDocTwice()
        {
            // LUCENE-5367: this was a problem with the previous code, making sure it
            // works with the new code.
            Directory indexDir = NewDirectory(), taxoDir = NewDirectory();
            IndexWriter indexWriter = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
            FacetsConfig facetsConfig = new FacetsConfig();
            Document doc = new Document();
            doc.Add(new FacetField("a", "b"));
            doc = facetsConfig.Build(taxoWriter, doc);
            // these two addDocument() used to fail
            indexWriter.AddDocument(doc);
            indexWriter.AddDocument(doc);
            IOUtils.Close(indexWriter, taxoWriter);

            DirectoryReader indexReader = DirectoryReader.Open(indexDir);
            DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
            IndexSearcher searcher = NewSearcher(indexReader);
            FacetsCollector fc = new FacetsCollector();
            searcher.Search(new MatchAllDocsQuery(), fc);

            Facets facets = GetTaxonomyFacetCounts(taxoReader, facetsConfig, fc);
            FacetResult res = facets.GetTopChildren(10, "a");
            Assert.AreEqual(1, res.LabelValues.Length);
            Assert.AreEqual(2, res.LabelValues[0].Value);
            IOUtils.Close(indexReader, taxoReader);

            IOUtils.Close(indexDir, taxoDir);
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:30,代码来源:TestFacetsConfig.cs

示例2: CreateRandomTerms

        public virtual void CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir)
        {
            int[] freq = new int[nTerms];
            Terms = new Term[nTerms];
            for (int i = 0; i < nTerms; i++)
            {
                int f = (nTerms + 1) - i; // make first terms less frequent
                freq[i] = (int)Math.Ceiling(Math.Pow(f, power));
                Terms[i] = new Term("f", char.ToString((char)('A' + i)));
            }

            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
            for (int i = 0; i < nDocs; i++)
            {
                Document d = new Document();
                for (int j = 0; j < nTerms; j++)
                {
                    if (Random().Next(freq[j]) == 0)
                    {
                        d.Add(NewStringField("f", Terms[j].Text(), Field.Store.NO));
                        //System.out.println(d);
                    }
                }
                iw.AddDocument(d);
            }
            iw.ForceMerge(1);
            iw.Dispose();
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:28,代码来源:TestScorerPerf.cs

示例3: AddDocs2

 private void AddDocs2(IndexWriter writer, int numDocs)
 {
     for (int i = 0; i < numDocs; i++)
     {
         Document doc = new Document();
         doc.Add(NewTextField("content", "bbb", Field.Store.NO));
         writer.AddDocument(doc);
     }
 }
开发者ID:joyanta,项目名称:lucene.net,代码行数:9,代码来源:TestPerFieldPostingsFormat2.cs

示例4: CreateDummySearcher

 // TODO: this should be setUp()....
 public virtual void CreateDummySearcher()
 {
     // Create a dummy index with nothing in it.
     // this could possibly fail if Lucene starts checking for docid ranges...
     d = NewDirectory();
     IndexWriter iw = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
     iw.AddDocument(new Document());
     iw.Dispose();
     r = DirectoryReader.Open(d);
     s = NewSearcher(r);
 }
开发者ID:paulirwin,项目名称:lucene.net,代码行数:12,代码来源:TestScorerPerf.cs

示例5: SetUp

 public override void SetUp()
 {
     base.SetUp();
     Directory = NewDirectory();
     IndexWriter writer = new IndexWriter(Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
     //writer.setNoCFSRatio(0.0);
     //writer.infoStream = System.out;
     FieldType customType = new FieldType(TextField.TYPE_STORED);
     customType.Tokenized = false;
     customType.StoreTermVectors = true;
     for (int i = 0; i < NumDocs; i++)
     {
         Documents.Document doc = new Documents.Document();
         Field fld = NewField("field", English.IntToEnglish(i), customType);
         doc.Add(fld);
         writer.AddDocument(doc);
     }
     writer.Dispose();
 }
开发者ID:paulirwin,项目名称:lucene.net,代码行数:19,代码来源:TestMultiThreadTermVectors.cs

示例6: SetUp

        public override void SetUp()
        {
            base.SetUp();
            //IndexDir = CreateTempDir("RAMDirIndex");
            string tempDir = Path.GetTempPath();
            if (tempDir == null)
                throw new IOException("java.io.tmpdir undefined, cannot run test");
            IndexDir = new DirectoryInfo(Path.Combine(tempDir, "RAMDirIndex"));

            Directory dir = NewFSDirectory(IndexDir);
            IndexWriter writer = new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE));
            // add some documents
            Document doc = null;
            for (int i = 0; i < DocsToAdd; i++)
            {
                doc = new Document();
                doc.Add(NewStringField("content", English.IntToEnglish(i).Trim(), Field.Store.YES));
                writer.AddDocument(doc);
            }
            Assert.AreEqual(DocsToAdd, writer.MaxDoc);
            writer.Dispose();
            dir.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:23,代码来源:TestRAMDirectory.cs

示例7: AddDocs3

 private void AddDocs3(IndexWriter writer, int numDocs)
 {
     for (int i = 0; i < numDocs; i++)
     {
         Document doc = new Document();
         doc.Add(NewTextField("content", "ccc", Field.Store.NO));
         doc.Add(NewStringField("id", "" + i, Field.Store.YES));
         writer.AddDocument(doc);
     }
 }
开发者ID:joyanta,项目名称:lucene.net,代码行数:10,代码来源:TestPerFieldPostingsFormat2.cs

示例8: MakeEmptyIndex

 private static IndexReader MakeEmptyIndex(Random random, int numDocs)
 {
     Debug.Assert(numDocs > 0);
     Directory d = new MockDirectoryWrapper(random, new RAMDirectory());
     IndexWriter w = new IndexWriter(d, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     for (int i = 0; i < numDocs; i++)
     {
         w.AddDocument(new Document());
     }
     w.ForceMerge(1);
     w.Commit();
     w.Dispose();
     DirectoryReader reader = DirectoryReader.Open(d);
     return new AllDeletedFilterReader(LuceneTestCase.GetOnlySegmentReader(reader));
 }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:15,代码来源:QueryUtils.cs

示例9: InsertDoc

        private void InsertDoc(IndexWriter writer, string content)
        {
            Document doc = new Document();

            doc.Add(NewStringField("id", "id" + DocCount, Field.Store.YES));
            doc.Add(NewTextField("content", content, Field.Store.NO));

            writer.AddDocument(doc);
            DocCount++;
        }
开发者ID:paulirwin,项目名称:lucene.net,代码行数:10,代码来源:TestTermRangeQuery.cs

示例10: TestSubclassConcurrentMergeScheduler

        public void TestSubclassConcurrentMergeScheduler()
        {
            MockDirectoryWrapper dir = NewMockDirectory();
            dir.FailOn(new FailOnlyOnMerge());

            Document doc = new Document();
            Field idField = NewStringField("id", "", Field.Store.YES);
            doc.Add(idField);

            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new MyMergeScheduler(this)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy()));
            LogMergePolicy logMP = (LogMergePolicy)writer.Config.MergePolicy;
            logMP.MergeFactor = 10;
            for (int i = 0; i < 20; i++)
            {
                writer.AddDocument(doc);
            }

            ((MyMergeScheduler)writer.Config.MergeScheduler).Sync();
            writer.Dispose();

            Assert.IsTrue(MergeThreadCreated);
            Assert.IsTrue(MergeCalled);
            Assert.IsTrue(ExcCalled);
            dir.Dispose();
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:25,代码来源:TestMergeSchedulerExternal.cs

示例11: TestCollationKeySort

        // Test using various international locales with accented characters (which
        // sort differently depending on locale)
        //
        // Copied (and slightly modified) from
        // Lucene.Net.Search.TestSort.testInternationalSort()
        //
        // TODO: this test is really fragile. there are already 3 different cases,
        // depending upon unicode version.
        public virtual void TestCollationKeySort(Analyzer usAnalyzer, Analyzer franceAnalyzer, Analyzer swedenAnalyzer, Analyzer denmarkAnalyzer, string usResult, string frResult, string svResult, string dkResult)
        {
            Directory indexStore = NewDirectory();
            IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));

            // document data:
            // the tracer field is used to determine which document was hit
            string[][] sortData = new string[][] { new string[] { "A", "x", "p\u00EAche", "p\u00EAche", "p\u00EAche", "p\u00EAche" }, new string[] { "B", "y", "HAT", "HAT", "HAT", "HAT" }, new string[] { "C", "x", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9" }, new string[] { "D", "y", "HUT", "HUT", "HUT", "HUT" }, new string[] { "E", "x", "peach", "peach", "peach", "peach" }, new string[] { "F", "y", "H\u00C5T", "H\u00C5T", "H\u00C5T", "H\u00C5T" }, new string[] { "G", "x", "sin", "sin", "sin", "sin" }, new string[] { "H", "y", "H\u00D8T", "H\u00D8T", "H\u00D8T", "H\u00D8T" }, new string[] { "I", "x", "s\u00EDn", "s\u00EDn", "s\u00EDn", "s\u00EDn" }, new string[] { "J", "y", "HOT", "HOT", "HOT", "HOT" } };

            FieldType customType = new FieldType();
            customType.Stored = true;

            for (int i = 0; i < sortData.Length; ++i)
            {
                Document doc = new Document();
                doc.Add(new Field("tracer", sortData[i][0], customType));
                doc.Add(new TextField("contents", sortData[i][1], Field.Store.NO));
                if (sortData[i][2] != null)
                {
                    doc.Add(new TextField("US", usAnalyzer.TokenStream("US", new StringReader(sortData[i][2]))));
                }
                if (sortData[i][3] != null)
                {
                    doc.Add(new TextField("France", franceAnalyzer.TokenStream("France", new StringReader(sortData[i][3]))));
                }
                if (sortData[i][4] != null)
                {
                    doc.Add(new TextField("Sweden", swedenAnalyzer.TokenStream("Sweden", new StringReader(sortData[i][4]))));
                }
                if (sortData[i][5] != null)
                {
                    doc.Add(new TextField("Denmark", denmarkAnalyzer.TokenStream("Denmark", new StringReader(sortData[i][5]))));
                }
                writer.AddDocument(doc);
            }
            writer.ForceMerge(1);
            writer.Dispose();
            IndexReader reader = DirectoryReader.Open(indexStore);
            IndexSearcher searcher = new IndexSearcher(reader);

            Sort sort = new Sort();
            Query queryX = new TermQuery(new Term("contents", "x"));
            Query queryY = new TermQuery(new Term("contents", "y"));

            sort.SetSort(new SortField("US", SortField.Type_e.STRING));
            AssertMatches(searcher, queryY, sort, usResult);

            sort.SetSort(new SortField("France", SortField.Type_e.STRING));
            AssertMatches(searcher, queryX, sort, frResult);

            sort.SetSort(new SortField("Sweden", SortField.Type_e.STRING));
            AssertMatches(searcher, queryY, sort, svResult);

            sort.SetSort(new SortField("Denmark", SortField.Type_e.STRING));
            AssertMatches(searcher, queryY, sort, dkResult);
            reader.Dispose();
            indexStore.Dispose();
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:66,代码来源:CollationTestBase.cs

示例12: AddDoc

 private void AddDoc(IndexWriter writer)
 {
     Document doc = new Document();
     doc.Add(NewTextField("content", "aaa", Field.Store.NO));
     writer.AddDocument(doc);
 }
开发者ID:joyanta,项目名称:lucene.net,代码行数:6,代码来源:TestLockFactory.cs

示例13: IndexDocsWithFacetsNoTerms

 private static void IndexDocsWithFacetsNoTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
 {
     Random random = Random();
     int numDocs = AtLeast(random, 2);
     FacetsConfig config = Config;
     for (int i = 0; i < numDocs; i++)
     {
         Document doc = new Document();
         AddFacets(doc, config, false);
         indexWriter.AddDocument(config.Build(taxoWriter, doc));
     }
     indexWriter.Commit(); // flush a segment
 }
开发者ID:paulirwin,项目名称:lucene.net,代码行数:13,代码来源:TestTaxonomyFacetCounts2.cs

示例14: IndexDictionary

		/// <summary> Index a Dictionary</summary>
		/// <param name="dict">the dictionary to index</param>
		/// <param name="mergeFactor">mergeFactor to use when indexing</param>
		/// <param name="ramMB">the max amount or memory in MB to use</param>
		/// <throws>  IOException </throws>
		/// <throws>AlreadyClosedException if the Spellchecker is already closed</throws>
		public virtual void IndexDictionary(IDictionary dict, int mergeFactor, int ramMB, CancellationToken token)
		{
			lock (modifyCurrentIndexLock)
			{
				EnsureOpen();
				Directory dir = this.spellindex;
				IndexWriter writer = new IndexWriter(spellindex, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
				writer.MergeFactor = mergeFactor;
				writer.SetMaxBufferedDocs(ramMB);

				System.Collections.IEnumerator iter = dict.GetWordsIterator();
				while (iter.MoveNext())
				{
					token.ThrowIfCancellationRequested();

					System.String word = (System.String)iter.Current;

					int len = word.Length;
					if (len < 3)
					{
						continue; // too short we bail but "too long" is fine...
					}

					if (this.Exist(word))
					{
						// if the word already exist in the gramindex
						continue;
					}

					// ok index the word
					Document doc = CreateDocument(word, GetMin(len), GetMax(len));
					writer.AddDocument(doc);
				}
				// close writer
				writer.Commit();
				writer.Dispose();
				// also re-open the spell index to see our own changes when the next suggestion
				// is fetched:
				SwapSearcher(dir);
			}
		}
开发者ID:GorelH,项目名称:ravendb,代码行数:47,代码来源:SpellChecker.cs

示例15: TestReplaceTaxonomyDirectory

        public virtual void TestReplaceTaxonomyDirectory()
        {
            Store.Directory indexDir = NewDirectory();
            Store.Directory taxoDir = NewDirectory();
            IndexWriter w = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            var tw = new DirectoryTaxonomyWriter(taxoDir);
            w.Commit();
            tw.Commit();

            Store.Directory taxoDir2 = NewDirectory();
            var tw2 = new DirectoryTaxonomyWriter(taxoDir2);
            tw2.AddCategory(new FacetLabel("a", "b"));
            tw2.Dispose();

            var mgr = new SearcherTaxonomyManager(indexDir, taxoDir, null);
            SearcherAndTaxonomy pair = mgr.Acquire();
            try
            {
                Assert.AreEqual(1, pair.taxonomyReader.Size);
            }
            finally
            {
                mgr.Release(pair);
            }

            w.AddDocument(new Document());
            tw.ReplaceTaxonomy(taxoDir2);
            taxoDir2.Dispose();
            w.Commit();
            tw.Commit();

            mgr.MaybeRefresh();
            pair = mgr.Acquire();
            try
            {
                Assert.AreEqual(3, pair.taxonomyReader.Size);
            }
            finally
            {
                mgr.Release(pair);
            }

            IOUtils.Close(mgr, tw, w, taxoDir, indexDir);
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:44,代码来源:TestSearcherTaxonomyManager.cs


注:本文中的IndexWriter.AddDocument方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。