本文整理汇总了C#中Lucene.Net.Index.IndexWriter.Dispose方法的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter.Dispose方法的具体用法?C# IndexWriter.Dispose怎么用?C# IndexWriter.Dispose使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.IndexWriter
的用法示例。
在下文中一共展示了IndexWriter.Dispose方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CreateIndexCreative
public static void CreateIndexCreative(Creative entity, string IndexPath)
{
var document = new Document();
document.Add(new Field("CreativeId", entity.Creativeid.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
document.Add(new Field("Title", entity.Title, Field.Store.YES, Field.Index.ANALYZED));
if (!string.IsNullOrEmpty(entity.About))
{
document.Add(new Field("About", entity.About, Field.Store.YES, Field.Index.ANALYZED));
}
Directory directory = FSDirectory.Open(new DirectoryInfo(IndexPath));
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_30);
var writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
try
{
writer.AddDocument(document);
writer.Optimize();
writer.Dispose();
}
finally
{
writer.Dispose();
}
}
示例2: ClearIndex
public void ClearIndex()
{
if (System.IO.Directory.GetFiles(this.index.Directory.FullName).Any())
{
try
{
var analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
using (var writer = new IndexWriter(this.index, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED))
{
// remove older index entries
writer.DeleteAll();
// close handles
analyzer.Close();
writer.Dispose();
}
ForceUnlockIndex();
}
catch (Exception)
{
throw;
}
}
}
示例3: GenerateLuceneIndex
private void GenerateLuceneIndex(List<Entry> entries)
{
Microsoft.WindowsAzure.Storage.CloudStorageAccount storageAccount = Microsoft.WindowsAzure.Storage.CloudStorageAccount.Parse("DefaultEndpointsProtocol=https;AccountName=diningsearchstorage;AccountKey=xeYMzXThFxrU7SsAMGbSWLdy9psLFRMk5NI8x0bx24xtEg9MPIstf/xwPdjvDm6HpHZaCPxxVFCv/7DDd5wymA==");
AzureDirectory azureDirectory = new AzureDirectory(storageAccount, "diningsearchindex");
Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
IndexWriter indexWriter = new IndexWriter(azureDirectory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
////
int c = 0;
foreach (var entry in entries)
{
c++;
var item = new Document();
item.Add(new Field("Id", c.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
item.Add(new Field("Dish", entry.DishName, Field.Store.YES, Field.Index.ANALYZED));
item.Add(new Field("Cafe/Restaurant", string.Format("{0}/{1}", entry.CafeName, entry.RestaurantName), Field.Store.YES, Field.Index.ANALYZED));
item.Add(new Field("URL", entry.CafeUrl, Field.Store.YES, Field.Index.NOT_ANALYZED));
item.Add(new Field("Description", entry.Description ?? string.Empty, Field.Store.YES, Field.Index.ANALYZED));
item.Add(new Field("Price", entry.Price, Field.Store.YES, Field.Index.NOT_ANALYZED));
indexWriter.AddDocument(item);
}
////
indexWriter.Dispose();
azureDirectory.Dispose();
}
示例4: SetUp
public override void SetUp()
{
base.SetUp();
store = NewDirectory();
IndexWriter writer = new IndexWriter(store, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));
Document doc;
doc = new Document();
doc.Add(NewTextField("aaa", "foo", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewTextField("aaa", "foo", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewTextField("contents", "Tom", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewTextField("contents", "Jerry", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewTextField("zzz", "bar", Field.Store.YES));
writer.AddDocument(doc);
writer.ForceMerge(1);
writer.Dispose();
}
示例5: TestGetFilterHandleNumericParseError
public void TestGetFilterHandleNumericParseError()
{
NumericRangeFilterBuilder filterBuilder = new NumericRangeFilterBuilder();
filterBuilder.SetStrictMode(false);
String xml = "<NumericRangeFilter fieldName='AGE' type='int' lowerTerm='-1' upperTerm='NaN'/>";
XmlDocument doc = GetDocumentFromString(xml);
Filter filter = filterBuilder.GetFilter(doc.DocumentElement);
Store.Directory ramDir = NewDirectory();
IndexWriter writer = new IndexWriter(ramDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null));
writer.Commit();
try
{
AtomicReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(ramDir));
try
{
assertNull(filter.GetDocIdSet(reader.AtomicContext, reader.LiveDocs));
}
finally
{
reader.Dispose();
}
}
finally
{
writer.Commit();
writer.Dispose();
ramDir.Dispose();
}
}
示例6: SetUp
public override void SetUp()
{
base.SetUp();
Document doc;
Rd1 = NewDirectory();
IndexWriter iw1 = new IndexWriter(Rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
doc = new Document();
doc.Add(NewTextField("field1", "the quick brown fox jumps", Field.Store.YES));
doc.Add(NewTextField("field2", "the quick brown fox jumps", Field.Store.YES));
iw1.AddDocument(doc);
iw1.Dispose();
Rd2 = NewDirectory();
IndexWriter iw2 = new IndexWriter(Rd2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
doc = new Document();
doc.Add(NewTextField("field1", "the fox jumps over the lazy dog", Field.Store.YES));
doc.Add(NewTextField("field3", "the fox jumps over the lazy dog", Field.Store.YES));
iw2.AddDocument(doc);
iw2.Dispose();
this.Ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(Rd1));
this.Ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(Rd2));
}
示例7: TestLucene
public virtual void TestLucene()
{
int num = 100;
Directory indexA = NewDirectory();
Directory indexB = NewDirectory();
FillIndex(Random(), indexA, 0, num);
bool fail = VerifyIndex(indexA, 0);
if (fail)
{
Assert.Fail("Index a is invalid");
}
FillIndex(Random(), indexB, num, num);
fail = VerifyIndex(indexB, num);
if (fail)
{
Assert.Fail("Index b is invalid");
}
Directory merged = NewDirectory();
IndexWriter writer = new IndexWriter(merged, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(2)));
writer.AddIndexes(indexA, indexB);
writer.ForceMerge(1);
writer.Dispose();
fail = VerifyIndex(merged, 0);
Assert.IsFalse(fail, "The merged index is invalid");
indexA.Dispose();
indexB.Dispose();
merged.Dispose();
}
示例8: TestByteSizeLimit
public virtual void TestByteSizeLimit()
{
// tests that the max merge size constraint is applied during forceMerge.
Directory dir = new RAMDirectory();
// Prepare an index w/ several small segments and a large one.
IndexWriterConfig conf = NewWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
const int numSegments = 15;
for (int i = 0; i < numSegments; i++)
{
int numDocs = i == 7 ? 30 : 1;
AddDocs(writer, numDocs);
}
writer.Dispose();
SegmentInfos sis = new SegmentInfos();
sis.Read(dir);
double min = sis.Info(0).SizeInBytes();
conf = NewWriterConfig();
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
lmp.MaxMergeMBForForcedMerge = (min + 1) / (1 << 20);
conf.SetMergePolicy(lmp);
writer = new IndexWriter(dir, conf);
writer.ForceMerge(1);
writer.Dispose();
// Should only be 3 segments in the index, because one of them exceeds the size limit
sis = new SegmentInfos();
sis.Read(dir);
Assert.AreEqual(3, sis.Size());
}
示例9: AddUpdateLuceneIndex
// add/update/clear search index data
public static void AddUpdateLuceneIndex()
{
using (var context = new CreativeNetworkEntities())
{
// init lucene
var analyzer = new StandardAnalyzer(Version.LUCENE_30);
using (var writer = new IndexWriter(_directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED))
{
// add data to lucene search index (replaces older entries if any)
foreach (var creative in context.Creative)
{
_addToLuceneIndex(creative, writer);
foreach (var chapter in creative.Chapter)
{
_addToLuceneIndex(chapter, writer);
}
}
// close handles
analyzer.Close();
writer.Dispose();
}
}
}
示例10: CreateIndex
public static IndexWriter CreateIndex(Content[] contents)
{
var v = Lucene.Net.Util.Version.LUCENE_30;
var l = Lucene.Net.Index.IndexWriter.MaxFieldLength.UNLIMITED;
var d = FSDirectory.Open(new DirectoryInfo(IndexPath));
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(v), l);
try
{
foreach (var item in contents)
{
Document doc = new Document();
Field id = new Field("id", item.Id.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED);
Field title = new Field("title", item.Title, Field.Store.YES, Field.Index.ANALYZED);
Field username = new Field("username", item.User.UserName, Field.Store.YES, Field.Index.ANALYZED);
doc.Add(id);
doc.Add(title);
doc.Add(username);
writer.AddDocument(doc);
}
writer.Optimize();
writer.Dispose();
}
catch (System.Exception ex)
{
}
return writer;
}
示例11: TestPrevTermAtEnd
public virtual void TestPrevTermAtEnd()
{
IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())));
AddDoc(writer, "aaa bbb");
writer.Dispose();
SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(Dir));
TermsEnum terms = reader.Fields.Terms("content").Iterator(null);
Assert.IsNotNull(terms.Next());
Assert.AreEqual("aaa", terms.Term().Utf8ToString());
Assert.IsNotNull(terms.Next());
long ordB;
try
{
ordB = terms.Ord();
}
catch (System.NotSupportedException uoe)
{
// ok -- codec is not required to support ord
reader.Dispose();
return;
}
Assert.AreEqual("bbb", terms.Term().Utf8ToString());
Assert.IsNull(terms.Next());
terms.SeekExact(ordB);
Assert.AreEqual("bbb", terms.Term().Utf8ToString());
reader.Dispose();
}
示例12: Main
static void Main(string[] args)
{
// Path to index file.
Directory indexDirectory = FSDirectory.Open(@"/* PASTE THE PATH WHERE YOUR INDEX WILL BE SAVED */");
// Creating Analyzer to make index searchable.
Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
// Creating IndexWriter
IndexWriter.MaxFieldLength mfl = new IndexWriter.MaxFieldLength(100000);
IndexWriter writer = new IndexWriter(indexDirectory, analyzer, mfl);
// Full path to input .txt files.
string[] filesList = System.IO.Directory.GetFiles(@"/* PASTE THE PATH TO YOUR INPUT FILE(S) */", "*.txt");
/* INDEX FIELDS:
** id & body are the fields to my Lucene Index,
** you can change those fields accordingly to your
** needs
*/
int idNumber = 0;
string body;
foreach (string file in filesList)
{
body = System.IO.File.ReadAllText(file);
AddToIndex(idNumber, body, writer);
idNumber++;
}
writer.Dispose();
}
示例13: CreateIndex
public void CreateIndex()
{
Analyzer analyzer = new MockAnalyzer(Random());
IndexWriter writer = new IndexWriter
(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
try
{
for (int docid = 0; docid < NUM_DOCS; docid++)
{
Document d = new Document();
d.Add(NewStringField("docid", "" + docid, Field.Store.YES));
d.Add(NewStringField("never_load", "fail", Field.Store.YES));
foreach (string f in FIELDS)
{
for (int val = 0; val < NUM_VALUES; val++)
{
d.Add(NewStringField(f, docid + "_" + f + "_" + val, Field.Store.YES));
}
}
d.Add(NewStringField("load_later", "yes", Field.Store.YES));
writer.AddDocument(d);
}
}
finally
{
writer.Dispose();
}
}
示例14: CreateIndex
public void CreateIndex(Analyzer analayer)
{
FSDirectory fsDir = new SimpleFSDirectory(new DirectoryInfo(_indexerFolder));
IndexWriter indexWriter = new IndexWriter(fsDir, analayer, true, Lucene.Net.Index.IndexWriter.MaxFieldLength.UNLIMITED);
string[] files = System.IO.Directory.GetFiles(_textFilesFolder, Config.FileSearchPattern, SearchOption.AllDirectories);
foreach (string file in files)
{
string name = new FileInfo(file).Name;
string content = File.ReadAllText(file);
Document doc = new Document();
doc.Add(new Field(Config.Field_Path, file, Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field(Config.Field_Name, name, Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field(Config.Field_Content, content, Field.Store.NO, Field.Index.ANALYZED));
indexWriter.AddDocument(doc);
Console.WriteLine("{0} - {1}", file, name);
}
indexWriter.Optimize();
indexWriter.Dispose();
Console.WriteLine("File count: {0}", files.Length);
}
示例15: SetUp
/// <summary>
/// Set up a new index in RAM with three test phrases and the supplied Analyzer.
/// </summary>
/// <exception cref="Exception"> if an error occurs with index writer or searcher </exception>
public override void SetUp()
{
base.SetUp();
analyzer = new ShingleAnalyzerWrapper(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false), 2);
directory = NewDirectory();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc;
doc = new Document();
doc.Add(new TextField("content", "please divide this sentence into shingles", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new TextField("content", "just another test sentence", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new TextField("content", "a sentence which contains no test", Field.Store.YES));
writer.AddDocument(doc);
writer.Dispose();
reader = DirectoryReader.Open(directory);
searcher = NewSearcher(reader);
}