本文整理汇总了C#中Lucene.Net.Index.IndexWriter.Optimize方法的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter.Optimize方法的具体用法?C# IndexWriter.Optimize怎么用?C# IndexWriter.Optimize使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.IndexWriter
的用法示例。
在下文中一共展示了IndexWriter.Optimize方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: SetUp
public void SetUp()
{
var writer = new IndexWriter(store, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
var doc = new Document();
doc.Add(new Field("aaa", "foo", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("aaa", "foo", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("contents", "Tom", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("contents", "Jerry", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("zzz", "bar", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Close();
}
示例2: Process
public ActionResult Process()
{
GetCoursesMessage message = new GetCoursesMessage();
MvcContrib.Bus.Send(message);
List<Course> courses = (message.Result.Data as List<Course>);
if (courses == null)
return RedirectToAction("Index");
Directory directory = FSDirectory.Open(new System.IO.DirectoryInfo(Server.MapPath("~/Data/Index")));
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_29);
IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
Document document;
try
{
foreach (Course course in courses)
{
document = new Document();
document.Add(new Field("Type", "Course", Field.Store.YES, Field.Index.NO));
document.Add(new Field("ID", course.Id.ToString(), Field.Store.YES, Field.Index.NO));
document.Add(new Field("Name", course.Name, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
document.Add(new Field("Owner", course.Owner, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
writer.AddDocument(document);
GetNodesMessage courseMessage = new GetNodesMessage { Input = new GetNodesInput { CourseId = course.Id } };
MvcContrib.Bus.Send(courseMessage);
List<Node> nodes = (courseMessage.Result.Data as List<Node>);
foreach (Node node in nodes)
{
ProcessNode(writer, node);
}
}
}
catch (Exception e)
{
writer.Optimize();
writer.Close();
throw e;
}
writer.Optimize();
writer.Close();
return RedirectToAction("Index");
}
示例3: TestSpanRegex
public void TestSpanRegex()
{
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
// doc.Add(new Field("field", "the quick brown fox jumps over the lazy dog",
// Field.Store.NO, Field.Index.ANALYZED));
// writer.AddDocument(doc);
// doc = new Document();
doc.Add(new Field("field", "auto update", Field.Store.NO,
Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("field", "first auto update", Field.Store.NO,
Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Close();
IndexSearcher searcher = new IndexSearcher(directory, true);
SpanRegexQuery srq = new SpanRegexQuery(new Term("field", "aut.*"));
SpanFirstQuery sfq = new SpanFirstQuery(srq, 1);
// SpanNearQuery query = new SpanNearQuery(new SpanQuery[] {srq, stq}, 6,
// true);
int numHits = searcher.Search(sfq, null, 1000).TotalHits;
Assert.AreEqual(1, numHits);
}
示例4: WriteEndVersion
public override void WriteEndVersion(Process process, AbstractConnection input, Entity entity, bool force = false) {
if (entity.Updates + entity.Inserts <= 0 && !force)
return;
var versionType = entity.Version == null ? "string" : entity.Version.SimpleType;
var end = entity.End ?? new DefaultFactory(Logger).Convert(entity.End, versionType);
using (var dir = LuceneDirectoryFactory.Create(this, TflBatchEntity(entity.ProcessName))) {
using (var writer = new IndexWriter(dir, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED)) {
var doc = new Document();
doc.Add(new NumericField("id", global::Lucene.Net.Documents.Field.Store.YES, true).SetIntValue(entity.TflBatchId));
doc.Add(new global::Lucene.Net.Documents.Field("process", entity.ProcessName, global::Lucene.Net.Documents.Field.Store.YES, global::Lucene.Net.Documents.Field.Index.NOT_ANALYZED_NO_NORMS));
doc.Add(new global::Lucene.Net.Documents.Field("connection", input.Name, global::Lucene.Net.Documents.Field.Store.YES, global::Lucene.Net.Documents.Field.Index.NOT_ANALYZED_NO_NORMS));
doc.Add(new global::Lucene.Net.Documents.Field("entity", entity.Alias, global::Lucene.Net.Documents.Field.Store.YES, global::Lucene.Net.Documents.Field.Index.NOT_ANALYZED_NO_NORMS));
doc.Add(new NumericField("updates", global::Lucene.Net.Documents.Field.Store.YES, true).SetLongValue(entity.Updates));
doc.Add(new NumericField("inserts", global::Lucene.Net.Documents.Field.Store.YES, true).SetLongValue(entity.Inserts));
doc.Add(new NumericField("deletes", global::Lucene.Net.Documents.Field.Store.YES, true).SetLongValue(entity.Deletes));
doc.Add(LuceneWriter.CreateField("version", versionType, new SearchType { Analyzer = "keyword" }, end));
doc.Add(new global::Lucene.Net.Documents.Field("version_type", versionType, global::Lucene.Net.Documents.Field.Store.YES, global::Lucene.Net.Documents.Field.Index.NOT_ANALYZED_NO_NORMS));
doc.Add(new NumericField("tflupdate", global::Lucene.Net.Documents.Field.Store.YES, true).SetLongValue(DateTime.UtcNow.Ticks));
writer.AddDocument(doc);
writer.Commit();
writer.Optimize();
}
}
}
示例5: HelloWorldTest
public void HelloWorldTest()
{
Directory directory = new RAMDirectory();
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_29);
IndexWriter writer = new IndexWriter(directory,
analyzer,
IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NO));
doc.Add(new Field("postBody", "sample test", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Commit();
writer.Close();
QueryParser parser = new QueryParser(Version.LUCENE_29, "postBody", analyzer);
Query query = parser.Parse("sample test");
//Setup searcher
IndexSearcher searcher = new IndexSearcher(directory, true);
//Do the search
var hits = searcher.Search(query, null, 10);
for (int i = 0; i < hits.TotalHits; i++)
{
var doc1 = hits.ScoreDocs[i];
}
searcher.Close();
directory.Close();
}
示例6: CreateIndex
public static IndexWriter CreateIndex(Content[] contents)
{
var v = Lucene.Net.Util.Version.LUCENE_30;
var l = Lucene.Net.Index.IndexWriter.MaxFieldLength.UNLIMITED;
var d = FSDirectory.Open(new DirectoryInfo(IndexPath));
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(v), l);
try
{
foreach (var item in contents)
{
Document doc = new Document();
Field id = new Field("id", item.Id.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED);
Field title = new Field("title", item.Title, Field.Store.YES, Field.Index.ANALYZED);
Field username = new Field("username", item.User.UserName, Field.Store.YES, Field.Index.ANALYZED);
doc.Add(id);
doc.Add(title);
doc.Add(username);
writer.AddDocument(doc);
}
writer.Optimize();
writer.Dispose();
}
catch (System.Exception ex)
{
}
return writer;
}
示例7: CreateIndexCreative
public static void CreateIndexCreative(Creative entity, string IndexPath)
{
var document = new Document();
document.Add(new Field("CreativeId", entity.Creativeid.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
document.Add(new Field("Title", entity.Title, Field.Store.YES, Field.Index.ANALYZED));
if (!string.IsNullOrEmpty(entity.About))
{
document.Add(new Field("About", entity.About, Field.Store.YES, Field.Index.ANALYZED));
}
Directory directory = FSDirectory.Open(new DirectoryInfo(IndexPath));
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_30);
var writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
try
{
writer.AddDocument(document);
writer.Optimize();
writer.Dispose();
}
finally
{
writer.Dispose();
}
}
示例8: TestLucene
public virtual void TestLucene()
{
int num = 100;
Directory indexA = new MockRAMDirectory();
Directory indexB = new MockRAMDirectory();
FillIndex(indexA, 0, num);
Assert.IsFalse(VerifyIndex(indexA, 0), "Index a is invalid");
FillIndex(indexB, num, num);
Assert.IsFalse(VerifyIndex(indexB, num), "Index b is invalid");
Directory merged = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.MergeFactor = 2;
writer.AddIndexesNoOptimize(new []{indexA, indexB});
writer.Optimize();
writer.Close();
var fail = VerifyIndex(merged, 0);
merged.Close();
Assert.IsFalse(fail, "The merged index is invalid");
}
示例9: CreateIndex
private void CreateIndex()
{
string sresult = "";
try
{
//读取数据库数据
SqlDataReader myred = ExecuteQuery();
//建立索引字段
//Lucene.Net.Analysis.Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT);
StockFooAnalyzer analyzer = new StockFooAnalyzer(System.Configuration.ConfigurationManager.AppSettings["AnalyzerPath"].ToString());
FSDirectory dy = FSDirectory.Open(new DirectoryInfo(Server.MapPath("IndexDirectory")));
IndexWriter writer = new IndexWriter(dy, analyzer, true,IndexWriter.MaxFieldLength.LIMITED);
while (myred.Read())
{
AddDocument(writer, myred["title"].ToString(), myred["url"].ToString(), myred["site"].ToString(), myred["body"].ToString(), myred["publish_time"].ToString());
}
myred.Close();
myred.Dispose();
writer.Optimize();
writer.Close();
sresult = "ok";
}
catch(Exception ex)
{
sresult = ex.Message;
}
Response.Write(sresult);
Response.Flush();
Response.End();
}
示例10: TestSimpleSkip
public virtual void TestSimpleSkip()
{
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Term term = new Term("test", "a");
for (int i = 0; i < 5000; i++)
{
Document d1 = new Document();
d1.Add(new Field(term.Field(), term.Text(), Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(d1);
}
writer.Flush();
writer.Optimize();
writer.Close();
IndexReader reader = SegmentReader.GetOnlySegmentReader(dir);
SegmentTermPositions tp = (SegmentTermPositions) reader.TermPositions();
tp.freqStream_ForNUnit = new CountingStream(this, tp.freqStream_ForNUnit);
for (int i = 0; i < 2; i++)
{
counter = 0;
tp.Seek(term);
CheckSkipTo(tp, 14, 185); // no skips
CheckSkipTo(tp, 17, 190); // one skip on level 0
CheckSkipTo(tp, 287, 200); // one skip on level 1, two on level 0
// this test would fail if we had only one skip level,
// because than more bytes would be read from the freqStream
CheckSkipTo(tp, 4800, 250); // one skip on level 2
}
}
示例11: SetUp
public void SetUp()
{
IndexWriter writer = new IndexWriter(store, new WhitespaceAnalyzer(), true);
Document doc;
doc = new Document();
doc.Add(new Field("aaa", "foo", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("aaa", "foo", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("contents", "Tom", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("contents", "Jerry", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new Field("zzz", "bar", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Close();
}
示例12: AddLuceneIndex
/// <summary>
/// 创建索引文档
/// </summary>
/// <param name="dic"></param>
public void AddLuceneIndex(Dictionary<string, string> dic) {
//var analyzer = new StandardAnalyzer(Version.LUCENE_30);
var analyzer = GetAnalyzer();
using (var directory = GetLuceneDirectory())
using (var writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED)) {
var doc = new Document();
foreach (KeyValuePair<string, string> pair in dic) {
// add new index entry
//Field.Store.YES:表示是否存储原值。
//只有当Field.Store.YES在后面才能用doc.Get("number")取出值来
//Field.Index. NOT_ANALYZED:不进行分词保存
//todo:boost
if (NotAnalyzeFields.Exists(one => one == pair.Key)) {
doc.Add(new Field(pair.Key, pair.Value, Field.Store.YES, Field.Index.NOT_ANALYZED));
}
else {
doc.Add(new Field(pair.Key, pair.Value, Field.Store.YES, Field.Index.ANALYZED));
}
}
//doc.Boost
writer.AddDocument(doc);
writer.Commit();
writer.Optimize();
analyzer.Close();
}
}
示例13: CreateIndex
public void CreateIndex(Analyzer analayer)
{
FSDirectory fsDir = new SimpleFSDirectory(new DirectoryInfo(_indexerFolder));
IndexWriter indexWriter = new IndexWriter(fsDir, analayer, true, Lucene.Net.Index.IndexWriter.MaxFieldLength.UNLIMITED);
string[] files = System.IO.Directory.GetFiles(_textFilesFolder, Config.FileSearchPattern, SearchOption.AllDirectories);
foreach (string file in files)
{
string name = new FileInfo(file).Name;
string content = File.ReadAllText(file);
Document doc = new Document();
doc.Add(new Field(Config.Field_Path, file, Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field(Config.Field_Name, name, Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field(Config.Field_Content, content, Field.Store.NO, Field.Index.ANALYZED));
indexWriter.AddDocument(doc);
Console.WriteLine("{0} - {1}", file, name);
}
indexWriter.Optimize();
indexWriter.Dispose();
Console.WriteLine("File count: {0}", files.Length);
}
示例14: CreateIndex
public void CreateIndex(List<ISearchEntity> CreateEntities)
{
Analyzer analyzer = new StandardAnalyzer();
IndexWriter writer = new IndexWriter(ConfigElement.IndexDirectory, analyzer, true);
//第三个参数:是否重新创建索引,True 一律清空 重新建立 False 原有基础上增量添加索引
foreach (ISearchEntity IndexEntity in CreateEntities)
{
ProductModel product = (ProductModel)IndexEntity;
Document doc = new Document();
doc.Add(new Field("productid", Convert.ToString(product.EntityIdentity), Field.Store.YES, Field.Index.UN_TOKENIZED));
doc.Add(new Field("productname", Convert.ToString(product.ProductName), Field.Store.YES, Field.Index.TOKENIZED));
doc.Add(new Field("cateid", Convert.ToString(product.CategoryID), Field.Store.YES, Field.Index.UN_TOKENIZED));
doc.Add(new Field("catepath", Convert.ToString(product.CategoryPath), Field.Store.YES, Field.Index.UN_TOKENIZED));
doc.Add(new Field("keywords", Convert.ToString(product.Keywords), Field.Store.YES, Field.Index.TOKENIZED));
doc.Add(new Field("description", Convert.ToString(product.Description), Field.Store.YES, Field.Index.TOKENIZED));
doc.Add(new Field("price", Convert.ToString(product.Price), Field.Store.YES, Field.Index.UN_TOKENIZED));
doc.Add(new Field("createtime", Convert.ToString(product.CreateTime), Field.Store.YES, Field.Index.UN_TOKENIZED));
doc.Add(new Field("updatetime", Convert.ToString(product.UpdateTime), Field.Store.YES, Field.Index.UN_TOKENIZED));
doc.Add(new Field("mainimage", Convert.ToString(product.ProductImage), Field.Store.YES, Field.Index.UN_TOKENIZED));
writer.AddDocument(doc);
Console.WriteLine("created index for {0}:{1}", product.EntityIdentity, product.ProductName);
}
writer.Optimize();
writer.Close();
}
示例15: Optimize
protected void Optimize()
{
using (var writer = new IndexWriter(LuceneDirectory, new StandardAnalyzer(Version.LUCENE_30), IndexWriter.MaxFieldLength.UNLIMITED))
{
writer.Optimize();
}
}