本文整理汇总了C#中Lucene.Net.Index.IndexWriter.Commit方法的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter.Commit方法的具体用法?C# IndexWriter.Commit怎么用?C# IndexWriter.Commit使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.IndexWriter
的用法示例。
在下文中一共展示了IndexWriter.Commit方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestGetFilterHandleNumericParseError
public void TestGetFilterHandleNumericParseError()
{
NumericRangeFilterBuilder filterBuilder = new NumericRangeFilterBuilder();
filterBuilder.SetStrictMode(false);
String xml = "<NumericRangeFilter fieldName='AGE' type='int' lowerTerm='-1' upperTerm='NaN'/>";
XmlDocument doc = GetDocumentFromString(xml);
Filter filter = filterBuilder.GetFilter(doc.DocumentElement);
Store.Directory ramDir = NewDirectory();
IndexWriter writer = new IndexWriter(ramDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null));
writer.Commit();
try
{
AtomicReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(ramDir));
try
{
assertNull(filter.GetDocIdSet(reader.AtomicContext, reader.LiveDocs));
}
finally
{
reader.Dispose();
}
}
finally
{
writer.Commit();
writer.Dispose();
ramDir.Dispose();
}
}
示例2: IndexAndCrashOnCreateOutputSegments2
/// <summary>
/// index 1 document and commit.
/// prepare for crashing.
/// index 1 more document, and upon commit, creation of segments_2 will crash.
/// </summary>
private void IndexAndCrashOnCreateOutputSegments2()
{
Directory realDirectory = FSDirectory.Open(Path);
CrashAfterCreateOutput crashAfterCreateOutput = new CrashAfterCreateOutput(realDirectory);
// NOTE: cannot use RandomIndexWriter because it
// sometimes commits:
IndexWriter indexWriter = new IndexWriter(crashAfterCreateOutput, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
indexWriter.AddDocument(Document);
// writes segments_1:
indexWriter.Commit();
crashAfterCreateOutput.GetCrashAfterCreateOutput = "segments_2";
indexWriter.AddDocument(Document);
try
{
// tries to write segments_2 but hits fake exc:
indexWriter.Commit();
Assert.Fail("should have hit CrashingException");
}
catch (CrashingException e)
{
// expected
}
// writes segments_3
indexWriter.Dispose();
Assert.IsFalse(SlowFileExists(realDirectory, "segments_2"));
crashAfterCreateOutput.Dispose();
}
示例3: HelloWorldTest
public void HelloWorldTest()
{
Directory directory = new RAMDirectory();
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_29);
IndexWriter writer = new IndexWriter(directory,
analyzer,
IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NO));
doc.Add(new Field("postBody", "sample test", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Commit();
writer.Close();
QueryParser parser = new QueryParser(Version.LUCENE_29, "postBody", analyzer);
Query query = parser.Parse("sample test");
//Setup searcher
IndexSearcher searcher = new IndexSearcher(directory, true);
//Do the search
var hits = searcher.Search(query, null, 10);
for (int i = 0; i < hits.TotalHits; i++)
{
var doc1 = hits.ScoreDocs[i];
}
searcher.Close();
directory.Close();
}
示例4: WriteEndVersion
public override void WriteEndVersion(Process process, AbstractConnection input, Entity entity, bool force = false) {
if (entity.Updates + entity.Inserts <= 0 && !force)
return;
var versionType = entity.Version == null ? "string" : entity.Version.SimpleType;
var end = entity.End ?? new DefaultFactory(Logger).Convert(entity.End, versionType);
using (var dir = LuceneDirectoryFactory.Create(this, TflBatchEntity(entity.ProcessName))) {
using (var writer = new IndexWriter(dir, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED)) {
var doc = new Document();
doc.Add(new NumericField("id", global::Lucene.Net.Documents.Field.Store.YES, true).SetIntValue(entity.TflBatchId));
doc.Add(new global::Lucene.Net.Documents.Field("process", entity.ProcessName, global::Lucene.Net.Documents.Field.Store.YES, global::Lucene.Net.Documents.Field.Index.NOT_ANALYZED_NO_NORMS));
doc.Add(new global::Lucene.Net.Documents.Field("connection", input.Name, global::Lucene.Net.Documents.Field.Store.YES, global::Lucene.Net.Documents.Field.Index.NOT_ANALYZED_NO_NORMS));
doc.Add(new global::Lucene.Net.Documents.Field("entity", entity.Alias, global::Lucene.Net.Documents.Field.Store.YES, global::Lucene.Net.Documents.Field.Index.NOT_ANALYZED_NO_NORMS));
doc.Add(new NumericField("updates", global::Lucene.Net.Documents.Field.Store.YES, true).SetLongValue(entity.Updates));
doc.Add(new NumericField("inserts", global::Lucene.Net.Documents.Field.Store.YES, true).SetLongValue(entity.Inserts));
doc.Add(new NumericField("deletes", global::Lucene.Net.Documents.Field.Store.YES, true).SetLongValue(entity.Deletes));
doc.Add(LuceneWriter.CreateField("version", versionType, new SearchType { Analyzer = "keyword" }, end));
doc.Add(new global::Lucene.Net.Documents.Field("version_type", versionType, global::Lucene.Net.Documents.Field.Store.YES, global::Lucene.Net.Documents.Field.Index.NOT_ANALYZED_NO_NORMS));
doc.Add(new NumericField("tflupdate", global::Lucene.Net.Documents.Field.Store.YES, true).SetLongValue(DateTime.UtcNow.Ticks));
writer.AddDocument(doc);
writer.Commit();
writer.Optimize();
}
}
}
示例5: TestReadersWriters
public void TestReadersWriters()
{
Directory dir;
using(dir = new RAMDirectory())
{
Document doc;
IndexWriter writer;
IndexReader reader;
using (writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED))
{
Field field = new Field("name", "value", Field.Store.YES,Field.Index.ANALYZED);
doc = new Document();
doc.Add(field);
writer.AddDocument(doc);
writer.Commit();
using (reader = writer.GetReader())
{
IndexReader r1 = reader.Reopen();
}
Assert.Throws<AlreadyClosedException>(() => reader.Reopen(), "IndexReader shouldn't be open here");
}
Assert.Throws<AlreadyClosedException>(() => writer.AddDocument(doc), "IndexWriter shouldn't be open here");
Assert.IsTrue(dir.isOpen_ForNUnit, "RAMDirectory");
}
Assert.IsFalse(dir.isOpen_ForNUnit, "RAMDirectory");
}
示例6: TestExceptionDuringSave
public virtual void TestExceptionDuringSave()
{
MockDirectoryWrapper dir = NewMockDirectory();
dir.FailOn(new FailureAnonymousInnerClassHelper(this, dir));
IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode_e.CREATE_OR_APPEND)));
writer.AddDocument(new Document());
writer.Commit();
PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.DelPolicy;
try
{
psdp.Snapshot();
}
catch (IOException ioe)
{
if (ioe.Message.Equals("now fail on purpose"))
{
// ok
}
else
{
throw ioe;
}
}
Assert.AreEqual(0, psdp.SnapshotCount);
writer.Dispose();
Assert.AreEqual(1, DirectoryReader.ListCommits(dir).Count);
dir.Dispose();
}
示例7: TestMultiValueSource
public virtual void TestMultiValueSource()
{
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
Field f = new Field("field", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
doc.Add(f);
for (int i = 0; i < 17; i++)
{
f.SetValue("" + i);
w.AddDocument(doc);
w.Commit();
}
IndexReader r = w.GetReader();
w.Close();
Assert.IsTrue(r.GetSequentialSubReaders().Length > 1);
ValueSource s1 = new IntFieldSource("field");
DocValues v1 = s1.GetValues(r);
DocValues v2 = new MultiValueSource(s1).GetValues(r);
for (int i = 0; i < r.MaxDoc(); i++)
{
Assert.AreEqual(v1.IntVal(i), i);
Assert.AreEqual(v2.IntVal(i), i);
}
Lucene.Net.Search.FieldCache_Fields.DEFAULT.PurgeAllCaches();
r.Close();
dir.Close();
}
示例8: TestSimpleSkip
public virtual void TestSimpleSkip()
{
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Term term = new Term("test", "a");
for (int i = 0; i < 5000; i++)
{
Document d1 = new Document();
d1.Add(new Field(term.Field, term.Text, Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(d1);
}
writer.Commit();
writer.Optimize();
writer.Close();
IndexReader reader = SegmentReader.GetOnlySegmentReader(dir);
SegmentTermPositions tp = (SegmentTermPositions) reader.TermPositions();
tp.freqStream = new CountingStream(this, tp.freqStream);
for (int i = 0; i < 2; i++)
{
counter = 0;
tp.Seek(term);
CheckSkipTo(tp, 14, 185); // no skips
CheckSkipTo(tp, 17, 190); // one skip on level 0
CheckSkipTo(tp, 287, 200); // one skip on level 1, two on level 0
// this test would fail if we had only one skip level,
// because than more bytes would be read from the freqStream
CheckSkipTo(tp, 4800, 250); // one skip on level 2
}
}
示例9: CreateIndex
public static void CreateIndex() {
try
{
var cloudAccount = Azure.GetStorageAccount();
using (var cacheDirectory = new RAMDirectory())
{
using (var azureDirectory = new AzureDirectory(cloudAccount, Azure.StorageContainerName, cacheDirectory))
{
using (Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30))
{
using (var indexWriter = new IndexWriter(azureDirectory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED))
{
AddDocuments(indexWriter);
indexWriter.Commit();
}
}
}
}
}
catch (StorageException ex)
{
Trace.TraceError(ex.Message);
}
}
示例10: AddLuceneIndex
/// <summary>
/// 创建索引文档
/// </summary>
/// <param name="dic"></param>
public void AddLuceneIndex(Dictionary<string, string> dic) {
//var analyzer = new StandardAnalyzer(Version.LUCENE_30);
var analyzer = GetAnalyzer();
using (var directory = GetLuceneDirectory())
using (var writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED)) {
var doc = new Document();
foreach (KeyValuePair<string, string> pair in dic) {
// add new index entry
//Field.Store.YES:表示是否存储原值。
//只有当Field.Store.YES在后面才能用doc.Get("number")取出值来
//Field.Index. NOT_ANALYZED:不进行分词保存
//todo:boost
if (NotAnalyzeFields.Exists(one => one == pair.Key)) {
doc.Add(new Field(pair.Key, pair.Value, Field.Store.YES, Field.Index.NOT_ANALYZED));
}
else {
doc.Add(new Field(pair.Key, pair.Value, Field.Store.YES, Field.Index.ANALYZED));
}
}
//doc.Boost
writer.AddDocument(doc);
writer.Commit();
writer.Optimize();
analyzer.Close();
}
}
示例11: Init
public void Init()
{
facetHandlers = new List<IFacetHandler>();
directory = new RAMDirectory();
analyzer = new WhitespaceAnalyzer();
selectionProperties = new Dictionary<string, string>();
IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
writer.AddDocument(Doc("prop1=val1", "prop2=val1", "prop5=val1"));
writer.AddDocument(Doc("prop1=val2", "prop3=val1", "prop7=val7"));
writer.AddDocument(Doc("prop1=val2", "prop3=val2", "prop3=val3"));
writer.AddDocument(Doc("prop1=val1", "prop2=val1"));
writer.AddDocument(Doc("prop1=val1", "prop2=val1"));
writer.AddDocument(Doc("prop1=val1", "prop2=val1", "prop4=val2", "prop4=val3"));
writer.Commit();
attributesFacetHandler = new AttributesFacetHandler(AttributeHandlerName, AttributeHandlerName, null, null,
new Dictionary<string, string>());
facetHandlers.Add(attributesFacetHandler);
IndexReader reader = IndexReader.Open(directory, true);
boboReader = BoboIndexReader.GetInstance(reader, facetHandlers);
attributesFacetHandler.LoadFacetData(boboReader);
browser = new BoboBrowser(boboReader);
}
示例12: InitIndex
private IndexWriter InitIndex(IConcurrentMergeScheduler scheduler, Random random, MockDirectoryWrapper dir, bool initialCommit)
{
dir.LockFactory = NoLockFactory.DoNoLockFactory;
scheduler.SetSuppressExceptions();
IndexWriter writer = new IndexWriter(dir,
NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
.SetMaxBufferedDocs(10)
.SetMergeScheduler(scheduler));
if (initialCommit)
{
writer.Commit();
}
Document doc = new Document();
doc.Add(NewTextField("content", "aaa", Field.Store.NO));
doc.Add(NewTextField("id", "0", Field.Store.NO));
for (int i = 0; i < 157; i++)
{
writer.AddDocument(doc);
}
return writer;
}
示例13: Test_IndexReader_IsCurrent
public void Test_IndexReader_IsCurrent()
{
RAMDirectory ramDir = new RAMDirectory();
IndexWriter writer = new IndexWriter(ramDir, new KeywordAnalyzer(), true, new IndexWriter.MaxFieldLength(1000));
Field field = new Field("TEST", "mytest", Field.Store.YES, Field.Index.ANALYZED);
Document doc = new Document();
doc.Add(field);
writer.AddDocument(doc);
IndexReader reader = writer.GetReader();
writer.DeleteDocuments(new Lucene.Net.Index.Term("TEST", "mytest"));
Assert.IsFalse(reader.IsCurrent());
int resCount1 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")),100).TotalHits;
Assert.AreEqual(1, resCount1);
writer.Commit();
Assert.IsFalse(reader.IsCurrent());
int resCount2 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")),100).TotalHits;
Assert.AreEqual(1, resCount2, "Reopen not invoked yet, resultCount must still be 1.");
reader = reader.Reopen();
Assert.IsTrue(reader.IsCurrent());
int resCount3 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")), 100).TotalHits;
Assert.AreEqual(0, resCount3, "After reopen, resultCount must be 0.");
reader.Close();
writer.Dispose();
}
示例14: RollBackLast
//Rolls back index to a chosen ID
private void RollBackLast(int id)
{
// System.out.println("Attempting to rollback to "+id);
System.String ids = "-" + id;
IndexCommit last = null;
IList<IndexCommit> commits = IndexReader.ListCommits(dir);
for (System.Collections.IEnumerator iterator = commits.GetEnumerator(); iterator.MoveNext(); )
{
IndexCommit commit = (IndexCommit) iterator.Current;
System.Collections.Generic.IDictionary<string, string> ud = commit.GetUserData();
if (ud.Count > 0)
if (((System.String) ud["index"]).EndsWith(ids))
last = commit;
}
if (last == null)
throw new System.SystemException("Couldn't find commit point " + id);
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), new RollbackDeletionPolicy(this, id), MaxFieldLength.UNLIMITED, last);
System.Collections.Generic.IDictionary<string, string> data = new System.Collections.Generic.Dictionary<string, string>();
data["index"] = "Rolled back to 1-" + id;
w.Commit(data);
w.Close();
}
示例15: BuildIndex
//END
//this method creates document from an ObjectToIndex
public void BuildIndex(FileToIndex file)
{
using (var analyzer = new Lucene.Net.Analysis.Ru.RussianAnalyzer(Version.LUCENE_30))
{
using (IndexWriter idxw = new IndexWriter(_directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED))
{
//check if document exists, if true deletes existing
var searchQuery = new TermQuery(new Term("Id", file.Id.ToString()));
idxw.DeleteDocuments(searchQuery);
//creation
Document doc = new Document();
doc.Add(new Field("Id", file.Id.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));//аналайзер разбивает строки на слова
doc.Add(new Field("Title", file.Title, Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("Description", file.Description, Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("Authors", file.Authors, Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("Text", file.Text, Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("Hashtags", file.Hashtags, Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("Discipline", file.Discipline, Field.Store.YES, Field.Index.ANALYZED));
//write the document to the index
idxw.AddDocument(doc);
//optimize and close the writer
idxw.Commit();
idxw.Optimize();
}
}
}