本文整理汇总了C#中Raven.Database.Indexing.WorkContext.AddError方法的典型用法代码示例。如果您正苦于以下问题:C# WorkContext.AddError方法的具体用法?C# WorkContext.AddError怎么用?C# WorkContext.AddError使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Raven.Database.Indexing.WorkContext
的用法示例。
在下文中一共展示了WorkContext.AddError方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: MoveNext
private bool? MoveNext(IEnumerator en, StatefulEnumerableWrapper<object> innerEnumerator, WorkContext context,
DocumentStorageActions actions)
{
try
{
actions.IncrementIndexingAttempt();
var moveNext = en.MoveNext();
if (moveNext == false)
actions.DecrementIndexingAttempt();
return moveNext;
}
catch (Exception e)
{
actions.IncrementIndexingFailure();
context.AddError(name,
TryGetDocKey(innerEnumerator.Current),
e.Message
);
log.WarnFormat(e, "Failed to execute indexing function on {0} on {1}", name,
GetDocId(innerEnumerator));
}
return null;
}
示例2: Remove
public override void Remove(string[] keys, WorkContext context)
{
Write(context, (writer, analyzer,stats) =>
{
stats.Operation = IndexingWorkStats.Status.Ignore;
logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), name));
var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
.Where(x => x != null)
.ToList();
keys.Apply(
key => batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnException(
string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
name, key),
exception);
context.AddError(name, key, exception.Message);
},
trigger => trigger.OnIndexEntryDeleted(key)));
writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k.ToLowerInvariant())).ToArray());
batchers.ApplyAndIgnoreAllErrors(
e =>
{
logIndexing.WarnException("Failed to dispose on index update trigger", e);
context.AddError(name, null, e.Message);
},
batcher => batcher.Dispose());
return keys.Length;
});
}
示例3: Write
protected void Write(WorkContext context, Func<IndexWriter, Analyzer, IndexingWorkStats, int> action)
{
if (disposed)
throw new ObjectDisposedException("Index " + name + " has been disposed");
LastIndexTime = SystemTime.UtcNow;
lock (writeLock)
{
bool shouldRecreateSearcher;
var toDispose = new List<Action>();
Analyzer searchAnalyzer = null;
try
{
waitReason = "Write";
try
{
searchAnalyzer = CreateAnalyzer(new LowerCaseKeywordAnalyzer(), toDispose);
}
catch (Exception e)
{
context.AddError(name, "Creating Analyzer", e.ToString());
throw;
}
if (indexWriter == null)
{
indexWriter = CreateIndexWriter(directory);
}
var locker = directory.MakeLock("writing-to-index.lock");
try
{
var stats = new IndexingWorkStats();
try
{
var changedDocs = action(indexWriter, searchAnalyzer, stats);
docCountSinceLastOptimization += changedDocs;
shouldRecreateSearcher = changedDocs > 0;
foreach (IIndexExtension indexExtension in indexExtensions.Values)
{
indexExtension.OnDocumentsIndexed(currentlyIndexDocuments);
}
}
catch (Exception e)
{
context.AddError(name, null, e.ToString());
throw;
}
UpdateIndexingStats(context, stats);
WriteTempIndexToDiskIfNeeded(context);
Flush(); // just make sure changes are flushed to disk
}
finally
{
locker.Release();
}
}
finally
{
currentlyIndexDocuments.Clear();
if (searchAnalyzer != null)
searchAnalyzer.Close();
foreach (Action dispose in toDispose)
{
dispose();
}
waitReason = null;
LastIndexTime = SystemTime.UtcNow;
}
if (shouldRecreateSearcher)
RecreateSearcher();
}
}
示例4: RobustEnumerationIndex
protected IEnumerable<object> RobustEnumerationIndex(IEnumerable<object> input, IEnumerable<IndexingFunc> funcs,
IStorageActionsAccessor actions, WorkContext context, IndexingWorkStats stats)
{
return new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
{
BeforeMoveNext = () => stats.IndexingAttempts++,
CancelMoveNext = () => stats.IndexingAttempts--,
OnError = (exception, o) =>
{
context.AddError(name,
TryGetDocKey(o),
exception.Message
);
logIndexing.WarnException(
String.Format("Failed to execute indexing function on {0} on {1}", name,
TryGetDocKey(o)),
exception);
stats.IndexingErrors++;
}
}.RobustEnumeration(input, funcs);
}
示例5: Remove
public override void Remove(string[] keys, WorkContext context)
{
Write((writer, analyzer, stats) =>
{
stats.Operation = IndexingWorkStats.Status.Ignore;
logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), name));
var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
.Where(x => x != null)
.ToList();
keys.Apply(
key => batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnException(
string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
name, key),
exception);
context.AddError(name, key, exception.Message, "OnIndexEntryDeleted Trigger");
},
trigger => trigger.OnIndexEntryDeleted(key)));
writer.DeleteDocuments(keys.Select(k => new Term(Constants.DocumentIdFieldName, k.ToLowerInvariant())).ToArray());
batchers.ApplyAndIgnoreAllErrors(
e =>
{
logIndexing.WarnException("Failed to dispose on index update trigger", e);
context.AddError(name, null, e.Message, "Dispose Trigger");
},
batcher => batcher.Dispose());
IndexStats currentIndexStats = null;
context.TransactionalStorage.Batch(accessor => currentIndexStats = accessor.Indexing.GetIndexStats(name));
return new IndexedItemsInfo
{
ChangedDocs = keys.Length,
HighestETag = currentIndexStats.LastIndexedEtag,
DeletedKeys = keys
};
});
}
示例6: IndexDocuments
public override void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable<object> documents, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
{
actions.Indexing.SetCurrentIndexStatsTo(name);
var count = 0;
Write(context, (indexWriter, analyzer) =>
{
bool madeChanges = false;
PropertyDescriptorCollection properties = null;
var processedKeys = new HashSet<string>();
var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
.Where(x => x != null)
.ToList();
var documentsWrapped = documents.Select((dynamic doc) =>
{
if(doc.__document_id == null)
throw new ArgumentException("Cannot index something which doesn't have a document id, but got: " + doc);
string documentId = doc.__document_id.ToString();
if (processedKeys.Add(documentId) == false)
return doc;
madeChanges = true;
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnException(
string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
name, documentId),
exception);
context.AddError(name,
documentId,
exception.Message
);
},
trigger => trigger.OnIndexEntryDeleted(name, documentId));
indexWriter.DeleteDocuments(new Term(Constants.DocumentIdFieldName, documentId.ToLowerInvariant()));
return doc;
});
foreach (var doc in RobustEnumerationIndex(documentsWrapped, viewGenerator.MapDefinition, actions, context))
{
count++;
IndexingResult indexingResult;
if (doc is DynamicJsonObject)
indexingResult = ExtractIndexDataFromDocument((DynamicJsonObject)doc);
else
indexingResult = ExtractIndexDataFromDocument(properties, doc);
if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
{
var luceneDoc = new Document();
luceneDoc.Add(new Field(Constants.DocumentIdFieldName, indexingResult.NewDocId.ToLowerInvariant(), Field.Store.YES,
Field.Index.NOT_ANALYZED_NO_NORMS));
madeChanges = true;
CopyFieldsToDocument(luceneDoc, indexingResult.Fields);
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.Warn(
string.Format( "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
name, indexingResult.NewDocId),
exception);
context.AddError(name,
indexingResult.NewDocId,
exception.Message
);
},
trigger => trigger.OnIndexEntryCreated(name, indexingResult.NewDocId, luceneDoc));
logIndexing.Debug("Index '{0}' resulted in: {1}", name, luceneDoc);
AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
}
actions.Indexing.IncrementSuccessIndexing();
}
batchers.ApplyAndIgnoreAllErrors(
e =>
{
logIndexing.Warn("Failed to dispose on index update trigger", e);
context.AddError(name, null, e.Message);
},
x => x.Dispose());
return madeChanges;
});
logIndexing.Debug("Indexed {0} documents for {1}", count, name);
}
示例7: ReduceDocuments
public void ReduceDocuments(AbstractViewGenerator viewGenerator,
IEnumerable<object> mappedResults,
WorkContext context,
IStorageActionsAccessor actions,
string[] reduceKeys)
{
actions.Indexing.SetCurrentIndexStatsTo(name);
var count = 0;
Write(context, indexWriter =>
{
var batchers = context.IndexUpdateTriggers.Select(x=>x.CreateBatcher(name))
.Where(x=>x!=null)
.ToList();
foreach (var reduceKey in reduceKeys)
{
var entryKey = reduceKey;
indexWriter.DeleteDocuments(new Term("__reduce_key", entryKey.ToLowerInvariant()));
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnFormat(exception,
"Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
name, entryKey);
context.AddError(name,
entryKey,
exception.Message
);
},
trigger => trigger.OnIndexEntryDeleted(name, entryKey));
}
PropertyDescriptorCollection properties = null;
foreach (var doc in RobustEnumeration(mappedResults, viewGenerator.ReduceDefinition, actions, context))
{
count++;
var fields = GetFields(doc, ref properties);
dynamic reduceKey = viewGenerator.GroupByExtraction(doc);
if (reduceKey == null)
{
throw new InvalidOperationException("Could not find reduce key for " + name + " in the result: " + doc);
}
string reduceKeyAsString = ReduceKeyToString(reduceKey);
var luceneDoc = new Document();
luceneDoc.Add(new Field("__reduce_key", reduceKeyAsString.ToLowerInvariant(), Field.Store.NO, Field.Index.NOT_ANALYZED));
foreach (var field in fields)
{
luceneDoc.Add(field);
}
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnFormat(exception,
"Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
name, reduceKeyAsString);
context.AddError(name,
reduceKeyAsString,
exception.Message
);
},
trigger => trigger.OnIndexEntryCreated(name, reduceKeyAsString, luceneDoc));
logIndexing.DebugFormat("Reduce key {0} result in index {1} gave document: {2}", reduceKeyAsString, name, luceneDoc);
indexWriter.AddDocument(luceneDoc);
actions.Indexing.IncrementSuccessIndexing();
}
batchers.ApplyAndIgnoreAllErrors(
e =>
{
logIndexing.Warn("Failed to dispose on index update trigger", e);
context.AddError(name, null, e.Message);
},
x => x.Dispose());
return true;
});
if (logIndexing.IsDebugEnabled)
{
logIndexing.DebugFormat("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, name, string.Join(", ", reduceKeys));
}
}
示例8: RobustEnumerationReduce
protected IEnumerable<object> RobustEnumerationReduce(IEnumerable<object> input, IndexingFunc func,
IStorageActionsAccessor actions, WorkContext context)
{
// not strictly accurate, but if we get that many errors, probably an error anyway.
return new RobustEnumerator(context.Configuration.MaxNumberOfItemsToIndexInSingleBatch)
{
BeforeMoveNext = actions.Indexing.IncrementReduceIndexingAttempt,
CancelMoveNext = actions.Indexing.DecrementReduceIndexingAttempt,
OnError = (exception, o) =>
{
context.AddError(name,
TryGetDocKey(o),
exception.Message
);
logIndexing.WarnException(
String.Format("Failed to execute indexing function on {0} on {1}", name,
TryGetDocKey(o)),
exception);
try
{
actions.Indexing.IncrementReduceIndexingFailure();
}
catch (Exception e)
{
// we don't care about error here, because it is an error on error problem
logIndexing.WarnException(
String.Format("Could not increment indexing failure rate for {0}", name),
e);
}
}
}.RobustEnumeration(input, func);
}
示例9: Write
protected void Write(WorkContext context, Func<IndexWriter, Analyzer, bool> action)
{
if (disposed)
throw new ObjectDisposedException("Index " + name + " has been disposed");
lock (writeLock)
{
bool shouldRecreateSearcher;
var toDispose = new List<Action>();
Analyzer analyzer = null;
try
{
try
{
analyzer = CreateAnalyzer(new LowerCaseAnalyzer(), toDispose);
}
catch (Exception e)
{
context.AddError(name, "Creating Analyzer", e.ToString());
throw;
}
if (indexWriter == null)
indexWriter = new IndexWriter(directory, new StopAnalyzer(Version.LUCENE_29), IndexWriter.MaxFieldLength.UNLIMITED);
try
{
shouldRecreateSearcher = action(indexWriter, analyzer);
foreach (IIndexExtension indexExtension in indexExtensions.Values)
{
indexExtension.OnDocumentsIndexed(currentlyIndexDocumented);
}
}
catch (Exception e)
{
context.AddError(name, null, e.ToString());
throw;
}
}
finally
{
currentlyIndexDocumented.Clear();
if (analyzer != null)
analyzer.Close();
foreach (Action dispose in toDispose)
{
dispose();
}
}
if (shouldRecreateSearcher)
RecreateSearcher();
}
}
示例10: RobustEnumerationReduce
protected IEnumerable<object> RobustEnumerationReduce(IEnumerable<object> input, IndexingFunc func,
IStorageActionsAccessor actions, WorkContext context)
{
return new RobustEnumerator
{
BeforeMoveNext = actions.Indexing.IncrementReduceIndexingAttempt,
CancelMoveNext = actions.Indexing.DecrementReduceIndexingAttempt,
OnError = (exception, o) =>
{
context.AddError(name,
TryGetDocKey(o),
exception.Message
);
logIndexing.WarnFormat(exception, "Failed to execute indexing function on {0} on {1}", name,
TryGetDocKey(o));
try
{
actions.Indexing.IncrementReduceIndexingFailure();
}
catch (Exception e)
{
// we don't care about error here, because it is an error on error problem
logIndexing.WarnFormat(e, "Could not increment indexing failure rate for {0}", name);
}
}
}.RobustEnumeration(input, func);
}
示例11: RobustEnumeration
protected IEnumerable<object> RobustEnumeration(IEnumerable<object> input, IndexingFunc func, IStorageActionsAccessor actions, WorkContext context)
{
return new RobustEnumerator
{
BeforeMoveNext = actions.Indexing.IncrementIndexingAttempt,
CancelMoveNext = actions.Indexing.DecrementIndexingAttempt,
OnError = (exception, o) =>
{
actions.Indexing.IncrementIndexingFailure();
context.AddError(name,
TryGetDocKey(o),
exception.Message
);
logIndexing.WarnFormat(exception, "Failed to execute indexing function on {0} on {1}", name,
TryGetDocKey(o));
}
}.RobustEnumeration(input, func);
}
示例12: Write
protected void Write(WorkContext context, Func<IndexWriter, bool> action)
{
if (disposed)
throw new ObjectDisposedException("Index " + name + " has been disposed");
lock (writeLock)
{
bool shouldRecreateSearcher;
var toDispose = new List<Action>();
Analyzer analyzer = null;
try
{
analyzer = CreateAnalyzer(toDispose);
var indexWriter = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED);
try
{
shouldRecreateSearcher = action(indexWriter);
}
catch (Exception e)
{
context.AddError(name, null, e.ToString());
throw;
}
finally
{
indexWriter.Close();
}
}
finally
{
if (analyzer != null)
analyzer.Close();
foreach (var dispose in toDispose)
{
dispose();
}
}
if (shouldRecreateSearcher)
RecreateSearcher();
}
}
示例13: ReduceDocuments
// This method may be called concurrently, by both the ReduceTask (for removal)
// and by the ReducingExecuter (for add/modify). This is okay with us, since the
// Write() call is already handling locking properly
public void ReduceDocuments(AbstractViewGenerator viewGenerator,
IEnumerable<object> mappedResults,
WorkContext context,
IStorageActionsAccessor actions,
string[] reduceKeys)
{
var count = 0;
Write(context, (indexWriter, analyzer) =>
{
actions.Indexing.SetCurrentIndexStatsTo(name);
var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
.Where(x => x != null)
.ToList();
foreach (var reduceKey in reduceKeys)
{
var entryKey = reduceKey;
indexWriter.DeleteDocuments(new Term(Abstractions.Data.Constants.ReduceKeyFieldName, entryKey.ToLowerInvariant()));
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnException(
string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
name, entryKey),
exception);
context.AddError(name,
entryKey,
exception.Message
);
},
trigger => trigger.OnIndexEntryDeleted(name, entryKey));
}
PropertyDescriptorCollection properties = null;
foreach (var doc in RobustEnumerationReduce(mappedResults, viewGenerator.ReduceDefinition, actions, context))
{
count++;
var fields = GetFields(doc, ref properties).ToList();
string reduceKeyAsString = ExtractReduceKey(viewGenerator, doc);
var luceneDoc = new Document();
luceneDoc.Add(new Field(Abstractions.Data.Constants.ReduceKeyFieldName, reduceKeyAsString.ToLowerInvariant(),
Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
foreach (var field in fields)
{
luceneDoc.Add(field);
}
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnException(
string.Format("Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
name, reduceKeyAsString),
exception);
context.AddError(name,
reduceKeyAsString,
exception.Message
);
},
trigger => trigger.OnIndexEntryCreated(name, reduceKeyAsString, luceneDoc));
logIndexing.Debug("Reduce key {0} result in index {1} gave document: {2}", reduceKeyAsString, name, luceneDoc);
AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
actions.Indexing.IncrementReduceSuccessIndexing();
}
batchers.ApplyAndIgnoreAllErrors(
e =>
{
logIndexing.Warn("Failed to dispose on index update trigger", e);
context.AddError(name, null, e.Message);
},
x => x.Dispose());
return true;
});
logIndexing.Debug(() => string.Format("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, name,
string.Join(", ", reduceKeys)));
}
示例14: IndexDocuments
public override void IndexDocuments(AbstractViewGenerator viewGenerator, IEnumerable<object> documents, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
{
var count = 0;
Write(context, (indexWriter, analyzer, stats) =>
{
var processedKeys = new HashSet<string>();
var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
.Where(x => x != null)
.ToList();
var documentsWrapped = documents.Select((dynamic doc) =>
{
if(doc.__document_id == null)
throw new ArgumentException(string.Format("Cannot index something which doesn't have a document id, but got: '{0}'", doc));
count++;
string documentId = doc.__document_id.ToString();
if (processedKeys.Add(documentId) == false)
return doc;
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnException(
string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
name, documentId),
exception);
context.AddError(name,
documentId,
exception.Message
);
},
trigger => trigger.OnIndexEntryDeleted(documentId));
indexWriter.DeleteDocuments(new Term(Constants.DocumentIdFieldName, documentId.ToLowerInvariant()));
return doc;
});
var anonymousObjectToLuceneDocumentConverter = new AnonymousObjectToLuceneDocumentConverter(indexDefinition);
var luceneDoc = new Document();
var documentIdField = new Field(Constants.DocumentIdFieldName, "dummy", Field.Store.YES, Field.Index.ANALYZED_NO_NORMS);
foreach (var doc in RobustEnumerationIndex(documentsWrapped, viewGenerator.MapDefinitions, actions, context, stats))
{
count++;
float boost;
var indexingResult = GetIndexingResult(doc, anonymousObjectToLuceneDocumentConverter, out boost);
if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
{
count += 1;
luceneDoc.GetFields().Clear();
luceneDoc.SetBoost(boost);
documentIdField.SetValue(indexingResult.NewDocId.ToLowerInvariant());
luceneDoc.Add(documentIdField);
foreach (var field in indexingResult.Fields)
{
luceneDoc.Add(field);
}
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnException(
string.Format( "Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
name, indexingResult.NewDocId),
exception);
context.AddError(name,
indexingResult.NewDocId,
exception.Message
);
},
trigger => trigger.OnIndexEntryCreated(indexingResult.NewDocId, luceneDoc));
LogIndexedDocument(indexingResult.NewDocId, luceneDoc);
AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
}
stats.IndexingSuccesses++;
}
batchers.ApplyAndIgnoreAllErrors(
e =>
{
logIndexing.WarnException("Failed to dispose on index update trigger", e);
context.AddError(name, null, e.Message);
},
x => x.Dispose());
return count;
});
logIndexing.Debug("Indexed {0} documents for {1}", count, name);
}
示例15: IndexDocuments
public override void IndexDocuments(AbstractViewGenerator viewGenerator, IndexingBatch batch, WorkContext context, IStorageActionsAccessor actions, DateTime minimumTimestamp)
{
var count = 0;
var sourceCount = 0;
var sw = Stopwatch.StartNew();
var start = SystemTime.UtcNow;
Write((indexWriter, analyzer, stats) =>
{
var processedKeys = new HashSet<string>();
var batchers = context.IndexUpdateTriggers.Select(x => x.CreateBatcher(name))
.Where(x => x != null)
.ToList();
try
{
var docIdTerm = new Term(Constants.DocumentIdFieldName);
var documentsWrapped = batch.Docs.Select((doc,i) =>
{
Interlocked.Increment(ref sourceCount);
if (doc.__document_id == null)
throw new ArgumentException(
string.Format("Cannot index something which doesn't have a document id, but got: '{0}'", doc));
string documentId = doc.__document_id.ToString();
if (processedKeys.Add(documentId) == false)
return doc;
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnException(
string.Format("Error when executed OnIndexEntryDeleted trigger for index '{0}', key: '{1}'",
name, documentId),
exception);
context.AddError(name,
documentId,
exception.Message
);
},
trigger => trigger.OnIndexEntryDeleted(documentId));
if (batch.SkipDeleteFromIndex[i] == false ||
context.ShouldRemoveFromIndex(documentId)) // maybe it is recently deleted?
indexWriter.DeleteDocuments(docIdTerm.CreateTerm(documentId.ToLowerInvariant()));
return doc;
})
.Where(x => x is FilteredDocument == false)
.ToList();
BackgroundTaskExecuter.Instance.ExecuteAllBuffered(context, documentsWrapped, (partition) =>
{
var anonymousObjectToLuceneDocumentConverter = new AnonymousObjectToLuceneDocumentConverter(indexDefinition);
var luceneDoc = new Document();
var documentIdField = new Field(Constants.DocumentIdFieldName, "dummy", Field.Store.YES,
Field.Index.NOT_ANALYZED_NO_NORMS);
foreach (var doc in RobustEnumerationIndex(partition, viewGenerator.MapDefinitions, actions, stats))
{
float boost;
var indexingResult = GetIndexingResult(doc, anonymousObjectToLuceneDocumentConverter, out boost);
if (indexingResult.NewDocId != null && indexingResult.ShouldSkip == false)
{
Interlocked.Increment(ref count);
luceneDoc.GetFields().Clear();
luceneDoc.Boost = boost;
documentIdField.SetValue(indexingResult.NewDocId.ToLowerInvariant());
luceneDoc.Add(documentIdField);
foreach (var field in indexingResult.Fields)
{
luceneDoc.Add(field);
}
batchers.ApplyAndIgnoreAllErrors(
exception =>
{
logIndexing.WarnException(
string.Format("Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
name, indexingResult.NewDocId),
exception);
context.AddError(name,
indexingResult.NewDocId,
exception.Message
);
},
trigger => trigger.OnIndexEntryCreated(indexingResult.NewDocId, luceneDoc));
LogIndexedDocument(indexingResult.NewDocId, luceneDoc);
AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
}
Interlocked.Increment(ref stats.IndexingSuccesses);
}
});
}
catch(Exception e)
{
batchers.ApplyAndIgnoreAllErrors(
ex =>
{
logIndexing.WarnException("Failed to notify index update trigger batcher about an error", ex);
context.AddError(name, null, ex.Message);
},
//.........这里部分代码省略.........