本文整理汇总了C#中Lucene.Net.Documents.Document.Add方法的典型用法代码示例。如果您正苦于以下问题:C# Document.Add方法的具体用法?C# Document.Add怎么用?C# Document.Add使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Documents.Document
的用法示例。
在下文中一共展示了Document.Add方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Engine
public Engine()
{
var directory = new RAMDirectory();
var analyzer = new StandardAnalyzer(Version.LUCENE_30);
using (var indexWriter = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED))
{
for (int i = 0; i < 10000; i++)
{
Console.Write(".");
var document = new Document();
document.Add(new Field("Id", i.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
document.Add(new Field("Name", "Name" + i.ToString(), Field.Store.YES, Field.Index.ANALYZED));
indexWriter.AddDocument(document);
}
}
Console.ReadKey();
var queryParser = new QueryParser(Version.LUCENE_30, "Name", analyzer);
var query = queryParser.Parse("Name37~");
IndexReader indexReader = IndexReader.Open(directory, true);
var searcher = new IndexSearcher(indexReader);
TopDocs resultDocs = searcher.Search(query, indexReader.MaxDoc);
}
示例2: AddSpecialFields
protected override void AddSpecialFields(Document document, Item item)
{
Assert.ArgumentNotNull(document, "document");
Assert.ArgumentNotNull(item, "item");
document.Add(this.CreateTextField(BuiltinFields.Name, item.Name));
document.Add(this.CreateDataField(BuiltinFields.Name, item.Name));
this.DetectRemovalFilterAndProcess(document, item, "DisplayName", BuiltinFields.Name, (itm) => item.Appearance.DisplayName);
this.DetectRemovalFilterValueField(document, item, "Icon", BuiltinFields.Icon, itm => itm.Appearance.Icon);
this.DetectRemovalFilterAndProcess(document, item, "Creator", BuiltinFields.Creator, itm => itm.Statistics.CreatedBy);
this.DetectRemovalFilterAndProcess(document, item, "Editor", BuiltinFields.Editor, itm => itm.Statistics.UpdatedBy);
this.DetectRemovalFilterAndProcess(document, item, "AllTemplates", BuiltinFields.AllTemplates, this.GetAllTemplates);
this.DetectRemovalFilterAndProcess(document, item, "TemplateName", BuiltinFields.TemplateName, itm => itm.TemplateName);
if (this.DetectRemoval("Hidden"))
{
if (this.IsHidden(item))
{
this.DetectRemovalFilterValueField(document, item, "Hidden", BuiltinFields.Hidden, itm => "1");
}
}
this.DetectRemovalFilterValueField(document, item, "Created", BuiltinFields.Created, itm => item[FieldIDs.Created]);
this.DetectRemovalFilterValueField(document, item, "Updated", BuiltinFields.Updated, itm => item[FieldIDs.Updated]);
this.DetectRemovalFilterAndProcess(document, item, "Path", BuiltinFields.Path, this.GetItemPath);
this.DetectRemovalFilterAndProcess(document, item, "Links", BuiltinFields.Links, this.GetItemLinks);
var tags = this.Tags;
if (tags.Length > 0)
{
document.Add(this.CreateTextField(BuiltinFields.Tags, tags));
document.Add(this.CreateDataField(BuiltinFields.Tags, tags));
}
}
示例3: readTXT
public static Document readTXT(string path)
{
Document doc = new Document();
doc.Add(new Field("Path", path, Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("Content" , readText(path) , Field.Store.YES, Field.Index.ANALYZED)) ;
return doc;
}
示例4: AddLuceneIndex
/// <summary>
/// 创建索引文档
/// </summary>
/// <param name="dic"></param>
public void AddLuceneIndex(Dictionary<string, string> dic) {
//var analyzer = new StandardAnalyzer(Version.LUCENE_30);
var analyzer = GetAnalyzer();
using (var directory = GetLuceneDirectory())
using (var writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED)) {
var doc = new Document();
foreach (KeyValuePair<string, string> pair in dic) {
// add new index entry
//Field.Store.YES:表示是否存储原值。
//只有当Field.Store.YES在后面才能用doc.Get("number")取出值来
//Field.Index. NOT_ANALYZED:不进行分词保存
//todo:boost
if (NotAnalyzeFields.Exists(one => one == pair.Key)) {
doc.Add(new Field(pair.Key, pair.Value, Field.Store.YES, Field.Index.NOT_ANALYZED));
}
else {
doc.Add(new Field(pair.Key, pair.Value, Field.Store.YES, Field.Index.ANALYZED));
}
}
//doc.Boost
writer.AddDocument(doc);
writer.Commit();
writer.Optimize();
analyzer.Close();
}
}
示例5: AddTextToIndex
private static void AddTextToIndex(int txts, string text, IndexWriter writer)
{
Document doc = new Document();
doc.Add(new Field("id", txts.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
doc.Add(new Field("postBody", text, Field.Store.YES, Field.Index.TOKENIZED));
writer.AddDocument(doc);
}
示例6: TestRollbackIntegrityWithBufferFlush
public void TestRollbackIntegrityWithBufferFlush()
{
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
for (int i = 0; i < 5; i++)
{
Document doc = new Document();
doc.Add(new Field("pk", i.ToString(), Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
w.AddDocument(doc);
}
w.Close();
// If buffer size is small enough to cause a flush, errors ensue...
w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
w.SetMaxBufferedDocs(2);
Term pkTerm = new Term("pk", "");
for (int i = 0; i < 3; i++)
{
Document doc = new Document();
String value = i.ToString();
doc.Add(new Field("pk", value, Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
doc.Add(new Field("text", "foo", Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
w.UpdateDocument(pkTerm.CreateTerm(value), doc);
}
w.Rollback();
IndexReader r = IndexReader.Open(dir, true);
Assert.AreEqual(5, r.NumDocs(), "index should contain same number of docs post rollback");
r.Close();
dir.Close();
}
示例7: MakeQualification
// ... has multiple qualifications
private Document MakeQualification(string qualification, int year)
{
Document job = new Document();
job.Add(NewStringField("qualification", qualification, Field.Store.YES));
job.Add(new IntField("year", year, Field.Store.NO));
return job;
}
示例8: InitIndex
private IndexWriter InitIndex(IConcurrentMergeScheduler scheduler, Random random, MockDirectoryWrapper dir, bool initialCommit)
{
dir.LockFactory = NoLockFactory.DoNoLockFactory;
scheduler.SetSuppressExceptions();
IndexWriter writer = new IndexWriter(dir,
NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
.SetMaxBufferedDocs(10)
.SetMergeScheduler(scheduler));
if (initialCommit)
{
writer.Commit();
}
Document doc = new Document();
doc.Add(NewTextField("content", "aaa", Field.Store.NO));
doc.Add(NewTextField("id", "0", Field.Store.NO));
for (int i = 0; i < 157; i++)
{
writer.AddDocument(doc);
}
return writer;
}
示例9: TestNGramPrefixGridLosAngeles
public virtual void TestNGramPrefixGridLosAngeles()
{
SpatialContext ctx = SpatialContext.GEO;
TermQueryPrefixTreeStrategy prefixGridStrategy = new TermQueryPrefixTreeStrategy(new QuadPrefixTree(ctx), "geo");
Spatial4n.Core.Shapes.IShape point = ctx.MakePoint(-118.243680, 34.052230);
Document losAngeles = new Document();
losAngeles.Add(new StringField("name", "Los Angeles", Field.Store.YES));
foreach (IndexableField field in prefixGridStrategy.CreateIndexableFields(point))
{
losAngeles.Add(field);
}
losAngeles.Add(new StoredField(prefixGridStrategy.FieldName, point.toString()));//just for diagnostics
addDocumentsAndCommit(Arrays.AsList(losAngeles));
// This won't work with simple spatial context...
SpatialArgsParser spatialArgsParser = new SpatialArgsParser();
// TODO... use a non polygon query
// SpatialArgs spatialArgs = spatialArgsParser.parse(
// "Intersects(POLYGON((-127.00390625 39.8125,-112.765625 39.98828125,-111.53515625 31.375,-125.94921875 30.14453125,-127.00390625 39.8125)))",
// new SimpleSpatialContext());
// Query query = prefixGridStrategy.makeQuery(spatialArgs, fieldInfo);
// SearchResults searchResults = executeQuery(query, 1);
// assertEquals(1, searchResults.numFound);
}
示例10: CreateIndex
public void CreateIndex()
{
Analyzer analyzer = new MockAnalyzer(Random());
IndexWriter writer = new IndexWriter
(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
try
{
for (int docid = 0; docid < NUM_DOCS; docid++)
{
Document d = new Document();
d.Add(NewStringField("docid", "" + docid, Field.Store.YES));
d.Add(NewStringField("never_load", "fail", Field.Store.YES));
foreach (string f in FIELDS)
{
for (int val = 0; val < NUM_VALUES; val++)
{
d.Add(NewStringField(f, docid + "_" + f + "_" + val, Field.Store.YES));
}
}
d.Add(NewStringField("load_later", "yes", Field.Store.YES));
writer.AddDocument(d);
}
}
finally
{
writer.Dispose();
}
}
示例11: Convert
/// <summary>
/// MicroblogEntity转换成<see cref="Lucene.Net.Documents.Document"/>
/// </summary>
/// <param name="microblog">微博实体</param>
/// <returns>Lucene.Net.Documents.Document</returns>
public static Document Convert(MicroblogEntity microblog)
{
Document doc = new Document();
//索引微博基本信息
doc.Add(new Field(MicroblogIndexDocument.MicroblogId, microblog.MicroblogId.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
if (microblog.OriginalMicroblog != null)
{
doc.Add(new Field(MicroblogIndexDocument.Body, HtmlUtility.StripHtml(microblog.Body, true, false).ToLower() + HtmlUtility.StripHtml(microblog.OriginalMicroblog.Body, true, false).ToLower(), Field.Store.NO, Field.Index.ANALYZED));
}
else
{
doc.Add(new Field(MicroblogIndexDocument.Body, HtmlUtility.StripHtml(microblog.Body, true, false).ToLower(), Field.Store.NO, Field.Index.ANALYZED));
}
doc.Add(new Field(MicroblogIndexDocument.DateCreated, DateTools.DateToString(microblog.DateCreated, DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field(MicroblogIndexDocument.HasMusic, microblog.HasMusic ? "1" : "0", Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field(MicroblogIndexDocument.HasPhoto, microblog.HasPhoto ? "1" : "0", Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field(MicroblogIndexDocument.HasVideo, microblog.HasVideo ? "1" : "0", Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field(MicroblogIndexDocument.IsOriginality, microblog.ForwardedMicroblogId == 0 ? "1" : "0", Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field(MicroblogIndexDocument.TenantTypeId, microblog.TenantTypeId, Field.Store.YES, Field.Index.NOT_ANALYZED));
TagService tagService = new TagService(TenantTypeIds.Instance().Microblog());
IEnumerable<ItemInTag> itemInTags = tagService.GetItemInTagsOfItem(microblog.MicroblogId);
foreach (ItemInTag itemInTag in itemInTags)
{
doc.Add(new Field(MicroblogIndexDocument.Topic, itemInTag.TagName.ToLower(), Field.Store.YES, Field.Index.ANALYZED));
}
return doc;
}
示例12: AddOrUpdateDocuments
public void AddOrUpdateDocuments(params CmsDocument[] documents)
{
DeleteDocuments(documents);
using (var writer = new IndexWriter(_Directory, _Analyzer, false, new IndexWriter.MaxFieldLength(1024 * 1024 * 4)))
{
foreach (var document in documents)
{
if (document.Id == Guid.Empty)
throw new ArgumentOutOfRangeException("Attempt to index transient document: " + document.Title);
var doc = new Document();
doc.Add(new Field(CmsDocumentField.Id.ToString(), document.Id.ToString("b"), Field.Store.YES, Field.Index.NOT_ANALYZED));
if (!String.IsNullOrEmpty(document.Title))
doc.Add(new Field(CmsDocumentField.Title.ToString(), document.Title, Field.Store.YES, Field.Index.ANALYZED));
foreach (var tag in document.Tags)
{
doc.Add(new Field(CmsDocumentField.Tag.ToString(), tag, Field.Store.YES, Field.Index.ANALYZED));
}
foreach (var partValue in document.Parts.Select(p => p.Value))
{
if(!String.IsNullOrEmpty(partValue))
doc.Add(new Field(CmsDocumentField.Value.ToString(), partValue, Field.Store.NO, Field.Index.ANALYZED));
}
writer.AddDocument(doc);
}
writer.Flush(true, true, true);
}
}
示例13: SetUp
public override void SetUp()
{
base.SetUp();
_dir = NewDirectory();
_indexWriter = new RandomIndexWriter(Random(), _dir, new MockAnalyzer(Random()), Similarity, TimeZone);
FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.StoreTermVectors = true;
ft.StoreTermVectorOffsets = true;
ft.StoreTermVectorPositions = true;
Analyzer analyzer = new MockAnalyzer(Random());
Document doc;
for (int i = 0; i < 100; i++)
{
doc = new Document();
doc.Add(new Field(_idFieldName, Random().toString(), ft));
doc.Add(new Field(_textFieldName, new StringBuilder(Random().toString()).append(Random().toString()).append(
Random().toString()).toString(), ft));
doc.Add(new Field(_classFieldName, Random().toString(), ft));
_indexWriter.AddDocument(doc, analyzer);
}
_indexWriter.Commit();
_originalIndex = SlowCompositeReaderWrapper.Wrap(_indexWriter.Reader);
}
示例14: TestRollbackIntegrityWithBufferFlush
public virtual void TestRollbackIntegrityWithBufferFlush()
{
Directory dir = NewDirectory();
RandomIndexWriter rw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
for (int i = 0; i < 5; i++)
{
Document doc = new Document();
doc.Add(NewStringField("pk", Convert.ToString(i), Field.Store.YES));
rw.AddDocument(doc);
}
rw.Dispose();
// If buffer size is small enough to cause a flush, errors ensue...
IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetOpenMode(IndexWriterConfig.OpenMode_e.APPEND));
for (int i = 0; i < 3; i++)
{
Document doc = new Document();
string value = Convert.ToString(i);
doc.Add(NewStringField("pk", value, Field.Store.YES));
doc.Add(NewStringField("text", "foo", Field.Store.YES));
w.UpdateDocument(new Term("pk", value), doc);
}
w.Rollback();
IndexReader r = DirectoryReader.Open(dir);
Assert.AreEqual(5, r.NumDocs, "index should contain same number of docs post rollback");
r.Dispose();
dir.Dispose();
}
示例15: AddDoc
private void AddDoc(IndexWriter writer, String name, String id)
{
Document doc = new Document();
doc.Add(new Field("name", name, Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("id", id, Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
}