本文整理汇总了C#中Document.add方法的典型用法代码示例。如果您正苦于以下问题:C# Document.add方法的具体用法?C# Document.add怎么用?C# Document.add使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Document
的用法示例。
在下文中一共展示了Document.add方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: GenerateInventarioEntryDetails
private void GenerateInventarioEntryDetails(Document doc, GISADataset.RelacaoHierarquicaRow rhRow, float CurrentIndentCm) {
string entry = "";
Paragraph p;
GisaDataSetHelper.GetFRDBaseDataAdapter(string.Format("WHERE IDNivel={0}", rhRow.ID), null, null).Fill(dataSet.FRDBase);
GisaDataSetHelper.GetSFRDDatasProducaoDataAdapter(string.Format("INNER JOIN FRDBase ON SFRDDatasProducao.IDFRDBase=FRDBase.ID WHERE IDNivel={0}", rhRow.ID), null, null).Fill(dataSet.SFRDDatasProducao);
GisaDataSetHelper.GetSFRDUFCotaDataAdapter(string.Format("INNER JOIN FRDBase ON SFRDUFCota.IDFRDBase=FRDBase.ID WHERE IDNivel={0}", rhRow.ID), null, null).Fill(dataSet.SFRDUFCota);
//PersistencyHelper.cleanDeletedRows()
foreach (GISADataset.FRDBaseRow frd in rhRow.NivelRowByNivelRelacaoHierarquica.GetFRDBaseRows()) {
if (frd.IDTipoFRDBase == (long)TipoFRDBase.FRDOIPublicacao) {
if (frd.GetSFRDDatasProducaoRows().Length > 0) { {
if (!frd.GetSFRDDatasProducaoRows()[0].IsInicioTextoNull() && frd.GetSFRDDatasProducaoRows()[0].InicioTexto.Length > 0) {
entry += frd.GetSFRDDatasProducaoRows()[0].InicioTexto + ", ";
}
entry += GetInicioData(frd.GetSFRDDatasProducaoRows()[0]) + " - " + GetFimData(frd.GetSFRDDatasProducaoRows()[0]);
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(CurrentIndentCm + 0.5f));
doc.add(p);
}
}
entry = frd.GetSFRDUFCotaRows()[0].Cota;
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(CurrentIndentCm + 0.5f));
doc.add(p);
entry = Nivel.GetCodigoOfNivel(rhRow.NivelRowByNivelRelacaoHierarquica);
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(CurrentIndentCm + 0.5f));
doc.add(p);
}
}
}
示例2: GenerateTitle
protected override void GenerateTitle(Document doc) {
Paragraph p = new Paragraph("Inventário", TitleFont);
p.setAlignment(ElementConst.ALIGN_CENTER);
doc.add(p);
p = new Paragraph("Listagem dos Documentos", SubTitleFont);
p.setAlignment(ElementConst.ALIGN_CENTER);
p.setLeading(CentimeterToPoint(1));
doc.add(p);
doc.add(new Paragraph(""));
}
示例3: TestNegativeQueryBoost
public virtual void TestNegativeQueryBoost()
{
Query q = new TermQuery(new Term("foo", "bar"));
q.Boost = -42f;
Assert.AreEqual(-42f, q.Boost, 0.0f);
Directory directory = newDirectory();
try
{
Analyzer analyzer = new MockAnalyzer(random());
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriter writer = new IndexWriter(directory, conf);
try
{
Document d = new Document();
d.add(newTextField("foo", "bar", Field.Store.YES));
writer.addDocument(d);
}
finally
{
writer.close();
}
IndexReader reader = DirectoryReader.open(directory);
try
{
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
Assert.AreEqual(1, hits.Length);
Assert.IsTrue("score is not negative: " + hits[0].score, hits[0].score < 0);
Explanation explain = searcher.explain(q, hits[0].doc);
Assert.AreEqual("score doesn't match explanation", hits[0].score, explain.Value, 0.001f);
Assert.IsTrue("explain doesn't think doc is a match", explain.Match);
}
finally
{
reader.close();
}
}
finally
{
directory.close();
}
}
示例4: GenerateInventarioEntry
protected override void GenerateInventarioEntry(Document doc, GISADataset.RelacaoHierarquicaRow rhRow, float CurrentIndentCm) {
GISADataset.NivelRow n = rhRow.NivelRowByNivelRelacaoHierarquica;
string entry = string.Format("{0}: {1} - {2}", rhRow.TipoNivelRelacionadoRow.Codigo, rhRow.NivelRowByNivelRelacaoHierarquica.Codigo, Nivel.GetDesignacao(n));
Paragraph p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(CurrentIndentCm));
doc.add(p);
DoRemovedEntries(1);
if (!rhRow.TipoNivelRelacionadoRow.TipoNivelRow.IsDocument) {
GenerateInventarioEntryChildren(doc, rhRow, CurrentIndentCm);
}
else {
GenerateInventarioEntryDetails(doc, rhRow, CurrentIndentCm);
}
}
示例5: Save
private void Save(IndexWriter writer)
{
var query = new BooleanQuery();
query.add(new TermQuery(new Term(PreferencesDocumentField, PreferencesName)), BooleanClause.Occur.MUST);
writer.deleteDocuments(query);
var prefs = new FolderPreferences();
prefs.NextFolderId = nextFolderId;
prefs.FolderMapping = new List<FolderMap>();
foreach (var key in registeredFolders.Keys)
prefs.FolderMapping.Add(new FolderMap { Id = key, Path = registeredFolders[key] });
var json = JsonConvert.SerializeObject(prefs);
// Create and store the document
var doc = new Document();
doc.add(new StringField(PreferencesDocumentField, PreferencesName, Field.Store.YES));
doc.add(new StringField(RegisteredFoldersField, json, Field.Store.YES));
writer.addDocument(doc);
writer.commit();
}
示例6: GenerateTitle
protected override void GenerateTitle(Document doc) {
Paragraph p = new Paragraph("Catálogo", TitleFont);
p.setAlignment(ElementConst.ALIGN_CENTER);
doc.add(p);
}
示例7: GenerateInventarioEntryDetails
private void GenerateInventarioEntryDetails(Document doc, GISADataset.RelacaoHierarquicaRow rhRow, float CurrentIndentCm) {
string entry = string.Empty;
Paragraph p;
GISADataset.NivelRow n = rhRow.NivelRowByNivelRelacaoHierarquica;
foreach (GISADataset.FRDBaseRow frd in rhRow.NivelRowByNivelRelacaoHierarquica.GetFRDBaseRows()) {
if (frd.IDTipoFRDBase == (long)TipoFRDBase.FRDOIPublicacao) {
entry = Nivel.GetCodigoOfNivel(n);
p = new Paragraph(CentimeterToPoint(0.5F), entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(0));
doc.add(p);
entry = string.Format("{0}: {1}", rhRow.TipoNivelRelacionadoRow.Codigo, Nivel.GetDesignacao(n));
p = new Paragraph(CentimeterToPoint(0.5F), entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(0.5F));
doc.add(p);
if (frd.GetSFRDDatasProducaoRows().Length > 0) {
entry = "";
if (!frd.GetSFRDDatasProducaoRows()[0].IsInicioTextoNull() && frd.GetSFRDDatasProducaoRows()[0].InicioTexto.Length > 0){
entry += frd.GetSFRDDatasProducaoRows()[0].InicioTexto + ", ";
}
entry += GetInicioData(frd.GetSFRDDatasProducaoRows()[0]) + " - " + GetFimData(frd.GetSFRDDatasProducaoRows()[0]);
p = new Paragraph(CentimeterToPoint(0.5F), entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(1F));
doc.add(p);
}
entry = "**Dimensão e suporte**";
if (entry.Length > 0) {
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(1F));
doc.add(p);
}
entry = GetControloAutFormaAut(frd, new TipoNoticiaAut[] {TipoNoticiaAut.EntidadeProdutora});
//For Each idx As GISADataset.IndexFRDCARow In frd.GetIndexFRDCARows()
// If idx.ControloAutRow.IDTipoNoticiaAut = TipoNoticiaAut.EntidadeProdutora Then
// For Each cad As GISADataset.ControloAutDicionarioRow In idx.ControloAutRow.GetControloAutDicionarioRows
// If cad.IDTipoControloAutForma = TipoControloAutForma.FormaAutorizada Then
// If entry.Length > 0 Then entry += " / "
// entry += cad.DicionarioRow.Termo
// End If
// Next
// End If
//Next
if (entry.Length > 0) {
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(1));
doc.add(p);
}
if (frd.GetSFRDContextoRows().Length == 1){
entry = frd.GetSFRDContextoRows()[0].HistoriaAdministrativa;
if (entry.Length > 0) {
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(1));
doc.add(p);
}
entry = frd.GetSFRDContextoRows()[0].HistoriaCustodial;
if (entry.Length > 0) {
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(1));
doc.add(p);
}
entry = frd.GetSFRDContextoRows()[0].FonteImediataDeAquisicao;
if (entry.Length > 0) {
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(1));
doc.add(p);
}
}
entry = GetControloAutFormaAut(frd, new TipoNoticiaAut[] {TipoNoticiaAut.TipologiaInformacional});
//For Each idx As GISADataset.IndexFRDCARow In frd.GetIndexFRDCARows()
// If idx.ControloAutRow.IDTipoNoticiaAut = TipoNoticiaAut.TipologiaInformacional Then
// For Each cad As GISADataset.ControloAutDicionarioRow In idx.ControloAutRow.GetControloAutDicionarioRows
// If cad.IDTipoControloAutForma = TipoControloAutForma.FormaAutorizada Then
// If entry.Length > 0 Then entry += " / "
// entry += cad.DicionarioRow.Termo
// End If
// Next
// End If
//Next
if (entry.Length > 0) {
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(1));
doc.add(p);
}
if (frd.GetSFRDConteudoEEstruturaRows().Length == 1) {
entry = frd.GetSFRDConteudoEEstruturaRows()[0].ConteudoInformacional;
if (entry.Length > 0) {
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(1));
doc.add(p);
}
}
if (frd.GetSFRDCondicaoDeAcessoRows().Length == 1) {
entry = frd.GetSFRDCondicaoDeAcessoRows()[0].CondicaoDeReproducao;
if (entry.Length > 0) {
p = new Paragraph(entry, this.BodyFont);
p.setIndentationLeft(CentimeterToPoint(1));
doc.add(p);
}
}
entry = GetControloAutFormaAut(frd, new TipoNoticiaAut[] {TipoNoticiaAut.Ideografico, TipoNoticiaAut.Onomastico, TipoNoticiaAut.ToponimicoGeografico});
//For Each idx As GISADataset.IndexFRDCARow In frd.GetIndexFRDCARows()
//.........这里部分代码省略.........
示例8: Main
static void Main()
{
// default AzureDirectory stores cache in local temp folder
CloudStorageAccount cloudStorageAccount;
CloudStorageAccount.TryParse(CloudConfigurationManager.GetSetting("blobStorage"), out cloudStorageAccount);
//AzureDirectory azureDirectory = new AzureDirectory(cloudStorageAccount, "TestTest", new RAMDirectory());
//AzureDirectory azureDirectory = new AzureDirectory(cloudStorageAccount, "TestTest", FSDirectory.Open(@"c:\test"));
var azureDirectory = new AzureDirectory(cloudStorageAccount, "TestTest" /* default is FSDirectory.Open(@"%temp%/AzureDirectory/TestTest"); */ );
IndexWriter indexWriter = null;
while (indexWriter == null)
{
try
{
var config = new IndexWriterConfig(org.apache.lucene.util.Version.LUCENE_CURRENT, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
indexWriter = new IndexWriter(azureDirectory, config);
}
catch (LockObtainFailedException)
{
Console.WriteLine("Lock is taken, waiting for timeout...");
Thread.Sleep(1000);
}
}
Console.WriteLine("IndexWriter lock obtained, this process has exclusive write access to index");
//indexWriter.setRAMBufferSizeMB(10.0);
//indexWriter.SetUseCompoundFile(false);
//indexWriter.SetMaxMergeDocs(10000);
//indexWriter.SetMergeFactor(100);
for (int iDoc = 0; iDoc < 10000; iDoc++)
{
if (iDoc % 10 == 0)
Console.WriteLine(iDoc);
var doc = new Document();
doc.add(new TextField("id", DateTime.Now.ToFileTimeUtc().ToString(CultureInfo.InvariantCulture), Field.Store.YES));
doc.add(new TextField("Title", GeneratePhrase(10), Field.Store.YES));
doc.add(new TextField("Body", GeneratePhrase(40), Field.Store.YES));
indexWriter.addDocument(doc);
}
Console.WriteLine("Total docs is {0}", indexWriter.numDocs());
Console.Write("Flushing and disposing writer...");
// Potentially Expensive: this ensures that all writes are commited to blob storage
indexWriter.commit();
indexWriter.close();
Console.WriteLine("done");
Console.WriteLine("Hit Key to search again");
Console.ReadKey();
IndexSearcher searcher;
using (new AutoStopWatch("Creating searcher"))
{
searcher = new IndexSearcher(DirectoryReader.open(azureDirectory));
}
SearchForPhrase(searcher, "dog");
SearchForPhrase(searcher, Random.Next(32768).ToString(CultureInfo.InvariantCulture));
SearchForPhrase(searcher, Random.Next(32768).ToString(CultureInfo.InvariantCulture));
Console.WriteLine("Hit a key to dispose and exit");
Console.ReadKey();
}
示例9: DoTestSearch
private void DoTestSearch(Random random, PrintWriter @out, bool useCompoundFile)
{
Directory directory = newDirectory();
Analyzer analyzer = new MockAnalyzer(random);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
MergePolicy mp = conf.MergePolicy;
mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
IndexWriter writer = new IndexWriter(directory, conf);
string[] docs = new string[] {"a b c d e", "a b c d e a b c d e", "a b c d e f g h i j", "a c e", "e c a", "a c e a c e", "a c e a b c"};
for (int j = 0; j < docs.Length; j++)
{
Document d = new Document();
d.add(newTextField("contents", docs[j], Field.Store.YES));
d.add(newStringField("id", "" + j, Field.Store.NO));
writer.addDocument(d);
}
writer.close();
IndexReader reader = DirectoryReader.open(directory);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = null;
Sort sort = new Sort(SortField.FIELD_SCORE, new SortField("id", SortField.Type.INT));
foreach (Query query in BuildQueries())
{
@out.println("Query: " + query.ToString("contents"));
if (VERBOSE)
{
Console.WriteLine("TEST: query=" + query);
}
hits = searcher.search(query, null, 1000, sort).scoreDocs;
@out.println(hits.Length + " total results");
for (int i = 0 ; i < hits.Length && i < 10; i++)
{
Document d = searcher.doc(hits[i].doc);
@out.println(i + " " + hits[i].score + " " + d.get("contents"));
}
}
reader.close();
directory.close();
}
示例10: AddExifInfo
private void AddExifInfo(string filename, Document doc, JToken info)
{
string latitude = null;
string longitude = null;
string latRef = null;
string lonRef = null;
DateTime? createdDate = null;
foreach (var child in info.Children())
{
var prop = child as JProperty;
if (prop != null)
{
foreach (var grandchild in child.Children())
{
var obj = grandchild as JObject;
if (obj != null)
{
foreach (var kv in obj)
{
var val = ToString(kv.Value);
var path = string.Format("{0}.{1}", prop.Name, kv.Key);
// Remove leading & trailing spaces in keywords. I suspect a better way is to
// create a more sophisticated Analyzer, but I haven't worked that out correctly.
// It's important to keep single keyword phrases with spaces together: "mount rushmore"
if (path == "IPTC.Keywords" || path == "XMP.Subject")
{
var tokens = val.Split(',');
for (int idx = 0; idx < tokens.Length; ++idx)
{
tokens[idx] = tokens[idx].Trim();
}
val = String.Join(",", tokens);
if (path == "IPTC.Keywords" && tokens.Length > 0)
{
foreach (var k in tokens)
{
var trimmedKeyword = k.Trim();
if (!String.IsNullOrWhiteSpace(trimmedKeyword))
doc.add(new FacetField(FacetNames.Keywords, trimmedKeyword));
}
}
}
switch (path)
{
case "EXIF.GPSLatitudeRef":
latRef = val;
break;
case "EXIF.GPSLongitudeRef":
lonRef = val;
break;
case "EXIF.GPSLatitude":
latitude = val;
break;
case "EXIF.GPSLongitude":
longitude = val;
break;
case "EXIF.DateTimeOriginal":
case "EXIF.CreateDate":
if (createdDate == null)
{
DateTime temp;
if (DateTime.TryParseExact(val, "yyyy:MM:dd HH:mm:ss", new CultureInfo("en-US"), DateTimeStyles.None, out temp))
{
createdDate = temp;
}
}
break;
}
string docKey;
if (StoredValues.TryGetValue(path, out docKey))
{
doc.add(new TextField(docKey.ToLower(), val, Field.Store.YES));
}
}
}
}
}
}
if (createdDate == null)
{
createdDate = new FileInfo(filename).CreationTime;
}
doc.add(new TextField(FieldName.Date, createdDate.Value.ToString("yyyyMMdd"), Field.Store.NO));
doc.add(new TextField(FieldName.CreatedDate, createdDate.Value.ToString("o"), Field.Store.YES));
doc.add(new TextField(FieldName.Day, createdDate.Value.AllDayNames(), Field.Store.NO));
doc.add(new TextField(FieldName.Month, createdDate.Value.AllMonthNames(), Field.Store.NO));
var facetDate = new []
{
createdDate.Value.Year.ToString(),
createdDate.Value.Month.ToString(),
createdDate.Value.Day.ToString(),
};
//.........这里部分代码省略.........
示例11: ProcessFile
private bool ProcessFile(string filename, Document doc, string hashValue, JToken exifInfo)
{
var fileInfo = new FileInfo(filename);
var indexPath = indexPreferences.ToIndexPath(filename);
doc.add(new StringField(FieldName.SignatureIndexPath, indexPath, Field.Store.YES));
doc.add(new StringField(FieldName.SignatureFileLength, fileInfo.Length.ToString(), Field.Store.YES));
doc.add(new StringField(FieldName.SignatureLastWriteTicks, fileInfo.LastWriteTimeUtc.Ticks.ToString(), Field.Store.YES));
doc.add(new StringField(FieldName.SignatureHash, hashValue, Field.Store.YES));
doc.add(new StringField(FieldName.SignatureSchemaVersion, CurrentSchemaVersion.ToString(), Field.Store.YES));
var directoryName = Path.GetDirectoryName(indexPreferences.RelativeIndexPath(indexPath));
doc.add(new TextField(FieldName.ContainingFolder, directoryName, Field.Store.YES));
doc.add(new TextField(FieldName.Filename, Path.GetFileName(filename), Field.Store.YES));
var directoryTokens = directoryName.Split(DirectorySplitChars, StringSplitOptions.RemoveEmptyEntries);
if (directoryTokens != null && directoryTokens.Length > 0)
doc.add(new FacetField(FacetNames.Folder, directoryTokens));
if (exifInfo != null)
{
AddExifInfo(filename, doc, exifInfo);
}
return true;
}
示例12: AddThumbnail
private bool AddThumbnail(string filename, string mimeType, string size)
{
var imageCodecInfo = ImageCodecInfo.GetImageEncoders().FirstOrDefault(e => e.MimeType == mimeType);
if (imageCodecInfo != null)
{
try
{
Interlocked.Increment(ref generatedThumbnails);
if ((generatedThumbnails % 100) == 0)
{
logger.Debug("Generated {0} thumbnails; processing {1} ({2} queued)", generatedThumbnails, filename, queuedFiles.Count);
}
// Remove the existing document/thumbnail, if any
var query = new BooleanQuery();
query.add(new TermQuery(new Term(FieldName.SignatureIndexPath, indexPreferences.ToIndexPath(filename))), BooleanClause.Occur.MUST);
writer.deleteDocuments(query);
using (var image = Image.FromFile(filename))
{
float ratio = (float) image.Width / (float) image.Height;
// First generate a thumbnail double the size desired, then do a high quality down-scale from there.
// For a large image (5184x3456), it adds about 5% more time, with a much better looking thumbnail
using (var doubleThumbSize = new Bitmap(image, (int) (ThumbnailHeight * 2 * ratio), ThumbnailHeight * 2))
{
using (var target = new Bitmap((int) (ThumbnailHeight * ratio), ThumbnailHeight))
{
using (var graphics = Graphics.FromImage(target))
{
graphics.CompositingQuality = System.Drawing.Drawing2D.CompositingQuality.HighQuality;
graphics.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.HighQualityBicubic;
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;
graphics.DrawImage(doubleThumbSize, 0, 0, target.Width, target.Height);
using (var memoryStream = new MemoryStream())
{
var encoderQuality = Encoder.Quality;
using (var parms = new EncoderParameters(1))
{
parms.Param[0] = new EncoderParameter(encoderQuality, ThumbnailQuality);
target.Save(memoryStream, imageCodecInfo, parms);
memoryStream.Close();
var doc = new Document();
doc.add(new StoredField(FieldName.Thumbnail, memoryStream.ToArray()));
doc.add(new StringField(FieldName.SignatureIndexPath, indexPreferences.ToIndexPath(filename), Field.Store.YES));
doc.add(new StringField(FieldName.MimeType, mimeType, Field.Store.YES));
doc.add(new StringField(FieldName.Size, size, Field.Store.YES));
writer.addDocument(doc);
MadeChange();
return true;
}
}
}
}
}
}
}
catch (Exception ex)
{
logger.Error("Error generating image for {0}: {1}", filename, ex);
}
}
return false;
}