本文整理汇总了C#中Lucene.Net.Store.Directory.ListAll方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Store.Directory.ListAll方法的具体用法?C# Lucene.Net.Store.Directory.ListAll怎么用?C# Lucene.Net.Store.Directory.ListAll使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Store.Directory
的用法示例。
在下文中一共展示了Lucene.Net.Store.Directory.ListAll方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: NewDirectory
/// <summary>
/// Returns a new Directory instance, using the specified random
/// with contents copied from the provided directory. See
/// <seealso cref="#newDirectory()"/> for more information.
/// </summary>
public static BaseDirectoryWrapper NewDirectory(Random r, Directory d)
{
Directory impl = NewDirectoryImpl(r, TEST_DIRECTORY);
foreach (string file in d.ListAll())
{
d.Copy(impl, file, file, NewIOContext(r));
}
return WrapDirectory(r, impl, Rarely(r));
}
示例2: IndexFileDeleter
/// <summary> Initialize the deleter: find all previous commits in
/// the Directory, incref the files they reference, call
/// the policy to let it delete commits. This will remove
/// any files not referenced by any of the commits.
/// </summary>
/// <throws> CorruptIndexException if the index is corrupt </throws>
/// <throws> IOException if there is a low-level IO error </throws>
public IndexFileDeleter(Directory directory, IndexDeletionPolicy policy, SegmentInfos segmentInfos, System.IO.StreamWriter infoStream, DocumentsWriter docWriter, System.Collections.Generic.Dictionary<string, string> synced)
{
this.docWriter = docWriter;
this.infoStream = infoStream;
this.synced = synced;
if (infoStream != null)
{
Message("init: current segments file is \"" + segmentInfos.GetCurrentSegmentFileName() + "\"; deletionPolicy=" + policy);
}
this.policy = policy;
this.directory = directory;
// First pass: walk the files and initialize our ref
// counts:
long currentGen = segmentInfos.GetGeneration();
IndexFileNameFilter filter = IndexFileNameFilter.GetFilter();
System.String[] files = directory.ListAll();
CommitPoint currentCommitPoint = null;
for (int i = 0; i < files.Length; i++)
{
System.String fileName = files[i];
if (filter.Accept(null, fileName) && !fileName.Equals(IndexFileNames.SEGMENTS_GEN))
{
// Add this file to refCounts with initial count 0:
GetRefCount(fileName);
if (fileName.StartsWith(IndexFileNames.SEGMENTS))
{
// This is a commit (segments or segments_N), and
// it's valid (<= the max gen). Load it, then
// incref all files it refers to:
if (infoStream != null)
{
Message("init: load commit \"" + fileName + "\"");
}
SegmentInfos sis = new SegmentInfos();
try
{
sis.Read(directory, fileName);
}
catch (System.IO.FileNotFoundException e)
{
// LUCENE-948: on NFS (and maybe others), if
// you have writers switching back and forth
// between machines, it's very likely that the
// dir listing will be stale and will claim a
// file segments_X exists when in fact it
// doesn't. So, we catch this and handle it
// as if the file does not exist
if (infoStream != null)
{
Message("init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
}
sis = null;
}
catch (System.IO.IOException e)
{
if (SegmentInfos.GenerationFromSegmentsFileName(fileName) <= currentGen)
{
throw e;
}
else
{
// Most likely we are opening an index that
// has an aborted "future" commit, so suppress
// exc in this case
sis = null;
}
}
if (sis != null)
{
CommitPoint commitPoint = new CommitPoint(this,commitsToDelete, directory, sis);
if (sis.GetGeneration() == segmentInfos.GetGeneration())
{
currentCommitPoint = commitPoint;
}
commits.Add(commitPoint);
IncRef(sis, true);
if (lastSegmentInfos == null || sis.GetGeneration() > lastSegmentInfos.GetGeneration())
{
lastSegmentInfos = sis;
}
}
//.........这里部分代码省略.........
示例3: CheckHeaders
private void CheckHeaders(Directory dir)
{
foreach (string file in dir.ListAll())
{
if (file.Equals(IndexWriter.WRITE_LOCK_NAME))
{
continue; // write.lock has no header, thats ok
}
if (file.Equals(IndexFileNames.SEGMENTS_GEN))
{
continue; // segments.gen has no header, thats ok
}
if (file.EndsWith(IndexFileNames.COMPOUND_FILE_EXTENSION))
{
CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, NewIOContext(Random()), false);
CheckHeaders(cfsDir); // recurse into cfs
cfsDir.Dispose();
}
IndexInput @in = null;
bool success = false;
try
{
@in = dir.OpenInput(file, NewIOContext(Random()));
int val = @in.ReadInt();
Assert.AreEqual(CodecUtil.CODEC_MAGIC, val, file + " has no codec header, instead found: " + val);
success = true;
}
finally
{
if (success)
{
IOUtils.Close(@in);
}
else
{
IOUtils.CloseWhileHandlingException(@in);
}
}
}
}
示例4: AssertNoNrm
private void AssertNoNrm(Directory dir)
{
string[] files = dir.ListAll();
for (int i = 0; i < files.Length; i++)
{
// TODO: this relies upon filenames
Assert.IsFalse(files[i].EndsWith(".nrm") || files[i].EndsWith(".len"));
}
}
示例5: GetCurrentSegmentGeneration
/// <summary> Get the generation (N) of the current segments_N file
/// in the directory.
///
/// </summary>
/// <param name="directory">-- directory to search for the latest segments_N file
/// </param>
public static long GetCurrentSegmentGeneration(Directory directory)
{
try
{
return GetCurrentSegmentGeneration(directory.ListAll());
}
catch (NoSuchDirectoryException nsde)
{
return - 1;
}
}
示例6: AssertNoPrx
private void AssertNoPrx(Directory dir)
{
System.String[] files = dir.ListAll();
for (int i = 0; i < files.Length; i++)
Assert.IsFalse(files[i].EndsWith(".prx"));
}
示例7: ListCommits
/// <seealso cref="Lucene.Net.Index.IndexReader.ListCommits">
/// </seealso>
public static new ICollection<IndexCommit> ListCommits(Directory dir)
{
String[] files = dir.ListAll();
ICollection<IndexCommit> commits = new List<IndexCommit>();
var latest = new SegmentInfos();
latest.Read(dir);
long currentGen = latest.Generation;
commits.Add(new ReaderCommit(latest, dir));
foreach (string fileName in files)
{
if (fileName.StartsWith(IndexFileNames.SEGMENTS) && !fileName.Equals(IndexFileNames.SEGMENTS_GEN) && SegmentInfos.GenerationFromSegmentsFileName(fileName) < currentGen)
{
var sis = new SegmentInfos();
try
{
// IOException allowed to throw there, in case
// segments_N is corrupt
sis.Read(dir, fileName);
}
catch (System.IO.FileNotFoundException)
{
// LUCENE-948: on NFS (and maybe others), if
// you have writers switching back and forth
// between machines, it's very likely that the
// dir listing will be stale and will claim a
// file segments_X exists when in fact it
// doesn't. So, we catch this and handle it
// as if the file does not exist
sis = null;
}
if (sis != null)
commits.Add(new ReaderCommit(sis, dir));
}
}
return commits;
}
示例8: AssertNoUnreferencedFiles
public static void AssertNoUnreferencedFiles(Directory dir, System.String message)
{
System.String[] startFiles = dir.ListAll();
SegmentInfos infos = new SegmentInfos();
infos.Read(dir);
new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
System.String[] endFiles = dir.ListAll();
System.Array.Sort(startFiles);
System.Array.Sort(endFiles);
if (!SupportClass.CollectionsHelper.Equals(startFiles, endFiles))
{
Assert.Fail(message + ": before delete:\n " + ArrayToString(startFiles) + "\n after delete:\n " + ArrayToString(endFiles));
}
}
示例9: DirSize
internal static long DirSize(Directory d)
{
long size = 0;
foreach (string file in d.ListAll())
{
size += d.FileLength(file);
}
return size;
}
示例10: BytesUsedByExtension
private IDictionary<string, long> BytesUsedByExtension(Directory d)
{
IDictionary<string, long> bytesUsedByExtension = new Dictionary<string, long>();
foreach (string file in d.ListAll())
{
string ext = IndexFileNames.GetExtension(file) ?? string.Empty;
long previousLength = bytesUsedByExtension.ContainsKey(ext) ? bytesUsedByExtension[ext] : 0;
bytesUsedByExtension[ext] = previousLength + d.FileLength(file);
}
foreach (string item in ExcludedExtensionsFromByteCounts()) {
bytesUsedByExtension.Remove(item);
}
return bytesUsedByExtension;
}
示例11: BuildIndex
//.........这里部分代码省略.........
{
string field = fieldEnt.Key;
IDictionary<BytesRef, long> terms = fieldEnt.Value;
FieldInfo fieldInfo = newFieldInfos.FieldInfo(field);
FieldInfo.IndexOptions? indexOptions = fieldInfo.FieldIndexOptions;
if (VERBOSE)
{
Console.WriteLine("field=" + field + " indexOtions=" + indexOptions);
}
bool doFreq = indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS;
bool doPos = indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
bool doPayloads = indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS && allowPayloads;
bool doOffsets = indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
TermsConsumer termsConsumer = fieldsConsumer.AddField(fieldInfo);
long sumTotalTF = 0;
long sumDF = 0;
FixedBitSet seenDocs = new FixedBitSet(MaxDoc);
foreach (KeyValuePair<BytesRef, long> termEnt in terms)
{
BytesRef term = termEnt.Key;
SeedPostings postings = GetSeedPostings(term.Utf8ToString(), termEnt.Value, false, maxAllowed);
if (VERBOSE)
{
Console.WriteLine(" term=" + field + ":" + term.Utf8ToString() + " docFreq=" + postings.DocFreq + " seed=" + termEnt.Value);
}
PostingsConsumer postingsConsumer = termsConsumer.StartTerm(term);
long totalTF = 0;
int docID = 0;
while ((docID = postings.NextDoc()) != DocsEnum.NO_MORE_DOCS)
{
int freq = postings.Freq();
if (VERBOSE)
{
Console.WriteLine(" " + postings.Upto + ": docID=" + docID + " freq=" + postings.Freq_Renamed);
}
postingsConsumer.StartDoc(docID, doFreq ? postings.Freq_Renamed : -1);
seenDocs.Set(docID);
if (doPos)
{
totalTF += postings.Freq_Renamed;
for (int posUpto = 0; posUpto < freq; posUpto++)
{
int pos = postings.NextPosition();
BytesRef payload = postings.Payload;
if (VERBOSE)
{
if (doPayloads)
{
Console.WriteLine(" pos=" + pos + " payload=" + (payload == null ? "null" : payload.Length + " bytes"));
}
else
{
Console.WriteLine(" pos=" + pos);
}
}
postingsConsumer.AddPosition(pos, doPayloads ? payload : null, doOffsets ? postings.StartOffset() : -1, doOffsets ? postings.EndOffset() : -1);
}
}
else if (doFreq)
{
totalTF += freq;
}
else
{
totalTF++;
}
postingsConsumer.FinishDoc();
}
termsConsumer.FinishTerm(term, new TermStats(postings.DocFreq, doFreq ? totalTF : -1));
sumTotalTF += totalTF;
sumDF += postings.DocFreq;
}
termsConsumer.Finish(doFreq ? sumTotalTF : -1, sumDF, seenDocs.Cardinality());
}
fieldsConsumer.Dispose();
if (VERBOSE)
{
Console.WriteLine("TEST: after indexing: files=");
foreach (string file in dir.ListAll())
{
Console.WriteLine(" " + file + ": " + dir.FileLength(file) + " bytes");
}
}
CurrentFieldInfos = newFieldInfos;
SegmentReadState readState = new SegmentReadState(dir, segmentInfo, newFieldInfos, IOContext.READ, 1);
return codec.PostingsFormat().FieldsProducer(readState);
}
示例12: CheckHeaders
private void CheckHeaders(Directory dir)
{
foreach (string file in dir.ListAll())
{
if (file.Equals(IndexWriter.WRITE_LOCK_NAME))
{
continue; // write.lock has no footer, thats ok
}
if (file.EndsWith(IndexFileNames.COMPOUND_FILE_EXTENSION))
{
CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, NewIOContext(Random()), false);
CheckHeaders(cfsDir); // recurse into cfs
cfsDir.Dispose();
}
IndexInput @in = null;
bool success = false;
try
{
@in = dir.OpenInput(file, NewIOContext(Random()));
CodecUtil.ChecksumEntireFile(@in);
success = true;
}
finally
{
if (success)
{
IOUtils.Close(@in);
}
else
{
IOUtils.CloseWhileHandlingException(@in);
}
}
}
}