当前位置: 首页>>代码示例>>C#>>正文


C# Lucene.Net.Store.Directory.FileLength方法代码示例

本文整理汇总了C#中Lucene.Net.Store.Directory.FileLength方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Store.Directory.FileLength方法的具体用法?C# Lucene.Net.Store.Directory.FileLength怎么用?C# Lucene.Net.Store.Directory.FileLength使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Lucene.Net.Store.Directory的用法示例。


在下文中一共展示了Lucene.Net.Store.Directory.FileLength方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: ReadFile

 private void ReadFile(Directory dir, string name)
 {
     IndexInput input = dir.OpenInput(name, NewIOContext(Random()));
     try
     {
         long size = dir.FileLength(name);
         long bytesLeft = size;
         while (bytesLeft > 0)
         {
             int numToRead;
             if (bytesLeft < Buffer.Length)
             {
                 numToRead = (int)bytesLeft;
             }
             else
             {
                 numToRead = Buffer.Length;
             }
             input.ReadBytes(Buffer, 0, numToRead, false);
             bytesLeft -= numToRead;
         }
         // Don't do this in your real backups!  this is just
         // to force a backup to take a somewhat long time, to
         // make sure we are exercising the fact that the
         // IndexWriter should not delete this file even when I
         // take my time reading it.
         Thread.Sleep(1);
     }
     finally
     {
         input.Dispose();
     }
 }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:33,代码来源:TestSnapshotDeletionPolicy.cs

示例2: ReadFile

		private void  ReadFile(Directory dir, System.String name)
		{
			IndexInput input = dir.OpenInput(name);
			try
			{
				long size = dir.FileLength(name);
				long bytesLeft = size;
				while (bytesLeft > 0)
				{
					int numToRead;
					if (bytesLeft < buffer.Length)
						numToRead = (int) bytesLeft;
					else
						numToRead = buffer.Length;
					input.ReadBytes(buffer, 0, numToRead, false);
					bytesLeft -= numToRead;
				}
				// Don't do this in your real backups!  This is just
				// to force a backup to take a somewhat long time, to
				// make sure we are exercising the fact that the
				// IndexWriter should not delete this file even when I
				// take my time reading it.
				System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1));
			}
			finally
			{
				input.Close();
			}
		}
开发者ID:VirtueMe,项目名称:ravendb,代码行数:29,代码来源:TestSnapshotDeletionPolicy.cs

示例3: TermInfosReader

        internal TermInfosReader(Directory dir, string seg, FieldInfos fis, IOContext context, int indexDivisor)
        {
            bool success = false;

            if (indexDivisor < 1 && indexDivisor != -1)
            {
                throw new System.ArgumentException("indexDivisor must be -1 (don't load terms index) or greater than 0: got " + indexDivisor);
            }

            try
            {
                Directory = dir;
                Segment = seg;
                FieldInfos = fis;

                OrigEnum = new SegmentTermEnum(Directory.OpenInput(IndexFileNames.SegmentFileName(Segment, "", Lucene3xPostingsFormat.TERMS_EXTENSION), context), FieldInfos, false);
                Size_Renamed = OrigEnum.Size;

                if (indexDivisor != -1)
                {
                    // Load terms index
                    TotalIndexInterval = OrigEnum.IndexInterval * indexDivisor;

                    string indexFileName = IndexFileNames.SegmentFileName(Segment, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION);
                    SegmentTermEnum indexEnum = new SegmentTermEnum(Directory.OpenInput(indexFileName, context), FieldInfos, true);

                    try
                    {
                        Index = new TermInfosReaderIndex(indexEnum, indexDivisor, dir.FileLength(indexFileName), TotalIndexInterval);
                        IndexLength = Index.Length();
                    }
                    finally
                    {
                        indexEnum.Dispose();
                    }
                }
                else
                {
                    // Do not load terms index:
                    TotalIndexInterval = -1;
                    Index = null;
                    IndexLength = -1;
                }
                success = true;
            }
            finally
            {
                // With lock-less commits, it's entirely possible (and
                // fine) to hit a FileNotFound exception above. In
                // this case, we want to explicitly close any subset
                // of things that were opened so that we don't have to
                // wait for a GC to do so.
                if (!success)
                {
                    Dispose();
                }
            }
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:58,代码来源:TermInfosReader.cs

示例4: CachedIndexInput

        public CachedIndexInput( ICloudProvider CloudProvider, Directory CacheDirectory, string Name )
        {
            this.name = Name;

            #if FULLDEBUG
            Debug.WriteLine( "Opening " + this.name );
            #endif
            this.fileMutex = BlobMutexManager.GrabMutex( this.name );
            this.fileMutex.WaitOne();
            try {

                bool fFileNeeded = false;
                FileMetadata cloudMetadata = CloudProvider.FileMetadata( this.name );
                if ( !cloudMetadata.Exists ) {
                    fFileNeeded = false;
                    // TODO: Delete local if it doesn't exist on cloud?
                    /*
                    if (CacheDirectory.FileExists(this.name)) {
                        CacheDirectory.DeleteFile(this.name);
                    }
                    */
                } else if ( !CacheDirectory.FileExists( this.name ) ) {
                    fFileNeeded = true;
                } else {
                    long cachedLength = CacheDirectory.FileLength( this.name );

                    long blobLength = cloudMetadata.Length;
                    DateTime blobLastModifiedUTC = cloudMetadata.LastModified.ToUniversalTime();

                    if ( !cloudMetadata.Exists || cachedLength != blobLength ) {
                        fFileNeeded = true;
                    } else {
                        // there seems to be an error of 1 tick which happens every once in a while
                        // for now we will say that if they are within 1 tick of each other and same length
                        DateTime cachedLastModifiedUTC = new DateTime( CacheDirectory.FileModified( this.name ), DateTimeKind.Local ).ToUniversalTime();
                        if ( cachedLastModifiedUTC < blobLastModifiedUTC ) {
                            TimeSpan timeSpan = blobLastModifiedUTC.Subtract( cachedLastModifiedUTC );
                            if ( timeSpan.TotalSeconds > 1 ) {
                                fFileNeeded = true;
                            } else {
            #if FULLDEBUG
                                Debug.WriteLine( "Using cache for " + this.name + ": " + timeSpan.TotalSeconds );
            #endif
                                // file not needed
                            }
                        }
                    }
                }

                // if the file does not exist
                // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage)
                if ( fFileNeeded ) {
                    using ( StreamOutput fileStream = new StreamOutput( CacheDirectory.CreateOutput( this.name ) ) ) {

                        Stream blobStream = CloudProvider.Download( this.name );
                        blobStream.CopyTo( fileStream );

                        fileStream.Flush();
                        Debug.WriteLine( "GET {0} RETREIVED {1} bytes", this.name, fileStream.Length );

                    }
                } else {
            #if FULLDEBUG
                    if ( !cloudMetadata.Exists ) {
                        Debug.WriteLine( "Cloud doesn't have " + this.name );
                    } else {
                        Debug.WriteLine( "Using cached file for " + this.name );
                    }
            #endif
                }

                // open the file in read only mode
                this.indexInput = CacheDirectory.OpenInput( this.name );
            } finally {
                this.fileMutex.ReleaseMutex();
            }
        }
开发者ID:robrich,项目名称:LuceneCloudDirectory,代码行数:77,代码来源:CachedIndexInput.cs

示例5: DirSize

 internal static long DirSize(Directory d)
 {
     long size = 0;
     foreach (string file in d.ListAll())
     {
         size += d.FileLength(file);
     }
     return size;
 }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:9,代码来源:BaseCompressingDocValuesFormatTestCase.cs

示例6: BeforeClass

        public static void BeforeClass()
        {
            // NOTE: turn off compound file, this test will open some index files directly.
            LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)).SetUseCompoundFile(false);

            TermIndexInterval = config.TermIndexInterval;
            IndexDivisor = TestUtil.NextInt(Random(), 1, 10);
            NUMBER_OF_DOCUMENTS = AtLeast(100);
            NUMBER_OF_FIELDS = AtLeast(Math.Max(10, 3 * TermIndexInterval * IndexDivisor / NUMBER_OF_DOCUMENTS));

            Directory = NewDirectory();

            config.SetCodec(new PreFlexRWCodec());
            LogMergePolicy mp = NewLogMergePolicy();
            // NOTE: turn off compound file, this test will open some index files directly.
            mp.NoCFSRatio = 0.0;
            config.SetMergePolicy(mp);

            Populate(Directory, config);

            DirectoryReader r0 = IndexReader.Open(Directory);
            SegmentReader r = LuceneTestCase.GetOnlySegmentReader(r0);
            string segment = r.SegmentName;
            r.Dispose();

            FieldInfosReader infosReader = (new PreFlexRWCodec()).FieldInfosFormat().FieldInfosReader;
            FieldInfos fieldInfos = infosReader.Read(Directory, segment, "", IOContext.READONCE);
            string segmentFileName = IndexFileNames.SegmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION);
            long tiiFileLength = Directory.FileLength(segmentFileName);
            IndexInput input = Directory.OpenInput(segmentFileName, NewIOContext(Random()));
            TermEnum = new SegmentTermEnum(Directory.OpenInput(IndexFileNames.SegmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_EXTENSION), NewIOContext(Random())), fieldInfos, false);
            int totalIndexInterval = TermEnum.IndexInterval * IndexDivisor;

            SegmentTermEnum indexEnum = new SegmentTermEnum(input, fieldInfos, true);
            Index = new TermInfosReaderIndex(indexEnum, IndexDivisor, tiiFileLength, totalIndexInterval);
            indexEnum.Dispose();
            input.Dispose();

            Reader = IndexReader.Open(Directory);
            SampleTerms = Sample(Random(), Reader, 1000);
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:42,代码来源:TestTermInfosReaderIndex.cs

示例7: CreateCompoundFile

        /// <summary>
        /// NOTE: this method creates a compound file for all files returned by
        /// info.files(). While, generally, this may include separate norms and
        /// deletion files, this SegmentInfo must not reference such files when this
        /// method is called, because they are not allowed within a compound file.
        /// </summary>
        public static ICollection<string> CreateCompoundFile(InfoStream infoStream, Directory directory, CheckAbort checkAbort, SegmentInfo info, IOContext context)
        {
            string fileName = Index.IndexFileNames.SegmentFileName(info.Name, "", Lucene.Net.Index.IndexFileNames.COMPOUND_FILE_EXTENSION);
            if (infoStream.IsEnabled("IW"))
            {
                infoStream.Message("IW", "create compound file " + fileName);
            }
            Debug.Assert(Lucene3xSegmentInfoFormat.GetDocStoreOffset(info) == -1);
            // Now merge all added files
            ICollection<string> files = info.Files;
            CompoundFileDirectory cfsDir = new CompoundFileDirectory(directory, fileName, context, true);
            IOException prior = null;
            try
            {
                foreach (string file in files)
                {
                    directory.Copy(cfsDir, file, file, context);
                    checkAbort.Work(directory.FileLength(file));
                }
            }
            catch (System.IO.IOException ex)
            {
                prior = ex;
            }
            finally
            {
                bool success = false;
                try
                {
                    IOUtils.CloseWhileHandlingException(prior, cfsDir);
                    success = true;
                }
                finally
                {
                    if (!success)
                    {
                        try
                        {
                            directory.DeleteFile(fileName);
                        }
                        catch (Exception)
                        {
                        }
                        try
                        {
                            directory.DeleteFile(Lucene.Net.Index.IndexFileNames.SegmentFileName(info.Name, "", Lucene.Net.Index.IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
                        }
                        catch (Exception)
                        {
                        }
                    }
                }
            }

            // Replace all previous files with the CFS/CFE files:
            HashSet<string> siFiles = new HashSet<string>();
            siFiles.Add(fileName);
            siFiles.Add(Lucene.Net.Index.IndexFileNames.SegmentFileName(info.Name, "", Lucene.Net.Index.IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
            info.Files = siFiles;

            return files;
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:68,代码来源:IndexWriter.cs

示例8: BytesUsedByExtension

 private IDictionary<string, long> BytesUsedByExtension(Directory d)
 {
     IDictionary<string, long> bytesUsedByExtension = new Dictionary<string, long>();
     foreach (string file in d.ListAll())
     {
         string ext = IndexFileNames.GetExtension(file) ?? string.Empty;
         long previousLength = bytesUsedByExtension.ContainsKey(ext) ? bytesUsedByExtension[ext] : 0;
         bytesUsedByExtension[ext] = previousLength + d.FileLength(file);
     }
     foreach (string item in ExcludedExtensionsFromByteCounts()) {
         bytesUsedByExtension.Remove(item);
     }
     return bytesUsedByExtension;
 }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:14,代码来源:BaseIndexFileFormatTestCase.cs

示例9: BuildIndex


//.........这里部分代码省略.........
            {
                string field = fieldEnt.Key;
                IDictionary<BytesRef, long> terms = fieldEnt.Value;

                FieldInfo fieldInfo = newFieldInfos.FieldInfo(field);

                FieldInfo.IndexOptions? indexOptions = fieldInfo.FieldIndexOptions;

                if (VERBOSE)
                {
                    Console.WriteLine("field=" + field + " indexOtions=" + indexOptions);
                }

                bool doFreq = indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS;
                bool doPos = indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
                bool doPayloads = indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS && allowPayloads;
                bool doOffsets = indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;

                TermsConsumer termsConsumer = fieldsConsumer.AddField(fieldInfo);
                long sumTotalTF = 0;
                long sumDF = 0;
                FixedBitSet seenDocs = new FixedBitSet(MaxDoc);
                foreach (KeyValuePair<BytesRef, long> termEnt in terms)
                {
                    BytesRef term = termEnt.Key;
                    SeedPostings postings = GetSeedPostings(term.Utf8ToString(), termEnt.Value, false, maxAllowed);
                    if (VERBOSE)
                    {
                        Console.WriteLine("  term=" + field + ":" + term.Utf8ToString() + " docFreq=" + postings.DocFreq + " seed=" + termEnt.Value);
                    }

                    PostingsConsumer postingsConsumer = termsConsumer.StartTerm(term);
                    long totalTF = 0;
                    int docID = 0;
                    while ((docID = postings.NextDoc()) != DocsEnum.NO_MORE_DOCS)
                    {
                        int freq = postings.Freq();
                        if (VERBOSE)
                        {
                            Console.WriteLine("    " + postings.Upto + ": docID=" + docID + " freq=" + postings.Freq_Renamed);
                        }
                        postingsConsumer.StartDoc(docID, doFreq ? postings.Freq_Renamed : -1);
                        seenDocs.Set(docID);
                        if (doPos)
                        {
                            totalTF += postings.Freq_Renamed;
                            for (int posUpto = 0; posUpto < freq; posUpto++)
                            {
                                int pos = postings.NextPosition();
                                BytesRef payload = postings.Payload;

                                if (VERBOSE)
                                {
                                    if (doPayloads)
                                    {
                                        Console.WriteLine("      pos=" + pos + " payload=" + (payload == null ? "null" : payload.Length + " bytes"));
                                    }
                                    else
                                    {
                                        Console.WriteLine("      pos=" + pos);
                                    }
                                }
                                postingsConsumer.AddPosition(pos, doPayloads ? payload : null, doOffsets ? postings.StartOffset() : -1, doOffsets ? postings.EndOffset() : -1);
                            }
                        }
                        else if (doFreq)
                        {
                            totalTF += freq;
                        }
                        else
                        {
                            totalTF++;
                        }
                        postingsConsumer.FinishDoc();
                    }
                    termsConsumer.FinishTerm(term, new TermStats(postings.DocFreq, doFreq ? totalTF : -1));
                    sumTotalTF += totalTF;
                    sumDF += postings.DocFreq;
                }

                termsConsumer.Finish(doFreq ? sumTotalTF : -1, sumDF, seenDocs.Cardinality());
            }

            fieldsConsumer.Dispose();

            if (VERBOSE)
            {
                Console.WriteLine("TEST: after indexing: files=");
                foreach (string file in dir.ListAll())
                {
                    Console.WriteLine("  " + file + ": " + dir.FileLength(file) + " bytes");
                }
            }

            CurrentFieldInfos = newFieldInfos;

            SegmentReadState readState = new SegmentReadState(dir, segmentInfo, newFieldInfos, IOContext.READ, 1);

            return codec.PostingsFormat().FieldsProducer(readState);
        }
开发者ID:WakeflyCBass,项目名称:lucenenet,代码行数:101,代码来源:BasePostingsFormatTestCase.cs


注:本文中的Lucene.Net.Store.Directory.FileLength方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。