当前位置: 首页>>代码示例>>C#>>正文


C# Lucene.Net.Util.BytesRef.CopyBytes方法代码示例

本文整理汇总了C#中Lucene.Net.Util.BytesRef.CopyBytes方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Util.BytesRef.CopyBytes方法的具体用法?C# Lucene.Net.Util.BytesRef.CopyBytes怎么用?C# Lucene.Net.Util.BytesRef.CopyBytes使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Lucene.Net.Util.BytesRef的用法示例。


在下文中一共展示了Lucene.Net.Util.BytesRef.CopyBytes方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: TestStressDeleteQueue

        public virtual void TestStressDeleteQueue()
        {
            DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
            HashSet<Term> uniqueValues = new HashSet<Term>();
            int size = 10000 + Random().Next(500) * RANDOM_MULTIPLIER;
            int?[] ids = new int?[size];
            for (int i = 0; i < ids.Length; i++)
            {
                ids[i] = Random().Next();
                uniqueValues.Add(new Term("id", ids[i].ToString()));
            }
            CountDownLatch latch = new CountDownLatch(1);
            AtomicInteger index = new AtomicInteger(0);
            int numThreads = 2 + Random().Next(5);
            UpdateThread[] threads = new UpdateThread[numThreads];
            for (int i = 0; i < threads.Length; i++)
            {
                threads[i] = new UpdateThread(queue, index, ids, latch);
                threads[i].Start();
            }
            latch.countDown();
            for (int i = 0; i < threads.Length; i++)
            {
                threads[i].Join();
            }

            foreach (UpdateThread updateThread in threads)
            {
                DeleteSlice slice = updateThread.Slice;
                queue.UpdateSlice(slice);
                BufferedUpdates deletes = updateThread.Deletes;
                slice.Apply(deletes, BufferedUpdates.MAX_INT);
                assertEquals(uniqueValues, new HashSet<Term>(deletes.Terms.Keys));
            }
            queue.TryApplyGlobalSlice();
            HashSet<Term> frozenSet = new HashSet<Term>();
            foreach (Term t in queue.FreezeGlobalBuffer(null).TermsIterable())
            {
                BytesRef bytesRef = new BytesRef();
                bytesRef.CopyBytes(t.Bytes);
                frozenSet.Add(new Term(t.Field, bytesRef));
            }
            Assert.AreEqual(0, queue.NumGlobalTermDeletes(), "num deletes must be 0 after freeze");
            Assert.AreEqual(uniqueValues.Count, frozenSet.Count);
            assertEquals(uniqueValues, frozenSet);
        }
开发者ID:WakeflyCBass,项目名称:lucenenet,代码行数:46,代码来源:TestDocumentsWriterDeleteQueue.cs

示例2: TestUpdateDelteSlices

 public virtual void TestUpdateDelteSlices()
 {
     DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
     int size = 200 + Random().Next(500) * RANDOM_MULTIPLIER;
     int?[] ids = new int?[size];
     for (int i = 0; i < ids.Length; i++)
     {
         ids[i] = Random().Next();
     }
     DeleteSlice slice1 = queue.NewSlice();
     DeleteSlice slice2 = queue.NewSlice();
     BufferedUpdates bd1 = new BufferedUpdates();
     BufferedUpdates bd2 = new BufferedUpdates();
     int last1 = 0;
     int last2 = 0;
     HashSet<Term> uniqueValues = new HashSet<Term>();
     for (int j = 0; j < ids.Length; j++)
     {
         int? i = ids[j];
         // create an array here since we compare identity below against tailItem
         Term[] term = new Term[] { new Term("id", i.ToString()) };
         uniqueValues.Add(term[0]);
         queue.AddDelete(term);
         if (Random().Next(20) == 0 || j == ids.Length - 1)
         {
             queue.UpdateSlice(slice1);
             Assert.IsTrue(slice1.IsTailItem(term));
             slice1.Apply(bd1, j);
             AssertAllBetween(last1, j, bd1, ids);
             last1 = j + 1;
         }
         if (Random().Next(10) == 5 || j == ids.Length - 1)
         {
             queue.UpdateSlice(slice2);
             Assert.IsTrue(slice2.IsTailItem(term));
             slice2.Apply(bd2, j);
             AssertAllBetween(last2, j, bd2, ids);
             last2 = j + 1;
         }
         Assert.AreEqual(j + 1, queue.NumGlobalTermDeletes());
     }
     assertEquals(uniqueValues, new HashSet<Term>(bd1.Terms.Keys));
     assertEquals(uniqueValues, new HashSet<Term>(bd2.Terms.Keys));
     var frozenSet = new HashSet<Term>();
     foreach (Term t in queue.FreezeGlobalBuffer(null).TermsIterable())
     {
         BytesRef bytesRef = new BytesRef();
         bytesRef.CopyBytes(t.Bytes);
         frozenSet.Add(new Term(t.Field, bytesRef));
     }
     assertEquals(uniqueValues, frozenSet);
     Assert.AreEqual(0, queue.NumGlobalTermDeletes(), "num deletes must be 0 after freeze");
 }
开发者ID:WakeflyCBass,项目名称:lucenenet,代码行数:53,代码来源:TestDocumentsWriterDeleteQueue.cs

示例3: AddTermsDict

        /// <summary>
        /// expert: writes a value dictionary for a sorted/sortedset field </summary>
        protected internal virtual void AddTermsDict(FieldInfo field, IEnumerable<BytesRef> values)
        {
            // first check if its a "fixed-length" terms dict
            int minLength = int.MaxValue;
            int maxLength = int.MinValue;
            foreach (BytesRef v in values)
            {
                minLength = Math.Min(minLength, v.Length);
                maxLength = Math.Max(maxLength, v.Length);
            }
            if (minLength == maxLength)
            {
                // no index needed: direct addressing by mult
                AddBinaryField(field, values);
            }
            else
            {
                // header
                Meta.WriteVInt(field.Number);
                Meta.WriteByte((byte)Lucene45DocValuesFormat.BINARY);
                Meta.WriteVInt(BINARY_PREFIX_COMPRESSED);
                Meta.WriteLong(-1L);
                // now write the bytes: sharing prefixes within a block
                long startFP = Data.FilePointer;
                // currently, we have to store the delta from expected for every 1/nth term
                // we could avoid this, but its not much and less overall RAM than the previous approach!
                RAMOutputStream addressBuffer = new RAMOutputStream();
                MonotonicBlockPackedWriter termAddresses = new MonotonicBlockPackedWriter(addressBuffer, BLOCK_SIZE);
                BytesRef lastTerm = new BytesRef();
                long count = 0;
                foreach (BytesRef v in values)
                {
                    if (count % ADDRESS_INTERVAL == 0)
                    {
                        termAddresses.Add(Data.FilePointer - startFP);
                        // force the first term in a block to be abs-encoded
                        lastTerm.Length = 0;
                    }

                    // prefix-code
                    int sharedPrefix = StringHelper.BytesDifference(lastTerm, v);
                    Data.WriteVInt(sharedPrefix);
                    Data.WriteVInt(v.Length - sharedPrefix);
                    Data.WriteBytes(v.Bytes, v.Offset + sharedPrefix, v.Length - sharedPrefix);
                    lastTerm.CopyBytes(v);
                    count++;
                }
                long indexStartFP = Data.FilePointer;
                // write addresses of indexed terms
                termAddresses.Finish();
                addressBuffer.WriteTo(Data);
                addressBuffer = null;
                termAddresses = null;
                Meta.WriteVInt(minLength);
                Meta.WriteVInt(maxLength);
                Meta.WriteVLong(count);
                Meta.WriteLong(startFP);
                Meta.WriteVInt(ADDRESS_INTERVAL);
                Meta.WriteLong(indexStartFP);
                Meta.WriteVInt(PackedInts.VERSION_CURRENT);
                Meta.WriteVInt(BLOCK_SIZE);
            }
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:65,代码来源:Lucene45DocValuesConsumer.cs


注:本文中的Lucene.Net.Util.BytesRef.CopyBytes方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。