当前位置: 首页>>代码示例>>C#>>正文


C# Token.Clone方法代码示例

本文整理汇总了C#中Lucene.Net.Analysis.Token.Clone方法的典型用法代码示例。如果您正苦于以下问题:C# Token.Clone方法的具体用法?C# Token.Clone怎么用?C# Token.Clone使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Lucene.Net.Analysis.Token的用法示例。


在下文中一共展示了Token.Clone方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: SingleTokenTokenStream

        public SingleTokenTokenStream(Token token)
        {
            Debug.Assert(token != null, "Token was null!");
            _singleToken = (Token) token.Clone();

            // ReSharper disable DoNotCallOverridableMethodsInConstructor
            _tokenAtt = (AttributeImpl) AddAttribute(typeof (TermAttribute));
            // ReSharper restore DoNotCallOverridableMethodsInConstructor

            Debug.Assert(_tokenAtt is Token || _tokenAtt.GetType().Name.Equals(typeof (TokenWrapper).Name),
                         "Token Attribute is the wrong type! Type was: " + _tokenAtt.GetType().Name + " but expected " +
                         typeof (TokenWrapper).Name);
        }
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:13,代码来源:SingleTokenTokenStream.cs

示例2: Add

 /// <summary> Override this method to cache only certain tokens, or new tokens based
 /// on the old tokens.
 /// 
 /// </summary>
 /// <param name="t">The {@link Lucene.Net.Analysis.Token} to add to the sink
 /// </param>
 public virtual void Add(Token t)
 {
     if (t == null)
         return ;
     lst.Add((Token) t.Clone());
 }
开发者ID:cqm0609,项目名称:lucene-file-finder,代码行数:12,代码来源:SinkTokenizer.cs

示例3: SetToken

 public void SetToken(Token token)
 {
     _singleToken = (Token) token.Clone();
 }
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:4,代码来源:SingleTokenTokenStream.cs

示例4: Add

			public override void  Add(Token t)
			{
				if (t != null && count % modCount == 0)
				{
					lst.Add(t.Clone());
				}
				count++;
			}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:8,代码来源:TeeSinkTokenTest.cs

示例5: TestPerformance

		public virtual void  TestPerformance()
		{
			int[] tokCount = new int[]{100, 500, 1000, 2000, 5000, 10000};
			int[] modCounts = new int[]{1, 2, 5, 10, 20, 50, 100, 200, 500};
			for (int k = 0; k < tokCount.Length; k++)
			{
				System.Text.StringBuilder buffer = new System.Text.StringBuilder();
				System.Console.Out.WriteLine("-----Tokens: " + tokCount[k] + "-----");
				for (int i = 0; i < tokCount[k]; i++)
				{
					buffer.Append(English.IntToEnglish(i).ToUpper()).Append(' ');
				}
				//make sure we produce the same tokens
				ModuloSinkTokenizer sink = new ModuloSinkTokenizer(this, tokCount[k], 100);
				Token next = new Token();
				TokenStream result = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
				while ((next = result.Next(next)) != null)
				{
				}
				result = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), 100);
				next = new Token();
				System.Collections.IList tmp = new System.Collections.ArrayList();
				while ((next = result.Next(next)) != null)
				{
					tmp.Add(next.Clone());
				}
				System.Collections.IList sinkList = sink.GetTokens();
				Assert.IsTrue(tmp.Count == sinkList.Count, "tmp Size: " + tmp.Count + " is not: " + sinkList.Count);
				for (int i = 0; i < tmp.Count; i++)
				{
					Token tfTok = (Token) tmp[i];
					Token sinkTok = (Token) sinkList[i];
					Assert.IsTrue(tfTok.TermText().Equals(sinkTok.TermText()) == true, tfTok.TermText() + " is not equal to " + sinkTok.TermText() + " at token: " + i);
				}
				//simulate two fields, each being analyzed once, for 20 documents
				
				for (int j = 0; j < modCounts.Length; j++)
				{
					int tfPos = 0;
					long start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
					for (int i = 0; i < 20; i++)
					{
						next = new Token();
						result = new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString())));
						while ((next = result.Next(next)) != null)
						{
							tfPos += next.GetPositionIncrement();
						}
						next = new Token();
						result = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), modCounts[j]);
						while ((next = result.Next(next)) != null)
						{
							tfPos += next.GetPositionIncrement();
						}
					}
					long finish = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
					System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Two fields took " + (finish - start) + " ms");
					int sinkPos = 0;
					//simulate one field with one sink
					start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
					for (int i = 0; i < 20; i++)
					{
						sink = new ModuloSinkTokenizer(this, tokCount[k], modCounts[j]);
						next = new Token();
						result = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
						while ((next = result.Next(next)) != null)
						{
							sinkPos += next.GetPositionIncrement();
						}
						//System.out.println("Modulo--------");
						result = sink;
						while ((next = result.Next(next)) != null)
						{
							sinkPos += next.GetPositionIncrement();
						}
					}
					finish = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
					System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Tee fields took " + (finish - start) + " ms");
					Assert.IsTrue(sinkPos == tfPos, sinkPos + " does not equal: " + tfPos);
				}
				System.Console.Out.WriteLine("- End Tokens: " + tokCount[k] + "-----");
			}
		}
开发者ID:vikasraz,项目名称:indexsearchutils,代码行数:83,代码来源:TeeSinkTokenTest.cs


注:本文中的Lucene.Net.Analysis.Token.Clone方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。