当前位置: 首页>>代码示例>>Java>>正文


Java FlagsAttribute类代码示例

本文整理汇总了Java中org.apache.lucene.analysis.tokenattributes.FlagsAttribute的典型用法代码示例。如果您正苦于以下问题:Java FlagsAttribute类的具体用法?Java FlagsAttribute怎么用?Java FlagsAttribute使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


FlagsAttribute类属于org.apache.lucene.analysis.tokenattributes包,在下文中一共展示了FlagsAttribute类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: PrefixAwareTokenFilter

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
public PrefixAwareTokenFilter(TokenStream prefix, TokenStream suffix) {
  super(suffix);
  this.suffix = suffix;
  this.prefix = prefix;
  prefixExhausted = false;
  
  termAtt = addAttribute(CharTermAttribute.class);
  posIncrAtt = addAttribute(PositionIncrementAttribute.class);
  payloadAtt = addAttribute(PayloadAttribute.class);
  offsetAtt = addAttribute(OffsetAttribute.class);
  typeAtt = addAttribute(TypeAttribute.class);
  flagsAtt = addAttribute(FlagsAttribute.class);

  p_termAtt = prefix.addAttribute(CharTermAttribute.class);
  p_posIncrAtt = prefix.addAttribute(PositionIncrementAttribute.class);
  p_payloadAtt = prefix.addAttribute(PayloadAttribute.class);
  p_offsetAtt = prefix.addAttribute(OffsetAttribute.class);
  p_typeAtt = prefix.addAttribute(TypeAttribute.class);
  p_flagsAtt = prefix.addAttribute(FlagsAttribute.class);
}
 
开发者ID:lamsfoundation,项目名称:lams,代码行数:21,代码来源:PrefixAwareTokenFilter.java

示例2: copyTo

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
@Override
public void copyTo(AttributeImpl target) {
  if (target instanceof Token) {
    final Token to = (Token) target;
    to.reinit(this);
    // reinit shares the payload, so clone it:
    if (payload !=null) {
      to.payload = payload.clone();
    }
  } else {
    super.copyTo(target);
    ((OffsetAttribute) target).setOffset(startOffset, endOffset);
    ((PositionIncrementAttribute) target).setPositionIncrement(positionIncrement);
    ((PayloadAttribute) target).setPayload((payload == null) ? null : payload.clone());
    ((FlagsAttribute) target).setFlags(flags);
    ((TypeAttribute) target).setType(type);
  }
}
 
开发者ID:gncloud,项目名称:fastcatsearch3,代码行数:19,代码来源:Token.java

示例3: testFilterTokens

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
public void testFilterTokens() throws Exception {
  SnowballFilter filter = new SnowballFilter(new TestTokenStream(), "English");
  CharTermAttribute termAtt = filter.getAttribute(CharTermAttribute.class);
  OffsetAttribute offsetAtt = filter.getAttribute(OffsetAttribute.class);
  TypeAttribute typeAtt = filter.getAttribute(TypeAttribute.class);
  PayloadAttribute payloadAtt = filter.getAttribute(PayloadAttribute.class);
  PositionIncrementAttribute posIncAtt = filter.getAttribute(PositionIncrementAttribute.class);
  FlagsAttribute flagsAtt = filter.getAttribute(FlagsAttribute.class);
  
  filter.incrementToken();

  assertEquals("accent", termAtt.toString());
  assertEquals(2, offsetAtt.startOffset());
  assertEquals(7, offsetAtt.endOffset());
  assertEquals("wrd", typeAtt.type());
  assertEquals(3, posIncAtt.getPositionIncrement());
  assertEquals(77, flagsAtt.getFlags());
  assertEquals(new BytesRef(new byte[]{0,1,2,3}), payloadAtt.getPayload());
}
 
开发者ID:europeana,项目名称:search,代码行数:20,代码来源:TestSnowball.java

示例4: reflectWith

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
@Override
public void reflectWith(AttributeReflector reflector) {
  super.reflectWith(reflector);
  reflector.reflect(OffsetAttribute.class, "startOffset", startOffset);
  reflector.reflect(OffsetAttribute.class, "endOffset", endOffset);
  reflector.reflect(PositionIncrementAttribute.class, "positionIncrement", positionIncrement);
  reflector.reflect(PayloadAttribute.class, "payload", payload);
  reflector.reflect(FlagsAttribute.class, "flags", flags);
  reflector.reflect(TypeAttribute.class, "type", type);
}
 
开发者ID:gncloud,项目名称:fastcatsearch3,代码行数:11,代码来源:Token.java

示例5: testBoth

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
public void testBoth() throws Exception {
  Set<String> untoks = new HashSet<>();
  untoks.add(WikipediaTokenizer.CATEGORY);
  untoks.add(WikipediaTokenizer.ITALICS);
  String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h   i   j]]";
  //should output all the indivual tokens plus the untokenized tokens as well.  Untokenized tokens
  WikipediaTokenizer tf = new WikipediaTokenizer(newAttributeFactory(), new StringReader(test), WikipediaTokenizer.BOTH, untoks);
  assertTokenStreamContents(tf,
      new String[] { "a b c d", "a", "b", "c", "d", "e f g", "e", "f", "g",
        "link", "here", "link", "there", "italics here", "italics", "here",
        "something", "more italics", "more", "italics", "h   i   j", "h", "i", "j" },
      new int[] { 11, 11, 13, 15, 17, 32, 32, 34, 36, 42, 47, 56, 61, 71, 71, 79, 86, 98,  98,  103, 124, 124, 128, 132 },
      new int[] { 18, 12, 14, 16, 18, 37, 33, 35, 37, 46, 51, 60, 66, 83, 78, 83, 95, 110, 102, 110, 133, 125, 129, 133 },
      new int[] { 1,  0,  1,  1,  1,  1,  0,  1,  1,  1,  1,  1,  1,  1,  0,  1,  1,  1,   0,   1,   1,   0,   1,   1 }
     );
  
  // now check the flags, TODO: add way to check flags from BaseTokenStreamTestCase?
  tf = new WikipediaTokenizer(newAttributeFactory(), new StringReader(test), WikipediaTokenizer.BOTH, untoks);
  int expectedFlags[] = new int[] { UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0, 
      0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0 };
  FlagsAttribute flagsAtt = tf.addAttribute(FlagsAttribute.class);
  tf.reset();
  for (int i = 0; i < expectedFlags.length; i++) {
    assertTrue(tf.incrementToken());
    assertEquals("flags " + i, expectedFlags[i], flagsAtt.getFlags());
  }
  assertFalse(tf.incrementToken());
  tf.close();
}
 
开发者ID:europeana,项目名称:search,代码行数:30,代码来源:WikipediaTokenizerTest.java

示例6: testAttributeReuse

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
public void testAttributeReuse() throws Exception {
  ThaiAnalyzer analyzer = new ThaiAnalyzer(Version.LUCENE_3_0);
  // just consume
  TokenStream ts = analyzer.tokenStream("dummy", "ภาษาไทย");
  assertTokenStreamContents(ts, new String[] { "ภาษา", "ไทย" });
  // this consumer adds flagsAtt, which this analyzer does not use. 
  ts = analyzer.tokenStream("dummy", "ภาษาไทย");
  ts.addAttribute(FlagsAttribute.class);
  assertTokenStreamContents(ts, new String[] { "ภาษา", "ไทย" });
}
 
开发者ID:europeana,项目名称:search,代码行数:11,代码来源:TestThaiAnalyzer.java

示例7: convert

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
@Override
public Collection<Token> convert(String origQuery) {
  Collection<Token> result = new HashSet<>();
  WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer();
  
  TokenStream ts = null;
  try {
    ts = analyzer.tokenStream("", origQuery);
    // TODO: support custom attributes
    CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
    OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
    TypeAttribute typeAtt = ts.addAttribute(TypeAttribute.class);
    FlagsAttribute flagsAtt = ts.addAttribute(FlagsAttribute.class);
    PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
    PositionIncrementAttribute posIncAtt = ts.addAttribute(PositionIncrementAttribute.class);

    ts.reset();

    while (ts.incrementToken()) {
      Token tok = new Token();
      tok.copyBuffer(termAtt.buffer(), 0, termAtt.length());
      tok.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
      tok.setFlags(flagsAtt.getFlags());
      tok.setPayload(payloadAtt.getPayload());
      tok.setPositionIncrement(posIncAtt.getPositionIncrement());
      tok.setType(typeAtt.type());
      result.add(tok);
    }
    ts.end();      
    return result;
  } catch (IOException e) {
    throw new RuntimeException(e);
  } finally {
    IOUtils.closeWhileHandlingException(ts);
  }
}
 
开发者ID:europeana,项目名称:search,代码行数:37,代码来源:SimpleQueryConverter.java

示例8: testBoth

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
public void testBoth() throws Exception {
  Set<String> untoks = new HashSet<String>();
  untoks.add(WikipediaTokenizer.CATEGORY);
  untoks.add(WikipediaTokenizer.ITALICS);
  String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h   i   j]]";
  //should output all the indivual tokens plus the untokenized tokens as well.  Untokenized tokens
  WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.BOTH, untoks);
  assertTokenStreamContents(tf,
      new String[] { "a b c d", "a", "b", "c", "d", "e f g", "e", "f", "g",
        "link", "here", "link", "there", "italics here", "italics", "here",
        "something", "more italics", "more", "italics", "h   i   j", "h", "i", "j" },
      new int[] { 11, 11, 13, 15, 17, 32, 32, 34, 36, 42, 47, 56, 61, 71, 71, 79, 86, 98,  98,  103, 124, 124, 128, 132 },
      new int[] { 18, 12, 14, 16, 18, 37, 33, 35, 37, 46, 51, 60, 66, 83, 78, 83, 95, 110, 102, 110, 133, 125, 129, 133 },
      new int[] { 1,  0,  1,  1,  1,  1,  0,  1,  1,  1,  1,  1,  1,  1,  0,  1,  1,  1,   0,   1,   1,   0,   1,   1 }
     );
  
  // now check the flags, TODO: add way to check flags from BaseTokenStreamTestCase?
  tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.BOTH, untoks);
  int expectedFlags[] = new int[] { UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0, 
      0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0 };
  FlagsAttribute flagsAtt = tf.addAttribute(FlagsAttribute.class);
  tf.reset();
  for (int i = 0; i < expectedFlags.length; i++) {
    assertTrue(tf.incrementToken());
    assertEquals("flags " + i, expectedFlags[i], flagsAtt.getFlags());
  }
  assertFalse(tf.incrementToken());
  tf.close();
}
 
开发者ID:pkarmstr,项目名称:NYBC,代码行数:30,代码来源:WikipediaTokenizerTest.java

示例9: testAttributeReuse

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
public void testAttributeReuse() throws Exception {
  ThaiAnalyzer analyzer = new ThaiAnalyzer(Version.LUCENE_30);
  // just consume
  TokenStream ts = analyzer.tokenStream("dummy", new StringReader("ภาษาไทย"));
  assertTokenStreamContents(ts, new String[] { "ภาษา", "ไทย" });
  // this consumer adds flagsAtt, which this analyzer does not use. 
  ts = analyzer.tokenStream("dummy", new StringReader("ภาษาไทย"));
  ts.addAttribute(FlagsAttribute.class);
  assertTokenStreamContents(ts, new String[] { "ภาษา", "ไทย" });
}
 
开发者ID:pkarmstr,项目名称:NYBC,代码行数:11,代码来源:TestThaiAnalyzer.java

示例10: convert

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
@Override
public Collection<Token> convert(String origQuery) {
  try {
    Collection<Token> result = new HashSet<Token>();
    WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_40);
    TokenStream ts = analyzer.tokenStream("", new StringReader(origQuery));
    // TODO: support custom attributes
    CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
    OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
    TypeAttribute typeAtt = ts.addAttribute(TypeAttribute.class);
    FlagsAttribute flagsAtt = ts.addAttribute(FlagsAttribute.class);
    PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
    PositionIncrementAttribute posIncAtt = ts.addAttribute(PositionIncrementAttribute.class);

    ts.reset();

    while (ts.incrementToken()) {
      Token tok = new Token();
      tok.copyBuffer(termAtt.buffer(), 0, termAtt.length());
      tok.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
      tok.setFlags(flagsAtt.getFlags());
      tok.setPayload(payloadAtt.getPayload());
      tok.setPositionIncrement(posIncAtt.getPositionIncrement());
      tok.setType(typeAtt.type());
      result.add(tok);
    }
    ts.end();
    ts.close();
    
    return result;
  } catch (IOException e) {
    throw new RuntimeException(e);
  }
}
 
开发者ID:pkarmstr,项目名称:NYBC,代码行数:35,代码来源:SimpleQueryConverter.java

示例11: NamedEntityPopulateFilter

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
protected NamedEntityPopulateFilter(ResultNamedEntityExtraction result,
		TokenStream input) {
	super(input);
	this.result = result;
	this.termAtt = (CharTermAttribute) addAttribute(CharTermAttribute.class);
	this.flagsAtt = (FlagsAttribute) addAttribute(FlagsAttribute.class);
	this.typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
}
 
开发者ID:jaeksoft,项目名称:opensearchserver,代码行数:9,代码来源:NamedEntityPopulateFilter.java

示例12: TokenTerm

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
public TokenTerm(final CharTermAttribute termAtt, final PositionIncrementAttribute posIncrAtt,
		final OffsetAttribute offsetAtt, final TypeAttribute typeAtt, final FlagsAttribute flagsAtt) {
	this.term = termAtt != null ? termAtt.toString() : null;
	this.start = offsetAtt != null ? offsetAtt.startOffset() : 0;
	this.end = offsetAtt != null ? offsetAtt.endOffset() : 0;
	this.increment = posIncrAtt != null ? posIncrAtt.getPositionIncrement() : 0;
	this.type = typeAtt != null ? typeAtt.type() : null;
	this.flags = flagsAtt != null ? flagsAtt.getFlags() : 0;
}
 
开发者ID:jaeksoft,项目名称:opensearchserver,代码行数:10,代码来源:TokenTerm.java

示例13: AbstractTermFilter

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
protected AbstractTermFilter(TokenStream input) {
	super(input);
	termAtt = (CharTermAttribute) addAttribute(CharTermAttribute.class);
	posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
	offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
	typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
	flagsAtt = (FlagsAttribute) addAttribute(FlagsAttribute.class);
}
 
开发者ID:jaeksoft,项目名称:opensearchserver,代码行数:9,代码来源:AbstractTermFilter.java

示例14: testAttributeReuse

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
public void testAttributeReuse() throws Exception {
  ThaiAnalyzer analyzer = new ThaiAnalyzer(Version.LUCENE_30);
  // just consume
  TokenStream ts = analyzer.tokenStream("dummy", "ภาษาไทย");
  assertTokenStreamContents(ts, new String[] { "ภาษา", "ไทย" });
  // this consumer adds flagsAtt, which this analyzer does not use. 
  ts = analyzer.tokenStream("dummy", "ภาษาไทย");
  ts.addAttribute(FlagsAttribute.class);
  assertTokenStreamContents(ts, new String[] { "ภาษา", "ไทย" });
}
 
开发者ID:jimaguere,项目名称:Maskana-Gestor-de-Conocimiento,代码行数:11,代码来源:TestThaiAnalyzer.java

示例15: copyToWithoutPayloadClone

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; //导入依赖的package包/类
private void copyToWithoutPayloadClone(AttributeImpl target) {
  super.copyTo(target);
  ((FlagsAttribute) target).setFlags(flags);
  ((PayloadAttribute) target).setPayload(payload);
}
 
开发者ID:lamsfoundation,项目名称:lams,代码行数:6,代码来源:Token.java


注:本文中的org.apache.lucene.analysis.tokenattributes.FlagsAttribute类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。