当前位置: 首页>>代码示例>>Java>>正文


Java Key类代码示例

本文整理汇总了Java中org.apache.hadoop.util.bloom.Key的典型用法代码示例。如果您正苦于以下问题:Java Key类的具体用法?Java Key怎么用?Java Key使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


Key类属于org.apache.hadoop.util.bloom包,在下文中一共展示了Key类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: execute

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
public void execute(Tuple tuple) {
    if(tuple.getSourceComponent().equals(UPSTREAM_COMPONENT_ID)){
        String word = tuple.getStringByField(UPSTREAM_FIEDLS);
        if(word.length() <= 0) {
            collector.ack(tuple);
            return;
        }
        collector.emit(Constraints.coinFileds, new Values(word));
        Key ky = new Key(word.getBytes());
        if(bf.membershipTest(ky))
            collector.emit(Constraints.hotFileds, tuple, new Values(word));
        else
            collector.emit(Constraints.nohotFileds, tuple, new Values(word));

    }else {
        String key = tuple.getStringByField(Constraints.wordFileds);
        Integer type = tuple.getIntegerByField(Constraints.typeFileds);
        Key hk = new Key(key.getBytes());
        if(!bf.membershipTest(hk) && type.equals(1))
            bf.add(hk);
        if(bf.membershipTest(hk) && type.equals(0))
            bf.delete(hk);
    }
    collector.ack(tuple);
}
 
开发者ID:DStream-Storm,项目名称:DStream,代码行数:26,代码来源:SplitterBolt.java

示例2: shouldWriteAndReadFilter

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
@Test
public void shouldWriteAndReadFilter() throws IOException {
    // Given
    final BloomFilter filter = new BloomFilter(100, 5, Hash.MURMUR_HASH);
    filter.add(new Key("ABC".getBytes()));
    filter.add(new Key("DEF".getBytes()));
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final DataOutputStream out = new DataOutputStream(baos);
    filter.write(out);
    String x = new String(baos.toByteArray(), AccumuloStoreConstants.BLOOM_FILTER_CHARSET);
    final ByteArrayInputStream bais = new ByteArrayInputStream(x.getBytes(AccumuloStoreConstants.BLOOM_FILTER_CHARSET));

    // When
    final DataInputStream in = new DataInputStream(bais);
    final BloomFilter read = new BloomFilter();
    read.readFields(in);

    // Then
    assertTrue(read.membershipTest(new Key("ABC".getBytes())));
    assertTrue(read.membershipTest(new Key("DEF".getBytes())));
    assertFalse(read.membershipTest(new Key("lkjhgfdsa".getBytes())));
}
 
开发者ID:gchq,项目名称:Gaffer,代码行数:23,代码来源:FilterWritabilityTest.java

示例3: aggregate

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
/** {@inheritDoc} */
@Override
public void aggregate(final String data, final String metadata) throws IOException, InterruptedException {
	// instantiate a bloom filter input key initialized by the data
	final Key key = new Key(data.getBytes());

	// if the key is already in the filter, forget about it
	if (this.filter.membershipTest(key))
		return;

	// add the key to the bloom filter
	this.filter.add(key);

	if (this.isCombining())
		this.collect(data);
	else
		this.total++;
}
 
开发者ID:boalang,项目名称:compiler,代码行数:19,代码来源:UniqueAggregator.java

示例4: aggregate

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
/** {@inheritDoc} */
@Override
public void aggregate(final String data, final String metadata) throws IOException, InterruptedException {
	// instantiate a bloom filter input key initialized by the data
	Key key = new Key(data.getBytes());

	// if the key is already in the filter, forget it
	if (this.filter.membershipTest(key))
		return;

	// add the key to the bloom filter
	this.filter.add(key);

	// and collect it
	this.collect(data);
}
 
开发者ID:boalang,项目名称:compiler,代码行数:17,代码来源:DistinctAggregator.java

示例5: exec

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
@Override
public Tuple exec(Tuple input) throws IOException {
    if (input == null || input.size() == 0) return null;

    // Strip off the initial level of bag
    DataBag values = (DataBag)input.get(0);
    Iterator<Tuple> it = values.iterator();
    Tuple t = it.next();

    // If the input tuple has only one field, then we'll extract
    // that field and serialize it into a key.  If it has multiple
    // fields, we'll serialize the whole tuple.
    byte[] b;
    if (t.size() == 1) b = DataType.toBytes(t.get(0));
    else b = DataType.toBytes(t, DataType.TUPLE);

    Key k = new Key(b);
    filter = new BloomFilter(vSize, numHash, hType);
    filter.add(k);

    return TupleFactory.getInstance().newTuple(bloomOut());
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:23,代码来源:BuildBloom.java

示例6: map

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
@Override
public void map(Object key, Text value, Context context)
		throws IOException, InterruptedException {
	Map<String, String> parsed = transformXmlToMap(value.toString());

	String body = parsed.get("Text");
	if (isNullOrEmpty(body)) {
		return;
	}
	StringTokenizer tokenizer = new StringTokenizer(body);
	while (tokenizer.hasMoreTokens()) {
		String word = tokenizer.nextToken();
		if (filter.membershipTest(new Key(word.getBytes()))) {
			context.write(value, NullWritable.get());
			break;
		}
	}

}
 
开发者ID:geftimov,项目名称:hadoop-map-reduce-patterns,代码行数:20,代码来源:BloomFilter.java

示例7: map

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
@Override
protected void map(Text key, Text value, Context context)
    throws IOException, InterruptedException {
  System.out.println("K[" + key + "]");
  if(filter.membershipTest(new Key(key.toString().getBytes()))) {
    context.write(key, value);
  }
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:9,代码来源:BloomJoin.java

示例8: map

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
@Override
public void map(Text key, Text value,
                OutputCollector<NullWritable, BloomFilter> output,
                Reporter reporter) throws IOException {

  System.out.println("K[" + key + "]");

  int age = Integer.valueOf(value.toString());
  if (age > 30) {
    filter.add(new Key(key.toString().getBytes()));
  }
  collector = output;
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:14,代码来源:BloomFilterCreator.java

示例9: map

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
@Override
protected void map(LongWritable offset, Text value, Context context)
    throws IOException, InterruptedException {
  String user = getUsername(value);
  if (filter.membershipTest(new Key(user.getBytes()))) {
    Tuple outputValue = new Tuple();
    outputValue.setInt(ValueFields.DATASET, getDataset());
    outputValue.setString(ValueFields.DATA, value.toString());

    context.write(new Text(user), outputValue);
  }
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:13,代码来源:BloomJoin.java

示例10: map

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
  User user = User.fromText(value);
  if ("CA".equals(user.getState())) {
    filter.add(new Key(user.getName().getBytes()));
  }
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:8,代码来源:BloomFilterCreator.java

示例11: shouldAcceptValidFilter

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
@Test
public void shouldAcceptValidFilter() {
    // Given
    final BloomFilter filter = new BloomFilter(100, 5, Hash.MURMUR_HASH);
    filter.add(new Key("ABC".getBytes()));
    filter.add(new Key("DEF".getBytes()));

    // Then
    assertTrue(filter.membershipTest(new Key("ABC".getBytes())));
    assertTrue(filter.membershipTest(new Key("DEF".getBytes())));
    assertFalse(filter.membershipTest(new Key("lkjhgfdsa".getBytes())));
}
 
开发者ID:gchq,项目名称:Gaffer,代码行数:13,代码来源:FilterWritabilityTest.java

示例12: addUriToBloomFilter

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
public void addUriToBloomFilter(String id, String uri) {
	KR2RMLBloomFilter bf = null;
	if(!idToBloomFilter.containsKey(id))
	{
		idToBloomFilter.putIfAbsent(id, new KR2RMLBloomFilter(KR2RMLBloomFilter.defaultVectorSize, KR2RMLBloomFilter.defaultnbHash, Hash.JENKINS_HASH));
	}
	bf = idToBloomFilter.get(id);
	
	Key k = new Key(uri.getBytes(UTF8_CHARSET));
	bf.add(k);
	return;
}
 
开发者ID:therelaxist,项目名称:spring-usc,代码行数:13,代码来源:KR2RMLBloomFilterManager.java

示例13: exec

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
@Override
public Boolean exec(Tuple input) throws IOException {
    if (filter == null) {
        init();
    }
    byte[] b;
    if (input.size() == 1) b = DataType.toBytes(input.get(0));
    else b = DataType.toBytes(input, DataType.TUPLE);

    Key k = new Key(b);
    return filter.membershipTest(k);
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:13,代码来源:Bloom.java

示例14: CBMessageFilter

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
public CBMessageFilter(boolean filterEnabled, String filterPattern){
	this.filterEnabled=filterEnabled;
	
	if(this.filterEnabled){
	String[] filters=filterPattern.split(",");
	  for(String key1:filters){
		  if(key1!=null && key1.trim().length()>0){
			  Key key=new Key(key1.getBytes());
			  filter.add(key);
		  }
	  }
	}
}
 
开发者ID:paypal,项目名称:cbflume,代码行数:14,代码来源:CBMessageFilter.java

示例15: membershiptest

import org.apache.hadoop.util.bloom.Key; //导入依赖的package包/类
public boolean membershiptest(String key){
	//return everythig if filter not enabled.
	if(!this.filterEnabled) return true;
	//System.out.println("Comparing key "+key);
	if(key==null) return false;
	return filter.membershipTest(new Key(key.getBytes()));
}
 
开发者ID:paypal,项目名称:cbflume,代码行数:8,代码来源:CBMessageFilter.java


注:本文中的org.apache.hadoop.util.bloom.Key类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。