本文整理汇总了Java中org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria.addColumn方法的典型用法代码示例。如果您正苦于以下问题:Java HBaseProjectionCriteria.addColumn方法的具体用法?Java HBaseProjectionCriteria.addColumn怎么用?Java HBaseProjectionCriteria.addColumn使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria
的用法示例。
在下文中一共展示了HBaseProjectionCriteria.addColumn方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: buildTopology
import org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria; //导入方法依赖的package包/类
public static StormTopology buildTopology(String hbaseRoot){
Fields fields = new Fields("word", "count");
FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
new Values("storm", 1),
new Values("trident", 1),
new Values("needs", 1),
new Values("javadoc", 1)
);
spout.setCycle(true);
TridentHBaseMapper tridentHBaseMapper = new SimpleTridentHBaseMapper()
.withColumnFamily("cf")
.withColumnFields(new Fields("word"))
.withCounterFields(new Fields("count"))
.withRowKeyField("word");
HBaseValueMapper rowToStormValueMapper = new WordCountValueMapper();
HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("cf", "count"));
HBaseState.Options options = new HBaseState.Options()
.withConfigKey(hbaseRoot)
.withDurability(Durability.SYNC_WAL)
.withMapper(tridentHBaseMapper)
.withProjectionCriteria(projectionCriteria)
.withRowToStormValueMapper(rowToStormValueMapper)
.withTableName("WordCount");
StateFactory factory = new HBaseStateFactory(options);
TridentTopology topology = new TridentTopology();
Stream stream = topology.newStream("spout1", spout);
stream.partitionPersist(factory, fields, new HBaseUpdater(), new Fields());
TridentState state = topology.newStaticState(factory);
stream = stream.stateQuery(state, new Fields("word"), new HBaseQuery(), new Fields("columnName","columnValue"));
stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
return topology.build();
}
示例2: main
import org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
Config config = new Config();
Map<String, Object> hbConf = new HashMap<String, Object>();
if(args.length > 0){
hbConf.put("hbase.rootdir", args[0]);
}
config.put("hbase.conf", hbConf);
WordSpout spout = new WordSpout();
TotalWordCounter totalBolt = new TotalWordCounter();
SimpleHBaseMapper mapper = new SimpleHBaseMapper().withRowKeyField("word");
HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("cf", "count"));
WordCountValueMapper rowToTupleMapper = new WordCountValueMapper();
HBaseLookupBolt hBaseLookupBolt = new HBaseLookupBolt("WordCount", mapper, rowToTupleMapper)
.withConfigKey("hbase.conf")
.withProjectionCriteria(projectionCriteria);
//wordspout -> lookupbolt -> totalCountBolt
TopologyBuilder builder = new TopologyBuilder();
builder.setSpout(WORD_SPOUT, spout, 1);
builder.setBolt(LOOKUP_BOLT, hBaseLookupBolt, 1).shuffleGrouping(WORD_SPOUT);
builder.setBolt(TOTAL_COUNT_BOLT, totalBolt, 1).fieldsGrouping(LOOKUP_BOLT, new Fields("columnName"));
if (args.length == 1) {
LocalCluster cluster = new LocalCluster();
cluster.submitTopology("test", config, builder.createTopology());
Thread.sleep(30000);
cluster.killTopology("test");
cluster.shutdown();
System.exit(0);
} else if (args.length == 2) {
StormSubmitter.submitTopology(args[1], config, builder.createTopology());
} else{
System.out.println("Usage: LookupWordCount <hbase.rootdir>");
}
}