本文整理匯總了Java中org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria.addColumn方法的典型用法代碼示例。如果您正苦於以下問題:Java HBaseProjectionCriteria.addColumn方法的具體用法?Java HBaseProjectionCriteria.addColumn怎麽用?Java HBaseProjectionCriteria.addColumn使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria
的用法示例。
在下文中一共展示了HBaseProjectionCriteria.addColumn方法的2個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: buildTopology
import org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria; //導入方法依賴的package包/類
public static StormTopology buildTopology(String hbaseRoot){
Fields fields = new Fields("word", "count");
FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
new Values("storm", 1),
new Values("trident", 1),
new Values("needs", 1),
new Values("javadoc", 1)
);
spout.setCycle(true);
TridentHBaseMapper tridentHBaseMapper = new SimpleTridentHBaseMapper()
.withColumnFamily("cf")
.withColumnFields(new Fields("word"))
.withCounterFields(new Fields("count"))
.withRowKeyField("word");
HBaseValueMapper rowToStormValueMapper = new WordCountValueMapper();
HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("cf", "count"));
HBaseState.Options options = new HBaseState.Options()
.withConfigKey(hbaseRoot)
.withDurability(Durability.SYNC_WAL)
.withMapper(tridentHBaseMapper)
.withProjectionCriteria(projectionCriteria)
.withRowToStormValueMapper(rowToStormValueMapper)
.withTableName("WordCount");
StateFactory factory = new HBaseStateFactory(options);
TridentTopology topology = new TridentTopology();
Stream stream = topology.newStream("spout1", spout);
stream.partitionPersist(factory, fields, new HBaseUpdater(), new Fields());
TridentState state = topology.newStaticState(factory);
stream = stream.stateQuery(state, new Fields("word"), new HBaseQuery(), new Fields("columnName","columnValue"));
stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
return topology.build();
}
示例2: main
import org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception {
Config config = new Config();
Map<String, Object> hbConf = new HashMap<String, Object>();
if(args.length > 0){
hbConf.put("hbase.rootdir", args[0]);
}
config.put("hbase.conf", hbConf);
WordSpout spout = new WordSpout();
TotalWordCounter totalBolt = new TotalWordCounter();
SimpleHBaseMapper mapper = new SimpleHBaseMapper().withRowKeyField("word");
HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("cf", "count"));
WordCountValueMapper rowToTupleMapper = new WordCountValueMapper();
HBaseLookupBolt hBaseLookupBolt = new HBaseLookupBolt("WordCount", mapper, rowToTupleMapper)
.withConfigKey("hbase.conf")
.withProjectionCriteria(projectionCriteria);
//wordspout -> lookupbolt -> totalCountBolt
TopologyBuilder builder = new TopologyBuilder();
builder.setSpout(WORD_SPOUT, spout, 1);
builder.setBolt(LOOKUP_BOLT, hBaseLookupBolt, 1).shuffleGrouping(WORD_SPOUT);
builder.setBolt(TOTAL_COUNT_BOLT, totalBolt, 1).fieldsGrouping(LOOKUP_BOLT, new Fields("columnName"));
if (args.length == 1) {
LocalCluster cluster = new LocalCluster();
cluster.submitTopology("test", config, builder.createTopology());
Thread.sleep(30000);
cluster.killTopology("test");
cluster.shutdown();
System.exit(0);
} else if (args.length == 2) {
StormSubmitter.submitTopology(args[1], config, builder.createTopology());
} else{
System.out.println("Usage: LookupWordCount <hbase.rootdir>");
}
}