当前位置: 首页>>代码示例>>Java>>正文


Java Result.getColumn方法代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.client.Result.getColumn方法的典型用法代码示例。如果您正苦于以下问题:Java Result.getColumn方法的具体用法?Java Result.getColumn怎么用?Java Result.getColumn使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hbase.client.Result的用法示例。


在下文中一共展示了Result.getColumn方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: scanAfterBulkLoad

import org.apache.hadoop.hbase.client.Result; //导入方法依赖的package包/类
private Result scanAfterBulkLoad(ResultScanner scanner, Result result, String expctedVal)
    throws IOException {
  while (result != null) {
    List<KeyValue> kvs = result.getColumn(Bytes.toBytes("col"), Bytes.toBytes("q"));
    for (KeyValue _kv : kvs) {
      if (Bytes.toString(_kv.getRow()).equals("row1")) {
        System.out.println(Bytes.toString(_kv.getRow()));
        System.out.println(Bytes.toString(_kv.getQualifier()));
        System.out.println(Bytes.toString(_kv.getValue()));
        Assert.assertEquals(expctedVal, Bytes.toString(_kv.getValue()));
      }
    }
    result = scanner.next();
  }
  return result;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:17,代码来源:TestScannerWithBulkload.java

示例2: testBulkLoad

import org.apache.hadoop.hbase.client.Result; //导入方法依赖的package包/类
@Test
public void testBulkLoad() throws Exception {
  TableName tableName = TableName.valueOf("testBulkLoad");
  long l = System.currentTimeMillis();
  HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
  createTable(admin, tableName);
  Scan scan = createScan();
  final HTable table = init(admin, l, scan, tableName);
  // use bulkload
  final Path hfilePath = writeToHFile(l, "/temp/testBulkLoad/", "/temp/testBulkLoad/col/file",
    false);
  Configuration conf = TEST_UTIL.getConfiguration();
  conf.setBoolean("hbase.mapreduce.bulkload.assign.sequenceNumbers", true);
  final LoadIncrementalHFiles bulkload = new LoadIncrementalHFiles(conf);
  bulkload.doBulkLoad(hfilePath, table);
  ResultScanner scanner = table.getScanner(scan);
  Result result = scanner.next();
  result = scanAfterBulkLoad(scanner, result, "version2");
  Put put0 = new Put(Bytes.toBytes("row1"));
  put0.add(new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes
      .toBytes("version3")));
  table.put(put0);
  admin.flush(tableName);
  scanner = table.getScanner(scan);
  result = scanner.next();
  while (result != null) {
    List<KeyValue> kvs = result.getColumn(Bytes.toBytes("col"), Bytes.toBytes("q"));
    for (KeyValue _kv : kvs) {
      if (Bytes.toString(_kv.getRow()).equals("row1")) {
        System.out.println(Bytes.toString(_kv.getRow()));
        System.out.println(Bytes.toString(_kv.getQualifier()));
        System.out.println(Bytes.toString(_kv.getValue()));
        Assert.assertEquals("version3", Bytes.toString(_kv.getValue()));
      }
    }
    result = scanner.next();
  }
  scanner.close();
  table.close();
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:41,代码来源:TestScannerWithBulkload.java

示例3: testBulkLoadNativeHFile

import org.apache.hadoop.hbase.client.Result; //导入方法依赖的package包/类
@Test
public void testBulkLoadNativeHFile() throws Exception {
  TableName tableName = TableName.valueOf("testBulkLoadNativeHFile");
  long l = System.currentTimeMillis();
  HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
  createTable(admin, tableName);
  Scan scan = createScan();
  final HTable table = init(admin, l, scan, tableName);
  // use bulkload
  final Path hfilePath = writeToHFile(l, "/temp/testBulkLoadNativeHFile/",
    "/temp/testBulkLoadNativeHFile/col/file", true);
  Configuration conf = TEST_UTIL.getConfiguration();
  conf.setBoolean("hbase.mapreduce.bulkload.assign.sequenceNumbers", true);
  final LoadIncrementalHFiles bulkload = new LoadIncrementalHFiles(conf);
  bulkload.doBulkLoad(hfilePath, table);
  ResultScanner scanner = table.getScanner(scan);
  Result result = scanner.next();
  // We had 'version0', 'version1' for 'row1,col:q' in the table.
  // Bulk load added 'version2'  scanner should be able to see 'version2'
  result = scanAfterBulkLoad(scanner, result, "version2");
  Put put0 = new Put(Bytes.toBytes("row1"));
  put0.add(new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes
      .toBytes("version3")));
  table.put(put0);
  admin.flush(tableName);
  scanner = table.getScanner(scan);
  result = scanner.next();
  while (result != null) {
    List<KeyValue> kvs = result.getColumn(Bytes.toBytes("col"), Bytes.toBytes("q"));
    for (KeyValue _kv : kvs) {
      if (Bytes.toString(_kv.getRow()).equals("row1")) {
        System.out.println(Bytes.toString(_kv.getRow()));
        System.out.println(Bytes.toString(_kv.getQualifier()));
        System.out.println(Bytes.toString(_kv.getValue()));
        Assert.assertEquals("version3", Bytes.toString(_kv.getValue()));
      }
    }
    result = scanner.next();
  }
  scanner.close();
  table.close();
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:43,代码来源:TestScannerWithBulkload.java

示例4: println

import org.apache.hadoop.hbase.client.Result; //导入方法依赖的package包/类
void println(Result result) {
  StringBuilder sb = new StringBuilder();
  sb.append("row=" + Bytes.toString(result.getRow()));

  List<KeyValue> kv = result.getColumn(Bytes.toBytes("f"), Bytes.toBytes("c1"));
  if (kv.size() != 0) {
    sb.append(", f:c1=" + Bytes.toInt(kv.get(0).getValue()));
  }

  kv = result.getColumn(Bytes.toBytes("f"), Bytes.toBytes("c2"));
  if (kv.size() != 0) {
    sb.append(", f:c2=" + Bytes.toString(kv.get(0).getValue()));
  }

  kv = result.getColumn(Bytes.toBytes("f"), Bytes.toBytes("c3"));
  if (kv.size() != 0) {
    sb.append(", f:c3=" + Bytes.toDouble(kv.get(0).getValue()));
  }

  kv = result.getColumn(Bytes.toBytes("f"), Bytes.toBytes("c4"));
  if (kv.size() != 0) {
    sb.append(", f:c4=" + Bytes.toString(kv.get(0).getValue()));
  }
  kv = result.getColumn(Bytes.toBytes("f"), Bytes.toBytes("c5"));
  if (kv.size() != 0) {
    sb.append(", f:c5=" + Bytes.toString(kv.get(0).getValue()));
  }

  kv = result.getColumn(Bytes.toBytes("f"), Bytes.toBytes("c6"));
  if (kv.size() != 0) {
    sb.append(", f:c6=" + Bytes.toString(kv.get(0).getValue()));
  }
  kv = result.getColumn(Bytes.toBytes("f"), Bytes.toBytes("c7"));
  if (kv.size() != 0) {
    sb.append(", f:c7=" + Bytes.toInt(kv.get(0).getValue()));
  }
  kv = result.getColumn(Bytes.toBytes("f"), Bytes.toBytes("c8"));
  if (kv.size() != 0) {
    sb.append(", f:c8=" + Bytes.toString(kv.get(0).getValue()));
  }
  System.out.println(sb.toString());
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:43,代码来源:IndexTable.java


注:本文中的org.apache.hadoop.hbase.client.Result.getColumn方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。