當前位置: 首頁>>代碼示例>>Java>>正文


Java KeyValue.parseColumn方法代碼示例

本文整理匯總了Java中org.apache.hadoop.hbase.KeyValue.parseColumn方法的典型用法代碼示例。如果您正苦於以下問題:Java KeyValue.parseColumn方法的具體用法?Java KeyValue.parseColumn怎麽用?Java KeyValue.parseColumn使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.hbase.KeyValue的用法示例。


在下文中一共展示了KeyValue.parseColumn方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: deleteAllTs

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
@Override
public void deleteAllTs(ByteBuffer tableName,
                        ByteBuffer row,
                        ByteBuffer column,
    long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
  Table table = null;
  try {
    table = getTable(tableName);
    Delete delete  = new Delete(getBytes(row));
    addAttributes(delete, attributes);
    byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
    if (famAndQf.length == 1) {
      delete.deleteFamily(famAndQf[0], timestamp);
    } else {
      delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
    }
    table.delete(delete);

  } catch (IOException e) {
    LOG.warn(e.getMessage(), e);
    throw new IOError(Throwables.getStackTraceAsString(e));
  } finally {
    closeTable(table);
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:26,代碼來源:ThriftServerRunner.java

示例2: appendFromThrift

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
/**
 * From a {@link TAppend} create an {@link Append}.
 * @param tappend the Thrift version of an append.
 * @return an increment that the {@link TAppend} represented.
 */
public static Append appendFromThrift(TAppend tappend) {
  Append append = new Append(tappend.getRow());
  List<ByteBuffer> columns = tappend.getColumns();
  List<ByteBuffer> values = tappend.getValues();

  if (columns.size() != values.size()) {
    throw new IllegalArgumentException(
        "Sizes of columns and values in tappend object are not matching");
  }

  int length = columns.size();

  for (int i = 0; i < length; i++) {
    byte[][] famAndQf = KeyValue.parseColumn(getBytes(columns.get(i)));
    append.add(famAndQf[0], famAndQf[1], getBytes(values.get(i)));
  }
  return append;
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:24,代碼來源:ThriftUtilities.java

示例3: insertData

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
private static int insertData(TableName tableName, String column, double prob) throws IOException {
  byte[] k = new byte[3];
  byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column));

  List<Put> puts = new ArrayList<>();
  for (int i = 0; i < 9; i++) {
    Put put = new Put(Bytes.toBytes("row" + i));
    put.setDurability(Durability.SKIP_WAL);
    put.add(famAndQf[0], famAndQf[1], k);
    put.setCellVisibility(new CellVisibility("(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!"
        + TOPSECRET));
    puts.add(put);
  }
  try (Table table = new HTable(TEST_UTIL.getConfiguration(), tableName)) {
    table.put(puts);
  }
  return puts.size();
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:19,代碼來源:TestScannersWithLabels.java

示例4: buildResultFromModel

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
protected Result[] buildResultFromModel(final CellSetModel model) {
  List<Result> results = new ArrayList<Result>();
  for (RowModel row: model.getRows()) {
    List<Cell> kvs = new ArrayList<Cell>();
    for (CellModel cell: row.getCells()) {
      byte[][] split = KeyValue.parseColumn(cell.getColumn());
      byte[] column = split[0];
      byte[] qualifier = null;
      if (split.length == 1) {
        qualifier = HConstants.EMPTY_BYTE_ARRAY;
      } else if (split.length == 2) {
        qualifier = split[1];
      } else {
        throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
      }
      kvs.add(new KeyValue(row.getKey(), column, qualifier,
        cell.getTimestamp(), cell.getValue()));
    }
    results.add(Result.create(kvs));
  }
  return results.toArray(new Result[results.size()]);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:23,代碼來源:RemoteHTable.java

示例5: run

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
public void run() {
  try {
    Get get = new Get(rowkey);
    get.setFilter(ftlist);
    get.setCacheBlocks(false);

    if (resultColumns != null && resultColumns.length != 0) {
      for (byte[] column : resultColumns) {
        byte[][] tmp = KeyValue.parseColumn(column);

        if (tmp.length == 1) {
          get.addFamily(tmp[0]);
        } else {
          get.addColumn(tmp[0], tmp[1]);
        }
      }
    }

    rsnew = table.get(get);
    table.close();
  } catch (Exception e) {
    rsnew = null;
    exception = e;
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:26,代碼來源:IndexResultScanner.java

示例6: insertData

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
static int insertData(Configuration conf, TableName tableName, String column, double prob)
    throws IOException {
  Random rng = new Random();
  byte[] k = new byte[3];
  byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column));
  List<Put> puts = new ArrayList<>();
  for (byte b1 = 'a'; b1 < 'z'; b1++) {
    for (byte b2 = 'a'; b2 < 'z'; b2++) {
      for (byte b3 = 'a'; b3 < 'z'; b3++) {
        if (rng.nextDouble() < prob) {
          k[0] = b1;
          k[1] = b2;
          k[2] = b3;
          Put put = new Put(k);
          put.setDurability(Durability.SKIP_WAL);
          put.add(famAndQf[0], famAndQf[1], k);
          puts.add(put);
        }
      }
    }
  }
  try (Connection conn = ConnectionFactory.createConnection(conf);
      Table table = conn.getTable(tableName)) {
    table.put(puts);
  }
  return puts.size();
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:28,代碼來源:TestScannerResource.java

示例7: get

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
@Override
public List<TCell> get(
    ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
    Map<ByteBuffer, ByteBuffer> attributes)
    throws IOError {
  byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
  if (famAndQf.length == 1) {
    return get(tableName, row, famAndQf[0], null, attributes);
  }
  if (famAndQf.length == 2) {
    return get(tableName, row, famAndQf[0], famAndQf[1], attributes);
  }
  throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:15,代碼來源:ThriftServerRunner.java

示例8: getVer

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
@Override
public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
    int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
  byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
  if(famAndQf.length == 1) {
    return getVer(tableName, row, famAndQf[0], null, numVersions, attributes);
  }
  if (famAndQf.length == 2) {
    return getVer(tableName, row, famAndQf[0], famAndQf[1], numVersions, attributes);
  }
  throw new IllegalArgumentException("Invalid familyAndQualifier provided.");

}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:14,代碼來源:ThriftServerRunner.java

示例9: getVerTs

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
@Override
public List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
    long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
  byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
  if (famAndQf.length == 1) {
    return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes);
  }
  if (famAndQf.length == 2) {
    return getVerTs(tableName, row, famAndQf[0], famAndQf[1], timestamp, numVersions,
      attributes);
  }
  throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:14,代碼來源:ThriftServerRunner.java

示例10: getRowsWithColumnsTs

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
@Override
public List<TRowResult> getRowsWithColumnsTs(ByteBuffer tableName,
                                             List<ByteBuffer> rows,
    List<ByteBuffer> columns, long timestamp,
    Map<ByteBuffer, ByteBuffer> attributes) throws IOError {

  Table table= null;
  try {
    List<Get> gets = new ArrayList<Get>(rows.size());
    table = getTable(tableName);
    if (metrics != null) {
      metrics.incNumRowKeysInBatchGet(rows.size());
    }
    for (ByteBuffer row : rows) {
      Get get = new Get(getBytes(row));
      addAttributes(get, attributes);
      if (columns != null) {

        for(ByteBuffer column : columns) {
          byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
          if (famAndQf.length == 1) {
            get.addFamily(famAndQf[0]);
          } else {
            get.addColumn(famAndQf[0], famAndQf[1]);
          }
        }
      }
      get.setTimeRange(0, timestamp);
      gets.add(get);
    }
    Result[] result = table.get(gets);
    return ThriftUtilities.rowResultFromHBase(result);
  } catch (IOException e) {
    LOG.warn(e.getMessage(), e);
    throw new IOError(Throwables.getStackTraceAsString(e));
  } finally{
    closeTable(table);
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:40,代碼來源:ThriftServerRunner.java

示例11: atomicIncrement

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
@Override
public long atomicIncrement(
    ByteBuffer tableName, ByteBuffer row, ByteBuffer column, long amount)
        throws IOError, IllegalArgument, TException {
  byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
  if(famAndQf.length == 1) {
    return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount);
  }
  return atomicIncrement(tableName, row, famAndQf[0], famAndQf[1], amount);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:11,代碼來源:ThriftServerRunner.java

示例12: scannerOpenWithStop

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
@Override
public int scannerOpenWithStop(ByteBuffer tableName, ByteBuffer startRow,
    ByteBuffer stopRow, List<ByteBuffer> columns,
    Map<ByteBuffer, ByteBuffer> attributes)
    throws IOError, TException {

  Table table = null;
  try {
    table = getTable(tableName);
    Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
    addAttributes(scan, attributes);
    if(columns != null && columns.size() != 0) {
      for(ByteBuffer column : columns) {
        byte [][] famQf = KeyValue.parseColumn(getBytes(column));
        if(famQf.length == 1) {
          scan.addFamily(famQf[0]);
        } else {
          scan.addColumn(famQf[0], famQf[1]);
        }
      }
    }
    return addScanner(table.getScanner(scan), false);
  } catch (IOException e) {
    LOG.warn(e.getMessage(), e);
    throw new IOError(Throwables.getStackTraceAsString(e));
  } finally{
    closeTable(table);
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:30,代碼來源:ThriftServerRunner.java

示例13: scannerOpenWithPrefix

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
@Override
public int scannerOpenWithPrefix(ByteBuffer tableName,
                                 ByteBuffer startAndPrefix,
                                 List<ByteBuffer> columns,
    Map<ByteBuffer, ByteBuffer> attributes)
    throws IOError, TException {

  Table table = null;
  try {
    table = getTable(tableName);
    Scan scan = new Scan(getBytes(startAndPrefix));
    addAttributes(scan, attributes);
    Filter f = new WhileMatchFilter(
        new PrefixFilter(getBytes(startAndPrefix)));
    scan.setFilter(f);
    if (columns != null && columns.size() != 0) {
      for(ByteBuffer column : columns) {
        byte [][] famQf = KeyValue.parseColumn(getBytes(column));
        if(famQf.length == 1) {
          scan.addFamily(famQf[0]);
        } else {
          scan.addColumn(famQf[0], famQf[1]);
        }
      }
    }
    return addScanner(table.getScanner(scan), false);
  } catch (IOException e) {
    LOG.warn(e.getMessage(), e);
    throw new IOError(Throwables.getStackTraceAsString(e));
  } finally{
    closeTable(table);
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:34,代碼來源:ThriftServerRunner.java

示例14: RowResultGenerator

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
public RowResultGenerator(final String tableName, final RowSpec rowspec,
    final Filter filter, final boolean cacheBlocks)
    throws IllegalArgumentException, IOException {
  Table table = RESTServlet.getInstance().getTable(tableName);
  try {
    Get get = new Get(rowspec.getRow());
    if (rowspec.hasColumns()) {
      for (byte[] col: rowspec.getColumns()) {
        byte[][] split = KeyValue.parseColumn(col);
        if (split.length == 1) {
          get.addFamily(split[0]);
        } else if (split.length == 2) {
          get.addColumn(split[0], split[1]);
        } else {
          throw new IllegalArgumentException("Invalid column specifier.");
        }
      }
    }
    get.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
    get.setMaxVersions(rowspec.getMaxVersions());
    if (filter != null) {
      get.setFilter(filter);
    }
    get.setCacheBlocks(cacheBlocks);
    Result result = table.get(get);
    if (result != null && !result.isEmpty()) {
      valuesI = result.listCells().iterator();
    }
  } catch (DoNotRetryIOException | NeedUnmanagedConnectionException e) {
    // Warn here because Stargate will return 404 in the case if multiple
    // column families were specified but one did not exist -- currently
    // HBase will fail the whole Get.
    // Specifying multiple columns in a URI should be uncommon usage but
    // help to avoid confusion by leaving a record of what happened here in
    // the log.
    LOG.warn(StringUtils.stringifyException(e));
  } finally {
    table.close();
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:41,代碼來源:RowResultGenerator.java

示例15: addColumn

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
/**
 * Parses a combined family and qualifier and adds either both or just the
 * family in case there is no qualifier. This assumes the older colon
 * divided notation, e.g. "family:qualifier".
 *
 * @param scan The Scan to update.
 * @param familyAndQualifier family and qualifier
 * @throws IllegalArgumentException When familyAndQualifier is invalid.
 */
private static void addColumn(Scan scan, byte[] familyAndQualifier) {
  byte [][] fq = KeyValue.parseColumn(familyAndQualifier);
  if (fq.length == 1) {
    scan.addFamily(fq[0]);
  } else if (fq.length == 2) {
    scan.addColumn(fq[0], fq[1]);
  } else {
    throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:20,代碼來源:TableInputFormat.java


注:本文中的org.apache.hadoop.hbase.KeyValue.parseColumn方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。