当前位置: 首页>>代码示例>>Java>>正文


Java Append.addColumn方法代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.client.Append.addColumn方法的典型用法代码示例。如果您正苦于以下问题:Java Append.addColumn方法的具体用法?Java Append.addColumn怎么用?Java Append.addColumn使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hbase.client.Append的用法示例。


在下文中一共展示了Append.addColumn方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testAppendHook

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Test
public void testAppendHook() throws IOException {
  final TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + "." + name.getMethodName());
  Table table = util.createTable(tableName, new byte[][] { A, B, C });
  try {
    Append app = new Append(Bytes.toBytes(0));
    app.addColumn(A, A, A);

    verifyMethodResult(SimpleRegionObserver.class,
      new String[] { "hadPreAppend", "hadPostAppend", "hadPreAppendAfterRowLock" }, tableName,
      new Boolean[] { false, false, false });

    table.append(app);

    verifyMethodResult(SimpleRegionObserver.class,
      new String[] { "hadPreAppend", "hadPostAppend", "hadPreAppendAfterRowLock" }, tableName,
      new Boolean[] { true, true, true });
  } finally {
    util.deleteTable(tableName);
    table.close();
  }
}
 
开发者ID:apache,项目名称:hbase,代码行数:23,代码来源:TestRegionObserverInterface.java

示例2: appendFromThrift

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
public static Append appendFromThrift(TAppend append) throws IOException {
  Append out = new Append(append.getRow());
  for (TColumnValue column : append.getColumns()) {
    out.addColumn(column.getFamily(), column.getQualifier(), column.getValue());
  }

  if (append.isSetAttributes()) {
    addAttributes(out, append.getAttributes());
  }

  if (append.isSetDurability()) {
    out.setDurability(durabilityFromThrift(append.getDurability()));
  }

  if(append.getCellVisibility() != null) {
    out.setCellVisibility(new CellVisibility(append.getCellVisibility().getExpression()));
  }

  return out;
}
 
开发者ID:apache,项目名称:hbase,代码行数:21,代码来源:ThriftUtilities.java

示例3: appendFromThrift

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
/**
 * From a {@link TAppend} create an {@link Append}.
 * @param tappend the Thrift version of an append.
 * @return an increment that the {@link TAppend} represented.
 */
public static Append appendFromThrift(TAppend tappend) {
  Append append = new Append(tappend.getRow());
  List<ByteBuffer> columns = tappend.getColumns();
  List<ByteBuffer> values = tappend.getValues();

  if (columns.size() != values.size()) {
    throw new IllegalArgumentException(
        "Sizes of columns and values in tappend object are not matching");
  }

  int length = columns.size();

  for (int i = 0; i < length; i++) {
    byte[][] famAndQf = CellUtil.parseColumn(getBytes(columns.get(i)));
    append.addColumn(famAndQf[0], famAndQf[1], getBytes(values.get(i)));
  }
  return append;
}
 
开发者ID:apache,项目名称:hbase,代码行数:24,代码来源:ThriftUtilities.java

示例4: testAppend

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Test (timeout=180000)
public void testAppend() throws Exception {

  AccessTestAction appendAction = new AccessTestAction() {
    @Override
    public Object run() throws Exception {
      byte[] row = TEST_ROW;
      byte[] qualifier = TEST_QUALIFIER;
      Put put = new Put(row);
      put.addColumn(TEST_FAMILY, qualifier, Bytes.toBytes(1));
      Append append = new Append(row);
      append.addColumn(TEST_FAMILY, qualifier, Bytes.toBytes(2));
      try(Connection conn = ConnectionFactory.createConnection(conf);
          Table t = conn.getTable(TEST_TABLE)) {
        t.put(put);
        t.append(append);
      }
      return null;
    }
  };

  verifyAllowed(appendAction, SUPERUSER, USER_ADMIN, USER_OWNER, USER_CREATE, USER_RW,
    USER_GROUP_WRITE);
  verifyDenied(appendAction, USER_RO, USER_NONE, USER_GROUP_CREATE, USER_GROUP_READ,
    USER_GROUP_ADMIN);
}
 
开发者ID:apache,项目名称:hbase,代码行数:27,代码来源:TestAccessController.java

示例5: testAppendWithReadOnlyTable

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Test
public void testAppendWithReadOnlyTable() throws Exception {
  final TableName tableName = TableName.valueOf(name.getMethodName());
  this.region = initHRegion(tableName, method, CONF, true, Bytes.toBytes("somefamily"));
  boolean exceptionCaught = false;
  Append append = new Append(Bytes.toBytes("somerow"));
  append.setDurability(Durability.SKIP_WAL);
  append.addColumn(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"),
      Bytes.toBytes("somevalue"));
  try {
    region.append(append);
  } catch (IOException e) {
    exceptionCaught = true;
  } finally {
    HBaseTestingUtility.closeRegionAndWAL(this.region);
    this.region = null;
  }
  assertTrue(exceptionCaught == true);
}
 
开发者ID:apache,项目名称:hbase,代码行数:20,代码来源:TestHRegion.java

示例6: testNoInsertsWithAppend

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Test
public void testNoInsertsWithAppend() throws Exception {
  Append a = new Append(Bytes.toBytes("to_reject"));
  a.addColumn(
      Bytes.toBytes(SpaceQuotaHelperForTests.F1), Bytes.toBytes("to"), Bytes.toBytes("reject"));
  writeUntilViolationAndVerifyViolation(SpaceViolationPolicy.NO_INSERTS, a);
}
 
开发者ID:apache,项目名称:hbase,代码行数:8,代码来源:TestSpaceQuotas.java

示例7: testNoWritesWithAppend

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Test
public void testNoWritesWithAppend() throws Exception {
  Append a = new Append(Bytes.toBytes("to_reject"));
  a.addColumn(
      Bytes.toBytes(SpaceQuotaHelperForTests.F1), Bytes.toBytes("to"), Bytes.toBytes("reject"));
  writeUntilViolationAndVerifyViolation(SpaceViolationPolicy.NO_WRITES, a);
}
 
开发者ID:apache,项目名称:hbase,代码行数:8,代码来源:TestSpaceQuotas.java

示例8: run

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Override
public void run() {
  int count = 0;
  while (count < appendCounter) {
    Append app = new Append(appendRow);
    app.addColumn(family, qualifier, CHAR);
    count++;
    try {
      region.append(app);
    } catch (IOException e) {
      LOG.info("Count=" + count + ", max=" + appendCounter + ", " + e);
      break;
    }
  }
}
 
开发者ID:apache,项目名称:hbase,代码行数:16,代码来源:TestHRegion.java

示例9: testAppendTimestampsAreMonotonic

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Test
public void testAppendTimestampsAreMonotonic() throws IOException {
  HRegion region = initHRegion(tableName, method, CONF, fam1);
  ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
  EnvironmentEdgeManager.injectEdge(edge);

  edge.setValue(10);
  Append a = new Append(row);
  a.setDurability(Durability.SKIP_WAL);
  a.addColumn(fam1, qual1, qual1);
  region.append(a);

  Result result = region.get(new Get(row));
  Cell c = result.getColumnLatestCell(fam1, qual1);
  assertNotNull(c);
  assertEquals(10L, c.getTimestamp());

  edge.setValue(1); // clock goes back
  region.append(a);
  result = region.get(new Get(row));
  c = result.getColumnLatestCell(fam1, qual1);
  assertEquals(11L, c.getTimestamp());

  byte[] expected = new byte[qual1.length*2];
  System.arraycopy(qual1, 0, expected, 0, qual1.length);
  System.arraycopy(qual1, 0, expected, qual1.length, qual1.length);

  assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(),
    expected, 0, expected.length));
}
 
开发者ID:apache,项目名称:hbase,代码行数:31,代码来源:TestHRegion.java

示例10: testAppend

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Test
public void testAppend() throws Exception {
  doNPuts(1, false);

  for(int count = 0; count< 73; count++) {
    Append append = new Append(row);
    append.addColumn(cf, qualifier, Bytes.toBytes(",Test"));
    table.append(append);
  }

  metricsRegionServer.getRegionServerWrapper().forceRecompute();
  assertCounter("appendNumOps", 73);
}
 
开发者ID:apache,项目名称:hbase,代码行数:14,代码来源:TestRegionServerMetrics.java

示例11: testRow

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Override
void testRow(final int i) throws IOException {
  byte [] bytes = format(i);
  Append append = new Append(bytes);
  append.addColumn(FAMILY_NAME, getQualifier(), bytes);
  updateValueSize(this.table.append(append));
}
 
开发者ID:apache,项目名称:hbase,代码行数:8,代码来源:PerformanceEvaluation.java

示例12: testMutation

import org.apache.hadoop.hbase.client.Append; //导入方法依赖的package包/类
@Test
public void testMutation() throws Exception {

  Put put = new Put(ROW);
  put.addColumn(FAMILY, QUALIFIER, VALUE);
  table.put(put);
  byte[] value = VALUE;
  assertResult(table.get(new Get(ROW)), value, value);
  assertObserverHasExecuted();

  Increment inc = new Increment(ROW);
  inc.addColumn(FAMILY, QUALIFIER, 10L);
  table.increment(inc);
  // QUALIFIER -> 10 (put) + 10 (increment)
  // QUALIFIER_FROM_CP -> 10 (from cp's put) + 10 (from cp's increment)
  value = Bytes.toBytes(20L);
  assertResult(table.get(new Get(ROW)), value, value);
  assertObserverHasExecuted();

  Append append = new Append(ROW);
  append.addColumn(FAMILY, QUALIFIER, APPEND_VALUE);
  table.append(append);
  // 10L + "MB"
  value = ByteBuffer.wrap(new byte[value.length + APPEND_VALUE.length])
    .put(value)
    .put(APPEND_VALUE)
    .array();
  assertResult(table.get(new Get(ROW)), value, value);
  assertObserverHasExecuted();

  Delete delete = new Delete(ROW);
  delete.addColumns(FAMILY, QUALIFIER);
  table.delete(delete);
  assertTrue(Arrays.asList(table.get(new Get(ROW)).rawCells()).toString(),
    table.get(new Get(ROW)).isEmpty());
  assertObserverHasExecuted();

  assertTrue(table.checkAndPut(ROW, FAMILY, QUALIFIER, null, put));
  assertObserverHasExecuted();

  assertTrue(table.checkAndDelete(ROW, FAMILY, QUALIFIER, VALUE, delete));
  assertObserverHasExecuted();

  assertTrue(table.get(new Get(ROW)).isEmpty());
}
 
开发者ID:apache,项目名称:hbase,代码行数:46,代码来源:TestPassCustomCellViaRegionObserver.java


注:本文中的org.apache.hadoop.hbase.client.Append.addColumn方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。