当前位置: 首页>>代码示例>>Java>>正文


Java ProtobufUtil类代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.protobuf.ProtobufUtil的典型用法代码示例。如果您正苦于以下问题:Java ProtobufUtil类的具体用法?Java ProtobufUtil怎么用?Java ProtobufUtil使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ProtobufUtil类属于org.apache.hadoop.hbase.protobuf包,在下文中一共展示了ProtobufUtil类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: parseFrom

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
/**
 * @param pbBytes A pb serialized {@link ValueFilter} instance
 * @return An instance of {@link ValueFilter} made from <code>bytes</code>
 * @throws DeserializationException
 * @see #toByteArray
 */
public static ValueFilter parseFrom(final byte [] pbBytes)
throws DeserializationException {
  FilterProtos.ValueFilter proto;
  try {
    proto = FilterProtos.ValueFilter.parseFrom(pbBytes);
  } catch (InvalidProtocolBufferException e) {
    throw new DeserializationException(e);
  }
  final CompareOp valueCompareOp =
    CompareOp.valueOf(proto.getCompareFilter().getCompareOp().name());
  ByteArrayComparable valueComparator = null;
  try {
    if (proto.getCompareFilter().hasComparator()) {
      valueComparator = ProtobufUtil.toComparator(proto.getCompareFilter().getComparator());
    }
  } catch (IOException ioe) {
    throw new DeserializationException(ioe);
  }
  return new ValueFilter(valueCompareOp,valueComparator);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:27,代码来源:ValueFilter.java

示例2: parseFrom

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
/**
 * @param pbBytes A pb serialized {@link FamilyFilter} instance
 * @return An instance of {@link FamilyFilter} made from <code>bytes</code>
 * @throws DeserializationException
 * @see #toByteArray
 */
public static FamilyFilter parseFrom(final byte [] pbBytes)
throws DeserializationException {
  FilterProtos.FamilyFilter proto;
  try {
    proto = FilterProtos.FamilyFilter.parseFrom(pbBytes);
  } catch (InvalidProtocolBufferException e) {
    throw new DeserializationException(e);
  }
  final CompareOp valueCompareOp =
    CompareOp.valueOf(proto.getCompareFilter().getCompareOp().name());
  ByteArrayComparable valueComparator = null;
  try {
    if (proto.getCompareFilter().hasComparator()) {
      valueComparator = ProtobufUtil.toComparator(proto.getCompareFilter().getComparator());
    }
  } catch (IOException ioe) {
    throw new DeserializationException(ioe);
  }
  return new FamilyFilter(valueCompareOp,valueComparator);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:27,代码来源:FamilyFilter.java

示例3: listTableNamesByNamespace

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
/**
 * Get list of table names by namespace
 * @param name namespace name
 * @return The list of table names in the namespace
 * @throws IOException
 */
@Override
public TableName[] listTableNamesByNamespace(final String name) throws IOException {
  return
      executeCallable(new MasterCallable<TableName[]>(getConnection()) {
        @Override
        public TableName[] call(int callTimeout) throws Exception {
          PayloadCarryingRpcController controller = rpcControllerFactory.newController();
          controller.setCallTimeout(callTimeout);
          List<HBaseProtos.TableName> tableNames =
            master.listTableNamesByNamespace(controller, ListTableNamesByNamespaceRequest.
              newBuilder().setNamespaceName(name).build())
              .getTableNameList();
          TableName[] result = new TableName[tableNames.size()];
          for (int i = 0; i < tableNames.size(); i++) {
            result[i] = ProtobufUtil.toTableName(tableNames.get(i));
          }
          return result;
        }
      });
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:27,代码来源:HBaseAdmin.java

示例4: testEndpoint

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
@Test
public void testEndpoint() throws Exception {
  final ServerName serverName = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
  final ServerRpcController controller = new ServerRpcController();
  final BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse> rpcCallback =
      new BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse>();
  DummyRegionServerEndpointProtos.DummyService service =
      ProtobufUtil.newServiceStub(DummyRegionServerEndpointProtos.DummyService.class,
          TEST_UTIL.getHBaseAdmin().coprocessorService(serverName));
  service.dummyCall(controller,
      DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance(), rpcCallback);
  assertEquals(DUMMY_VALUE, rpcCallback.get().getValue());
  if (controller.failedOnException()) {
    throw controller.getFailedOn();
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:17,代码来源:TestRegionServerCoprocessorEndpoint.java

示例5: revokeGlobal

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
/**
 * Revoke permissions globally from the given user. Will wait until all active
 * AccessController instances have updated their permissions caches or will
 * throw an exception upon timeout (10 seconds).
 */
public static void revokeGlobal(final HBaseTestingUtility util, final String user,
    final Permission.Action... actions) throws Exception {
  SecureTestUtil.updateACLs(util, new Callable<Void>() {
    @Override
    public Void call() throws Exception {
      try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) {
        try (Table acl = connection.getTable(AccessControlLists.ACL_TABLE_NAME)) {
          BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW);
          AccessControlService.BlockingInterface protocol =
              AccessControlService.newBlockingStub(service);
          ProtobufUtil.revoke(null, protocol, user, actions);
        }
      }
      return null;
    }
  });
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:23,代码来源:SecureTestUtil.java

示例6: testAttributesSerialization

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
@Test
public void testAttributesSerialization() throws IOException {
  Scan scan = new Scan();
  scan.setAttribute("attribute1", Bytes.toBytes("value1"));
  scan.setAttribute("attribute2", Bytes.toBytes("value2"));
  scan.setAttribute("attribute3", Bytes.toBytes("value3"));

  ClientProtos.Scan scanProto = ProtobufUtil.toScan(scan);

  Scan scan2 = ProtobufUtil.toScan(scanProto);

  Assert.assertNull(scan2.getAttribute("absent"));
  Assert.assertTrue(Arrays.equals(Bytes.toBytes("value1"), scan2.getAttribute("attribute1")));
  Assert.assertTrue(Arrays.equals(Bytes.toBytes("value2"), scan2.getAttribute("attribute2")));
  Assert.assertTrue(Arrays.equals(Bytes.toBytes("value3"), scan2.getAttribute("attribute3")));
  Assert.assertEquals(3, scan2.getAttributesMap().size());
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:18,代码来源:TestScan.java

示例7: testTokenAuth

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
private void testTokenAuth(Class<? extends RpcClient> rpcImplClass) throws IOException,
    ServiceException {
  TEST_UTIL.getConfiguration().set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY,
    rpcImplClass.getName());
  try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
      Table table = conn.getTable(TableName.META_TABLE_NAME)) {
    CoprocessorRpcChannel rpcChannel = table.coprocessorService(HConstants.EMPTY_START_ROW);
    AuthenticationProtos.AuthenticationService.BlockingInterface service =
        AuthenticationProtos.AuthenticationService.newBlockingStub(rpcChannel);
    WhoAmIResponse response = service.whoAmI(null, WhoAmIRequest.getDefaultInstance());
    assertEquals(USERNAME, response.getUsername());
    assertEquals(AuthenticationMethod.TOKEN.name(), response.getAuthMethod());
    try {
      service.getAuthenticationToken(null, GetAuthenticationTokenRequest.getDefaultInstance());
    } catch (ServiceException e) {
      AccessDeniedException exc = (AccessDeniedException) ProtobufUtil.getRemoteException(e);
      assertTrue(exc.getMessage().contains(
        "Token generation only allowed for Kerberos authenticated clients"));
    }
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:22,代码来源:TestGenerateDelegationToken.java

示例8: createNamespace

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
/**
 * Create a new namespace
 * @param descriptor descriptor which describes the new namespace
 * @throws IOException
 */
@Override
public void createNamespace(final NamespaceDescriptor descriptor) throws IOException {
  executeCallable(new MasterCallable<Void>(getConnection()) {
    @Override
    public Void call(int callTimeout) throws Exception {
      PayloadCarryingRpcController controller = rpcControllerFactory.newController();
      controller.setCallTimeout(callTimeout);
      // TODO: set priority based on NS?
      master.createNamespace(controller,
        CreateNamespaceRequest.newBuilder()
          .setNamespaceDescriptor(ProtobufUtil
            .toProtoNamespaceDescriptor(descriptor)).build()
      );
      return null;
    }
  });
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:23,代码来源:HBaseAdmin.java

示例9: createTableLock

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
private InterProcessLock createTableLock() {
  String tableLockZNode = ZKUtil.joinZNode(zkWatcher.tableLockZNode,
      tableName.getNameAsString());

  ZooKeeperProtos.TableLock data = ZooKeeperProtos.TableLock.newBuilder()
    .setTableName(ProtobufUtil.toProtoTableName(tableName))
    .setLockOwner(ProtobufUtil.toServerName(serverName))
    .setThreadId(Thread.currentThread().getId())
    .setPurpose(purpose)
    .setIsShared(isShared)
    .setCreateTime(EnvironmentEdgeManager.currentTime()).build();
  byte[] lockMetadata = toBytes(data);

  InterProcessReadWriteLock lock = new ZKInterProcessReadWriteLock(zkWatcher, tableLockZNode,
    METADATA_HANDLER);
  return isShared ? lock.readLock(lockMetadata) : lock.writeLock(lockMetadata);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:18,代码来源:TableLockManager.java

示例10: getCompactionStateForRegion

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
/**
 * {@inheritDoc}
 */
@Override
public CompactionState getCompactionStateForRegion(final byte[] regionName)
throws IOException {
  try {
    Pair<HRegionInfo, ServerName> regionServerPair = getRegion(regionName);
    if (regionServerPair == null) {
      throw new IllegalArgumentException("Invalid region: " + Bytes.toStringBinary(regionName));
    }
    if (regionServerPair.getSecond() == null) {
      throw new NoServerForRegionException(Bytes.toStringBinary(regionName));
    }
    ServerName sn = regionServerPair.getSecond();
    AdminService.BlockingInterface admin = this.connection.getAdmin(sn);
    GetRegionInfoRequest request = RequestConverter.buildGetRegionInfoRequest(
      regionServerPair.getFirst().getRegionName(), true);
    PayloadCarryingRpcController controller = rpcControllerFactory.newController();
    // TODO: this does not do retries, it should. Set priority and timeout in controller
    GetRegionInfoResponse response = admin.getRegionInfo(controller, request);
    return response.getCompactionState();
  } catch (ServiceException se) {
    throw ProtobufUtil.getRemoteException(se);
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:27,代码来源:HBaseAdmin.java

示例11: regionSequenceIdsToByteArray

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
/**
 * @param regionLastFlushedSequenceId the flushed sequence id of a region which is the min of its
 *          store max seq ids
 * @param storeSequenceIds column family to sequence Id map
 * @return Serialized protobuf of <code>RegionSequenceIds</code> with pb magic prefix prepended
 *         suitable for use to filter wal edits in distributedLogReplay mode
 */
public static byte[] regionSequenceIdsToByteArray(final Long regionLastFlushedSequenceId,
    final Map<byte[], Long> storeSequenceIds) {
  ClusterStatusProtos.RegionStoreSequenceIds.Builder regionSequenceIdsBuilder =
      ClusterStatusProtos.RegionStoreSequenceIds.newBuilder();
  ClusterStatusProtos.StoreSequenceId.Builder storeSequenceIdBuilder =
      ClusterStatusProtos.StoreSequenceId.newBuilder();
  if (storeSequenceIds != null) {
    for (Map.Entry<byte[], Long> e : storeSequenceIds.entrySet()){
      byte[] columnFamilyName = e.getKey();
      Long curSeqId = e.getValue();
      storeSequenceIdBuilder.setFamilyName(ByteStringer.wrap(columnFamilyName));
      storeSequenceIdBuilder.setSequenceId(curSeqId);
      regionSequenceIdsBuilder.addStoreSequenceId(storeSequenceIdBuilder.build());
      storeSequenceIdBuilder.clear();
    }
  }
  regionSequenceIdsBuilder.setLastFlushedSequenceId(regionLastFlushedSequenceId);
  byte[] result = regionSequenceIdsBuilder.build().toByteArray();
  return ProtobufUtil.prependPBMagic(result);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:28,代码来源:ZKUtil.java

示例12: sendRegionsMerge

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
/**
 * Sends an MERGE REGIONS RPC to the specified server to merge the specified
 * regions.
 * <p>
 * A region server could reject the close request because it either does not
 * have the specified region.
 * @param server server to merge regions
 * @param region_a region to merge
 * @param region_b region to merge
 * @param forcible true if do a compulsory merge, otherwise we will only merge
 *          two adjacent regions
 * @throws IOException
 */
public void sendRegionsMerge(ServerName server, HRegionInfo region_a,
    HRegionInfo region_b, boolean forcible) throws IOException {
  if (server == null)
    throw new NullPointerException("Passed server is null");
  if (region_a == null || region_b == null)
    throw new NullPointerException("Passed region is null");
  AdminService.BlockingInterface admin = getRsAdmin(server);
  if (admin == null) {
    throw new IOException("Attempting to send MERGE REGIONS RPC to server "
        + server.toString() + " for region "
        + region_a.getRegionNameAsString() + ","
        + region_b.getRegionNameAsString()
        + " failed because no RPC connection found to this server");
  }
  PayloadCarryingRpcController controller = newRpcController();
  ProtobufUtil.mergeRegions(controller, admin, region_a, region_b, forcible);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:31,代码来源:ServerManager.java

示例13: serializeStateData

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
@Override
public void serializeStateData(final OutputStream stream) throws IOException {
  super.serializeStateData(stream);

  MasterProcedureProtos.TruncateTableStateData.Builder state =
    MasterProcedureProtos.TruncateTableStateData.newBuilder()
      .setUserInfo(MasterProcedureUtil.toProtoUserInfo(this.user))
      .setPreserveSplits(preserveSplits);
  if (hTableDescriptor != null) {
    state.setTableSchema(hTableDescriptor.convert());
  } else {
    state.setTableName(ProtobufUtil.toProtoTableName(tableName));
  }
  if (regions != null) {
    for (HRegionInfo hri: regions) {
      state.addRegionInfo(HRegionInfo.convert(hri));
    }
  }
  state.build().writeDelimitedTo(stream);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:21,代码来源:TruncateTableProcedure.java

示例14: deserializeStateData

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
@Override
public void deserializeStateData(final InputStream stream) throws IOException {
  super.deserializeStateData(stream);

  MasterProcedureProtos.TruncateTableStateData state =
    MasterProcedureProtos.TruncateTableStateData.parseDelimitedFrom(stream);
  user = MasterProcedureUtil.toUserInfo(state.getUserInfo());
  if (state.hasTableSchema()) {
    hTableDescriptor = HTableDescriptor.convert(state.getTableSchema());
    tableName = hTableDescriptor.getTableName();
  } else {
    tableName = ProtobufUtil.toTableName(state.getTableName());
  }
  preserveSplits = state.getPreserveSplits();
  if (state.getRegionInfoCount() == 0) {
    regions = null;
  } else {
    regions = new ArrayList<HRegionInfo>(state.getRegionInfoCount());
    for (HBaseProtos.RegionInfo hri: state.getRegionInfoList()) {
      regions.add(HRegionInfo.convert(hri));
    }
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:24,代码来源:TruncateTableProcedure.java

示例15: revokeFromNamespace

import org.apache.hadoop.hbase.protobuf.ProtobufUtil; //导入依赖的package包/类
/**
 * Revoke permissions on a namespace from the given user. Will wait until all active
 * AccessController instances have updated their permissions caches or will
 * throw an exception upon timeout (10 seconds).
 */
public static void revokeFromNamespace(final HBaseTestingUtility util, final String user,
    final String namespace, final Permission.Action... actions) throws Exception {
  SecureTestUtil.updateACLs(util, new Callable<Void>() {
    @Override
    public Void call() throws Exception {
      try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) {
        try (Table acl = connection.getTable(AccessControlLists.ACL_TABLE_NAME)) {
          BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW);
          AccessControlService.BlockingInterface protocol =
              AccessControlService.newBlockingStub(service);
          ProtobufUtil.revoke(null, protocol, user, namespace, actions);
        }
      }
      return null;
    }
  });
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:23,代码来源:SecureTestUtil.java


注:本文中的org.apache.hadoop.hbase.protobuf.ProtobufUtil类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。