当前位置: 首页>>代码示例>>Java>>正文


Java GetAdditionalDatanodeRequestProto类代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto的典型用法代码示例。如果您正苦于以下问题:Java GetAdditionalDatanodeRequestProto类的具体用法?Java GetAdditionalDatanodeRequestProto怎么用?Java GetAdditionalDatanodeRequestProto使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


GetAdditionalDatanodeRequestProto类属于org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos包,在下文中一共展示了GetAdditionalDatanodeRequestProto类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public GetAdditionalDatanodeResponseProto getAdditionalDatanode(
    RpcController controller, GetAdditionalDatanodeRequestProto req)
    throws ServiceException {
  try {
    List<DatanodeInfoProto> existingList = req.getExistingsList();
    List<String> existingStorageIDsList = req.getExistingStorageUuidsList();
    List<DatanodeInfoProto> excludesList = req.getExcludesList();
    LocatedBlock result = server.getAdditionalDatanode(req.getSrc(),
        req.getFileId(), PBHelper.convert(req.getBlk()),
        PBHelper.convert(existingList.toArray(
            new DatanodeInfoProto[existingList.size()])),
        existingStorageIDsList.toArray(
            new String[existingStorageIDsList.size()]),
        PBHelper.convert(excludesList.toArray(
            new DatanodeInfoProto[excludesList.size()])), 
        req.getNumAdditionalNodes(), req.getClientName());
    return GetAdditionalDatanodeResponseProto.newBuilder().setBlock(
        PBHelper.convert(result))
        .build();
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:ClientNamenodeProtocolServerSideTranslatorPB.java

示例2: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public LocatedBlock getAdditionalDatanode(String src, long fileId,
    ExtendedBlock blk, DatanodeInfo[] existings, String[] existingStorageIDs,
    DatanodeInfo[] excludes,
    int numAdditionalNodes, String clientName) throws AccessControlException,
    FileNotFoundException, SafeModeException, UnresolvedLinkException,
    IOException {
  GetAdditionalDatanodeRequestProto req = GetAdditionalDatanodeRequestProto
      .newBuilder()
      .setSrc(src)
      .setFileId(fileId)
      .setBlk(PBHelper.convert(blk))
      .addAllExistings(PBHelper.convert(existings))
      .addAllExistingStorageUuids(Arrays.asList(existingStorageIDs))
      .addAllExcludes(PBHelper.convert(excludes))
      .setNumAdditionalNodes(numAdditionalNodes)
      .setClientName(clientName)
      .build();
  try {
    return PBHelper.convert(rpcProxy.getAdditionalDatanode(null, req)
        .getBlock());
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:26,代码来源:ClientNamenodeProtocolTranslatorPB.java

示例3: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public LocatedBlock getAdditionalDatanode(String src, long fileId,
    ExtendedBlock blk, DatanodeInfo[] existings, String[] existingStorageIDs,
    DatanodeInfo[] excludes, int numAdditionalNodes, String clientName)
    throws IOException {
  GetAdditionalDatanodeRequestProto req = GetAdditionalDatanodeRequestProto
      .newBuilder()
      .setSrc(src)
      .setFileId(fileId)
      .setBlk(PBHelperClient.convert(blk))
      .addAllExistings(PBHelperClient.convert(existings))
      .addAllExistingStorageUuids(Arrays.asList(existingStorageIDs))
      .addAllExcludes(PBHelperClient.convert(excludes))
      .setNumAdditionalNodes(numAdditionalNodes)
      .setClientName(clientName)
      .build();
  try {
    return PBHelperClient.convertLocatedBlockProto(
        rpcProxy.getAdditionalDatanode(null, req).getBlock());
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:24,代码来源:ClientNamenodeProtocolTranslatorPB.java

示例4: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public GetAdditionalDatanodeResponseProto getAdditionalDatanode(
    RpcController controller, GetAdditionalDatanodeRequestProto req)
    throws ServiceException {
  try {
    List<DatanodeInfoProto> existingList = req.getExistingsList();
    List<String> existingStorageIDsList = req.getExistingStorageUuidsList();
    List<DatanodeInfoProto> excludesList = req.getExcludesList();
    LocatedBlock result = server.getAdditionalDatanode(req.getSrc(),
        req.getFileId(), PBHelperClient.convert(req.getBlk()),
        PBHelperClient.convert(existingList.toArray(
            new DatanodeInfoProto[existingList.size()])),
        existingStorageIDsList.toArray(
            new String[existingStorageIDsList.size()]),
        PBHelperClient.convert(excludesList.toArray(
            new DatanodeInfoProto[excludesList.size()])),
        req.getNumAdditionalNodes(), req.getClientName());
    return GetAdditionalDatanodeResponseProto.newBuilder().setBlock(
    PBHelperClient.convertLocatedBlock(result))
        .build();
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:25,代码来源:ClientNamenodeProtocolServerSideTranslatorPB.java

示例5: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public GetAdditionalDatanodeResponseProto getAdditionalDatanode(
    RpcController controller, GetAdditionalDatanodeRequestProto req)
    throws ServiceException {
  try {
    List<DatanodeInfoProto> existingList = req.getExistingsList();
    List<DatanodeInfoProto> excludesList = req.getExcludesList();
    LocatedBlock result = server.getAdditionalDatanode(
        req.getSrc(), PBHelper.convert(req.getBlk()),
        PBHelper.convert(existingList.toArray(
            new DatanodeInfoProto[existingList.size()])),
        PBHelper.convert(excludesList.toArray(
            new DatanodeInfoProto[excludesList.size()])), 
            req.getNumAdditionalNodes(), req.getClientName());
    return GetAdditionalDatanodeResponseProto.newBuilder().setBlock(
        PBHelper.convert(result))
        .build();
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:22,代码来源:ClientNamenodeProtocolServerSideTranslatorPB.java

示例6: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public LocatedBlock getAdditionalDatanode(String src, ExtendedBlock blk,
    DatanodeInfo[] existings, DatanodeInfo[] excludes,
    int numAdditionalNodes, String clientName) throws AccessControlException,
    FileNotFoundException, SafeModeException, UnresolvedLinkException,
    IOException {
  GetAdditionalDatanodeRequestProto req = GetAdditionalDatanodeRequestProto
      .newBuilder()
      .setSrc(src)
      .setBlk(PBHelper.convert(blk))
      .addAllExistings(PBHelper.convert(existings))
      .addAllExcludes(PBHelper.convert(excludes))
      .setNumAdditionalNodes(numAdditionalNodes)
      .setClientName(clientName)
      .build();
  try {
    return PBHelper.convert(rpcProxy.getAdditionalDatanode(null, req)
        .getBlock());
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:23,代码来源:ClientNamenodeProtocolTranslatorPB.java

示例7: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public GetAdditionalDatanodeResponseProto getAdditionalDatanode(
    RpcController controller, GetAdditionalDatanodeRequestProto req)
    throws ServiceException {
  try {
    List<DatanodeInfoProto> existingList = req.getExistingsList();
    List<DatanodeInfoProto> excludesList = req.getExcludesList();
    LocatedBlock result = server
        .getAdditionalDatanode(req.getSrc(), PBHelper.convert(req.getBlk()),
            PBHelper.convert(existingList
                .toArray(new DatanodeInfoProto[existingList.size()])),
            PBHelper.convert(excludesList
                .toArray(new DatanodeInfoProto[excludesList.size()])),
            req.getNumAdditionalNodes(), req.getClientName());
    return GetAdditionalDatanodeResponseProto.newBuilder()
        .setBlock(PBHelper.convert(result)).build();
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:21,代码来源:ClientNamenodeProtocolServerSideTranslatorPB.java

示例8: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public LocatedBlock getAdditionalDatanode(String src, ExtendedBlock blk,
    DatanodeInfo[] existings, DatanodeInfo[] excludes, int numAdditionalNodes,
    String clientName)
    throws AccessControlException, FileNotFoundException, SafeModeException,
    UnresolvedLinkException, IOException {
  GetAdditionalDatanodeRequestProto req =
      GetAdditionalDatanodeRequestProto.newBuilder().setSrc(src)
          .setBlk(PBHelper.convert(blk))
          .addAllExistings(PBHelper.convert(existings))
          .addAllExcludes(PBHelper.convert(excludes))
          .setNumAdditionalNodes(numAdditionalNodes).setClientName(clientName)
          .build();
  try {
    return PBHelper
        .convert(rpcProxy.getAdditionalDatanode(null, req).getBlock());
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:21,代码来源:ClientNamenodeProtocolTranslatorPB.java

示例9: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public GetAdditionalDatanodeResponseProto getAdditionalDatanode(
    RpcController controller, GetAdditionalDatanodeRequestProto req)
    throws ServiceException {
  try {
    List<DatanodeInfoProto> existingList = req.getExistingsList();
    List<String> existingStorageIDsList = req.getExistingStorageUuidsList();
    List<DatanodeInfoProto> excludesList = req.getExcludesList();
    LocatedBlock result = server.getAdditionalDatanode(req.getSrc(),
        PBHelper.convert(req.getBlk()),
        PBHelper.convert(existingList.toArray(
            new DatanodeInfoProto[existingList.size()])),
        existingStorageIDsList.toArray(
            new String[existingStorageIDsList.size()]),
        PBHelper.convert(excludesList.toArray(
            new DatanodeInfoProto[excludesList.size()])), 
        req.getNumAdditionalNodes(), req.getClientName());
    return GetAdditionalDatanodeResponseProto.newBuilder().setBlock(
        PBHelper.convert(result))
        .build();
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre2,代码行数:25,代码来源:ClientNamenodeProtocolServerSideTranslatorPB.java

示例10: getAdditionalDatanode

import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto; //导入依赖的package包/类
@Override
public LocatedBlock getAdditionalDatanode(String src, ExtendedBlock blk,
    DatanodeInfo[] existings, String[] existingStorageIDs,
    DatanodeInfo[] excludes,
    int numAdditionalNodes, String clientName) throws AccessControlException,
    FileNotFoundException, SafeModeException, UnresolvedLinkException,
    IOException {
  GetAdditionalDatanodeRequestProto req = GetAdditionalDatanodeRequestProto
      .newBuilder()
      .setSrc(src)
      .setBlk(PBHelper.convert(blk))
      .addAllExistings(PBHelper.convert(existings))
      .addAllExistingStorageUuids(Arrays.asList(existingStorageIDs))
      .addAllExcludes(PBHelper.convert(excludes))
      .setNumAdditionalNodes(numAdditionalNodes)
      .setClientName(clientName)
      .build();
  try {
    return PBHelper.convert(rpcProxy.getAdditionalDatanode(null, req)
        .getBlock());
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre2,代码行数:25,代码来源:ClientNamenodeProtocolTranslatorPB.java


注:本文中的org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。