本文整理汇总了Java中org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoRequestProto类的典型用法代码示例。如果您正苦于以下问题:Java GetFileInfoRequestProto类的具体用法?Java GetFileInfoRequestProto怎么用?Java GetFileInfoRequestProto使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
GetFileInfoRequestProto类属于org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos包,在下文中一共展示了GetFileInfoRequestProto类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getFileInfo
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoRequestProto; //导入依赖的package包/类
@Override
public GetFileInfoResponseProto getFileInfo(RpcController controller,
GetFileInfoRequestProto req) throws ServiceException {
try {
HdfsFileStatus result = server.getFileInfo(req.getSrc());
if (result != null) {
return GetFileInfoResponseProto.newBuilder().setFs(
PBHelper.convert(result)).build();
}
return VOID_GETFILEINFO_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
示例2: getFileInfo
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoRequestProto; //导入依赖的package包/类
@Override
public HdfsFileStatus getFileInfo(String src) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
GetFileInfoRequestProto req = GetFileInfoRequestProto.newBuilder()
.setSrc(src).build();
try {
GetFileInfoResponseProto res = rpcProxy.getFileInfo(null, req);
return res.hasFs() ? PBHelper.convert(res.getFs()) : null;
} catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e);
}
}
示例3: getFileInfo
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoRequestProto; //导入依赖的package包/类
@Override
public HdfsFileStatus getFileInfo(String src) throws IOException {
GetFileInfoRequestProto req = GetFileInfoRequestProto.newBuilder()
.setSrc(src).build();
try {
GetFileInfoResponseProto res = rpcProxy.getFileInfo(null, req);
return res.hasFs() ? PBHelperClient.convert(res.getFs()) : null;
} catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e);
}
}
示例4: getFileInfo
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoRequestProto; //导入依赖的package包/类
@Override
public GetFileInfoResponseProto getFileInfo(RpcController controller,
GetFileInfoRequestProto req) throws ServiceException {
try {
HdfsFileStatus result = server.getFileInfo(req.getSrc());
if (result != null) {
return GetFileInfoResponseProto.newBuilder().setFs(
PBHelperClient.convert(result)).build();
}
return VOID_GETFILEINFO_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:16,代码来源:ClientNamenodeProtocolServerSideTranslatorPB.java
示例5: getFileInfo
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoRequestProto; //导入依赖的package包/类
@Override
public GetFileInfoResponseProto getFileInfo(RpcController controller,
GetFileInfoRequestProto req) throws ServiceException {
try {
HdfsFileStatus result = server.getFileInfo(req.getSrc());
if (result != null) {
return GetFileInfoResponseProto.newBuilder()
.setFs(PBHelper.convert(result)).build();
}
return VOID_GETFILEINFO_RESPONSE;
} catch (IOException e) {
throw new ServiceException(e);
}
}
示例6: getFileInfo
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoRequestProto; //导入依赖的package包/类
@Override
public HdfsFileStatus getFileInfo(String src)
throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
GetFileInfoRequestProto req =
GetFileInfoRequestProto.newBuilder().setSrc(src).build();
try {
GetFileInfoResponseProto res = rpcProxy.getFileInfo(null, req);
return res.hasFs() ? PBHelper.convert(res.getFs()) : null;
} catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e);
}
}