当前位置: 首页>>代码示例>>Java>>正文


Java DirectoryListing类代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.protocol.DirectoryListing的典型用法代码示例。如果您正苦于以下问题:Java DirectoryListing类的具体用法?Java DirectoryListing怎么用?Java DirectoryListing使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


DirectoryListing类属于org.apache.hadoop.hdfs.protocol包,在下文中一共展示了DirectoryListing类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: listPaths

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
/**
 * Used by readdir and readdirplus to get dirents. It retries the listing if
 * the startAfter can't be found anymore.
 */
private DirectoryListing listPaths(DFSClient dfsClient, String dirFileIdPath,
    byte[] startAfter) throws IOException {
  DirectoryListing dlisting;
  try {
    dlisting = dfsClient.listPaths(dirFileIdPath, startAfter);
  } catch (RemoteException e) {
    IOException io = e.unwrapRemoteException();
    if (!(io instanceof DirectoryListingStartAfterNotFoundException)) {
      throw io;
    }
    // This happens when startAfter was just deleted
    LOG.info("Cookie couldn't be found: "
        + new String(startAfter, Charset.forName("UTF-8"))
        + ", do listing from beginning");
    dlisting = dfsClient
        .listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME);
  }
  return dlisting;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:RpcProgramNfs3.java

示例2: getListing

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
/**
 * Get a partial listing of the indicated directory
 *
 * @param src the directory name
 * @param startAfter the name to start after
 * @param needLocation if blockLocations need to be returned
 * @return a partial listing starting after startAfter
 * 
 * @throws AccessControlException if access is denied
 * @throws UnresolvedLinkException if symbolic link is encountered
 * @throws IOException if other I/O error occurred
 */
DirectoryListing getListing(String src, byte[] startAfter,
    boolean needLocation) 
    throws IOException {
  checkOperation(OperationCategory.READ);
  DirectoryListing dl = null;
  readLock();
  try {
    checkOperation(NameNode.OperationCategory.READ);
    dl = FSDirStatAndListingOp.getListingInt(dir, src, startAfter,
        needLocation);
  } catch (AccessControlException e) {
    logAuditEvent(false, "listStatus", src);
    throw e;
  } finally {
    readUnlock();
  }
  logAuditEvent(true, "listStatus", src);
  return dl;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:32,代码来源:FSNamesystem.java

示例3: getListing

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
@Override
public GetListingResponseProto getListing(RpcController controller,
    GetListingRequestProto req) throws ServiceException {
  try {
    DirectoryListing result = server.getListing(
        req.getSrc(), req.getStartAfter().toByteArray(),
        req.getNeedLocation());
    if (result !=null) {
      return GetListingResponseProto.newBuilder().setDirList(
        PBHelper.convert(result)).build();
    } else {
      return VOID_GETLISTING_RESPONSE;
    }
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:ClientNamenodeProtocolServerSideTranslatorPB.java

示例4: getListing

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
@Override
public DirectoryListing getListing(String src, byte[] startAfter,
    boolean needLocation) throws AccessControlException,
    FileNotFoundException, UnresolvedLinkException, IOException {
  GetListingRequestProto req = GetListingRequestProto.newBuilder()
      .setSrc(src)
      .setStartAfter(ByteString.copyFrom(startAfter))
      .setNeedLocation(needLocation).build();
  try {
    GetListingResponseProto result = rpcProxy.getListing(null, req);
    
    if (result.hasDirList()) {
      return PBHelper.convert(result.getDirList());
    }
    return null;
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:ClientNamenodeProtocolTranslatorPB.java

示例5: verifyFile

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
void verifyFile(final Path file, final Byte expectedPolicyId)
    throws Exception {
  final Path parent = file.getParent();
  DirectoryListing children = dfs.getClient().listPaths(
      parent.toString(), HdfsFileStatus.EMPTY_NAME, true);
  for (HdfsFileStatus child : children.getPartialListing()) {
    if (child.getLocalName().equals(file.getName())) {
      verifyFile(parent,  child, expectedPolicyId);
      return;
    }
  }
  Assert.fail("File " + file + " not found.");
}
 
开发者ID:naver,项目名称:hadoop,代码行数:14,代码来源:TestStorageMover.java

示例6: recoverAllLeases

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
static void recoverAllLeases(DFSClient dfs, 
    Path path) throws IOException {
  String pathStr = path.toString();
  HdfsFileStatus status = dfs.getFileInfo(pathStr);
  if (!status.isDir()) {
    dfs.recoverLease(pathStr);
    return;
  }
  byte prev[] = HdfsFileStatus.EMPTY_NAME;
  DirectoryListing dirList;
  do {
    dirList = dfs.listPaths(pathStr, prev);
    HdfsFileStatus files[] = dirList.getPartialListing();
    for (HdfsFileStatus f : files) {
      recoverAllLeases(dfs, f.getFullPath(path));
    }
    prev = dirList.getLastName();
  } while (dirList.hasMore());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:TestDFSUpgradeFromImage.java

示例7: getListing

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
@Override
public DirectoryListing getListing(String src, byte[] startAfter,
    boolean needLocation) throws IOException {
  GetListingRequestProto req = GetListingRequestProto.newBuilder()
      .setSrc(src)
      .setStartAfter(ByteString.copyFrom(startAfter))
      .setNeedLocation(needLocation).build();
  try {
    GetListingResponseProto result = rpcProxy.getListing(null, req);

    if (result.hasDirList()) {
      return PBHelperClient.convert(result.getDirList());
    }
    return null;
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:19,代码来源:ClientNamenodeProtocolTranslatorPB.java

示例8: getListing

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
/**
 * Get a partial listing of the indicated directory
 *
 * @param src the directory name
 * @param startAfter the name to start after
 * @param needLocation if blockLocations need to be returned
 * @return a partial listing starting after startAfter
 * 
 * @throws AccessControlException if access is denied
 * @throws UnresolvedLinkException if symbolic link is encountered
 * @throws IOException if other I/O error occurred
 */
DirectoryListing getListing(String src, byte[] startAfter,
    boolean needLocation) 
    throws IOException {
  checkOperation(OperationCategory.READ);
  DirectoryListing dl = null;
  readLock();
  try {
    checkOperation(NameNode.OperationCategory.READ);
    dl = getListingInt(dir, src, startAfter, needLocation);
  } catch (AccessControlException e) {
    logAuditEvent(false, "listStatus", src);
    throw e;
  } finally {
    readUnlock();
  }
  logAuditEvent(true, "listStatus", src);
  return dl;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:31,代码来源:FSNamesystem.java

示例9: getListing

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
@Override
public GetListingResponseProto getListing(RpcController controller,
    GetListingRequestProto req) throws ServiceException {
  try {
    DirectoryListing result = server.getListing(
        req.getSrc(), req.getStartAfter().toByteArray(),
        req.getNeedLocation());
    if (result !=null) {
      return GetListingResponseProto.newBuilder().setDirList(
        PBHelperClient.convert(result)).build();
    } else {
      return VOID_GETLISTING_RESPONSE;
    }
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:18,代码来源:ClientNamenodeProtocolServerSideTranslatorPB.java

示例10: listPaths

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
/**
 * Used by readdir and readdirplus to get dirents. It retries the listing if
 * the startAfter can't be found anymore.
 */
private DirectoryListing listPaths(DFSClient dfsClient, String dirFileIdPath,
    byte[] startAfter) throws IOException {
  DirectoryListing dlisting;
  try {
    dlisting = dfsClient.listPaths(dirFileIdPath, startAfter);
  } catch (RemoteException e) {
    IOException io = e.unwrapRemoteException();
    if (!(io instanceof DirectoryListingStartAfterNotFoundException)) {
      throw io;
    }
    // This happens when startAfter was just deleted
    LOG.info("Cookie couldn't be found: " + new String(startAfter)
        + ", do listing from beginning");
    dlisting = dfsClient
        .listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME);
  }
  return dlisting;
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:23,代码来源:RpcProgramNfs3.java

示例11: getPartialListing

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
/**
 * Get a partial listing of the indicated directory
 *
 * @param src          the directory name
 * @param startAfter   the name to start after
 * @param needLocation if blockLocations need to be returned
 * @return a partial listing starting after startAfter
 */
public DirectoryListing getPartialListing(String src, byte[] startAfter,
                                          boolean needLocation)
  throws IOException {
  DirectoryListing stats;
  src = dir.normalizePath(src);
  byte[][] names = INode.getPathComponents(src);
  INode[] inodes = new INode[names.length];
  readLock();
  try {
    dir.rootDir.getExistingPathINodes(names, inodes);
    getListingCheck(src, inodes);
    stats = dir.getPartialListing(src, inodes[inodes.length-1],
        startAfter, needLocation);
  } finally {
    readUnlock();
  }
  if (auditLog.isInfoEnabled()) {
    logAuditEvent(getCurrentUGI(),
      Server.getRemoteIp(),
      "listStatus", src, null, null);
  }
  return stats;
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:32,代码来源:FSNamesystem.java

示例12: getSnapshotsListing

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
/**
 * Get a listing of all the snapshots of a snapshottable directory
 */
private DirectoryListing getSnapshotsListing(String src, byte[] startAfter)
    throws UnresolvedLinkException, IOException {
  Preconditions.checkState(hasReadLock());
  Preconditions.checkArgument(
      src.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR), 
      "%s does not end with %s", src, HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR);
  
  final String dirPath = normalizePath(src.substring(0,
      src.length() - HdfsConstants.DOT_SNAPSHOT_DIR.length()));
  
  final INode node = this.getINode(dirPath);
  final INodeDirectorySnapshottable dirNode = INodeDirectorySnapshottable
      .valueOf(node, dirPath);
  final ReadOnlyList<Snapshot> snapshots = dirNode.getSnapshotList();
  int skipSize = ReadOnlyList.Util.binarySearch(snapshots, startAfter);
  skipSize = skipSize < 0 ? -skipSize - 1 : skipSize + 1;
  int numOfListing = Math.min(snapshots.size() - skipSize, this.lsLimit);
  final HdfsFileStatus listing[] = new HdfsFileStatus[numOfListing];
  for (int i = 0; i < numOfListing; i++) {
    Root sRoot = snapshots.get(i + skipSize).getRoot();
    listing[i] = createFileStatus(sRoot.getLocalNameBytes(), sRoot, null);
  }
  return new DirectoryListing(
      listing, snapshots.size() - skipSize - numOfListing);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:29,代码来源:FSDirectory.java

示例13: getListingInt

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
private DirectoryListing getListingInt(String src, byte[] startAfter,
    boolean needLocation) 
  throws AccessControlException, UnresolvedLinkException, IOException {
  DirectoryListing dl;
  FSPermissionChecker pc = getPermissionChecker();
  checkOperation(OperationCategory.READ);
  byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
  readLock();
  try {
    checkOperation(OperationCategory.READ);
    src = FSDirectory.resolvePath(src, pathComponents, dir);

    if (isPermissionEnabled) {
      if (dir.isDir(src)) {
        checkPathAccess(pc, src, FsAction.READ_EXECUTE);
      } else {
        checkTraverse(pc, src);
      }
    }
    logAuditEvent(true, "listStatus", src);
    dl = dir.getListing(src, startAfter, needLocation);
  } finally {
    readUnlock();
  }
  return dl;
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:27,代码来源:FSNamesystem.java

示例14: getHdfsFileList

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
/**
 * Retrieve list of files under @hdfsDir for @hdfsClient.
 */
public static List<HdfsFileStatus> getHdfsFileList(DFSClient hdfsClient,
                                                   String hdfsDir)
    throws IOException {
  List<HdfsFileStatus> fileList = Lists.newArrayList();
  // Build a list of files.
  DirectoryListing listing = null;
  String continuation = "";
  while (true) {
    listing = hdfsClient.listPaths(hdfsDir, continuation.getBytes());
    for (HdfsFileStatus fileStatus : listing.getPartialListing()) {
      fileList.add(fileStatus);
    }
    // Go through the listing and paginate.
    if (!listing.hasMore()) {
      break;
    } else {
      continuation = new String(listing.getLastName());
    }
  }
  return fileList;
}
 
开发者ID:pinterest-attic,项目名称:terrapin,代码行数:25,代码来源:TerrapinUtil.java

示例15: listPaths

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入依赖的package包/类
/**
 * Used by readdir and readdirplus to get dirents. It retries the listing if
 * the startAfter can't be found anymore.
 */
private DirectoryListing listPaths(DFSClient dfsClient, String dirFileIdPath,
    byte[] startAfter) throws IOException {
  DirectoryListing dlisting = null;
  try {
    dlisting = dfsClient.listPaths(dirFileIdPath, startAfter);
  } catch (RemoteException e) {
    IOException io = e.unwrapRemoteException();
    if (!(io instanceof DirectoryListingStartAfterNotFoundException)) {
      throw io;
    }
    // This happens when startAfter was just deleted
    LOG.info("Cookie cound't be found: " + new String(startAfter) +
        ", do listing from beginning");
    dlisting = dfsClient.listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME);
  }
  return dlisting;
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:22,代码来源:RpcProgramNfs3.java


注:本文中的org.apache.hadoop.hdfs.protocol.DirectoryListing类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。