当前位置: 首页>>代码示例>>Java>>正文


Java DirectoryListing.getPartialListing方法代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.protocol.DirectoryListing.getPartialListing方法的典型用法代码示例。如果您正苦于以下问题:Java DirectoryListing.getPartialListing方法的具体用法?Java DirectoryListing.getPartialListing怎么用?Java DirectoryListing.getPartialListing使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hdfs.protocol.DirectoryListing的用法示例。


在下文中一共展示了DirectoryListing.getPartialListing方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: verifyFile

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
void verifyFile(final Path file, final Byte expectedPolicyId)
    throws Exception {
  final Path parent = file.getParent();
  DirectoryListing children = dfs.getClient().listPaths(
      parent.toString(), HdfsFileStatus.EMPTY_NAME, true);
  for (HdfsFileStatus child : children.getPartialListing()) {
    if (child.getLocalName().equals(file.getName())) {
      verifyFile(parent,  child, expectedPolicyId);
      return;
    }
  }
  Assert.fail("File " + file + " not found.");
}
 
开发者ID:naver,项目名称:hadoop,代码行数:14,代码来源:TestStorageMover.java

示例2: recoverAllLeases

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
static void recoverAllLeases(DFSClient dfs, 
    Path path) throws IOException {
  String pathStr = path.toString();
  HdfsFileStatus status = dfs.getFileInfo(pathStr);
  if (!status.isDir()) {
    dfs.recoverLease(pathStr);
    return;
  }
  byte prev[] = HdfsFileStatus.EMPTY_NAME;
  DirectoryListing dirList;
  do {
    dirList = dfs.listPaths(pathStr, prev);
    HdfsFileStatus files[] = dirList.getPartialListing();
    for (HdfsFileStatus f : files) {
      recoverAllLeases(dfs, f.getFullPath(path));
    }
    prev = dirList.getLastName();
  } while (dirList.hasMore());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:TestDFSUpgradeFromImage.java

示例3: getHdfsFileList

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
/**
 * Retrieve list of files under @hdfsDir for @hdfsClient.
 */
public static List<HdfsFileStatus> getHdfsFileList(DFSClient hdfsClient,
                                                   String hdfsDir)
    throws IOException {
  List<HdfsFileStatus> fileList = Lists.newArrayList();
  // Build a list of files.
  DirectoryListing listing = null;
  String continuation = "";
  while (true) {
    listing = hdfsClient.listPaths(hdfsDir, continuation.getBytes());
    for (HdfsFileStatus fileStatus : listing.getPartialListing()) {
      fileList.add(fileStatus);
    }
    // Go through the listing and paginate.
    if (!listing.hasMore()) {
      break;
    } else {
      continuation = new String(listing.getLastName());
    }
  }
  return fileList;
}
 
开发者ID:pinterest-attic,项目名称:terrapin,代码行数:25,代码来源:TerrapinUtil.java

示例4: verifyRecursively

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
private void verifyRecursively(final Path parent,
    final HdfsFileStatus status) throws Exception {
  if (status.isDir()) {
    Path fullPath = parent == null ?
        new Path("/") : status.getFullPath(parent);
    DirectoryListing children = dfs.getClient().listPaths(
        fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true);
    for (HdfsFileStatus child : children.getPartialListing()) {
      verifyRecursively(fullPath, child);
    }
  } else if (!status.isSymlink()) { // is file
    verifyFile(parent, status, null);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:15,代码来源:TestStorageMover.java

示例5: getListingStream

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
private static StreamingOutput getListingStream(final NamenodeProtocols np,
    final String p) throws IOException {
  final DirectoryListing first =
      getDirectoryListing(np, p, HdfsFileStatus.EMPTY_NAME);

  return new StreamingOutput() {
    @Override
    public void write(final OutputStream outstream) throws IOException {
      final PrintWriter out =
          new PrintWriter(new OutputStreamWriter(outstream, Charsets.UTF_8));
      out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\"" +
          FileStatus.class.getSimpleName() + "\":[");

      final HdfsFileStatus[] partial = first.getPartialListing();
      if (partial.length > 0) {
        out.print(JsonUtil.toJsonString(partial[0], false));
      }
      for (int i = 1; i < partial.length; i++) {
        out.println(',');
        out.print(JsonUtil.toJsonString(partial[i], false));
      }

      for (DirectoryListing curr = first; curr.hasMore(); ) {
        curr = getDirectoryListing(np, p, curr.getLastName());
        for (HdfsFileStatus s : curr.getPartialListing()) {
          out.println(',');
          out.print(JsonUtil.toJsonString(s, false));
        }
      }
      
      out.println();
      out.println("]}}");
      out.flush();
    }
  };
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:37,代码来源:NamenodeWebHdfsMethods.java

示例6: getListingStream

import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
private static StreamingOutput getListingStream(final NameNode np, 
    final String p) throws IOException {
  final DirectoryListing first = getDirectoryListing(np, p,
      HdfsFileStatus.EMPTY_NAME);

  return new StreamingOutput() {
    @Override
    public void write(final OutputStream outstream) throws IOException {
      final PrintStream out = new PrintStream(outstream);
      out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\""
          + FileStatus.class.getSimpleName() + "\":[");

      final HdfsFileStatus[] partial = first.getPartialListing();
      if (partial.length > 0) {
        out.print(JsonUtil.toJsonString(partial[0], false));
      }
      for(int i = 1; i < partial.length; i++) {
        out.println(',');
        out.print(JsonUtil.toJsonString(partial[i], false));
      }

      for(DirectoryListing curr = first; curr.hasMore(); ) { 
        curr = getDirectoryListing(np, p, curr.getLastName());
        for(HdfsFileStatus s : curr.getPartialListing()) {
          out.println(',');
          out.print(JsonUtil.toJsonString(s, false));
        }
      }
      
      out.println();
      out.println("]}}");
    }
  };
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:35,代码来源:NamenodeWebHdfsMethods.java


注:本文中的org.apache.hadoop.hdfs.protocol.DirectoryListing.getPartialListing方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。