本文整理汇总了Java中org.apache.hadoop.hdfs.protocol.DirectoryListing.getPartialListing方法的典型用法代码示例。如果您正苦于以下问题:Java DirectoryListing.getPartialListing方法的具体用法?Java DirectoryListing.getPartialListing怎么用?Java DirectoryListing.getPartialListing使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hdfs.protocol.DirectoryListing
的用法示例。
在下文中一共展示了DirectoryListing.getPartialListing方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: verifyFile
import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
void verifyFile(final Path file, final Byte expectedPolicyId)
throws Exception {
final Path parent = file.getParent();
DirectoryListing children = dfs.getClient().listPaths(
parent.toString(), HdfsFileStatus.EMPTY_NAME, true);
for (HdfsFileStatus child : children.getPartialListing()) {
if (child.getLocalName().equals(file.getName())) {
verifyFile(parent, child, expectedPolicyId);
return;
}
}
Assert.fail("File " + file + " not found.");
}
示例2: recoverAllLeases
import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
static void recoverAllLeases(DFSClient dfs,
Path path) throws IOException {
String pathStr = path.toString();
HdfsFileStatus status = dfs.getFileInfo(pathStr);
if (!status.isDir()) {
dfs.recoverLease(pathStr);
return;
}
byte prev[] = HdfsFileStatus.EMPTY_NAME;
DirectoryListing dirList;
do {
dirList = dfs.listPaths(pathStr, prev);
HdfsFileStatus files[] = dirList.getPartialListing();
for (HdfsFileStatus f : files) {
recoverAllLeases(dfs, f.getFullPath(path));
}
prev = dirList.getLastName();
} while (dirList.hasMore());
}
示例3: getHdfsFileList
import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
/**
* Retrieve list of files under @hdfsDir for @hdfsClient.
*/
public static List<HdfsFileStatus> getHdfsFileList(DFSClient hdfsClient,
String hdfsDir)
throws IOException {
List<HdfsFileStatus> fileList = Lists.newArrayList();
// Build a list of files.
DirectoryListing listing = null;
String continuation = "";
while (true) {
listing = hdfsClient.listPaths(hdfsDir, continuation.getBytes());
for (HdfsFileStatus fileStatus : listing.getPartialListing()) {
fileList.add(fileStatus);
}
// Go through the listing and paginate.
if (!listing.hasMore()) {
break;
} else {
continuation = new String(listing.getLastName());
}
}
return fileList;
}
示例4: verifyRecursively
import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
private void verifyRecursively(final Path parent,
final HdfsFileStatus status) throws Exception {
if (status.isDir()) {
Path fullPath = parent == null ?
new Path("/") : status.getFullPath(parent);
DirectoryListing children = dfs.getClient().listPaths(
fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true);
for (HdfsFileStatus child : children.getPartialListing()) {
verifyRecursively(fullPath, child);
}
} else if (!status.isSymlink()) { // is file
verifyFile(parent, status, null);
}
}
示例5: getListingStream
import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
private static StreamingOutput getListingStream(final NamenodeProtocols np,
final String p) throws IOException {
final DirectoryListing first =
getDirectoryListing(np, p, HdfsFileStatus.EMPTY_NAME);
return new StreamingOutput() {
@Override
public void write(final OutputStream outstream) throws IOException {
final PrintWriter out =
new PrintWriter(new OutputStreamWriter(outstream, Charsets.UTF_8));
out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\"" +
FileStatus.class.getSimpleName() + "\":[");
final HdfsFileStatus[] partial = first.getPartialListing();
if (partial.length > 0) {
out.print(JsonUtil.toJsonString(partial[0], false));
}
for (int i = 1; i < partial.length; i++) {
out.println(',');
out.print(JsonUtil.toJsonString(partial[i], false));
}
for (DirectoryListing curr = first; curr.hasMore(); ) {
curr = getDirectoryListing(np, p, curr.getLastName());
for (HdfsFileStatus s : curr.getPartialListing()) {
out.println(',');
out.print(JsonUtil.toJsonString(s, false));
}
}
out.println();
out.println("]}}");
out.flush();
}
};
}
示例6: getListingStream
import org.apache.hadoop.hdfs.protocol.DirectoryListing; //导入方法依赖的package包/类
private static StreamingOutput getListingStream(final NameNode np,
final String p) throws IOException {
final DirectoryListing first = getDirectoryListing(np, p,
HdfsFileStatus.EMPTY_NAME);
return new StreamingOutput() {
@Override
public void write(final OutputStream outstream) throws IOException {
final PrintStream out = new PrintStream(outstream);
out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\""
+ FileStatus.class.getSimpleName() + "\":[");
final HdfsFileStatus[] partial = first.getPartialListing();
if (partial.length > 0) {
out.print(JsonUtil.toJsonString(partial[0], false));
}
for(int i = 1; i < partial.length; i++) {
out.println(',');
out.print(JsonUtil.toJsonString(partial[i], false));
}
for(DirectoryListing curr = first; curr.hasMore(); ) {
curr = getDirectoryListing(np, p, curr.getLastName());
for(HdfsFileStatus s : curr.getPartialListing()) {
out.println(',');
out.print(JsonUtil.toJsonString(s, false));
}
}
out.println();
out.println("]}}");
}
};
}