本文整理汇总了Java中org.apache.hadoop.fs.FSInputStream.read方法的典型用法代码示例。如果您正苦于以下问题:Java FSInputStream.read方法的具体用法?Java FSInputStream.read怎么用?Java FSInputStream.read使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.fs.FSInputStream
的用法示例。
在下文中一共展示了FSInputStream.read方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: verifyDir
import org.apache.hadoop.fs.FSInputStream; //导入方法依赖的package包/类
private void verifyDir(DistributedFileSystem dfs, Path dir,
CRC32 overallChecksum) throws IOException {
FileStatus[] fileArr = dfs.listStatus(dir);
TreeMap<Path, Boolean> fileMap = new TreeMap<Path, Boolean>();
for(FileStatus file : fileArr) {
fileMap.put(file.getPath(), Boolean.valueOf(file.isDirectory()));
}
for(Iterator<Path> it = fileMap.keySet().iterator(); it.hasNext();) {
Path path = it.next();
boolean isDir = fileMap.get(path);
String pathName = path.toUri().getPath();
overallChecksum.update(pathName.getBytes());
if ( isDir ) {
verifyDir(dfs, path, overallChecksum);
} else {
// this is not a directory. Checksum the file data.
CRC32 fileCRC = new CRC32();
FSInputStream in = dfsOpenFileWithRetries(dfs, pathName);
byte[] buf = new byte[4096];
int nRead = 0;
while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
fileCRC.update(buf, 0, nRead);
}
verifyChecksum(pathName, fileCRC.getValue());
}
}
}
示例2: writeTo
import org.apache.hadoop.fs.FSInputStream; //导入方法依赖的package包/类
static void writeTo(FSInputStream in,
OutputStream os,
long start,
long count)
throws IOException {
byte buf[] = new byte[4096];
long bytesRemaining = count;
int bytesRead;
int bytesToRead;
in.seek(start);
while (true) {
// number of bytes to read this iteration
bytesToRead = (int)(bytesRemaining<buf.length ?
bytesRemaining:
buf.length);
// number of bytes actually read this iteration
bytesRead = in.read(buf, 0, bytesToRead);
// if we can't read anymore, break
if (bytesRead == -1) {
break;
}
os.write(buf, 0, bytesRead);
bytesRemaining -= bytesRead;
// if we don't need to read anymore, break
if (bytesRemaining <= 0) {
break;
}
}
}
示例3: verifyDir
import org.apache.hadoop.fs.FSInputStream; //导入方法依赖的package包/类
private void verifyDir(DistributedFileSystem dfs, Path dir)
throws IOException {
FileStatus[] fileArr = dfs.listStatus(dir);
TreeMap<Path, Boolean> fileMap = new TreeMap<Path, Boolean>();
for(FileStatus file : fileArr) {
fileMap.put(file.getPath(), Boolean.valueOf(file.isDirectory()));
}
for(Iterator<Path> it = fileMap.keySet().iterator(); it.hasNext();) {
Path path = it.next();
boolean isDir = fileMap.get(path);
String pathName = path.toUri().getPath();
overallChecksum.update(pathName.getBytes());
if ( isDir ) {
verifyDir(dfs, path);
} else {
// this is not a directory. Checksum the file data.
CRC32 fileCRC = new CRC32();
FSInputStream in = dfs.dfs.open(pathName);
byte[] buf = new byte[4096];
int nRead = 0;
while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
fileCRC.update(buf, 0, nRead);
}
verifyChecksum(pathName, fileCRC.getValue());
}
}
}