本文整理汇总了Java中org.apache.hadoop.fs.HarFileSystem.getHarHash方法的典型用法代码示例。如果您正苦于以下问题:Java HarFileSystem.getHarHash方法的具体用法?Java HarFileSystem.getHarHash怎么用?Java HarFileSystem.getHarHash使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.fs.HarFileSystem
的用法示例。
在下文中一共展示了HarFileSystem.getHarHash方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: map
import org.apache.hadoop.fs.HarFileSystem; //导入方法依赖的package包/类
public void map(LongWritable key, Text value,
OutputCollector<IntWritable, Text> out,
Reporter reporter) throws IOException {
String line = value.toString();
MapStat mstat = new MapStat(line);
Path relPath = new Path(mstat.pathname);
int hash = HarFileSystem.getHarHash(relPath);
String towrite = null;
Path srcPath = realPath(relPath, rootPath);
long startPos = partStream.getPos();
if (mstat.isDir) {
towrite = relPath.toString() + " " + "dir none " + 0 + " " + 0 + " ";
StringBuffer sbuff = new StringBuffer();
sbuff.append(towrite);
for (String child: mstat.children) {
sbuff.append(child + " ");
}
towrite = sbuff.toString();
//reading directories is also progress
reporter.progress();
}
else {
FileSystem srcFs = srcPath.getFileSystem(conf);
FileStatus srcStatus = srcFs.getFileStatus(srcPath);
FSDataInputStream input = srcFs.open(srcStatus.getPath());
reporter.setStatus("Copying file " + srcStatus.getPath() +
" to archive.");
copyData(srcStatus.getPath(), input, partStream, reporter);
towrite = relPath.toString() + " file " + partname + " " + startPos
+ " " + srcStatus.getLen() + " ";
}
out.collect(new IntWritable(hash), new Text(towrite));
}
示例2: map
import org.apache.hadoop.fs.HarFileSystem; //导入方法依赖的package包/类
public void map(LongWritable key, HarEntry value,
OutputCollector<IntWritable, Text> out,
Reporter reporter) throws IOException {
Path relPath = new Path(value.path);
int hash = HarFileSystem.getHarHash(relPath);
String towrite = null;
Path srcPath = realPath(relPath, rootPath);
long startPos = partStream.getPos();
FileSystem srcFs = srcPath.getFileSystem(conf);
FileStatus srcStatus = srcFs.getFileStatus(srcPath);
String propStr = encodeProperties(srcStatus);
if (value.isDir()) {
towrite = encodeName(relPath.toString())
+ " dir " + propStr + " 0 0 ";
StringBuffer sbuff = new StringBuffer();
sbuff.append(towrite);
for (String child: value.children) {
sbuff.append(encodeName(child) + " ");
}
towrite = sbuff.toString();
//reading directories is also progress
reporter.progress();
}
else {
FSDataInputStream input = srcFs.open(srcStatus.getPath());
reporter.setStatus("Copying file " + srcStatus.getPath() +
" to archive.");
copyData(srcStatus.getPath(), input, partStream, reporter);
towrite = encodeName(relPath.toString())
+ " file " + partname + " " + startPos
+ " " + srcStatus.getLen() + " " + propStr + " ";
}
out.collect(new IntWritable(hash), new Text(towrite));
}
示例3: map
import org.apache.hadoop.fs.HarFileSystem; //导入方法依赖的package包/类
public void map(LongWritable key, Text value, OutputCollector<IntWritable, Text> out,
Reporter reporter) throws IOException {
reporter.setStatus("Passing file " + value + " to archive.");
reporter.progress();
HarStatus harStatus = new HarStatus(value.toString());
int hash = HarFileSystem.getHarHash(harStatus.getName());
out.collect(new IntWritable(hash), value);
}
示例4: map
import org.apache.hadoop.fs.HarFileSystem; //导入方法依赖的package包/类
public void map(LongWritable key, HarEntry value,
OutputCollector<IntWritable, Text> out,
Reporter reporter) throws IOException {
Path relPath = new Path(value.path);
int hash = HarFileSystem.getHarHash(relPath);
String towrite = null;
Path srcPath = realPath(relPath, rootPath);
long startPos = partStream.getPos();
FileSystem srcFs = srcPath.getFileSystem(conf);
FileStatus srcStatus = srcFs.getFileStatus(srcPath);
String propStr = URLEncoder.encode(
srcStatus.getModificationTime() + " "
+ srcStatus.getAccessTime() + " "
+ srcStatus.getPermission().toShort() + " "
+ URLEncoder.encode(srcStatus.getOwner(), "UTF-8") + " "
+ URLEncoder.encode(srcStatus.getGroup(), "UTF-8"),
"UTF-8");
if (value.isDir()) {
towrite = URLEncoder.encode(relPath.toString(),"UTF-8")
+ " dir " + propStr + " 0 0 ";
StringBuffer sbuff = new StringBuffer();
sbuff.append(towrite);
for (String child: value.children) {
sbuff.append(URLEncoder.encode(child,"UTF-8") + " ");
}
towrite = sbuff.toString();
//reading directories is also progress
reporter.progress();
}
else {
FSDataInputStream input = srcFs.open(srcStatus.getPath());
reporter.setStatus("Copying file " + srcStatus.getPath() +
" to archive.");
copyData(srcStatus.getPath(), input, partStream, reporter);
towrite = URLEncoder.encode(relPath.toString(),"UTF-8")
+ " file " + partname + " " + startPos
+ " " + srcStatus.getLen() + " " + propStr + " ";
}
out.collect(new IntWritable(hash), new Text(towrite));
}
示例5: map
import org.apache.hadoop.fs.HarFileSystem; //导入方法依赖的package包/类
public void map(LongWritable key, Text value,
OutputCollector<IntWritable, Text> out,
Reporter reporter) throws IOException {
String line = value.toString();
MapStat mstat = new MapStat(line);
Path srcPath = new Path(mstat.pathname);
String towrite = null;
Path relPath = makeRelative(srcPath);
int hash = HarFileSystem.getHarHash(relPath);
long startPos = partStream.getPos();
if (mstat.isDir) {
towrite = relPath.toString() + " " + "dir none " + 0 + " " + 0 + " ";
StringBuffer sbuff = new StringBuffer();
sbuff.append(towrite);
for (String child: mstat.children) {
sbuff.append(child + " ");
}
towrite = sbuff.toString();
//reading directories is also progress
reporter.progress();
}
else {
FileSystem srcFs = srcPath.getFileSystem(conf);
FileStatus srcStatus = srcFs.getFileStatus(srcPath);
FSDataInputStream input = srcFs.open(srcStatus.getPath());
reporter.setStatus("Copying file " + srcStatus.getPath() +
" to archive.");
copyData(srcStatus.getPath(), input, partStream, reporter);
towrite = relPath.toString() + " file " + partname + " " + startPos
+ " " + srcStatus.getLen() + " ";
}
out.collect(new IntWritable(hash), new Text(towrite));
}
示例6: calculateHarHash
import org.apache.hadoop.fs.HarFileSystem; //导入方法依赖的package包/类
protected Integer calculateHarHash() {
return HarFileSystem.getHarHash(new Path(path.replace("%2F", "/")));
}