本文整理汇总了Java中org.apache.hadoop.io.BytesWritable.toString方法的典型用法代码示例。如果您正苦于以下问题:Java BytesWritable.toString方法的具体用法?Java BytesWritable.toString怎么用?Java BytesWritable.toString使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.BytesWritable
的用法示例。
在下文中一共展示了BytesWritable.toString方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: reduce
import org.apache.hadoop.io.BytesWritable; //导入方法依赖的package包/类
public void reduce(BytesWritable key, Iterator<IntWritable> values,
OutputCollector<BytesWritable, IntWritable> output,
Reporter reporter) throws IOException {
int ones = 0;
int twos = 0;
while (values.hasNext()) {
IntWritable count = values.next();
if (count.equals(sortInput)) {
++ones;
} else if (count.equals(sortOutput)) {
++twos;
} else {
throw new IOException("Invalid 'value' of " + count.get() +
" for (key,value): " + key.toString());
}
}
// Check to ensure there are equal no. of ones and twos
if (ones != twos) {
throw new IOException("Illegal ('one', 'two'): (" + ones + ", " + twos +
") for (key, value): " + key.toString());
}
}
示例2: map
import org.apache.hadoop.io.BytesWritable; //导入方法依赖的package包/类
/**
* {@inheritDoc}
*/
protected void map(final Object key, final BytesWritable value, final Context context) throws IOException, InterruptedException {
if (value!= null && value.toString() != null && value.toString().isEmpty()) {
return;
}
// Mapper sends data with parent directory path as keys to retain directory structure
final FileSplit fileSplit = (FileSplit) context.getInputSplit();
final Path filePath = fileSplit.getPath();
final String parentFilePath = String.format("%s/", filePath.getParent().toString());
log.debug("Parent file path {}", parentFilePath);
if (!fileSizesMap.containsKey(filePath.toString())) {
if (fileSystem == null){
final URI uri = URI.create(filePath.toString());
fileSystem = FileSystem.get(uri, configuration);
}
final FileStatus[] listStatuses = fileSystem.listStatus(filePath);
for (FileStatus fileStatus : listStatuses) {
if (!fileStatus.isDirectory()) {
fileSizesMap.put(fileStatus.getPath().toString(), fileStatus.getLen());
log.info("Entry added to fileSizes Map {} {}", fileStatus.getPath().toString(), fileStatus.getLen());
}
}
}
final Text parentFilePathKey = new Text(parentFilePath);
final Text filePathKey = new Text(filePath.toString());
final Long fileSize = fileSizesMap.get(filePath.toString());
if (fileSize < threshold) {
context.write(parentFilePathKey, value);
} else {
context.write(filePathKey, value);
}
}
示例3: textifyBytes
import org.apache.hadoop.io.BytesWritable; //导入方法依赖的package包/类
private static String textifyBytes(Text t) {
BytesWritable b = new BytesWritable();
b.set(t.getBytes(), 0, t.getLength());
return b.toString();
}