本文整理汇总了Java中org.apache.hadoop.typedbytes.TypedBytesOutput类的典型用法代码示例。如果您正苦于以下问题:Java TypedBytesOutput类的具体用法?Java TypedBytesOutput怎么用?Java TypedBytesOutput使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
TypedBytesOutput类属于org.apache.hadoop.typedbytes包,在下文中一共展示了TypedBytesOutput类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: go
import org.apache.hadoop.typedbytes.TypedBytesOutput; //导入依赖的package包/类
public void go() throws IOException {
TypedBytesInput tbinput = new TypedBytesInput(new DataInputStream(System.in));
TypedBytesOutput tboutput = new TypedBytesOutput(new DataOutputStream(System.out));
Object key = tbinput.readRaw();
while (key != null) {
Object value = tbinput.read();
for (String part : value.toString().split(find)) {
tboutput.write(part); // write key
tboutput.write(1); // write value
}
System.err.println("reporter:counter:UserCounters,InputLines,1");
key = tbinput.readRaw();
}
System.out.flush();
}
示例2: go
import org.apache.hadoop.typedbytes.TypedBytesOutput; //导入依赖的package包/类
public void go() throws IOException {
TypedBytesInput tbinput = new TypedBytesInput(new DataInputStream(System.in));
TypedBytesOutput tboutput = new TypedBytesOutput(new DataOutputStream(System.out));
Object prevKey = null;
int sum = 0;
Object key = tbinput.read();
while (key != null) {
if (prevKey != null && !key.equals(prevKey)) {
tboutput.write(prevKey); // write key
tboutput.write(sum); // write value
sum = 0;
}
sum += (Integer) tbinput.read();
prevKey = key;
key = tbinput.read();
}
tboutput.write(prevKey);
tboutput.write(sum);
System.out.flush();
}
示例3: dumpTypedBytes
import org.apache.hadoop.typedbytes.TypedBytesOutput; //导入依赖的package包/类
/**
* Dump given list of files to standard output as typed bytes.
*/
@SuppressWarnings("unchecked")
private int dumpTypedBytes(List<FileStatus> files) throws IOException {
JobConf job = new JobConf(getConf());
DataOutputStream dout = new DataOutputStream(System.out);
AutoInputFormat autoInputFormat = new AutoInputFormat();
for (FileStatus fileStatus : files) {
FileSplit split = new FileSplit(fileStatus.getPath(), 0,
fileStatus.getLen() * fileStatus.getBlockSize(),
(String[]) null);
RecordReader recReader = null;
try {
recReader = autoInputFormat.getRecordReader(split, job, Reporter.NULL);
Object key = recReader.createKey();
Object value = recReader.createValue();
while (recReader.next(key, value)) {
if (key instanceof Writable) {
TypedBytesWritableOutput.get(dout).write((Writable) key);
} else {
TypedBytesOutput.get(dout).write(key);
}
if (value instanceof Writable) {
TypedBytesWritableOutput.get(dout).write((Writable) value);
} else {
TypedBytesOutput.get(dout).write(value);
}
}
} finally {
if (recReader != null) {
recReader.close();
}
}
}
dout.flush();
return 0;
}
示例4: initialize
import org.apache.hadoop.typedbytes.TypedBytesOutput; //导入依赖的package包/类
@Override
public void initialize(PipeMapRed pipeMapRed) throws IOException {
super.initialize(pipeMapRed);
DataOutput clientOut = pipeMapRed.getClientOutput();
tbOut = new TypedBytesOutput(clientOut);
tbwOut = new TypedBytesWritableOutput(clientOut);
}