本文整理汇总了Java中org.apache.hadoop.io.Text.write方法的典型用法代码示例。如果您正苦于以下问题:Java Text.write方法的具体用法?Java Text.write怎么用?Java Text.write使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.Text
的用法示例。
在下文中一共展示了Text.write方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: WritableSortable
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public WritableSortable(int j) throws IOException {
seed = r.nextLong();
r.setSeed(seed);
Text t = new Text();
StringBuilder sb = new StringBuilder();
indices = new int[j];
offsets = new int[j];
check = new String[j];
DataOutputBuffer dob = new DataOutputBuffer();
for (int i = 0; i < j; ++i) {
indices[i] = i;
offsets[i] = dob.getLength();
genRandom(t, r.nextInt(15) + 1, sb);
t.write(dob);
check[i] = t.toString();
}
eob = dob.getLength();
bytes = dob.getData();
comparator = WritableComparator.get(Text.class);
}
示例2: write
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void write(DataOutput dataOutput) throws IOException {
Text text = new Text(wifiProb==null?"":wifiProb);
text.write(dataOutput);
LongWritable longWritable = new LongWritable();
longWritable.set(hour);
longWritable.write(dataOutput);
longWritable.set(newCustomer);
longWritable.write(dataOutput);
longWritable.set(oldCustomer);
longWritable.write(dataOutput);
}
示例3: write
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
public void write(DataOutput dataOutput) throws IOException {
Text text = new Text(wifiProb==null?"":wifiProb);
text.write(dataOutput);
IntWritable intWritable = new IntWritable();
intWritable.set(inNoOutWifi);
intWritable.write(dataOutput);
intWritable.set(inNoOutStore);
intWritable.write(dataOutput);
intWritable.set(outNoInWifi);
intWritable.write(dataOutput);
intWritable.set(outNoInStore);
intWritable.write(dataOutput);
intWritable.set(inAndOutWifi);
intWritable.write(dataOutput);
intWritable.set(inAndOutStore);
intWritable.write(dataOutput);
intWritable.set(stayInWifi);
intWritable.write(dataOutput);
intWritable.set(stayInStore);
intWritable.write(dataOutput);
DoubleWritable doubleWritable = new DoubleWritable();
doubleWritable.set(jumpRate);
doubleWritable.write(dataOutput);
doubleWritable.set(deepVisit);
doubleWritable.write(dataOutput);
doubleWritable.set(inStoreRate);
doubleWritable.write(dataOutput);
}
示例4: writePartitionFile
import org.apache.hadoop.io.Text; //导入方法依赖的package包/类
/**
* Use the input splits to take samples of the input and generate sample
* keys. By default reads 100,000 keys from 10 locations in the input, sorts
* them and picks N-1 keys to generate N equally sized partitions.
* @param job the job to sample
* @param partFile where to write the output file to
* @throws Throwable if something goes wrong
*/
public static void writePartitionFile(final JobContext job,
Path partFile) throws Throwable {
long t1 = System.currentTimeMillis();
Configuration conf = job.getConfiguration();
final TeraInputFormat inFormat = new TeraInputFormat();
final TextSampler sampler = new TextSampler();
int partitions = job.getNumReduceTasks();
long sampleSize = conf.getLong(SAMPLE_SIZE, 100000);
final List<InputSplit> splits = inFormat.getSplits(job);
long t2 = System.currentTimeMillis();
System.out.println("Computing input splits took " + (t2 - t1) + "ms");
int samples = Math.min(conf.getInt(NUM_PARTITIONS, 10), splits.size());
System.out.println("Sampling " + samples + " splits of " + splits.size());
final long recordsPerSample = sampleSize / samples;
final int sampleStep = splits.size() / samples;
Thread[] samplerReader = new Thread[samples];
SamplerThreadGroup threadGroup = new SamplerThreadGroup("Sampler Reader Thread Group");
// take N samples from different parts of the input
for(int i=0; i < samples; ++i) {
final int idx = i;
samplerReader[i] =
new Thread (threadGroup,"Sampler Reader " + idx) {
{
setDaemon(true);
}
public void run() {
long records = 0;
try {
TaskAttemptContext context = new TaskAttemptContextImpl(
job.getConfiguration(), new TaskAttemptID());
RecordReader<Text, Text> reader =
inFormat.createRecordReader(splits.get(sampleStep * idx),
context);
reader.initialize(splits.get(sampleStep * idx), context);
while (reader.nextKeyValue()) {
sampler.addKey(new Text(reader.getCurrentKey()));
records += 1;
if (recordsPerSample <= records) {
break;
}
}
} catch (IOException ie){
System.err.println("Got an exception while reading splits " +
StringUtils.stringifyException(ie));
throw new RuntimeException(ie);
} catch (InterruptedException e) {
}
}
};
samplerReader[i].start();
}
FileSystem outFs = partFile.getFileSystem(conf);
DataOutputStream writer = outFs.create(partFile, true, 64*1024, (short) 10,
outFs.getDefaultBlockSize(partFile));
for (int i = 0; i < samples; i++) {
try {
samplerReader[i].join();
if(threadGroup.getThrowable() != null){
throw threadGroup.getThrowable();
}
} catch (InterruptedException e) {
}
}
for(Text split : sampler.createPartitions(partitions)) {
split.write(writer);
}
writer.close();
long t3 = System.currentTimeMillis();
System.out.println("Computing parititions took " + (t3 - t2) + "ms");
}