本文整理匯總了Java中org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter.getWorkPath方法的典型用法代碼示例。如果您正苦於以下問題:Java FileOutputCommitter.getWorkPath方法的具體用法?Java FileOutputCommitter.getWorkPath怎麽用?Java FileOutputCommitter.getWorkPath使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
的用法示例。
在下文中一共展示了FileOutputCommitter.getWorkPath方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createRecordWriter
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; //導入方法依賴的package包/類
/**
* Creates a new {@link RecordWriter} to output temporary data.
* @param <V> value type
* @param context current context
* @param name output name
* @param dataType value type
* @return the created writer
* @throws IOException if failed to create a new {@link RecordWriter}
* @throws InterruptedException if interrupted
*/
public <V> RecordWriter<NullWritable, V> createRecordWriter(
TaskAttemptContext context,
String name,
Class<V> dataType) throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
FileOutputCommitter committer = (FileOutputCommitter) getOutputCommitter(context);
Path file = new Path(
committer.getWorkPath(),
FileOutputFormat.getUniqueFile(context, name, "")); //$NON-NLS-1$
ModelOutput<V> out = TemporaryStorage.openOutput(conf, dataType, file);
return new RecordWriter<NullWritable, V>() {
@Override
public void write(NullWritable key, V value) throws IOException {
out.write(value);
}
@Override
public void close(TaskAttemptContext ignored) throws IOException {
out.close();
}
@Override
public String toString() {
return String.format("TemporaryOutput(%s)", file); //$NON-NLS-1$
}
};
}
示例2: init
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; //導入方法依賴的package包/類
@Override
public void init() throws IOException {
super.init();
Configuration taskConf = new Configuration();
Path stagingResultDir = new Path(stagingDir, TajoConstants.RESULT_DIR_NAME);
taskConf.set(FileOutputFormat.OUTDIR, stagingResultDir.toString());
ExecutionBlockId ebId = taskAttemptId.getTaskId().getExecutionBlockId();
writerContext = new TaskAttemptContextImpl(taskConf,
new TaskAttemptID(ebId.getQueryId().toString(), ebId.getId(), TaskType.MAP,
taskAttemptId.getTaskId().getId(), taskAttemptId.getId()));
HFileOutputFormat2 hFileOutputFormat2 = new HFileOutputFormat2();
try {
writer = hFileOutputFormat2.getRecordWriter(writerContext);
committer = new FileOutputCommitter(FileOutputFormat.getOutputPath(writerContext), writerContext);
workingFilePath = committer.getWorkPath();
} catch (InterruptedException e) {
throw new IOException(e.getMessage(), e);
}
LOG.info("Created hbase file writer: " + workingFilePath);
}
示例3: getDefaultWorkFile
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; //導入方法依賴的package包/類
@Override
public Path getDefaultWorkFile(TaskAttemptContext context,
String extension) throws IOException {
FileOutputCommitter committer =
(FileOutputCommitter) super.getOutputCommitter(context);
return new Path(committer.getWorkPath(), getUniqueFile(context,
"part", extension));
}
示例4: getDefaultWorkFile
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; //導入方法依賴的package包/類
public static <K, V> Path getDefaultWorkFile(FileOutputFormat<K, V> format,
TaskAttemptContext context) throws IOException {
FileOutputCommitter committer =
(FileOutputCommitter) format.getOutputCommitter(context);
return new Path(committer.getWorkPath(), getOutputFile(context));
}
示例5: getDefaultWorkFile
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; //導入方法依賴的package包/類
/**
* Get the default path and filename for the output format.
* @param context the task context
* @param extension an extension to add to the filename
* @return a full path $output/_temporary/$task-id/part-[mr]-$id
* @throws java.io.IOException
*/
@Override
public Path getDefaultWorkFile(TaskAttemptContext context, String extension) throws IOException {
FileOutputCommitter committer = (FileOutputCommitter) getOutputCommitter(context);
return new Path(committer.getWorkPath(), getCustomFileName(context, getOutputName(context), extension));
}