本文整理汇总了Java中org.apache.hadoop.mapred.TaskAttemptContext类的典型用法代码示例。如果您正苦于以下问题:Java TaskAttemptContext类的具体用法?Java TaskAttemptContext怎么用?Java TaskAttemptContext使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
TaskAttemptContext类属于org.apache.hadoop.mapred包,在下文中一共展示了TaskAttemptContext类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testCloseWithTaskCommit
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test
public void testCloseWithTaskCommit() throws Exception {
OutputFormat<String, Long> dummyOutputFormat = mock(DummyOutputFormat.class);
DummyOutputCommitter outputCommitter = mock(DummyOutputCommitter.class);
when(outputCommitter.needsTaskCommit(any(TaskAttemptContext.class))).thenReturn(true);
DummyRecordWriter recordWriter = mock(DummyRecordWriter.class);
JobConf jobConf = mock(JobConf.class);
HadoopOutputFormat<String, Long> outputFormat = new HadoopOutputFormat<>(dummyOutputFormat, jobConf);
outputFormat.recordWriter = recordWriter;
outputFormat.outputCommitter = outputCommitter;
outputFormat.close();
verify(recordWriter, times(1)).close(any(Reporter.class));
verify(outputCommitter, times(1)).commitTask(any(TaskAttemptContext.class));
}
示例2: testCloseWithoutTaskCommit
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test
public void testCloseWithoutTaskCommit() throws Exception {
OutputFormat<String, Long> dummyOutputFormat = mock(DummyOutputFormat.class);
DummyOutputCommitter outputCommitter = mock(DummyOutputCommitter.class);
when(outputCommitter.needsTaskCommit(any(TaskAttemptContext.class))).thenReturn(false);
DummyRecordWriter recordWriter = mock(DummyRecordWriter.class);
JobConf jobConf = mock(JobConf.class);
HadoopOutputFormat<String, Long> outputFormat = new HadoopOutputFormat<>(dummyOutputFormat, jobConf);
outputFormat.recordWriter = recordWriter;
outputFormat.outputCommitter = outputCommitter;
outputFormat.close();
verify(recordWriter, times(1)).close(any(Reporter.class));
verify(outputCommitter, times(0)).commitTask(any(TaskAttemptContext.class));
}
示例3: commitTask
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Override
public void commitTask(TaskAttemptContext context)
throws IOException
{
JobConf conf = context.getJobConf();
TaskAttemptID attemptId = context.getTaskAttemptID();
// get the mapping between index to output filename
outputs = MRJobConfiguration.getOutputs(conf);
// get temp task output path (compatible with hadoop1 and hadoop2)
Path taskOutPath = FileOutputFormat.getWorkOutputPath(conf);
FileSystem fs = taskOutPath.getFileSystem(conf);
if( !fs.exists(taskOutPath) )
throw new IOException("Task output path "+ taskOutPath.toString() + "does not exist.");
// move the task outputs to their final places
context.getProgressible().progress();
moveFinalTaskOutputs(context, fs, taskOutPath);
// delete the temporary task-specific output directory
if( !fs.delete(taskOutPath, true) )
LOG.debug("Failed to delete the temporary output directory of task: " + attemptId + " - " + taskOutPath);
}
示例4: moveFileToDestination
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
private void moveFileToDestination(TaskAttemptContext context, FileSystem fs, Path file)
throws IOException
{
TaskAttemptID attemptId = context.getTaskAttemptID();
// get output index and final destination
String name = file.getName(); //e.g., 0-r-00000
int index = Integer.parseInt(name.substring(0, name.indexOf("-")));
Path dest = new Path(outputs[index], name); //e.g., outX/0-r-00000
// move file from 'file' to 'finalPath'
if( !fs.rename(file, dest) ) {
if (!fs.delete(dest, true))
throw new IOException("Failed to delete earlier output " + dest + " for rename of " + file + " in task " + attemptId);
if (!fs.rename(file, dest))
throw new IOException("Failed to save output " + dest + " for rename of " + file + " in task: " + attemptId);
}
}
示例5: initMapreduceOutputCommitter
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
private void initMapreduceOutputCommitter(TaskAttemptContext taskContext)
throws IOException {
if (mapreduceOutputCommitter != null) {
LOG.debug("Using existing mapreduceOutputCommitter");
return;
}
// It would be nice to use the BigQueryOutputFormat that already exists
// (there is one wrapped inside our BigQueryMapredOutputFormat), but
// there does not seem to be an easy way to do that. So make another one.
LOG.debug("Creating BigQueryOutputFormat");
BigQueryOutputFormat<Object, JsonObject> mapreduceOutputFormat =
new BigQueryOutputFormat<Object, JsonObject>();
// Fortunately, mapred.TaskAttemptContext is a subclass of
// mapreduce.TaskAttemptContext, so we can use it directly.
try {
LOG.debug("Creating mapreduce OutputCommit");
mapreduceOutputCommitter = mapreduceOutputFormat.getOutputCommitter(
taskContext);
} catch (InterruptedException ex) {
throw new IOException(ex);
}
}
示例6: testClose
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testClose() throws IOException, InterruptedException {
RecordWriter<LongWritable, JsonObject> recordWriter =
new BigQueryMapredRecordWriter<LongWritable, JsonObject>(
mockRecordWriter, mockTaskAttemptContext);
Reporter reporter = null; // unused by code under test
recordWriter.close(reporter);
verify(mockRecordWriter).close(any(TaskAttemptContext.class));
doThrow(new IOException("test")).
when(mockRecordWriter).close(any(TaskAttemptContext.class));
expectedException.expect(IOException.class);
try {
recordWriter.close(reporter);
} finally {
verify(mockRecordWriter, times(2)).close(any(TaskAttemptContext.class));
}
}
示例7: instantiateJobContext
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
public static JobContext instantiateJobContext(JobConf jobConf, JobID jobId) throws Exception {
try {
// for Hadoop 1.xx
Class<?> clazz = null;
if(!TaskAttemptContext.class.isInterface()) {
clazz = Class.forName("org.apache.hadoop.mapred.JobContext", true, Thread.currentThread().getContextClassLoader());
}
// for Hadoop 2.xx
else {
clazz = Class.forName("org.apache.hadoop.mapred.JobContextImpl", true, Thread.currentThread().getContextClassLoader());
}
Constructor<?> constructor = clazz.getDeclaredConstructor(JobConf.class, org.apache.hadoop.mapreduce.JobID.class);
// for Hadoop 1.xx
constructor.setAccessible(true);
JobContext context = (JobContext) constructor.newInstance(jobConf, jobId);
return context;
} catch(Exception e) {
throw new Exception("Could not create instance of JobContext.", e);
}
}
示例8: instantiateTaskAttemptContext
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
public static TaskAttemptContext instantiateTaskAttemptContext(JobConf jobConf, TaskAttemptID taskAttemptID) throws Exception {
try {
// for Hadoop 1.xx
Class<?> clazz = null;
if(!TaskAttemptContext.class.isInterface()) {
clazz = Class.forName("org.apache.hadoop.mapred.TaskAttemptContext", true, Thread.currentThread().getContextClassLoader());
}
// for Hadoop 2.xx
else {
clazz = Class.forName("org.apache.hadoop.mapred.TaskAttemptContextImpl", true, Thread.currentThread().getContextClassLoader());
}
Constructor<?> constructor = clazz.getDeclaredConstructor(JobConf.class, TaskAttemptID.class);
// for Hadoop 1.xx
constructor.setAccessible(true);
TaskAttemptContext context = (TaskAttemptContext) constructor.newInstance(jobConf, taskAttemptID);
return context;
} catch(Exception e) {
throw new Exception("Could not create instance of TaskAttemptContext.", e);
}
}
示例9: moveFinalTaskOutputs
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
private void moveFinalTaskOutputs(TaskAttemptContext context, FileSystem fs, Path taskOutput)
throws IOException
{
context.getProgressible().progress();
if( fs.getFileStatus(taskOutput).isDirectory() ) {
FileStatus[] files = fs.listStatus(taskOutput);
if (files != null)
for (FileStatus file : files) //for all files
if( !file.isDirectory() ) //skip directories
moveFileToDestination(context, fs, file.getPath());
}
}
示例10: commit
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
/**
* Commit task.
*
* @throws IOException In failed.
*/
public void commit() throws IOException {
if (writer != null) {
OutputCommitter outputCommitter = jobConf.getOutputCommitter();
TaskAttemptContext taskCtx = new TaskAttemptContextImpl(jobConf, attempt);
if (outputCommitter.needsTaskCommit(taskCtx))
outputCommitter.commitTask(taskCtx);
}
}
示例11: needsTaskCommit
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Override
public boolean needsTaskCommit(TaskAttemptContext taskContext)
throws IOException {
LOG.debug("needsTaskCommit");
initMapreduceOutputCommitter(taskContext);
return mapreduceOutputCommitter.needsTaskCommit(taskContext);
}
示例12: testAbortTask
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testAbortTask() throws IOException {
BigQueryMapredOutputCommitter outputCommitter =
new BigQueryMapredOutputCommitter();
outputCommitter.setMapreduceOutputCommitter(mockOutputCommitter);
outputCommitter.abortTask(mockTaskAttemptContext);
verify(mockOutputCommitter).abortTask(any(TaskAttemptContext.class));
}
示例13: testCommitTask
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testCommitTask() throws IOException {
BigQueryMapredOutputCommitter outputCommitter =
new BigQueryMapredOutputCommitter();
outputCommitter.setMapreduceOutputCommitter(mockOutputCommitter);
outputCommitter.commitTask(mockTaskAttemptContext);
verify(mockOutputCommitter).commitTask(any(TaskAttemptContext.class));
}
示例14: testNeedsTaskCommit
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testNeedsTaskCommit() throws IOException {
BigQueryMapredOutputCommitter outputCommitter =
new BigQueryMapredOutputCommitter();
outputCommitter.setMapreduceOutputCommitter(mockOutputCommitter);
outputCommitter.needsTaskCommit(mockTaskAttemptContext);
verify(mockOutputCommitter).needsTaskCommit(any(TaskAttemptContext.class));
}
示例15: testSetupTask
import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testSetupTask() throws IOException {
BigQueryMapredOutputCommitter outputCommitter =
new BigQueryMapredOutputCommitter();
outputCommitter.setMapreduceOutputCommitter(mockOutputCommitter);
outputCommitter.setupTask(mockTaskAttemptContext);
verify(mockOutputCommitter).setupTask(any(TaskAttemptContext.class));
}