当前位置: 首页>>代码示例>>Java>>正文


Java TaskAttemptContext类代码示例

本文整理汇总了Java中org.apache.hadoop.mapred.TaskAttemptContext的典型用法代码示例。如果您正苦于以下问题:Java TaskAttemptContext类的具体用法?Java TaskAttemptContext怎么用?Java TaskAttemptContext使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


TaskAttemptContext类属于org.apache.hadoop.mapred包,在下文中一共展示了TaskAttemptContext类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testCloseWithTaskCommit

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test
public void testCloseWithTaskCommit() throws Exception {
	OutputFormat<String, Long> dummyOutputFormat = mock(DummyOutputFormat.class);
	DummyOutputCommitter outputCommitter = mock(DummyOutputCommitter.class);
	when(outputCommitter.needsTaskCommit(any(TaskAttemptContext.class))).thenReturn(true);
	DummyRecordWriter recordWriter = mock(DummyRecordWriter.class);
	JobConf jobConf = mock(JobConf.class);

	HadoopOutputFormat<String, Long> outputFormat = new HadoopOutputFormat<>(dummyOutputFormat, jobConf);
	outputFormat.recordWriter = recordWriter;
	outputFormat.outputCommitter = outputCommitter;

	outputFormat.close();

	verify(recordWriter, times(1)).close(any(Reporter.class));
	verify(outputCommitter, times(1)).commitTask(any(TaskAttemptContext.class));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:HadoopOutputFormatTest.java

示例2: testCloseWithoutTaskCommit

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test
public void testCloseWithoutTaskCommit() throws Exception {
	OutputFormat<String, Long> dummyOutputFormat = mock(DummyOutputFormat.class);
	DummyOutputCommitter outputCommitter = mock(DummyOutputCommitter.class);
	when(outputCommitter.needsTaskCommit(any(TaskAttemptContext.class))).thenReturn(false);
	DummyRecordWriter recordWriter = mock(DummyRecordWriter.class);
	JobConf jobConf = mock(JobConf.class);

	HadoopOutputFormat<String, Long> outputFormat = new HadoopOutputFormat<>(dummyOutputFormat, jobConf);
	outputFormat.recordWriter = recordWriter;
	outputFormat.outputCommitter = outputCommitter;

	outputFormat.close();

	verify(recordWriter, times(1)).close(any(Reporter.class));
	verify(outputCommitter, times(0)).commitTask(any(TaskAttemptContext.class));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:HadoopOutputFormatTest.java

示例3: commitTask

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Override
public void commitTask(TaskAttemptContext context) 
	throws IOException 
{
	JobConf conf = context.getJobConf();
	TaskAttemptID attemptId = context.getTaskAttemptID();
	
	// get the mapping between index to output filename
	outputs = MRJobConfiguration.getOutputs(conf);
	
	// get temp task output path (compatible with hadoop1 and hadoop2)
	Path taskOutPath = FileOutputFormat.getWorkOutputPath(conf);
	FileSystem fs = taskOutPath.getFileSystem(conf);
	if( !fs.exists(taskOutPath) )
		throw new IOException("Task output path "+ taskOutPath.toString() + "does not exist.");
	
	// move the task outputs to their final places
	context.getProgressible().progress();
	moveFinalTaskOutputs(context, fs, taskOutPath);
	
	// delete the temporary task-specific output directory
	if( !fs.delete(taskOutPath, true) ) 
		LOG.debug("Failed to delete the temporary output directory of task: " + attemptId + " - " + taskOutPath);
}
 
开发者ID:apache,项目名称:systemml,代码行数:25,代码来源:MultipleOutputCommitter.java

示例4: moveFileToDestination

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
private void moveFileToDestination(TaskAttemptContext context, FileSystem fs, Path file) 
	throws IOException 
{
	TaskAttemptID attemptId = context.getTaskAttemptID();
	
	// get output index and final destination 
	String name =  file.getName(); //e.g., 0-r-00000 
	int index = Integer.parseInt(name.substring(0, name.indexOf("-")));
	Path dest = new Path(outputs[index], name); //e.g., outX/0-r-00000
	
	// move file from 'file' to 'finalPath'
	if( !fs.rename(file, dest) ) {
		if (!fs.delete(dest, true))
			throw new IOException("Failed to delete earlier output " + dest + " for rename of " + file + " in task " + attemptId);
		if (!fs.rename(file, dest)) 
			throw new IOException("Failed to save output " + dest + " for rename of " + file + " in task: " + attemptId);
	}
}
 
开发者ID:apache,项目名称:systemml,代码行数:19,代码来源:MultipleOutputCommitter.java

示例5: initMapreduceOutputCommitter

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
private void initMapreduceOutputCommitter(TaskAttemptContext taskContext)
    throws IOException {
  if (mapreduceOutputCommitter != null) {
    LOG.debug("Using existing mapreduceOutputCommitter");
    return;
  }

  // It would be nice to use the BigQueryOutputFormat that already exists
  // (there is one wrapped inside our BigQueryMapredOutputFormat), but
  // there does not seem to be an easy way to do that. So make another one.
  LOG.debug("Creating BigQueryOutputFormat");
  BigQueryOutputFormat<Object, JsonObject> mapreduceOutputFormat =
      new BigQueryOutputFormat<Object, JsonObject>();

  // Fortunately, mapred.TaskAttemptContext is a subclass of
  // mapreduce.TaskAttemptContext, so we can use it directly.
  try {
    LOG.debug("Creating mapreduce OutputCommit");
    mapreduceOutputCommitter = mapreduceOutputFormat.getOutputCommitter(
        taskContext);
  } catch (InterruptedException ex) {
    throw new IOException(ex);
  }
}
 
开发者ID:GoogleCloudPlatform,项目名称:bigdata-interop,代码行数:25,代码来源:BigQueryMapredOutputCommitter.java

示例6: testClose

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testClose() throws IOException, InterruptedException {
  RecordWriter<LongWritable, JsonObject> recordWriter =
      new BigQueryMapredRecordWriter<LongWritable, JsonObject>(
      mockRecordWriter, mockTaskAttemptContext);
  Reporter reporter = null;   // unused by code under test

  recordWriter.close(reporter);
  verify(mockRecordWriter).close(any(TaskAttemptContext.class));

  doThrow(new IOException("test")).
    when(mockRecordWriter).close(any(TaskAttemptContext.class));
  expectedException.expect(IOException.class);
  try {
    recordWriter.close(reporter);
  } finally {
    verify(mockRecordWriter, times(2)).close(any(TaskAttemptContext.class));
  }
}
 
开发者ID:GoogleCloudPlatform,项目名称:bigdata-interop,代码行数:19,代码来源:BigQueryMapredRecordWriterTest.java

示例7: instantiateJobContext

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
public static JobContext instantiateJobContext(JobConf jobConf, JobID jobId) throws Exception {
	try {
		// for Hadoop 1.xx
		Class<?> clazz = null;
		if(!TaskAttemptContext.class.isInterface()) { 
			clazz = Class.forName("org.apache.hadoop.mapred.JobContext", true, Thread.currentThread().getContextClassLoader());
		}
		// for Hadoop 2.xx
		else {
			clazz = Class.forName("org.apache.hadoop.mapred.JobContextImpl", true, Thread.currentThread().getContextClassLoader());
		}
		Constructor<?> constructor = clazz.getDeclaredConstructor(JobConf.class, org.apache.hadoop.mapreduce.JobID.class);
		// for Hadoop 1.xx
		constructor.setAccessible(true);
		JobContext context = (JobContext) constructor.newInstance(jobConf, jobId);
		
		return context;
	} catch(Exception e) {
		throw new Exception("Could not create instance of JobContext.", e);
	}
}
 
开发者ID:citlab,项目名称:vs.msc.ws14,代码行数:22,代码来源:HadoopUtils.java

示例8: instantiateTaskAttemptContext

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
public static TaskAttemptContext instantiateTaskAttemptContext(JobConf jobConf,  TaskAttemptID taskAttemptID) throws Exception {
	try {
		// for Hadoop 1.xx
		Class<?> clazz = null;
		if(!TaskAttemptContext.class.isInterface()) { 
			clazz = Class.forName("org.apache.hadoop.mapred.TaskAttemptContext", true, Thread.currentThread().getContextClassLoader());
		}
		// for Hadoop 2.xx
		else {
			clazz = Class.forName("org.apache.hadoop.mapred.TaskAttemptContextImpl", true, Thread.currentThread().getContextClassLoader());
		}
		Constructor<?> constructor = clazz.getDeclaredConstructor(JobConf.class, TaskAttemptID.class);
		// for Hadoop 1.xx
		constructor.setAccessible(true);
		TaskAttemptContext context = (TaskAttemptContext) constructor.newInstance(jobConf, taskAttemptID);
		return context;
	} catch(Exception e) {
		throw new Exception("Could not create instance of TaskAttemptContext.", e);
	}
}
 
开发者ID:citlab,项目名称:vs.msc.ws14,代码行数:21,代码来源:HadoopUtils.java

示例9: moveFinalTaskOutputs

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
private void moveFinalTaskOutputs(TaskAttemptContext context, FileSystem fs, Path taskOutput)
	throws IOException 
{
	context.getProgressible().progress();
	
	if( fs.getFileStatus(taskOutput).isDirectory() ) {
		FileStatus[] files = fs.listStatus(taskOutput);
		if (files != null)
			for (FileStatus file : files) //for all files
				if( !file.isDirectory() ) //skip directories
					moveFileToDestination(context, fs, file.getPath());
	}
}
 
开发者ID:apache,项目名称:systemml,代码行数:14,代码来源:MultipleOutputCommitter.java

示例10: commit

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
/**
 * Commit task.
 *
 * @throws IOException In failed.
 */
public void commit() throws IOException {
    if (writer != null) {
        OutputCommitter outputCommitter = jobConf.getOutputCommitter();

        TaskAttemptContext taskCtx = new TaskAttemptContextImpl(jobConf, attempt);

        if (outputCommitter.needsTaskCommit(taskCtx))
            outputCommitter.commitTask(taskCtx);
    }
}
 
开发者ID:apache,项目名称:ignite,代码行数:16,代码来源:HadoopV1OutputCollector.java

示例11: needsTaskCommit

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Override
public boolean needsTaskCommit(TaskAttemptContext taskContext)
    throws IOException {
  LOG.debug("needsTaskCommit");
  initMapreduceOutputCommitter(taskContext);
  return mapreduceOutputCommitter.needsTaskCommit(taskContext);
}
 
开发者ID:GoogleCloudPlatform,项目名称:bigdata-interop,代码行数:8,代码来源:BigQueryMapredOutputCommitter.java

示例12: testAbortTask

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testAbortTask() throws IOException {
  BigQueryMapredOutputCommitter outputCommitter =
      new BigQueryMapredOutputCommitter();
  outputCommitter.setMapreduceOutputCommitter(mockOutputCommitter);

  outputCommitter.abortTask(mockTaskAttemptContext);

  verify(mockOutputCommitter).abortTask(any(TaskAttemptContext.class));
}
 
开发者ID:GoogleCloudPlatform,项目名称:bigdata-interop,代码行数:10,代码来源:BigQueryMapredOutputCommitterTest.java

示例13: testCommitTask

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testCommitTask() throws IOException {
  BigQueryMapredOutputCommitter outputCommitter =
      new BigQueryMapredOutputCommitter();
  outputCommitter.setMapreduceOutputCommitter(mockOutputCommitter);

  outputCommitter.commitTask(mockTaskAttemptContext);

  verify(mockOutputCommitter).commitTask(any(TaskAttemptContext.class));
}
 
开发者ID:GoogleCloudPlatform,项目名称:bigdata-interop,代码行数:10,代码来源:BigQueryMapredOutputCommitterTest.java

示例14: testNeedsTaskCommit

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testNeedsTaskCommit() throws IOException {
  BigQueryMapredOutputCommitter outputCommitter =
      new BigQueryMapredOutputCommitter();
  outputCommitter.setMapreduceOutputCommitter(mockOutputCommitter);

  outputCommitter.needsTaskCommit(mockTaskAttemptContext);

  verify(mockOutputCommitter).needsTaskCommit(any(TaskAttemptContext.class));
}
 
开发者ID:GoogleCloudPlatform,项目名称:bigdata-interop,代码行数:10,代码来源:BigQueryMapredOutputCommitterTest.java

示例15: testSetupTask

import org.apache.hadoop.mapred.TaskAttemptContext; //导入依赖的package包/类
@Test public void testSetupTask() throws IOException {
  BigQueryMapredOutputCommitter outputCommitter =
      new BigQueryMapredOutputCommitter();
  outputCommitter.setMapreduceOutputCommitter(mockOutputCommitter);

  outputCommitter.setupTask(mockTaskAttemptContext);

  verify(mockOutputCommitter).setupTask(any(TaskAttemptContext.class));
}
 
开发者ID:GoogleCloudPlatform,项目名称:bigdata-interop,代码行数:10,代码来源:BigQueryMapredOutputCommitterTest.java


注:本文中的org.apache.hadoop.mapred.TaskAttemptContext类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。