当前位置: 首页>>代码示例>>Java>>正文


Java TaskAttemptInfo类代码示例

本文整理汇总了Java中org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo的典型用法代码示例。如果您正苦于以下问题:Java TaskAttemptInfo类的具体用法?Java TaskAttemptInfo怎么用?Java TaskAttemptInfo使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


TaskAttemptInfo类属于org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser包,在下文中一共展示了TaskAttemptInfo类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testCompletedTaskAttempt

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
/**
 * test some methods of CompletedTaskAttempt
 */
@Test (timeout=5000)
public void testCompletedTaskAttempt(){
  
  TaskAttemptInfo attemptInfo= mock(TaskAttemptInfo.class);
  when(attemptInfo.getRackname()).thenReturn("Rackname");
  when(attemptInfo.getShuffleFinishTime()).thenReturn(11L);
  when(attemptInfo.getSortFinishTime()).thenReturn(12L);
  when(attemptInfo.getShufflePort()).thenReturn(10);
  
  JobID jobId= new JobID("12345",0);
  TaskID taskId =new TaskID(jobId,TaskType.REDUCE, 0);
  TaskAttemptID taskAttemptId= new TaskAttemptID(taskId, 0);
  when(attemptInfo.getAttemptId()).thenReturn(taskAttemptId);
  
  
  CompletedTaskAttempt taskAttemt= new CompletedTaskAttempt(null,attemptInfo);
  assertEquals( "Rackname",   taskAttemt.getNodeRackName());
  assertEquals( Phase.CLEANUP,   taskAttemt.getPhase());
  assertTrue(  taskAttemt.isFinished());
  assertEquals( 11L,   taskAttemt.getShuffleFinishTime());
  assertEquals( 12L,   taskAttemt.getSortFinishTime());
  assertEquals( 10,   taskAttemt.getShufflePort());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:TestCompletedTask.java

示例2: createTaskAttemptEntity

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
private TimelineEntity createTaskAttemptEntity(TaskAttemptInfo taskAttemptInfo) {
  TimelineEntity taskAttempt = new TimelineEntity();
  taskAttempt.setEntityType(TASK_ATTEMPT);
  taskAttempt.setEntityId(taskAttemptInfo.getAttemptId().toString());
  taskAttempt.setStartTime(taskAttemptInfo.getStartTime());

  taskAttempt.addOtherInfo("START_TIME", taskAttemptInfo.getStartTime());
  taskAttempt.addOtherInfo("FINISH_TIME", taskAttemptInfo.getFinishTime());
  taskAttempt.addOtherInfo("MAP_FINISH_TIME",
      taskAttemptInfo.getMapFinishTime());
  taskAttempt.addOtherInfo("SHUFFLE_FINISH_TIME",
      taskAttemptInfo.getShuffleFinishTime());
  taskAttempt.addOtherInfo("SORT_FINISH_TIME",
      taskAttemptInfo.getSortFinishTime());
  taskAttempt.addOtherInfo("TASK_STATUS", taskAttemptInfo.getTaskStatus());
  taskAttempt.addOtherInfo("STATE", taskAttemptInfo.getState());
  taskAttempt.addOtherInfo("ERROR", taskAttemptInfo.getError());
  taskAttempt.addOtherInfo("CONTAINER_ID",
      taskAttemptInfo.getContainerId().toString());

  LOG.info("converted task attempt " + taskAttemptInfo.getAttemptId() +
      " to a timeline entity");
  return taskAttempt;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:25,代码来源:TimelineEntityConverterV1.java

示例3: prepareSetupDetails

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
/**
 * This method is responsible for populating the setup phase details.
 * @return TaskOutputDetails contains the details of the set up phase.
 */
private PhaseDetails prepareSetupDetails(JobInfo jobInfo,Map<TaskAttemptID, TaskAttemptInfo> tasks){
	PhaseDetails phaseDetails = new PhaseDetails();
	List<TaskOutputDetails> taskOutputDetails = new ArrayList<TaskOutputDetails>();
	TaskOutputDetails tod;
	tod = new TaskOutputDetails();
	tod.setTaskType("SETUP");
	tod.setTaskID("Setup");
	for (Map.Entry<TaskAttemptID, TaskAttemptInfo> task : tasks
			.entrySet()) {
		TaskAttemptInfo taskAttemptInfo = (TaskAttemptInfo) (task.getValue());
		tod.setLocation(taskAttemptInfo.getHostname());
	}
	long startPoint = jobInfo.getSubmitTime();
	tod.setStartPoint(0);
	long endPoint = (jobInfo.getLaunchTime()-startPoint) / CONVERSION_FACTOR_MILLISECS_TO_SECS;
	tod.setEndPoint(endPoint);
	tod.setDataFlowRate(0);
	taskOutputDetails.add(tod);
	phaseDetails.setTaskOutputDetails(taskOutputDetails);
	phaseDetails.setAvgDataFlowRate(0);
	return phaseDetails;
}
 
开发者ID:Impetus,项目名称:jumbune,代码行数:27,代码来源:YarnJobStatsUtility.java

示例4: prepareCleanupDetails

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
/**
 * This method is responsible for populating the clean up phase details.
 * @return TaskOutputDetails contains the details of the clean up phase.
 */
private PhaseDetails prepareCleanupDetails(JobInfo jobInfo, Map<TaskAttemptID, TaskAttemptInfo> tasks){
	PhaseDetails phaseDetails = new PhaseDetails();
	List<TaskOutputDetails> cleanupTaskOuptputDetails = new ArrayList<TaskOutputDetails>();
	TaskOutputDetails taskOutputDetails = new TaskOutputDetails();
	taskOutputDetails.setTaskType("CLEANUP");
	taskOutputDetails.setTaskID("Cleanup");
	for (Map.Entry<TaskAttemptID, TaskAttemptInfo> task : tasks
			.entrySet()) {
		TaskAttemptInfo taskAttemptInfo = (TaskAttemptInfo) (task.getValue());
		taskOutputDetails.setLocation(taskAttemptInfo.getHostname());
	}
	long startPoint = getMaxReduceTime(tasks,jobInfo.getSubmitTime());
	taskOutputDetails.setStartPoint(startPoint);
	long endPoint = (jobInfo.getFinishTime() - jobInfo.getSubmitTime())/CONVERSION_FACTOR_MILLISECS_TO_SECS;
	taskOutputDetails.setEndPoint(endPoint);
	taskOutputDetails.setDataFlowRate(0);
	cleanupTaskOuptputDetails.add(taskOutputDetails);
	phaseDetails.setTaskOutputDetails(cleanupTaskOuptputDetails);
	phaseDetails.setAvgDataFlowRate(0);
	return phaseDetails;
}
 
开发者ID:Impetus,项目名称:jumbune,代码行数:26,代码来源:YarnJobStatsUtility.java

示例5: TaskAttemptRecoverEvent

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
public TaskAttemptRecoverEvent(TaskAttemptId id, TaskAttemptInfo taInfo,
    OutputCommitter committer, boolean recoverOutput) {
  super(id, TaskAttemptEventType.TA_RECOVER);
  this.taInfo = taInfo;
  this.committer = committer;
  this.recoverAttemptOutput = recoverOutput;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:8,代码来源:TaskAttemptRecoverEvent.java

示例6: getMockTaskAttemptInfo

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
private TaskAttemptInfo getMockTaskAttemptInfo(TaskAttemptID tai,
    TaskAttemptState tas) {

  ContainerId ci = mock(ContainerId.class);
  Counters counters = mock(Counters.class);
  TaskType tt = TaskType.MAP;

  long finishTime = System.currentTimeMillis();

  TaskAttemptInfo mockTAinfo = mock(TaskAttemptInfo.class);

  when(mockTAinfo.getAttemptId()).thenReturn(tai);
  when(mockTAinfo.getContainerId()).thenReturn(ci);
  when(mockTAinfo.getCounters()).thenReturn(counters);
  when(mockTAinfo.getError()).thenReturn("");
  when(mockTAinfo.getFinishTime()).thenReturn(finishTime);
  when(mockTAinfo.getHostname()).thenReturn("localhost");
  when(mockTAinfo.getHttpPort()).thenReturn(23);
  when(mockTAinfo.getMapFinishTime()).thenReturn(finishTime - 1000L);
  when(mockTAinfo.getPort()).thenReturn(24);
  when(mockTAinfo.getRackname()).thenReturn("defaultRack");
  when(mockTAinfo.getShuffleFinishTime()).thenReturn(finishTime - 2000L);
  when(mockTAinfo.getShufflePort()).thenReturn(25);
  when(mockTAinfo.getSortFinishTime()).thenReturn(finishTime - 3000L);
  when(mockTAinfo.getStartTime()).thenReturn(finishTime -10000);
  when(mockTAinfo.getState()).thenReturn("task in progress");
  when(mockTAinfo.getTaskStatus()).thenReturn(tas.toString());
  when(mockTAinfo.getTaskType()).thenReturn(tt);
  when(mockTAinfo.getTrackerName()).thenReturn("TrackerName");
  return mockTAinfo;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:32,代码来源:TestRecovery.java

示例7: loadAllTaskAttempts

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
private void loadAllTaskAttempts() {
  if (taskAttemptsLoaded.get()) {
    return;
  }
  taskAttemptsLock.lock();
  try {
    if (taskAttemptsLoaded.get()) {
      return;
    }

    for (TaskAttemptInfo attemptHistory : taskInfo.getAllTaskAttempts()
        .values()) {
      CompletedTaskAttempt attempt =
          new CompletedTaskAttempt(taskId, attemptHistory);
      reportDiagnostics.addAll(attempt.getDiagnostics());
      attempts.put(attempt.getID(), attempt);
      if (successfulAttempt == null
          && attemptHistory.getTaskStatus() != null
          && attemptHistory.getTaskStatus().equals(
              TaskState.SUCCEEDED.toString())) {
        successfulAttempt =
            TypeConverter.toYarn(attemptHistory.getAttemptId());
      }
    }
    taskAttemptsLoaded.set(true);
  } finally {
    taskAttemptsLock.unlock();
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:30,代码来源:CompletedTask.java

示例8: CompletedTaskAttempt

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
CompletedTaskAttempt(TaskId taskId, TaskAttemptInfo attemptInfo) {
  this.attemptInfo = attemptInfo;
  this.attemptId = TypeConverter.toYarn(attemptInfo.getAttemptId());
  if (attemptInfo.getTaskStatus() != null) {
    this.state = TaskAttemptState.valueOf(attemptInfo.getTaskStatus());
  } else {
    this.state = TaskAttemptState.KILLED;
    localDiagMessage = "Attmpt state missing from History : marked as KILLED";
    diagnostics.add(localDiagMessage);
  }
  if (attemptInfo.getError() != null) {
    diagnostics.add(attemptInfo.getError());
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:15,代码来源:CompletedTaskAttempt.java

示例9: createTaskAttemptEntities

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) {
  Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>();
  Map<TaskAttemptID,TaskAttemptInfo> taskAttemptInfoMap =
      taskInfo.getAllTaskAttempts();
  LOG.info("task " + taskInfo.getTaskId() + " has " +
      taskAttemptInfoMap.size() + " task attempts");
  for (TaskAttemptInfo taskAttemptInfo: taskAttemptInfoMap.values()) {
    TimelineEntity taskAttempt = createTaskAttemptEntity(taskAttemptInfo);
    taskAttempts.add(taskAttempt);
  }
  return taskAttempts;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:13,代码来源:TimelineEntityConverterV1.java

示例10: testTaskStartTimes

import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; //导入依赖的package包/类
@Test (timeout=5000)
public void testTaskStartTimes() {
  
  TaskId taskId = mock(TaskId.class); 
  TaskInfo taskInfo = mock(TaskInfo.class);
  Map<TaskAttemptID, TaskAttemptInfo> taskAttempts
    = new TreeMap<TaskAttemptID, TaskAttemptInfo>();
  
  TaskAttemptID id = new TaskAttemptID("0", 0, TaskType.MAP, 0, 0);
  TaskAttemptInfo info = mock(TaskAttemptInfo.class);
  when(info.getAttemptId()).thenReturn(id);
  when(info.getStartTime()).thenReturn(10l);
  taskAttempts.put(id, info);
  
  id = new TaskAttemptID("1", 0, TaskType.MAP, 1, 1);
  info = mock(TaskAttemptInfo.class);
  when(info.getAttemptId()).thenReturn(id);
  when(info.getStartTime()).thenReturn(20l);
  taskAttempts.put(id, info);
  
  
  when(taskInfo.getAllTaskAttempts()).thenReturn(taskAttempts);
  CompletedTask task = new CompletedTask(taskId, taskInfo);
  TaskReport report = task.getReport();

  // Make sure the startTime returned by report is the lesser of the 
  // attempy launch times
  assertTrue(report.getStartTime() == 10);
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:30,代码来源:TestCompletedTask.java


注:本文中的org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。