本文整理汇总了Java中org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo.getLaunchTime方法的典型用法代码示例。如果您正苦于以下问题:Java JobInfo.getLaunchTime方法的具体用法?Java JobInfo.getLaunchTime怎么用?Java JobInfo.getLaunchTime使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo
的用法示例。
在下文中一共展示了JobInfo.getLaunchTime方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: prepareSetupDetails
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; //导入方法依赖的package包/类
/**
* This method is responsible for populating the setup phase details.
* @return TaskOutputDetails contains the details of the set up phase.
*/
private PhaseDetails prepareSetupDetails(JobInfo jobInfo,Map<TaskAttemptID, TaskAttemptInfo> tasks){
PhaseDetails phaseDetails = new PhaseDetails();
List<TaskOutputDetails> taskOutputDetails = new ArrayList<TaskOutputDetails>();
TaskOutputDetails tod;
tod = new TaskOutputDetails();
tod.setTaskType("SETUP");
tod.setTaskID("Setup");
for (Map.Entry<TaskAttemptID, TaskAttemptInfo> task : tasks
.entrySet()) {
TaskAttemptInfo taskAttemptInfo = (TaskAttemptInfo) (task.getValue());
tod.setLocation(taskAttemptInfo.getHostname());
}
long startPoint = jobInfo.getSubmitTime();
tod.setStartPoint(0);
long endPoint = (jobInfo.getLaunchTime()-startPoint) / CONVERSION_FACTOR_MILLISECS_TO_SECS;
tod.setEndPoint(endPoint);
tod.setDataFlowRate(0);
taskOutputDetails.add(tod);
phaseDetails.setTaskOutputDetails(taskOutputDetails);
phaseDetails.setAvgDataFlowRate(0);
return phaseDetails;
}
示例2: validateJobLevelKeyValuesFormat
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; //导入方法依赖的package包/类
private static void validateJobLevelKeyValuesFormat(JobInfo jobInfo,
String status) {
long submitTime = jobInfo.getSubmitTime();
long launchTime = jobInfo.getLaunchTime();
long finishTime = jobInfo.getFinishTime();
assertTrue("Invalid submit time", submitTime > 0);
assertTrue("SubmitTime > LaunchTime", submitTime <= launchTime);
assertTrue("LaunchTime > FinishTime", launchTime <= finishTime);
String stat = jobInfo.getJobStatus();
assertTrue("Unexpected JOB_STATUS \"" + stat + "\" is seen in" +
" history file", (status.equals(stat)));
String priority = jobInfo.getPriority();
assertNotNull(priority);
assertTrue("Unknown priority for the job in history file",
(priority.equals("HIGH") ||
priority.equals("LOW") || priority.equals("NORMAL") ||
priority.equals("VERY_HIGH") || priority.equals("VERY_LOW")));
}
示例3: parsePreviousJobHistory
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; //导入方法依赖的package包/类
private void parsePreviousJobHistory() throws IOException {
FSDataInputStream in = getPreviousJobHistoryStream(getConfig(),
appAttemptID);
JobHistoryParser parser = new JobHistoryParser(in);
JobInfo jobInfo = parser.parse();
Exception parseException = parser.getParseException();
if (parseException != null) {
LOG.info("Got an error parsing job-history file" +
", ignoring incomplete events.", parseException);
}
Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo
.getAllTasks();
for (TaskInfo taskInfo : taskInfos.values()) {
if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) {
Iterator<Entry<TaskAttemptID, TaskAttemptInfo>> taskAttemptIterator =
taskInfo.getAllTaskAttempts().entrySet().iterator();
while (taskAttemptIterator.hasNext()) {
Map.Entry<TaskAttemptID, TaskAttemptInfo> currentEntry = taskAttemptIterator.next();
if (!jobInfo.getAllCompletedTaskAttempts().containsKey(currentEntry.getKey())) {
taskAttemptIterator.remove();
}
}
completedTasksFromPreviousRun
.put(TypeConverter.toYarn(taskInfo.getTaskId()), taskInfo);
LOG.info("Read from history task "
+ TypeConverter.toYarn(taskInfo.getTaskId()));
}
}
LOG.info("Read completed tasks from history "
+ completedTasksFromPreviousRun.size());
recoveredJobStartTime = jobInfo.getLaunchTime();
// recover AMInfos
List<JobHistoryParser.AMInfo> jhAmInfoList = jobInfo.getAMInfos();
if (jhAmInfoList != null) {
for (JobHistoryParser.AMInfo jhAmInfo : jhAmInfoList) {
AMInfo amInfo = MRBuilderUtils.newAMInfo(jhAmInfo.getAppAttemptId(),
jhAmInfo.getStartTime(), jhAmInfo.getContainerId(),
jhAmInfo.getNodeManagerHost(), jhAmInfo.getNodeManagerPort(),
jhAmInfo.getNodeManagerHttpPort());
amInfos.add(amInfo);
}
}
}