本文整理汇总了Java中org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants.Values.MAP属性的典型用法代码示例。如果您正苦于以下问题:Java Values.MAP属性的具体用法?Java Values.MAP怎么用?Java Values.MAP使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants.Values
的用法示例。
在下文中一共展示了Values.MAP属性的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: scaleInfo
@SuppressWarnings("hiding")
private TaskAttemptInfo scaleInfo(LoggedTask loggedTask,
LoggedTaskAttempt loggedAttempt, int locality, int loggedLocality,
double rackLocalOverNodeLocal, double rackRemoteOverNodeLocal) {
TaskInfo taskInfo = getTaskInfo(loggedTask);
double[] factors = new double[] { 1.0, rackLocalOverNodeLocal,
rackRemoteOverNodeLocal };
double scaleFactor = factors[locality] / factors[loggedLocality];
State state = convertState(loggedAttempt.getResult());
if (loggedTask.getTaskType() == Values.MAP) {
long taskTime = 0;
if (loggedAttempt.getStartTime() == 0) {
taskTime = makeUpMapRuntime(state, locality);
} else {
taskTime = loggedAttempt.getFinishTime() - loggedAttempt.getStartTime();
}
taskTime = sanitizeTaskRuntime(taskTime, loggedAttempt.getAttemptID());
taskTime *= scaleFactor;
return new MapTaskAttemptInfo
(state, taskInfo, taskTime, loggedAttempt.allSplitVectors());
} else {
throw new IllegalArgumentException("taskType can only be MAP: "
+ loggedTask.getTaskType());
}
}
示例2: scaleInfo
@SuppressWarnings("hiding")
private TaskAttemptInfo scaleInfo(LoggedTask loggedTask,
LoggedTaskAttempt loggedAttempt, int locality, int loggedLocality,
double rackLocalOverNodeLocal, double rackRemoteOverNodeLocal) {
TaskInfo taskInfo = getTaskInfo(loggedTask);
double[] factors = new double[] { 1.0, rackLocalOverNodeLocal,
rackRemoteOverNodeLocal };
double scaleFactor = factors[locality] / factors[loggedLocality];
State state = convertState(loggedAttempt.getResult());
if (loggedTask.getTaskType() == Values.MAP) {
long taskTime = 0;
if (loggedAttempt.getStartTime() == 0) {
taskTime = makeUpMapRuntime(state, locality);
} else {
taskTime = loggedAttempt.getFinishTime() - loggedAttempt.getStartTime();
}
taskTime = sanitizeTaskRuntime(taskTime, loggedAttempt.getAttemptID());
taskTime *= scaleFactor;
return new MapTaskAttemptInfo(state, taskInfo, taskTime);
} else {
throw new IllegalArgumentException("taskType can only be MAP: "
+ loggedTask.getTaskType());
}
}
示例3: getTaskInfo
private TaskInfo getTaskInfo(LoggedTask loggedTask) {
if (loggedTask == null) {
return new TaskInfo(0, 0, 0, 0, 0);
}
List<LoggedTaskAttempt> attempts = loggedTask.getAttempts();
long inputBytes = -1;
long inputRecords = -1;
long outputBytes = -1;
long outputRecords = -1;
long heapMegabytes = -1;
ResourceUsageMetrics metrics = new ResourceUsageMetrics();
Values type = loggedTask.getTaskType();
if ((type != Values.MAP) && (type != Values.REDUCE)) {
throw new IllegalArgumentException(
"getTaskInfo only supports MAP or REDUCE tasks: " + type.toString()
+ " for task = " + loggedTask.getTaskID());
}
for (LoggedTaskAttempt attempt : attempts) {
attempt = sanitizeLoggedTaskAttempt(attempt);
// ignore bad attempts or unsuccessful attempts.
if ((attempt == null) || (attempt.getResult() != Values.SUCCESS)) {
continue;
}
if (type == Values.MAP) {
inputBytes = attempt.getHdfsBytesRead();
inputRecords = attempt.getMapInputRecords();
outputBytes =
(job.getTotalReduces() > 0) ? attempt.getMapOutputBytes() : attempt
.getHdfsBytesWritten();
outputRecords = attempt.getMapOutputRecords();
heapMegabytes =
(job.getJobMapMB() > 0) ? job.getJobMapMB() : job
.getHeapMegabytes();
} else {
inputBytes = attempt.getReduceShuffleBytes();
inputRecords = attempt.getReduceInputRecords();
outputBytes = attempt.getHdfsBytesWritten();
outputRecords = attempt.getReduceOutputRecords();
heapMegabytes =
(job.getJobReduceMB() > 0) ? job.getJobReduceMB() : job
.getHeapMegabytes();
}
// set the resource usage metrics
metrics = attempt.getResourceUsageMetrics();
break;
}
TaskInfo taskInfo =
new TaskInfo(inputBytes, (int) inputRecords, outputBytes,
(int) outputRecords, (int) heapMegabytes,
metrics);
return taskInfo;
}
示例4: getTaskInfo
private TaskInfo getTaskInfo(LoggedTask loggedTask) {
List<LoggedTaskAttempt> attempts = loggedTask.getAttempts();
long inputBytes = -1;
long inputRecords = -1;
long outputBytes = -1;
long outputRecords = -1;
long heapMegabytes = -1;
Values type = loggedTask.getTaskType();
if ((type != Values.MAP) && (type != Values.REDUCE)) {
throw new IllegalArgumentException(
"getTaskInfo only supports MAP or REDUCE tasks: " + type.toString()
+ " for task = " + loggedTask.getTaskID());
}
for (LoggedTaskAttempt attempt : attempts) {
attempt = sanitizeLoggedTaskAttempt(attempt);
// ignore bad attempts or unsuccessful attempts.
if ((attempt == null) || (attempt.getResult() != Values.SUCCESS)) {
continue;
}
if (type == Values.MAP) {
inputBytes = attempt.getHdfsBytesRead();
inputRecords = attempt.getMapInputRecords();
outputBytes =
(job.getTotalReduces() > 0) ? attempt.getMapOutputBytes() : attempt
.getHdfsBytesWritten();
outputRecords = attempt.getMapOutputRecords();
heapMegabytes =
(job.getJobMapMB() > 0) ? job.getJobMapMB() : job
.getHeapMegabytes();
} else {
inputBytes = attempt.getReduceShuffleBytes();
inputRecords = attempt.getReduceInputRecords();
outputBytes = attempt.getHdfsBytesWritten();
outputRecords = attempt.getReduceOutputRecords();
heapMegabytes =
(job.getJobReduceMB() > 0) ? job.getJobReduceMB() : job
.getHeapMegabytes();
}
break;
}
TaskInfo taskInfo =
new TaskInfo(inputBytes, (int) inputRecords, outputBytes,
(int) outputRecords, (int) heapMegabytes);
return taskInfo;
}
示例5: getTaskInfo
private TaskInfo getTaskInfo(LoggedTask loggedTask) {
List<LoggedTaskAttempt> attempts = loggedTask.getAttempts();
long inputBytes = -1;
long inputRecords = -1;
long outputBytes = -1;
long outputRecords = -1;
long heapMegabytes = -1;
ResourceUsageMetrics metrics = new ResourceUsageMetrics();
Values type = loggedTask.getTaskType();
if ((type != Values.MAP) && (type != Values.REDUCE)) {
throw new IllegalArgumentException(
"getTaskInfo only supports MAP or REDUCE tasks: " + type.toString()
+ " for task = " + loggedTask.getTaskID());
}
for (LoggedTaskAttempt attempt : attempts) {
attempt = sanitizeLoggedTaskAttempt(attempt);
// ignore bad attempts or unsuccessful attempts.
if ((attempt == null) || (attempt.getResult() != Values.SUCCESS)) {
continue;
}
if (type == Values.MAP) {
inputBytes = attempt.getHdfsBytesRead();
inputRecords = attempt.getMapInputRecords();
outputBytes =
(job.getTotalReduces() > 0) ? attempt.getMapOutputBytes() : attempt
.getHdfsBytesWritten();
outputRecords = attempt.getMapOutputRecords();
heapMegabytes =
(job.getJobMapMB() > 0) ? job.getJobMapMB() : job
.getHeapMegabytes();
} else {
inputBytes = attempt.getReduceShuffleBytes();
inputRecords = attempt.getReduceInputRecords();
outputBytes = attempt.getHdfsBytesWritten();
outputRecords = attempt.getReduceOutputRecords();
heapMegabytes =
(job.getJobReduceMB() > 0) ? job.getJobReduceMB() : job
.getHeapMegabytes();
}
// set the resource usage metrics
metrics = attempt.getResourceUsageMetrics();
break;
}
TaskInfo taskInfo =
new TaskInfo(inputBytes, (int) inputRecords, outputBytes,
(int) outputRecords, (int) heapMegabytes,
metrics);
return taskInfo;
}