本文整理汇总了Java中org.apache.hadoop.tools.rumen.TaskInfo类的典型用法代码示例。如果您正苦于以下问题:Java TaskInfo类的具体用法?Java TaskInfo怎么用?Java TaskInfo使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
TaskInfo类属于org.apache.hadoop.tools.rumen包,在下文中一共展示了TaskInfo类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getTaskAttemptInfo
import org.apache.hadoop.tools.rumen.TaskInfo; //导入依赖的package包/类
@SuppressWarnings({ "deprecation", "incomplete-switch" })
@Override
public TaskAttemptInfo getTaskAttemptInfo(
TaskType taskType, int taskNumber, int taskAttemptNumber) {
switch (taskType) {
case MAP:
return new MapTaskAttemptInfo(
State.SUCCEEDED,
new TaskInfo(
m_bytesIn[taskNumber], m_recsIn[taskNumber],
m_bytesOut[taskNumber], m_recsOut[taskNumber], -1),
100);
case REDUCE:
return new ReduceTaskAttemptInfo(
State.SUCCEEDED,
new TaskInfo(
r_bytesIn[taskNumber], r_recsIn[taskNumber],
r_bytesOut[taskNumber], r_recsOut[taskNumber], -1),
100, 100, 100);
}
throw new UnsupportedOperationException();
}
示例2: getTaskAttemptInfo
import org.apache.hadoop.tools.rumen.TaskInfo; //导入依赖的package包/类
@Override
public TaskAttemptInfo getTaskAttemptInfo(
TaskType taskType, int taskNumber, int taskAttemptNumber) {
switch (taskType) {
case MAP:
return new MapTaskAttemptInfo(
State.SUCCEEDED,
new TaskInfo(
m_bytesIn[taskNumber], m_recsIn[taskNumber],
m_bytesOut[taskNumber], m_recsOut[taskNumber], -1),
100);
case REDUCE:
return new ReduceTaskAttemptInfo(
State.SUCCEEDED,
new TaskInfo(
r_bytesIn[taskNumber], r_recsIn[taskNumber],
r_bytesOut[taskNumber], r_recsOut[taskNumber], -1),
100, 100, 100);
}
throw new UnsupportedOperationException();
}
示例3: getTaskAttemptInfo
import org.apache.hadoop.tools.rumen.TaskInfo; //导入依赖的package包/类
@Override
public TaskAttemptInfo getTaskAttemptInfo(
TaskType taskType, int taskNumber, int taskAttemptNumber) {
switch (taskType) {
case MAP:
return new MapTaskAttemptInfo(
State.SUCCEEDED, new TaskInfo(
m_bytesIn[taskNumber], m_recsIn[taskNumber],
m_bytesOut[taskNumber], m_recsOut[taskNumber], -1),100);
case REDUCE:
return new ReduceTaskAttemptInfo(
State.SUCCEEDED, new TaskInfo(
r_bytesIn[taskNumber], r_recsIn[taskNumber],
r_bytesOut[taskNumber], r_recsOut[taskNumber], -1),100,100,100);
}
throw new UnsupportedOperationException();
}
示例4: getJobMapCounters
import org.apache.hadoop.tools.rumen.TaskInfo; //导入依赖的package包/类
/**
* Get the original job map counters from a trace.
* @param zombieJob - Original job story.
* @return - map counters as a map.
*/
public Map<String, Long> getJobMapCounters(ZombieJob zombieJob) {
long expMapInputBytes = 0;
long expMapOutputBytes = 0;
long expMapInputRecs = 0;
long expMapOutputRecs = 0;
Map<String,Long> mapCounters = new HashMap<String,Long>();
for (int index = 0; index < zombieJob.getNumberMaps(); index ++) {
TaskInfo mapTask = zombieJob.getTaskInfo(TaskType.MAP, index);
expMapInputBytes += mapTask.getInputBytes();
expMapOutputBytes += mapTask.getOutputBytes();
expMapInputRecs += mapTask.getInputRecords();
expMapOutputRecs += mapTask.getOutputRecords();
}
mapCounters.put("MAP_INPUT_BYTES", expMapInputBytes);
mapCounters.put("MAP_OUTPUT_BYTES", expMapOutputBytes);
mapCounters.put("MAP_INPUT_RECS", expMapInputRecs);
mapCounters.put("MAP_OUTPUT_RECS", expMapOutputRecs);
return mapCounters;
}
示例5: getJobReduceCounters
import org.apache.hadoop.tools.rumen.TaskInfo; //导入依赖的package包/类
/**
* Get the original job reduce counters from a trace.
* @param zombieJob - Original job story.
* @return - reduce counters as a map.
*/
public Map<String,Long> getJobReduceCounters(ZombieJob zombieJob) {
long expReduceInputBytes = 0;
long expReduceOutputBytes = 0;
long expReduceInputRecs = 0;
long expReduceOutputRecs = 0;
Map<String,Long> reduceCounters = new HashMap<String,Long>();
for (int index = 0; index < zombieJob.getNumberReduces(); index ++) {
TaskInfo reduceTask = zombieJob.getTaskInfo(TaskType.REDUCE, index);
expReduceInputBytes += reduceTask.getInputBytes();
expReduceOutputBytes += reduceTask.getOutputBytes();
expReduceInputRecs += reduceTask.getInputRecords();
expReduceOutputRecs += reduceTask.getOutputRecords();
}
reduceCounters.put("REDUCE_INPUT_BYTES", expReduceInputBytes);
reduceCounters.put("REDUCE_OUTPUT_BYTES", expReduceOutputBytes);
reduceCounters.put("REDUCE_INPUT_RECS", expReduceInputRecs);
reduceCounters.put("REDUCE_OUTPUT_RECS", expReduceOutputRecs);
return reduceCounters;
}
示例6: getTaskInfo
import org.apache.hadoop.tools.rumen.TaskInfo; //导入依赖的package包/类
@Override
public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
switch (taskType) {
case MAP:
return new TaskInfo(m_bytesIn[taskNumber], m_recsIn[taskNumber],
m_bytesOut[taskNumber], m_recsOut[taskNumber], -1);
case REDUCE:
return new TaskInfo(r_bytesIn[taskNumber], r_recsIn[taskNumber],
r_bytesOut[taskNumber], r_recsOut[taskNumber], -1);
default:
throw new IllegalArgumentException("Not interested");
}
}
示例7: getNextJobFiltered
import org.apache.hadoop.tools.rumen.TaskInfo; //导入依赖的package包/类
protected JobStory getNextJobFiltered() throws IOException {
JobStory job;
do {
job = jobProducer.getNextJob();
} while (job != null
&& (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS ||
job.getSubmissionTime() < 0));
return null == job ? null : new FilterJobStory(job) {
@Override
public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
return new MinTaskInfo(this.job.getTaskInfo(taskType, taskNumber));
}
};
}
示例8: getNextJobFiltered
import org.apache.hadoop.tools.rumen.TaskInfo; //导入依赖的package包/类
private JobStory getNextJobFiltered() throws IOException {
JobStory job;
do {
job = jobProducer.getNextJob();
} while (job != null
&& (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS ||
job.getSubmissionTime() < 0));
return null == job ? null : new FilterJobStory(job) {
@Override
public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
return new MinTaskInfo(this.job.getTaskInfo(taskType, taskNumber));
}
};
}
示例9: getNextJobFiltered
import org.apache.hadoop.tools.rumen.TaskInfo; //导入依赖的package包/类
protected JobStory getNextJobFiltered() throws IOException {
JobStory job;
do {
job = jobProducer.getNextJob();
} while (job != null &&
(job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS ||
job.getSubmissionTime() < 0));
return null == job ? null : new FilterJobStory(job) {
@Override
public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
return new MinTaskInfo(this.job.getTaskInfo(taskType, taskNumber));
}
};
}