本文整理汇总了Java中org.apache.hadoop.tools.rumen.JobStory.getNumberMaps方法的典型用法代码示例。如果您正苦于以下问题:Java JobStory.getNumberMaps方法的具体用法?Java JobStory.getNumberMaps怎么用?Java JobStory.getNumberMaps使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.tools.rumen.JobStory
的用法示例。
在下文中一共展示了JobStory.getNumberMaps方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: generateJobStats
import org.apache.hadoop.tools.rumen.JobStory; //导入方法依赖的package包/类
/**
* Generates a job stats.
*/
public static JobStats generateJobStats(Job job, JobStory jobdesc) {
int seq = GridmixJob.getJobSeqId(job);
// bail out if job description is missing for a job to be simulated
if (seq >= 0 && jobdesc == null) {
throw new IllegalArgumentException("JobStory not available for job "
+ job.getJobID());
}
int maps = -1;
int reds = -1;
if (jobdesc != null) {
// Note that the ZombieJob will return a >= 0 value
maps = jobdesc.getNumberMaps();
reds = jobdesc.getNumberReduces();
}
return new JobStats(maps, reds, job);
}
示例2: addJobStats
import org.apache.hadoop.tools.rumen.JobStory; //导入方法依赖的package包/类
public void addJobStats(Job job, JobStory jobdesc) {
int seq = GridmixJob.getJobSeqId(job);
if (seq < 0) {
LOG.info("Not tracking job " + job.getJobName()
+ " as seq id is less than zero: " + seq);
return;
}
int maps = 0;
if (jobdesc == null) {
throw new IllegalArgumentException(
" JobStory not available for job " + job.getJobName());
} else {
maps = jobdesc.getNumberMaps();
}
JobStats stats = new JobStats(maps,job);
jobMaps.put(seq,stats);
}
示例3: verifyWordCountJobStory
import org.apache.hadoop.tools.rumen.JobStory; //导入方法依赖的package包/类
/**
* Verifies that the given {@code JobStory} corresponds to the checked-in
* WordCount {@code JobStory}. The verification is effected via JUnit
* assertions.
*
* @param js the candidate JobStory.
*/
private void verifyWordCountJobStory(JobStory js) {
assertNotNull("Null JobStory", js);
String expectedJobStory = "WordCount:johndoe:default:1285322645148:3:1";
String actualJobStory = js.getName() + ":" + js.getUser() + ":"
+ js.getQueueName() + ":" + js.getSubmissionTime() + ":"
+ js.getNumberMaps() + ":" + js.getNumberReduces();
assertEquals("Unexpected JobStory", expectedJobStory, actualJobStory);
}
示例4: getNextJobFiltered
import org.apache.hadoop.tools.rumen.JobStory; //导入方法依赖的package包/类
protected JobStory getNextJobFiltered() throws IOException {
JobStory job = getNextJobFromTrace();
// filter out the following jobs
// - unsuccessful jobs
// - jobs with missing submit-time
// - reduce only jobs
// These jobs are not yet supported in Gridmix
while (job != null &&
(job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS ||
job.getSubmissionTime() < 0 || job.getNumberMaps() == 0)) {
if (LOG.isDebugEnabled()) {
List<String> reason = new ArrayList<String>();
if (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS) {
reason.add("STATE (" + job.getOutcome().name() + ")");
}
if (job.getSubmissionTime() < 0) {
reason.add("SUBMISSION-TIME (" + job.getSubmissionTime() + ")");
}
if (job.getNumberMaps() == 0) {
reason.add("ZERO-MAPS-JOB");
}
// TODO This should never happen. Probably we missed something!
if (reason.size() == 0) {
reason.add("N/A");
}
LOG.debug("Ignoring job " + job.getJobID() + " from the input trace."
+ " Reason: " + StringUtils.join(reason, ","));
}
job = getNextJobFromTrace();
}
return null == job ? null : new FilterJobStory(job) {
@Override
public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
TaskInfo info = this.job.getTaskInfo(taskType, taskNumber);
if (info != null) {
info = new MinTaskInfo(info);
} else {
info = new MinTaskInfo(new TaskInfo(0, 0, 0, 0, 0));
}
return info;
}
};
}
示例5: SimulatorJobInProgress
import org.apache.hadoop.tools.rumen.JobStory; //导入方法依赖的package包/类
@SuppressWarnings("deprecation")
public SimulatorJobInProgress(JobID jobid, JobTracker jobtracker,
JobConf default_conf, JobStory jobStory) {
super(jobid, jobStory.getJobConf(), jobtracker);
// jobSetupCleanupNeeded set to false in parent cstr, though
// default is true
restartCount = 0;
jobSetupCleanupNeeded = false;
this.memoryPerMap = conf.getMemoryForMapTask();
this.memoryPerReduce = conf.getMemoryForReduceTask();
this.maxTaskFailuresPerTracker = conf.getMaxTaskFailuresPerTracker();
this.jobId = jobid;
String url = "http://" + jobtracker.getJobTrackerMachine() + ":"
+ jobtracker.getInfoPort() + "/jobdetails.jsp?jobid=" + jobid;
this.jobtracker = jobtracker;
this.conf = jobStory.getJobConf();
this.priority = conf.getJobPriority();
Path jobDir = jobtracker.getSystemDirectoryForJob(jobid);
this.jobFile = new Path(jobDir, "job.xml");
this.status = new JobStatus(jobid, 0.0f, 0.0f, 0.0f, 0.0f, JobStatus.PREP,
priority, conf.getUser());
this.profile = new JobProfile(jobStory.getUser(), jobid, this.jobFile
.toString(), url, jobStory.getName(), conf.getQueueName());
this.startTime = JobTracker.getClock().getTime();
status.setStartTime(startTime);
this.resourceEstimator = new ResourceEstimator(this);
this.numMapTasks = jobStory.getNumberMaps();
this.numReduceTasks = jobStory.getNumberReduces();
this.taskCompletionEvents = new ArrayList<TaskCompletionEvent>(numMapTasks
+ numReduceTasks + 10);
this.mapFailuresPercent = conf.getMaxMapTaskFailuresPercent();
this.reduceFailuresPercent = conf.getMaxReduceTaskFailuresPercent();
MetricsContext metricsContext = MetricsUtil.getContext("mapred");
this.jobMetrics = MetricsUtil.createRecord(metricsContext, "job");
this.jobMetrics.setTag("user", conf.getUser());
this.jobMetrics.setTag("sessionId", conf.getSessionId());
this.jobMetrics.setTag("jobName", conf.getJobName());
this.jobMetrics.setTag("jobId", jobid.toString());
this.maxLevel = jobtracker.getNumTaskCacheLevels();
this.anyCacheLevel = this.maxLevel + 1;
this.nonLocalMaps = new LinkedList<TaskInProgress>();
this.nonLocalRunningMaps = new LinkedHashSet<TaskInProgress>();
this.runningMapCache = new IdentityHashMap<Node, Set<TaskInProgress>>();
this.nonRunningReduces = new LinkedList<TaskInProgress>();
this.runningReduces = new LinkedHashSet<TaskInProgress>();
this.slowTaskThreshold = Math.max(0.0f, conf.getFloat(
"mapred.speculative.execution.slowTaskThreshold", 1.0f));
this.speculativeCap = conf.getFloat(
"mapred.speculative.execution.speculativeCap", 0.1f);
this.slowNodeThreshold = conf.getFloat(
"mapred.speculative.execution.slowNodeThreshold", 1.0f);
this.jobStory = jobStory;
// this.jobHistory = this.jobtracker.getJobHistory();
}