本文整理汇总了Java中org.apache.hadoop.mapred.JobStatus.SUCCEEDED属性的典型用法代码示例。如果您正苦于以下问题:Java JobStatus.SUCCEEDED属性的具体用法?Java JobStatus.SUCCEEDED怎么用?Java JobStatus.SUCCEEDED使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类org.apache.hadoop.mapred.JobStatus
的用法示例。
在下文中一共展示了JobStatus.SUCCEEDED属性的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: convertMapreduceState
/**
* Convert map-reduce specific job status constants to Sqoop job status
* constants.
*
* @param status Map-reduce job constant
* @return Equivalent submission status
*/
private SubmissionStatus convertMapreduceState(int status) {
if(status == JobStatus.PREP) {
return SubmissionStatus.BOOTING;
} else if (status == JobStatus.RUNNING) {
return SubmissionStatus.RUNNING;
} else if (status == JobStatus.FAILED) {
return SubmissionStatus.FAILED;
} else if (status == JobStatus.KILLED) {
return SubmissionStatus.FAILED;
} else if (status == JobStatus.SUCCEEDED) {
return SubmissionStatus.SUCCEEDED;
}
throw new SqoopException(MapreduceSubmissionError.MAPREDUCE_0004,
"Unknown status " + status);
}
示例2: ofInt
static JobState ofInt(int state) {
if (state == JobStatus.PREP) {
return PREPARE;
}
else if (state == JobStatus.RUNNING) {
return RUNNING;
}
else if (state == JobStatus.FAILED) {
return FAILED;
}
else if (state == JobStatus.SUCCEEDED) {
return SUCCEEDED;
}
else {
return null;
}
}
示例3: ofInt
static JobState ofInt(int state) {
switch (state) {
case JobStatus.PREP:
return PREPARE;
case JobStatus.RUNNING:
return RUNNING;
case JobStatus.FAILED:
return FAILED;
case JobStatus.SUCCEEDED:
return SUCCEEDED;
default:
return null;
}
}
示例4: run
@Override
public int run(String[] arg0) throws IOException {
final List<String> domains = Lists.newArrayList(".net", ".com", ".edu");
final List<String> words = new ArrayList<>();
List<String> urls = new ArrayList<>();
int wordCount = 0;
try (Scanner wordScanner = new Scanner(new FileReader(new File(INTPUT_VOCAB_FILENAME)))) {
wordScanner.useDelimiter("\\W");
while (wordScanner.hasNext()) {
String nextWord = wordScanner.next().trim().toLowerCase();
if (nextWord.length() > 3) {
++wordCount;
words.add(nextWord);
LOG.debug(wordCount + ": " + nextWord);
}
}
}
LOG.info("Vocab words read: " + wordCount);
urls = generateUrls(words, domains, MAX_URLS);
int urlWrittenCount = 0;
Configuration conf = getConf();
FileSystem fs = FileSystem.get(conf);
Path dataPath = new Path(OUTPUT_SEQ_FILENAME);
if (fs.exists(dataPath))
fs.delete(dataPath, true);
try (SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, dataPath, Text.class,
Text.class)) {
urlWrittenCount = writeUrlsToSequenceFile(writer, urls);
}
LOG.info("Successfully wrote " + urlWrittenCount + " urls");
return JobStatus.SUCCEEDED;
}
示例5: verifyJobDetails
/**
* Verification API to check if the job details are semantically correct.<br/>
*
* @param jobId
* jobID of the job
* @param jconf
* configuration object of the job
* @return true if all the job verifications are verified to be true
* @throws Exception
*/
public void verifyJobDetails(JobID jobId) throws Exception {
// wait till the setup is launched and finished.
JobInfo jobInfo = getJobInfo(jobId);
if(jobInfo == null){
return;
}
LOG.info("waiting for the setup to be finished");
while (!jobInfo.isSetupFinished()) {
Thread.sleep(2000);
jobInfo = getJobInfo(jobId);
if(jobInfo == null) {
break;
}
}
// verify job id.
assertTrue(jobId.toString().startsWith("job_"));
LOG.info("verified job id and is : " + jobId.toString());
// verify the number of map/reduce tasks.
verifyNumTasks(jobId);
// should verify job progress.
verifyJobProgress(jobId);
jobInfo = getJobInfo(jobId);
if(jobInfo == null) {
return;
}
if (jobInfo.getStatus().getRunState() == JobStatus.SUCCEEDED) {
// verify if map/reduce progress reached 1.
jobInfo = getJobInfo(jobId);
if (jobInfo == null) {
return;
}
assertEquals(1.0, jobInfo.getStatus().mapProgress(), 0.001);
assertEquals(1.0, jobInfo.getStatus().reduceProgress(), 0.001);
// verify successful finish of tasks.
verifyAllTasksSuccess(jobId);
}
if (jobInfo.getStatus().isJobComplete()) {
// verify if the cleanup is launched.
jobInfo = getJobInfo(jobId);
if (jobInfo == null) {
return;
}
assertTrue(jobInfo.isCleanupLaunched());
LOG.info("Verified launching of cleanup");
}
}
示例6: getJobState
@Override
public int getJobState() throws IOException {
return isComplete()?JobStatus.SUCCEEDED:JobStatus.FAILED;
}
示例7: run
@Override
public int run(String[] arg0) throws IOException, ClassNotFoundException, InterruptedException {
LOG.info("Demo for Pagerank Job -- run()");
Configuration conf = getConf();
FileSystem fs = FileSystem.get(conf);
conf.setFloat("damping.factor", DEFAULT_DAMPING_FACTOR);
conf.setInt("input.size", INPUT_SIZE);
conf.setInt("rounding.precision", DEFAULT_ROUNDING_PRECISION);
Path parsedIntputPath = new Path(DATA_PATH);
Path parsedOutputPath = new Path("data/pagerank/parsed_output");
if (fs.exists(parsedOutputPath))
fs.delete(parsedOutputPath, true);
Job parseJob = new Job(conf, "Pagerank: Parse URL data job");
parseJob.setJarByClass(getClass());
runParseUrlJob(parseJob, parsedIntputPath, parsedOutputPath);
int iterationCount = 1;
Path inputPath = parsedOutputPath;
while (iterationCount < MAX_ITERATIONS) {
LOG.info("ITERATION #" + iterationCount);
Path outputPath = new Path("data/pagerank/depth_" + iterationCount);
if (fs.exists(outputPath))
fs.delete(outputPath, true);
Job pargerankJob = new Job(conf, "Pagerank computation job iteration " + iterationCount);
pargerankJob.setJarByClass(getClass());
runPagerankIterationJob(pargerankJob, inputPath, outputPath);
iterationCount++;
fs.delete(inputPath, true);
inputPath = outputPath;
}
Path topkOutputPath = new Path("data/pagerank/topk");
if (fs.exists(topkOutputPath)) {
fs.delete(topkOutputPath, true);
}
Job topkJob = new Job(conf, "Pagerank topK job");
topkJob.setJarByClass(getClass());
runTopKJob(topkJob, inputPath, topkOutputPath);
fs.delete(inputPath, true);
Util.showSequenceFile(conf, topkOutputPath);
return JobStatus.SUCCEEDED;
}
示例8: verifyJobDetails
/**
* Verification API to check if the job details are semantically correct.<br/>
*
* @param jobId
* jobID of the job
* @return true if all the job verifications are verified to be true
* @throws Exception
*/
public void verifyJobDetails(JobID jobId) throws Exception {
// wait till the setup is launched and finished.
JobInfo jobInfo = getJobInfo(jobId);
if (jobInfo == null) {
return;
}
LOG.info("waiting for the setup to be finished");
while (!jobInfo.isSetupFinished()) {
Thread.sleep(2000);
jobInfo = getJobInfo(jobId);
if (jobInfo == null) {
break;
}
}
// verify job id.
assertTrue(jobId.toString().startsWith("job_"));
LOG.info("verified job id and is : " + jobId.toString());
// verify the number of map/reduce tasks.
verifyNumTasks(jobId);
// should verify job progress.
verifyJobProgress(jobId);
jobInfo = getJobInfo(jobId);
if (jobInfo == null) {
return;
}
if (jobInfo.getStatus().getRunState() == JobStatus.SUCCEEDED) {
// verify if map/reduce progress reached 1.
jobInfo = getJobInfo(jobId);
if (jobInfo == null) {
return;
}
assertEquals(1.0, jobInfo.getStatus().mapProgress(), 0.001);
assertEquals(1.0, jobInfo.getStatus().reduceProgress(), 0.001);
// verify successful finish of tasks.
verifyAllTasksSuccess(jobId);
}
if (jobInfo.getStatus().isJobComplete()) {
// verify if the cleanup is launched.
jobInfo = getJobInfo(jobId);
if (jobInfo == null) {
return;
}
assertTrue(jobInfo.isCleanupLaunched());
LOG.info("Verified launching of cleanup");
}
}