本文整理汇总了Java中org.apache.hadoop.mapreduce.TaskCompletionEvent类的典型用法代码示例。如果您正苦于以下问题:Java TaskCompletionEvent类的具体用法?Java TaskCompletionEvent怎么用?Java TaskCompletionEvent使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
TaskCompletionEvent类属于org.apache.hadoop.mapreduce包,在下文中一共展示了TaskCompletionEvent类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getAllTaskCompletionEvent
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
private static List<TaskCompletionEvent> getAllTaskCompletionEvent(Job completedJob) {
List<TaskCompletionEvent> completionEvents = new LinkedList<>();
while (true) {
try {
TaskCompletionEvent[] bunchOfEvents;
bunchOfEvents = completedJob.getTaskCompletionEvents(completionEvents.size());
if (bunchOfEvents == null || bunchOfEvents.length == 0) {
break;
}
completionEvents.addAll(Arrays.asList(bunchOfEvents));
} catch (IOException e) {
break;
}
}
return completionEvents;
}
示例2: removeFailedPaths
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
/**
* Remove all bad paths caused by speculative execution
* The problem happens when speculative task attempt initialized but then killed in the middle of processing.
* Some partial file was generated at {tmp_output}/_temporary/1/_temporary/attempt_xxx_xxx/part-m-xxxx.avro,
* without being committed to its final destination at {tmp_output}/part-m-xxxx.avro.
*
* @param job Completed MR job
* @param fs File system that can handle file system
* @return all successful paths
*/
public static List<Path> removeFailedPaths(Job job, Path tmpPath, FileSystem fs) throws IOException {
List<TaskCompletionEvent> failedEvents = CompactionAvroJobConfigurator.getUnsuccessfulTaskCompletionEvent(job);
List<Path> allFilePaths = DatasetHelper.getApplicableFilePaths(fs, tmpPath, Lists.newArrayList("avro"));
List<Path> goodPaths = new ArrayList<>();
for (Path filePath: allFilePaths) {
if (CompactionAvroJobConfigurator.isFailedPath(filePath, failedEvents)) {
fs.delete(filePath, false);
log.error("{} is a bad path so it was deleted", filePath);
} else {
goodPaths.add(filePath);
}
}
return goodPaths;
}
示例3: listEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
/**
* List the events for the given job
* @param jobId the job id for the job's events to list
* @throws IOException
*/
private void listEvents(Job job, int fromEventId, int numEvents)
throws IOException, InterruptedException {
TaskCompletionEvent[] events = job.
getTaskCompletionEvents(fromEventId, numEvents);
System.out.println("Task completion events for " + job.getJobID());
System.out.println("Number of events (from " + fromEventId + ") are: "
+ events.length);
for(TaskCompletionEvent event: events) {
System.out.println(event.getStatus() + " " +
event.getTaskAttemptId() + " " +
getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
}
}
示例4: getTaskCompletionEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobID, int i, int i2) throws IOException, InterruptedException {
if (submittedJobs.containsKey(org.apache.hadoop.mapred.JobID.downgrade(jobID))) {
return new TaskCompletionEvent[0];
} else {
return backupRunner.getTaskCompletionEvents(jobID, i, i2);
}
}
示例5: listEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
/**
* List the events for the given job
* @param job the job to list
* @param fromEventId event id for the job's events to list from
* @param numEvents number of events we want to list
* @throws IOException
*/
private void listEvents(Job job, int fromEventId, int numEvents)
throws IOException, InterruptedException {
TaskCompletionEvent[] events = job.
getTaskCompletionEvents(fromEventId, numEvents);
System.out.println("Task completion events for " + job.getJobID());
System.out.println("Number of events (from " + fromEventId + ") are: "
+ events.length);
for(TaskCompletionEvent event: events) {
System.out.println(event.getStatus() + " " +
event.getTaskAttemptId() + " " +
getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
}
}
示例6: getTaskCompletionEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobId,
int fromEventId, int maxEvents)
throws IOException, InterruptedException {
// FIXME seems like there is support in client to query task failure
// related information
// However, api does not make sense for DAG
return new TaskCompletionEvent[0];
}
示例7: getTaskCompletionEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1, int arg2)
throws IOException, InterruptedException {
return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
示例8: getTaskCompletionEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
int arg2) throws IOException, InterruptedException {
return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
示例9: getMapCompletionEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId,
int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
return new MapTaskCompletionEventsUpdate(
org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false);
}
示例10: getTaskCompletionEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
public TaskCompletionEvent[] getTaskCompletionEvents(
org.apache.hadoop.mapreduce.JobID jobid
, int fromEventId, int maxEvents) throws IOException {
return TaskCompletionEvent.EMPTY_ARRAY;
}
示例11: getMapCompletionEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
@Override
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId,
int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
return new MapTaskCompletionEventsUpdate(
org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false);
}
示例12: getTaskCompletionEvents
import org.apache.hadoop.mapreduce.TaskCompletionEvent; //导入依赖的package包/类
/** {@inheritDoc} */
@Override public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobid, int fromEventId, int maxEvents)
throws IOException, InterruptedException {
return new TaskCompletionEvent[0];
}