本文整理汇总了Java中org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent类的典型用法代码示例。如果您正苦于以下问题:Java TaskAttemptCompletionEvent类的具体用法?Java TaskAttemptCompletionEvent怎么用?Java TaskAttemptCompletionEvent使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
TaskAttemptCompletionEvent类属于org.apache.hadoop.mapreduce.v2.api.records包,在下文中一共展示了TaskAttemptCompletionEvent类的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getTaskAttemptCompletionEvents
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
@Override
public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(
int fromEventId, int maxEvents) {
TaskAttemptCompletionEvent[] events = EMPTY_TASK_ATTEMPT_COMPLETION_EVENTS;
readLock.lock();
try {
if (taskAttemptCompletionEvents.size() > fromEventId) {
int actualMax = Math.min(maxEvents,
(taskAttemptCompletionEvents.size() - fromEventId));
events = taskAttemptCompletionEvents.subList(fromEventId,
actualMax + fromEventId).toArray(events);
}
return events;
} finally {
readLock.unlock();
}
}
示例2: handleTaskAttemptCompletion
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
private void handleTaskAttemptCompletion(TaskAttemptId attemptId,
TaskAttemptCompletionEventStatus status) {
TaskAttempt attempt = attempts.get(attemptId);
//raise the completion event only if the container is assigned
// to nextAttemptNumber
if (attempt.getNodeHttpAddress() != null) {
TaskAttemptCompletionEvent tce = recordFactory
.newRecordInstance(TaskAttemptCompletionEvent.class);
tce.setEventId(-1);
String scheme = (encryptedShuffle) ? "https://" : "http://";
tce.setMapOutputServerAddress(StringInterner.weakIntern(scheme
+ attempt.getNodeHttpAddress().split(":")[0] + ":"
+ attempt.getShufflePort()));
tce.setStatus(status);
tce.setAttemptId(attempt.getID());
int runTime = 0;
if (attempt.getFinishTime() != 0 && attempt.getLaunchTime() !=0)
runTime = (int)(attempt.getFinishTime() - attempt.getLaunchTime());
tce.setAttemptRunTime(runTime);
//raise the event to job so that it adds the completion event to its
//data structures
eventHandler.handle(new JobTaskAttemptCompletedEvent(tce));
}
}
示例3: createTce
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
private static TaskAttemptCompletionEvent createTce(int eventId,
boolean isMap, TaskAttemptCompletionEventStatus status) {
JobId jid = MRBuilderUtils.newJobId(12345, 1, 1);
TaskId tid = MRBuilderUtils.newTaskId(jid, 0,
isMap ? org.apache.hadoop.mapreduce.v2.api.records.TaskType.MAP
: org.apache.hadoop.mapreduce.v2.api.records.TaskType.REDUCE);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(tid, 0);
RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
TaskAttemptCompletionEvent tce = recordFactory
.newRecordInstance(TaskAttemptCompletionEvent.class);
tce.setEventId(eventId);
tce.setAttemptId(attemptId);
tce.setStatus(status);
return tce;
}
示例4: createTce
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
private static TaskAttemptCompletionEvent createTce(int eventId,
boolean isMap, TaskAttemptCompletionEventStatus status) {
JobId jid = MRBuilderUtils.newJobId(12345, 1, 1);
TaskId tid = MRBuilderUtils.newTaskId(jid, 0,
isMap ? org.apache.hadoop.mapreduce.v2.api.records.TaskType.MAP
: org.apache.hadoop.mapreduce.v2.api.records.TaskType.REDUCE);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(tid, 0);
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
TaskAttemptCompletionEvent tce = recordFactory
.newRecordInstance(TaskAttemptCompletionEvent.class);
tce.setEventId(eventId);
tce.setAttemptId(attemptId);
tce.setStatus(status);
return tce;
}
示例5: getTaskAttemptCompletionEvents
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
@Override
public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
GetTaskAttemptCompletionEventsRequest request)
throws IOException {
GetTaskAttemptCompletionEventsResponse resp =
recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsResponse.class);
resp.addAllCompletionEvents(new ArrayList<TaskAttemptCompletionEvent>());
return resp;
}
示例6: fromYarn
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
public static TaskCompletionEvent[] fromYarn(
TaskAttemptCompletionEvent[] newEvents) {
TaskCompletionEvent[] oldEvents =
new TaskCompletionEvent[newEvents.length];
int i = 0;
for (TaskAttemptCompletionEvent newEvent
: newEvents) {
oldEvents[i++] = fromYarn(newEvent);
}
return oldEvents;
}
示例7: initCompletionEvents
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
private void initCompletionEvents() {
if (this.completionEvents != null) {
return;
}
GetTaskAttemptCompletionEventsResponseProtoOrBuilder p = viaProto ? proto : builder;
List<TaskAttemptCompletionEventProto> list = p.getCompletionEventsList();
this.completionEvents = new ArrayList<TaskAttemptCompletionEvent>();
for (TaskAttemptCompletionEventProto c : list) {
this.completionEvents.add(convertFromProtoFormat(c));
}
}
示例8: addAllCompletionEvents
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
@Override
public void addAllCompletionEvents(final List<TaskAttemptCompletionEvent> completionEvents) {
if (completionEvents == null)
return;
initCompletionEvents();
this.completionEvents.addAll(completionEvents);
}
示例9: addCompletionEventsToProto
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
private void addCompletionEventsToProto() {
maybeInitBuilder();
builder.clearCompletionEvents();
if (completionEvents == null)
return;
Iterable<TaskAttemptCompletionEventProto> iterable = new Iterable<TaskAttemptCompletionEventProto>() {
@Override
public Iterator<TaskAttemptCompletionEventProto> iterator() {
return new Iterator<TaskAttemptCompletionEventProto>() {
Iterator<TaskAttemptCompletionEvent> iter = completionEvents.iterator();
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public TaskAttemptCompletionEventProto next() {
return convertToProtoFormat(iter.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
builder.addAllCompletionEvents(iterable);
}
示例10: getTaskAttemptCompletionEvents
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
@Override
public synchronized TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(
int fromEventId, int maxEvents) {
if (completionEvents == null) {
constructTaskAttemptCompletionEvents();
}
return getAttemptCompletionEvents(completionEvents,
fromEventId, maxEvents);
}
示例11: getAttemptCompletionEvents
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; //导入依赖的package包/类
private static TaskAttemptCompletionEvent[] getAttemptCompletionEvents(
List<TaskAttemptCompletionEvent> eventList,
int startIndex, int maxEvents) {
TaskAttemptCompletionEvent[] events = new TaskAttemptCompletionEvent[0];
if (eventList.size() > startIndex) {
int actualMax = Math.min(maxEvents,
(eventList.size() - startIndex));
events = eventList.subList(startIndex, actualMax + startIndex)
.toArray(events);
}
return events;
}