本文整理汇总了Java中org.apache.hadoop.yarn.api.records.timeline.TimelineEntity.addOtherInfo方法的典型用法代码示例。如果您正苦于以下问题:Java TimelineEntity.addOtherInfo方法的具体用法?Java TimelineEntity.addOtherInfo怎么用?Java TimelineEntity.addOtherInfo使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.yarn.api.records.timeline.TimelineEntity
的用法示例。
在下文中一共展示了TimelineEntity.addOtherInfo方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: generateEntity
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; //导入方法依赖的package包/类
private static TimelineEntity generateEntity() {
TimelineEntity entity = new TimelineEntity();
entity.setEntityId("entity id");
entity.setEntityType("entity type");
entity.setStartTime(System.currentTimeMillis());
for (int i = 0; i < 2; ++i) {
TimelineEvent event = new TimelineEvent();
event.setTimestamp(System.currentTimeMillis());
event.setEventType("test event type " + i);
event.addEventInfo("key1", "val1");
event.addEventInfo("key2", "val2");
entity.addEvent(event);
}
entity.addRelatedEntity("test ref type 1", "test ref id 1");
entity.addRelatedEntity("test ref type 2", "test ref id 2");
entity.addPrimaryFilter("pkey1", "pval1");
entity.addPrimaryFilter("pkey2", "pval2");
entity.addOtherInfo("okey1", "oval1");
entity.addOtherInfo("okey2", "oval2");
entity.setDomainId("domain id 1");
return entity;
}
示例2: createJobEntity
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; //导入方法依赖的package包/类
private TimelineEntity createJobEntity(JobInfo jobInfo, Configuration conf) {
TimelineEntity job = new TimelineEntity();
job.setEntityType(JOB);
job.setEntityId(jobInfo.getJobId().toString());
job.setStartTime(jobInfo.getSubmitTime());
job.addPrimaryFilter("JOBNAME", jobInfo.getJobname());
job.addPrimaryFilter("USERNAME", jobInfo.getUsername());
job.addOtherInfo("JOB_QUEUE_NAME", jobInfo.getJobQueueName());
job.addOtherInfo("SUBMIT_TIME", jobInfo.getSubmitTime());
job.addOtherInfo("LAUNCH_TIME", jobInfo.getLaunchTime());
job.addOtherInfo("FINISH_TIME", jobInfo.getFinishTime());
job.addOtherInfo("JOB_STATUS", jobInfo.getJobStatus());
job.addOtherInfo("PRIORITY", jobInfo.getPriority());
job.addOtherInfo("TOTAL_MAPS", jobInfo.getTotalMaps());
job.addOtherInfo("TOTAL_REDUCES", jobInfo.getTotalReduces());
job.addOtherInfo("UBERIZED", jobInfo.getUberized());
job.addOtherInfo("ERROR_INFO", jobInfo.getErrorInfo());
LOG.info("converted job " + jobInfo.getJobId() + " to a timeline entity");
return job;
}
示例3: createTaskEntity
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; //导入方法依赖的package包/类
private TimelineEntity createTaskEntity(TaskInfo taskInfo) {
TimelineEntity task = new TimelineEntity();
task.setEntityType(TASK);
task.setEntityId(taskInfo.getTaskId().toString());
task.setStartTime(taskInfo.getStartTime());
task.addOtherInfo("START_TIME", taskInfo.getStartTime());
task.addOtherInfo("FINISH_TIME", taskInfo.getFinishTime());
task.addOtherInfo("TASK_TYPE", taskInfo.getTaskType());
task.addOtherInfo("TASK_STATUS", taskInfo.getTaskStatus());
task.addOtherInfo("ERROR_INFO", taskInfo.getError());
LOG.info("converted task " + taskInfo.getTaskId() +
" to a timeline entity");
return task;
}
示例4: createTaskAttemptEntity
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; //导入方法依赖的package包/类
private TimelineEntity createTaskAttemptEntity(TaskAttemptInfo taskAttemptInfo) {
TimelineEntity taskAttempt = new TimelineEntity();
taskAttempt.setEntityType(TASK_ATTEMPT);
taskAttempt.setEntityId(taskAttemptInfo.getAttemptId().toString());
taskAttempt.setStartTime(taskAttemptInfo.getStartTime());
taskAttempt.addOtherInfo("START_TIME", taskAttemptInfo.getStartTime());
taskAttempt.addOtherInfo("FINISH_TIME", taskAttemptInfo.getFinishTime());
taskAttempt.addOtherInfo("MAP_FINISH_TIME",
taskAttemptInfo.getMapFinishTime());
taskAttempt.addOtherInfo("SHUFFLE_FINISH_TIME",
taskAttemptInfo.getShuffleFinishTime());
taskAttempt.addOtherInfo("SORT_FINISH_TIME",
taskAttemptInfo.getSortFinishTime());
taskAttempt.addOtherInfo("TASK_STATUS", taskAttemptInfo.getTaskStatus());
taskAttempt.addOtherInfo("STATE", taskAttemptInfo.getState());
taskAttempt.addOtherInfo("ERROR", taskAttemptInfo.getError());
taskAttempt.addOtherInfo("CONTAINER_ID",
taskAttemptInfo.getContainerId().toString());
LOG.info("converted task attempt " + taskAttemptInfo.getAttemptId() +
" to a timeline entity");
return taskAttempt;
}
示例5: publishApplicationFinishedEvent
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; //导入方法依赖的package包/类
private void publishApplicationFinishedEvent(ApplicationFinishedEvent event) {
TimelineEntity entity =
createApplicationEntity(event.getApplicationId());
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(
ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
tEvent.setTimestamp(event.getTimestamp());
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO,
event.getDiagnosticsInfo());
eventInfo.put(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO,
event.getFinalApplicationStatus().toString());
eventInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO,
event.getYarnApplicationState().toString());
if (event.getLatestApplicationAttemptId() != null) {
eventInfo.put(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO,
event.getLatestApplicationAttemptId().toString());
}
RMAppMetrics appMetrics = event.getAppMetrics();
entity.addOtherInfo(ApplicationMetricsConstants.APP_CPU_METRICS,
appMetrics.getVcoreSeconds());
entity.addOtherInfo(ApplicationMetricsConstants.APP_MEM_METRICS,
appMetrics.getMemorySeconds());
entity.addOtherInfo(ApplicationMetricsConstants.APP_GPU_METRICS,
appMetrics.getGcoreSeconds());
tEvent.setEventInfo(eventInfo);
entity.addEvent(tEvent);
putEntity(entity);
}
示例6: maskFields
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; //导入方法依赖的package包/类
private static TimelineEntity maskFields(
TimelineEntity entity, EnumSet<Field> fields) {
// Conceal the fields that are not going to be exposed
TimelineEntity entityToReturn = new TimelineEntity();
entityToReturn.setEntityId(entity.getEntityId());
entityToReturn.setEntityType(entity.getEntityType());
entityToReturn.setStartTime(entity.getStartTime());
entityToReturn.setDomainId(entity.getDomainId());
// Deep copy
if (fields.contains(Field.EVENTS)) {
entityToReturn.addEvents(entity.getEvents());
} else if (fields.contains(Field.LAST_EVENT_ONLY)) {
entityToReturn.addEvent(entity.getEvents().get(0));
} else {
entityToReturn.setEvents(null);
}
if (fields.contains(Field.RELATED_ENTITIES)) {
entityToReturn.addRelatedEntities(entity.getRelatedEntities());
} else {
entityToReturn.setRelatedEntities(null);
}
if (fields.contains(Field.PRIMARY_FILTERS)) {
entityToReturn.addPrimaryFilters(entity.getPrimaryFilters());
} else {
entityToReturn.setPrimaryFilters(null);
}
if (fields.contains(Field.OTHER_INFO)) {
entityToReturn.addOtherInfo(entity.getOtherInfo());
} else {
entityToReturn.setOtherInfo(null);
}
return entityToReturn;
}
示例7: publishApplicationFinishedEvent
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; //导入方法依赖的package包/类
private void publishApplicationFinishedEvent(ApplicationFinishedEvent event) {
TimelineEntity entity =
createApplicationEntity(event.getApplicationId());
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(
ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
tEvent.setTimestamp(event.getTimestamp());
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO,
event.getDiagnosticsInfo());
eventInfo.put(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO,
event.getFinalApplicationStatus().toString());
eventInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO,
event.getYarnApplicationState().toString());
if (event.getLatestApplicationAttemptId() != null) {
eventInfo.put(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO,
event.getLatestApplicationAttemptId().toString());
}
RMAppMetrics appMetrics = event.getAppMetrics();
entity.addOtherInfo(ApplicationMetricsConstants.APP_CPU_METRICS,
appMetrics.getVcoreSeconds());
entity.addOtherInfo(ApplicationMetricsConstants.APP_MEM_METRICS,
appMetrics.getMemorySeconds());
tEvent.setEventInfo(eventInfo);
entity.addEvent(tEvent);
putEntity(entity);
}
示例8: map
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; //导入方法依赖的package包/类
public void map(IntWritable key, IntWritable val, Context context) throws IOException {
TimelineClient tlc = new TimelineClientImpl();
Configuration conf = context.getConfiguration();
final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);
long totalTime = 0;
final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
final Random rand = new Random();
final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
final char[] payLoad = new char[kbs * 1024];
for (int i = 0; i < testtimes; i++) {
// Generate a fixed length random payload
for (int xx = 0; xx < kbs * 1024; xx++) {
int alphaNumIdx =
rand.nextInt(ALPHA_NUMS.length);
payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
}
String entId = taskAttemptId + "_" + Integer.toString(i);
final TimelineEntity entity = new TimelineEntity();
entity.setEntityId(entId);
entity.setEntityType("FOO_ATTEMPT");
entity.addOtherInfo("PERF_TEST", payLoad);
// add an event
TimelineEvent event = new TimelineEvent();
event.setTimestamp(System.currentTimeMillis());
event.setEventType("foo_event");
entity.addEvent(event);
// use the current user for this purpose
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
long startWrite = System.nanoTime();
try {
tlc.putEntities(entity);
} catch (Exception e) {
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).
increment(1);
LOG.error("writing to the timeline service failed", e);
}
long endWrite = System.nanoTime();
totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite-startWrite);
}
LOG.info("wrote " + testtimes + " entities (" + kbs*testtimes +
" kB) in " + totalTime + " ms");
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).
increment(totalTime);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).
increment(testtimes);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).
increment(kbs*testtimes);
}
示例9: testStorePerformance
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; //导入方法依赖的package包/类
public void testStorePerformance() throws IOException {
TimelineEntity entityToStorePrep = new TimelineEntity();
entityToStorePrep.setEntityType("TEST_ENTITY_TYPE_PREP");
entityToStorePrep.setEntityId("TEST_ENTITY_ID_PREP");
entityToStorePrep.setDomainId("TEST_DOMAIN");
entityToStorePrep.addRelatedEntity("TEST_ENTITY_TYPE_2",
"TEST_ENTITY_ID_2");
entityToStorePrep.setStartTime(0L);
TimelineEntities entitiesPrep = new TimelineEntities();
entitiesPrep.addEntity(entityToStorePrep);
store.put(entitiesPrep);
long start = System.currentTimeMillis();
int num = 1000000;
Log.info("Start test for " + num);
final String tezTaskAttemptId = "TEZ_TA";
final String tezEntityId = "attempt_1429158534256_0001_1_00_000000_";
final String tezTaskId = "TEZ_T";
final String tezDomainId = "Tez_ATS_application_1429158534256_0001";
TimelineEntity entityToStore = new TimelineEntity();
TimelineEvent startEvt = new TimelineEvent();
entityToStore.setEntityType(tezTaskAttemptId);
startEvt.setEventType("TASK_ATTEMPT_STARTED");
startEvt.setTimestamp(0);
entityToStore.addEvent(startEvt);
entityToStore.setDomainId(tezDomainId);
entityToStore.addPrimaryFilter("status", "SUCCEEDED");
entityToStore.addPrimaryFilter("applicationId",
"application_1429158534256_0001");
entityToStore.addPrimaryFilter("TEZ_VERTEX_ID",
"vertex_1429158534256_0001_1_00");
entityToStore.addPrimaryFilter("TEZ_DAG_ID", "dag_1429158534256_0001_1");
entityToStore.addPrimaryFilter("TEZ_TASK_ID",
"task_1429158534256_0001_1_00_000000");
entityToStore.setStartTime(0L);
entityToStore.addOtherInfo("startTime", 0);
entityToStore.addOtherInfo("inProgressLogsURL",
"localhost:8042/inProgressLogsURL");
entityToStore.addOtherInfo("completedLogsURL", "");
entityToStore.addOtherInfo("nodeId", "localhost:54450");
entityToStore.addOtherInfo("nodeHttpAddress", "localhost:8042");
entityToStore.addOtherInfo("containerId",
"container_1429158534256_0001_01_000002");
entityToStore.addOtherInfo("status", "RUNNING");
entityToStore.addRelatedEntity(tezTaskId, "TEZ_TASK_ID_1");
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entityToStore);
for (int i = 0; i < num; ++i) {
entityToStore.setEntityId(tezEntityId + i);
store.put(entities);
}
long duration = System.currentTimeMillis() - start;
Log.info("Duration for " + num + ": " + duration);
}