当前位置: 首页>>代码示例>>Java>>正文


Java TimelineEntities.getEntities方法代码示例

本文整理汇总了Java中org.apache.hadoop.yarn.api.records.timeline.TimelineEntities.getEntities方法的典型用法代码示例。如果您正苦于以下问题:Java TimelineEntities.getEntities方法的具体用法?Java TimelineEntities.getEntities怎么用?Java TimelineEntities.getEntities使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.yarn.api.records.timeline.TimelineEntities的用法示例。


在下文中一共展示了TimelineEntities.getEntities方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getAllApplications

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public Map<ApplicationId, ApplicationReport> getAllApplications()
    throws YarnException, IOException {
  TimelineEntities entities = timelineDataManager.getEntities(
      ApplicationMetricsConstants.ENTITY_TYPE, null, null, null, null,
      null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class),
      UserGroupInformation.getLoginUser());
  Map<ApplicationId, ApplicationReport> apps =
      new LinkedHashMap<ApplicationId, ApplicationReport>();
  if (entities != null && entities.getEntities() != null) {
    for (TimelineEntity entity : entities.getEntities()) {
      try {
        ApplicationReportExt app =
            generateApplicationReport(entity, ApplicationReportField.ALL);
        apps.put(app.appReport.getApplicationId(), app.appReport);
      } catch (Exception e) {
        LOG.error("Error on generating application report for " +
            entity.getEntityId(), e);
      }
    }
  }
  return apps;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:ApplicationHistoryManagerOnTimelineStore.java

示例2: getApplicationAttempts

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public Map<ApplicationAttemptId, ApplicationAttemptReport>
    getApplicationAttempts(ApplicationId appId)
        throws YarnException, IOException {
  ApplicationReportExt app = getApplication(
      appId, ApplicationReportField.USER_AND_ACLS);
  checkAccess(app);
  TimelineEntities entities = timelineDataManager.getEntities(
      AppAttemptMetricsConstants.ENTITY_TYPE,
      new NameValuePair(
          AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appId
              .toString()), null, null, null, null, null,
      Long.MAX_VALUE, EnumSet.allOf(Field.class),
      UserGroupInformation.getLoginUser());
  Map<ApplicationAttemptId, ApplicationAttemptReport> appAttempts =
      new LinkedHashMap<ApplicationAttemptId, ApplicationAttemptReport>();
  for (TimelineEntity entity : entities.getEntities()) {
    ApplicationAttemptReport appAttempt =
        convertToApplicationAttemptReport(entity);
    appAttempts.put(appAttempt.getApplicationAttemptId(), appAttempt);
  }
  return appAttempts;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:ApplicationHistoryManagerOnTimelineStore.java

示例3: getContainers

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public Map<ContainerId, ContainerReport> getContainers(
    ApplicationAttemptId appAttemptId) throws YarnException, IOException {
  ApplicationReportExt app = getApplication(
      appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS);
  checkAccess(app);
  TimelineEntities entities = timelineDataManager.getEntities(
      ContainerMetricsConstants.ENTITY_TYPE,
      new NameValuePair(
          ContainerMetricsConstants.PARENT_PRIMARIY_FILTER,
          appAttemptId.toString()), null, null, null,
      null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class),
      UserGroupInformation.getLoginUser());
  Map<ContainerId, ContainerReport> containers =
      new LinkedHashMap<ContainerId, ContainerReport>();
  if (entities != null && entities.getEntities() != null) {
    for (TimelineEntity entity : entities.getEntities()) {
      ContainerReport container = convertToContainerReport(
          entity, serverHttpAddress, app.appReport.getUser());
      containers.put(container.getContainerId(), container);
    }
  }
  return containers;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:ApplicationHistoryManagerOnTimelineStore.java

示例4: getApplications

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public Map<ApplicationId, ApplicationReport> getApplications(long appsNum,
    long appStartedTimeBegin, long appStartedTimeEnd) throws YarnException,
    IOException {
  TimelineEntities entities =
      timelineDataManager.getEntities(
        ApplicationMetricsConstants.ENTITY_TYPE, null, null,
        appStartedTimeBegin, appStartedTimeEnd, null, null,
        appsNum == Long.MAX_VALUE ? this.maxLoadedApplications : appsNum,
        EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
  Map<ApplicationId, ApplicationReport> apps =
      new LinkedHashMap<ApplicationId, ApplicationReport>();
  if (entities != null && entities.getEntities() != null) {
    for (TimelineEntity entity : entities.getEntities()) {
      try {
        ApplicationReportExt app =
            generateApplicationReport(entity, ApplicationReportField.ALL);
        apps.put(app.appReport.getApplicationId(), app.appReport);
      } catch (Exception e) {
        LOG.error("Error on generating application report for " +
            entity.getEntityId(), e);
      }
    }
  }
  return apps;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:27,代码来源:ApplicationHistoryManagerOnTimelineStore.java

示例5: put

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public TimelinePutResponse put(TimelineEntities entities) {
  try {
    deleteLock.readLock().lock();
    TimelinePutResponse response = new TimelinePutResponse();
    for (TimelineEntity entity : entities.getEntities()) {
      put(entity, response, false);
    }
    return response;
  } finally {
    deleteLock.readLock().unlock();
  }
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:14,代码来源:LeveldbTimelineStore.java

示例6: putWithNoDomainId

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Private
@VisibleForTesting
public TimelinePutResponse putWithNoDomainId(TimelineEntities entities) {
  try {
    deleteLock.readLock().lock();
    TimelinePutResponse response = new TimelinePutResponse();
    for (TimelineEntity entity : entities.getEntities()) {
      put(entity, response, true);
    }
    return response;
  } finally {
    deleteLock.readLock().unlock();
  }
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:15,代码来源:LeveldbTimelineStore.java

示例7: testSummaryRead

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Test
public void testSummaryRead() throws Exception {
  // Load data
  EntityGroupFSTimelineStore.AppLogs appLogs =
      store.new AppLogs(mainTestAppId, mainTestAppDirPath,
      AppState.COMPLETED);
  MutableCounterLong summaryLogEntityRead
      = store.metrics.getGetEntityToSummaryOps();
  long numEntityReadBefore = summaryLogEntityRead.value();
  TimelineDataManager tdm
      = PluginStoreTestUtils.getTdmWithStore(config, store);
  appLogs.scanForLogs();
  appLogs.parseSummaryLogs(tdm);

  // Verify single entity read
  PluginStoreTestUtils.verifyTestEntities(tdm);
  // Verify multiple entities read
  TimelineEntities entities = tdm.getEntities("type_1", null, null, null,
      null, null, null, null, EnumSet.allOf(TimelineReader.Field.class),
      UserGroupInformation.getLoginUser());
  assertEquals(entities.getEntities().size(), 1);
  for (TimelineEntity entity : entities.getEntities()) {
    assertEquals((Long) 123L, entity.getStartTime());
  }
  // Verify metrics
  assertEquals(numEntityReadBefore + 5L, summaryLogEntityRead.value());

}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:29,代码来源:TestEntityGroupFSTimelineStore.java

示例8: writeEntities

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
/**
 * Write timeline entities to a file system
 * @param entities
 * @param logPath
 * @param fs
 * @throws IOException
 */
static void writeEntities(TimelineEntities entities, Path logPath,
    FileSystem fs) throws IOException {
  FSDataOutputStream outStream = createLogFile(logPath, fs);
  JsonGenerator jsonGenerator
      = (new JsonFactory()).createJsonGenerator(outStream);
  jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
  ObjectMapper objMapper = createObjectMapper();
  for (TimelineEntity entity : entities.getEntities()) {
    objMapper.writeValue(jsonGenerator, entity);
  }
  outStream.close();
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:20,代码来源:PluginStoreTestUtils.java

示例9: writeEntitiesLeaveOpen

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
private void writeEntitiesLeaveOpen(TimelineEntities entities, Path logPath)
    throws IOException {
  if (outStream == null) {
    outStream = PluginStoreTestUtils.createLogFile(logPath, fs);
    jsonGenerator = (new JsonFactory()).createJsonGenerator(outStream);
    jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
  }
  for (TimelineEntity entity : entities.getEntities()) {
    objMapper.writeValue(jsonGenerator, entity);
  }
  outStream.hflush();
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:13,代码来源:TestLogInfo.java

示例10: postEntities

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
/**
 * Store the given entities into the timeline store, and return the errors
 * that happen during storing.
 */
@POST
@Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
public TimelinePutResponse postEntities(
    @Context HttpServletRequest req,
    @Context HttpServletResponse res,
    TimelineEntities entities) {
  init(res);
  if (entities == null) {
    return new TimelinePutResponse();
  }
  try {
    List<EntityIdentifier> entityIDs = new ArrayList<EntityIdentifier>();
    for (TimelineEntity entity : entities.getEntities()) {
      EntityIdentifier entityID =
          new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
      entityIDs.add(entityID);
      if (LOG.isDebugEnabled()) {
        LOG.debug("Storing the entity " + entityID + ", JSON-style content: "
            + TimelineUtils.dumpTimelineRecordtoJSON(entity));
      }
    }
    if (LOG.isDebugEnabled()) {
      LOG.debug("Storing entities: " + CSV_JOINER.join(entityIDs));
    }
    return store.put(entities);
  } catch (IOException e) {
    LOG.error("Error putting entities", e);
    throw new WebApplicationException(e,
        Response.Status.INTERNAL_SERVER_ERROR);
  }
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre2,代码行数:36,代码来源:TimelineWebServices.java

示例11: put

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public TimelinePutResponse put(TimelineEntities entities) {
  try {
    deleteLock.readLock().lock();
    TimelinePutResponse response = new TimelinePutResponse();
    for (TimelineEntity entity : entities.getEntities()) {
      put(entity, response);
    }
    return response;
  } finally {
    deleteLock.readLock().unlock();
  }
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre2,代码行数:14,代码来源:LeveldbTimelineStore.java

示例12: put

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public TimelinePutResponse put(TimelineEntities entities) {
  if (LOG.isDebugEnabled()) {
    LOG.debug("Starting put");
  }
  TimelinePutResponse response = new TimelinePutResponse();
  TreeMap<Long, RollingWriteBatch> entityUpdates =
      new TreeMap<Long, RollingWriteBatch>();
  TreeMap<Long, RollingWriteBatch> indexUpdates =
      new TreeMap<Long, RollingWriteBatch>();

  long entityCount = 0;
  long indexCount = 0;

  try {

    for (TimelineEntity entity : entities.getEntities()) {
      entityCount += putEntities(entityUpdates, indexUpdates, entity,
          response);
    }

    for (RollingWriteBatch entityUpdate : entityUpdates.values()) {
      entityUpdate.write();
    }

    for (RollingWriteBatch indexUpdate : indexUpdates.values()) {
      indexUpdate.write();
    }

  } finally {

    for (RollingWriteBatch entityRollingWriteBatch : entityUpdates.values()) {
      entityRollingWriteBatch.close();
    }
    for (RollingWriteBatch indexRollingWriteBatch : indexUpdates.values()) {
      indexRollingWriteBatch.close();
    }
  }
  if (LOG.isDebugEnabled()) {
    LOG.debug("Put " + entityCount + " new leveldb entity entries and "
        + indexCount + " new leveldb index entries from "
        + entities.getEntities().size() + " timeline entities");
  }
  return response;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:46,代码来源:RollingLevelDBTimelineStore.java

示例13: doPostEntities

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
private TimelinePutResponse doPostEntities(
    TimelineEntities entities,
    UserGroupInformation callerUGI) throws YarnException, IOException {
  if (entities == null) {
    return new TimelinePutResponse();
  }
  metrics.incrPostEntitiesTotal(entities.getEntities().size());
  TimelineEntities entitiesToPut = new TimelineEntities();
  List<TimelinePutResponse.TimelinePutError> errors =
      new ArrayList<TimelinePutResponse.TimelinePutError>();
  for (TimelineEntity entity : entities.getEntities()) {

    // if the domain id is not specified, the entity will be put into
    // the default domain
    if (entity.getDomainId() == null ||
        entity.getDomainId().length() == 0) {
      entity.setDomainId(DEFAULT_DOMAIN_ID);
    }
    if (entity.getEntityId() == null || entity.getEntityType() == null) {
      throw new BadRequestException("Incomplete entity without entity"
          + " id/type");
    }
    // check if there is existing entity
    TimelineEntity existingEntity = null;
    try {
      existingEntity =
          store.getEntity(entity.getEntityId(), entity.getEntityType(),
              EnumSet.of(Field.PRIMARY_FILTERS));
      if (existingEntity != null) {
        addDefaultDomainIdIfAbsent(existingEntity);
        if (!existingEntity.getDomainId().equals(entity.getDomainId())) {
          throw new YarnException("The domain of the timeline entity "
            + "{ id: " + entity.getEntityId() + ", type: "
            + entity.getEntityType() + " } is not allowed to be changed from "
            + existingEntity.getDomainId() + " to " + entity.getDomainId());
        }
      }
      if (!timelineACLsManager.checkAccess(
          callerUGI, ApplicationAccessType.MODIFY_APP, entity)) {
        throw new YarnException(callerUGI
            + " is not allowed to put the timeline entity "
            + "{ id: " + entity.getEntityId() + ", type: "
            + entity.getEntityType() + " } into the domain "
            + entity.getDomainId() + ".");
      }
    } catch (Exception e) {
      // Skip the entity which already exists and was put by others
      LOG.warn("Skip the timeline entity: { id: " + entity.getEntityId()
          + ", type: "+ entity.getEntityType() + " }", e);
      TimelinePutResponse.TimelinePutError error =
          new TimelinePutResponse.TimelinePutError();
      error.setEntityId(entity.getEntityId());
      error.setEntityType(entity.getEntityType());
      error.setErrorCode(
          TimelinePutResponse.TimelinePutError.ACCESS_DENIED);
      errors.add(error);
      continue;
    }

    entitiesToPut.addEntity(entity);
  }

  TimelinePutResponse response = store.put(entitiesToPut);
  // add the errors of timeline system filter key conflict
  response.addErrors(errors);
  return response;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:68,代码来源:TimelineDataManager.java

示例14: testPluginRead

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Test
public void testPluginRead() throws Exception {
  // Verify precondition
  assertEquals(EntityGroupPlugInForTest.class.getName(),
      store.getConfig().get(
          YarnConfiguration.TIMELINE_SERVICE_ENTITY_GROUP_PLUGIN_CLASSES));
  // Load data and cache item, prepare timeline store by making a cache item
  EntityGroupFSTimelineStore.AppLogs appLogs =
      store.new AppLogs(mainTestAppId, mainTestAppDirPath,
      AppState.COMPLETED);
  EntityCacheItem cacheItem = new EntityCacheItem(
      EntityGroupPlugInForTest.getStandardTimelineGroupId(mainTestAppId),
      config);
  cacheItem.setAppLogs(appLogs);
  store.setCachedLogs(
      EntityGroupPlugInForTest.getStandardTimelineGroupId(mainTestAppId),
      cacheItem);
  MutableCounterLong detailLogEntityRead =
      store.metrics.getGetEntityToDetailOps();
  MutableStat cacheRefresh = store.metrics.getCacheRefresh();
  long numEntityReadBefore = detailLogEntityRead.value();
  long cacheRefreshBefore = cacheRefresh.lastStat().numSamples();

  // Generate TDM
  TimelineDataManager tdm
      = PluginStoreTestUtils.getTdmWithStore(config, store);

  // Verify single entity read
  TimelineEntity entity3 = tdm.getEntity("type_3", mainTestAppId.toString(),
      EnumSet.allOf(TimelineReader.Field.class),
      UserGroupInformation.getLoginUser());
  assertNotNull(entity3);
  assertEquals(entityNew.getStartTime(), entity3.getStartTime());
  // Verify multiple entities read
  NameValuePair primaryFilter = new NameValuePair(
      EntityGroupPlugInForTest.APP_ID_FILTER_NAME, mainTestAppId.toString());
  TimelineEntities entities = tdm.getEntities("type_3", primaryFilter, null,
      null, null, null, null, null, EnumSet.allOf(TimelineReader.Field.class),
      UserGroupInformation.getLoginUser());
  assertEquals(1, entities.getEntities().size());
  for (TimelineEntity entity : entities.getEntities()) {
    assertEquals(entityNew.getStartTime(), entity.getStartTime());
  }
  // Verify metrics
  assertEquals(numEntityReadBefore + 2L, detailLogEntityRead.value());
  assertEquals(cacheRefreshBefore + 1L, cacheRefresh.lastStat().numSamples());
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:48,代码来源:TestEntityGroupFSTimelineStore.java


注:本文中的org.apache.hadoop.yarn.api.records.timeline.TimelineEntities.getEntities方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。