本文整理汇总了Java中org.apache.hadoop.yarn.api.records.timeline.TimelineEntities.getEntities方法的典型用法代码示例。如果您正苦于以下问题:Java TimelineEntities.getEntities方法的具体用法?Java TimelineEntities.getEntities怎么用?Java TimelineEntities.getEntities使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.yarn.api.records.timeline.TimelineEntities
的用法示例。
在下文中一共展示了TimelineEntities.getEntities方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getAllApplications
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public Map<ApplicationId, ApplicationReport> getAllApplications()
throws YarnException, IOException {
TimelineEntities entities = timelineDataManager.getEntities(
ApplicationMetricsConstants.ENTITY_TYPE, null, null, null, null,
null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class),
UserGroupInformation.getLoginUser());
Map<ApplicationId, ApplicationReport> apps =
new LinkedHashMap<ApplicationId, ApplicationReport>();
if (entities != null && entities.getEntities() != null) {
for (TimelineEntity entity : entities.getEntities()) {
try {
ApplicationReportExt app =
generateApplicationReport(entity, ApplicationReportField.ALL);
apps.put(app.appReport.getApplicationId(), app.appReport);
} catch (Exception e) {
LOG.error("Error on generating application report for " +
entity.getEntityId(), e);
}
}
}
return apps;
}
示例2: getApplicationAttempts
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public Map<ApplicationAttemptId, ApplicationAttemptReport>
getApplicationAttempts(ApplicationId appId)
throws YarnException, IOException {
ApplicationReportExt app = getApplication(
appId, ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
TimelineEntities entities = timelineDataManager.getEntities(
AppAttemptMetricsConstants.ENTITY_TYPE,
new NameValuePair(
AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appId
.toString()), null, null, null, null, null,
Long.MAX_VALUE, EnumSet.allOf(Field.class),
UserGroupInformation.getLoginUser());
Map<ApplicationAttemptId, ApplicationAttemptReport> appAttempts =
new LinkedHashMap<ApplicationAttemptId, ApplicationAttemptReport>();
for (TimelineEntity entity : entities.getEntities()) {
ApplicationAttemptReport appAttempt =
convertToApplicationAttemptReport(entity);
appAttempts.put(appAttempt.getApplicationAttemptId(), appAttempt);
}
return appAttempts;
}
示例3: getContainers
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public Map<ContainerId, ContainerReport> getContainers(
ApplicationAttemptId appAttemptId) throws YarnException, IOException {
ApplicationReportExt app = getApplication(
appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
TimelineEntities entities = timelineDataManager.getEntities(
ContainerMetricsConstants.ENTITY_TYPE,
new NameValuePair(
ContainerMetricsConstants.PARENT_PRIMARIY_FILTER,
appAttemptId.toString()), null, null, null,
null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class),
UserGroupInformation.getLoginUser());
Map<ContainerId, ContainerReport> containers =
new LinkedHashMap<ContainerId, ContainerReport>();
if (entities != null && entities.getEntities() != null) {
for (TimelineEntity entity : entities.getEntities()) {
ContainerReport container = convertToContainerReport(
entity, serverHttpAddress, app.appReport.getUser());
containers.put(container.getContainerId(), container);
}
}
return containers;
}
示例4: getApplications
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public Map<ApplicationId, ApplicationReport> getApplications(long appsNum,
long appStartedTimeBegin, long appStartedTimeEnd) throws YarnException,
IOException {
TimelineEntities entities =
timelineDataManager.getEntities(
ApplicationMetricsConstants.ENTITY_TYPE, null, null,
appStartedTimeBegin, appStartedTimeEnd, null, null,
appsNum == Long.MAX_VALUE ? this.maxLoadedApplications : appsNum,
EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
Map<ApplicationId, ApplicationReport> apps =
new LinkedHashMap<ApplicationId, ApplicationReport>();
if (entities != null && entities.getEntities() != null) {
for (TimelineEntity entity : entities.getEntities()) {
try {
ApplicationReportExt app =
generateApplicationReport(entity, ApplicationReportField.ALL);
apps.put(app.appReport.getApplicationId(), app.appReport);
} catch (Exception e) {
LOG.error("Error on generating application report for " +
entity.getEntityId(), e);
}
}
}
return apps;
}
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:27,代码来源:ApplicationHistoryManagerOnTimelineStore.java
示例5: put
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public TimelinePutResponse put(TimelineEntities entities) {
try {
deleteLock.readLock().lock();
TimelinePutResponse response = new TimelinePutResponse();
for (TimelineEntity entity : entities.getEntities()) {
put(entity, response, false);
}
return response;
} finally {
deleteLock.readLock().unlock();
}
}
示例6: putWithNoDomainId
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Private
@VisibleForTesting
public TimelinePutResponse putWithNoDomainId(TimelineEntities entities) {
try {
deleteLock.readLock().lock();
TimelinePutResponse response = new TimelinePutResponse();
for (TimelineEntity entity : entities.getEntities()) {
put(entity, response, true);
}
return response;
} finally {
deleteLock.readLock().unlock();
}
}
示例7: testSummaryRead
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Test
public void testSummaryRead() throws Exception {
// Load data
EntityGroupFSTimelineStore.AppLogs appLogs =
store.new AppLogs(mainTestAppId, mainTestAppDirPath,
AppState.COMPLETED);
MutableCounterLong summaryLogEntityRead
= store.metrics.getGetEntityToSummaryOps();
long numEntityReadBefore = summaryLogEntityRead.value();
TimelineDataManager tdm
= PluginStoreTestUtils.getTdmWithStore(config, store);
appLogs.scanForLogs();
appLogs.parseSummaryLogs(tdm);
// Verify single entity read
PluginStoreTestUtils.verifyTestEntities(tdm);
// Verify multiple entities read
TimelineEntities entities = tdm.getEntities("type_1", null, null, null,
null, null, null, null, EnumSet.allOf(TimelineReader.Field.class),
UserGroupInformation.getLoginUser());
assertEquals(entities.getEntities().size(), 1);
for (TimelineEntity entity : entities.getEntities()) {
assertEquals((Long) 123L, entity.getStartTime());
}
// Verify metrics
assertEquals(numEntityReadBefore + 5L, summaryLogEntityRead.value());
}
示例8: writeEntities
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
/**
* Write timeline entities to a file system
* @param entities
* @param logPath
* @param fs
* @throws IOException
*/
static void writeEntities(TimelineEntities entities, Path logPath,
FileSystem fs) throws IOException {
FSDataOutputStream outStream = createLogFile(logPath, fs);
JsonGenerator jsonGenerator
= (new JsonFactory()).createJsonGenerator(outStream);
jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
ObjectMapper objMapper = createObjectMapper();
for (TimelineEntity entity : entities.getEntities()) {
objMapper.writeValue(jsonGenerator, entity);
}
outStream.close();
}
示例9: writeEntitiesLeaveOpen
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
private void writeEntitiesLeaveOpen(TimelineEntities entities, Path logPath)
throws IOException {
if (outStream == null) {
outStream = PluginStoreTestUtils.createLogFile(logPath, fs);
jsonGenerator = (new JsonFactory()).createJsonGenerator(outStream);
jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
}
for (TimelineEntity entity : entities.getEntities()) {
objMapper.writeValue(jsonGenerator, entity);
}
outStream.hflush();
}
示例10: postEntities
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
/**
* Store the given entities into the timeline store, and return the errors
* that happen during storing.
*/
@POST
@Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
public TimelinePutResponse postEntities(
@Context HttpServletRequest req,
@Context HttpServletResponse res,
TimelineEntities entities) {
init(res);
if (entities == null) {
return new TimelinePutResponse();
}
try {
List<EntityIdentifier> entityIDs = new ArrayList<EntityIdentifier>();
for (TimelineEntity entity : entities.getEntities()) {
EntityIdentifier entityID =
new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
entityIDs.add(entityID);
if (LOG.isDebugEnabled()) {
LOG.debug("Storing the entity " + entityID + ", JSON-style content: "
+ TimelineUtils.dumpTimelineRecordtoJSON(entity));
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Storing entities: " + CSV_JOINER.join(entityIDs));
}
return store.put(entities);
} catch (IOException e) {
LOG.error("Error putting entities", e);
throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
}
}
示例11: put
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public TimelinePutResponse put(TimelineEntities entities) {
try {
deleteLock.readLock().lock();
TimelinePutResponse response = new TimelinePutResponse();
for (TimelineEntity entity : entities.getEntities()) {
put(entity, response);
}
return response;
} finally {
deleteLock.readLock().unlock();
}
}
示例12: put
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Override
public TimelinePutResponse put(TimelineEntities entities) {
if (LOG.isDebugEnabled()) {
LOG.debug("Starting put");
}
TimelinePutResponse response = new TimelinePutResponse();
TreeMap<Long, RollingWriteBatch> entityUpdates =
new TreeMap<Long, RollingWriteBatch>();
TreeMap<Long, RollingWriteBatch> indexUpdates =
new TreeMap<Long, RollingWriteBatch>();
long entityCount = 0;
long indexCount = 0;
try {
for (TimelineEntity entity : entities.getEntities()) {
entityCount += putEntities(entityUpdates, indexUpdates, entity,
response);
}
for (RollingWriteBatch entityUpdate : entityUpdates.values()) {
entityUpdate.write();
}
for (RollingWriteBatch indexUpdate : indexUpdates.values()) {
indexUpdate.write();
}
} finally {
for (RollingWriteBatch entityRollingWriteBatch : entityUpdates.values()) {
entityRollingWriteBatch.close();
}
for (RollingWriteBatch indexRollingWriteBatch : indexUpdates.values()) {
indexRollingWriteBatch.close();
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Put " + entityCount + " new leveldb entity entries and "
+ indexCount + " new leveldb index entries from "
+ entities.getEntities().size() + " timeline entities");
}
return response;
}
示例13: doPostEntities
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
private TimelinePutResponse doPostEntities(
TimelineEntities entities,
UserGroupInformation callerUGI) throws YarnException, IOException {
if (entities == null) {
return new TimelinePutResponse();
}
metrics.incrPostEntitiesTotal(entities.getEntities().size());
TimelineEntities entitiesToPut = new TimelineEntities();
List<TimelinePutResponse.TimelinePutError> errors =
new ArrayList<TimelinePutResponse.TimelinePutError>();
for (TimelineEntity entity : entities.getEntities()) {
// if the domain id is not specified, the entity will be put into
// the default domain
if (entity.getDomainId() == null ||
entity.getDomainId().length() == 0) {
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (entity.getEntityId() == null || entity.getEntityType() == null) {
throw new BadRequestException("Incomplete entity without entity"
+ " id/type");
}
// check if there is existing entity
TimelineEntity existingEntity = null;
try {
existingEntity =
store.getEntity(entity.getEntityId(), entity.getEntityType(),
EnumSet.of(Field.PRIMARY_FILTERS));
if (existingEntity != null) {
addDefaultDomainIdIfAbsent(existingEntity);
if (!existingEntity.getDomainId().equals(entity.getDomainId())) {
throw new YarnException("The domain of the timeline entity "
+ "{ id: " + entity.getEntityId() + ", type: "
+ entity.getEntityType() + " } is not allowed to be changed from "
+ existingEntity.getDomainId() + " to " + entity.getDomainId());
}
}
if (!timelineACLsManager.checkAccess(
callerUGI, ApplicationAccessType.MODIFY_APP, entity)) {
throw new YarnException(callerUGI
+ " is not allowed to put the timeline entity "
+ "{ id: " + entity.getEntityId() + ", type: "
+ entity.getEntityType() + " } into the domain "
+ entity.getDomainId() + ".");
}
} catch (Exception e) {
// Skip the entity which already exists and was put by others
LOG.warn("Skip the timeline entity: { id: " + entity.getEntityId()
+ ", type: "+ entity.getEntityType() + " }", e);
TimelinePutResponse.TimelinePutError error =
new TimelinePutResponse.TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(
TimelinePutResponse.TimelinePutError.ACCESS_DENIED);
errors.add(error);
continue;
}
entitiesToPut.addEntity(entity);
}
TimelinePutResponse response = store.put(entitiesToPut);
// add the errors of timeline system filter key conflict
response.addErrors(errors);
return response;
}
示例14: testPluginRead
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Test
public void testPluginRead() throws Exception {
// Verify precondition
assertEquals(EntityGroupPlugInForTest.class.getName(),
store.getConfig().get(
YarnConfiguration.TIMELINE_SERVICE_ENTITY_GROUP_PLUGIN_CLASSES));
// Load data and cache item, prepare timeline store by making a cache item
EntityGroupFSTimelineStore.AppLogs appLogs =
store.new AppLogs(mainTestAppId, mainTestAppDirPath,
AppState.COMPLETED);
EntityCacheItem cacheItem = new EntityCacheItem(
EntityGroupPlugInForTest.getStandardTimelineGroupId(mainTestAppId),
config);
cacheItem.setAppLogs(appLogs);
store.setCachedLogs(
EntityGroupPlugInForTest.getStandardTimelineGroupId(mainTestAppId),
cacheItem);
MutableCounterLong detailLogEntityRead =
store.metrics.getGetEntityToDetailOps();
MutableStat cacheRefresh = store.metrics.getCacheRefresh();
long numEntityReadBefore = detailLogEntityRead.value();
long cacheRefreshBefore = cacheRefresh.lastStat().numSamples();
// Generate TDM
TimelineDataManager tdm
= PluginStoreTestUtils.getTdmWithStore(config, store);
// Verify single entity read
TimelineEntity entity3 = tdm.getEntity("type_3", mainTestAppId.toString(),
EnumSet.allOf(TimelineReader.Field.class),
UserGroupInformation.getLoginUser());
assertNotNull(entity3);
assertEquals(entityNew.getStartTime(), entity3.getStartTime());
// Verify multiple entities read
NameValuePair primaryFilter = new NameValuePair(
EntityGroupPlugInForTest.APP_ID_FILTER_NAME, mainTestAppId.toString());
TimelineEntities entities = tdm.getEntities("type_3", primaryFilter, null,
null, null, null, null, null, EnumSet.allOf(TimelineReader.Field.class),
UserGroupInformation.getLoginUser());
assertEquals(1, entities.getEntities().size());
for (TimelineEntity entity : entities.getEntities()) {
assertEquals(entityNew.getStartTime(), entity.getStartTime());
}
// Verify metrics
assertEquals(numEntityReadBefore + 2L, detailLogEntityRead.value());
assertEquals(cacheRefreshBefore + 1L, cacheRefresh.lastStat().numSamples());
}