当前位置: 首页>>代码示例>>Java>>正文


Java TimelineEntities.setEntities方法代码示例

本文整理汇总了Java中org.apache.hadoop.yarn.api.records.timeline.TimelineEntities.setEntities方法的典型用法代码示例。如果您正苦于以下问题:Java TimelineEntities.setEntities方法的具体用法?Java TimelineEntities.setEntities怎么用?Java TimelineEntities.setEntities使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.yarn.api.records.timeline.TimelineEntities的用法示例。


在下文中一共展示了TimelineEntities.setEntities方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testDeleteEntitiesPrimaryFilters

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Test
public void testDeleteEntitiesPrimaryFilters()
    throws IOException, InterruptedException {
  Map<String, Set<Object>> primaryFilter =
      Collections.singletonMap("user", Collections.singleton(
          (Object) "otheruser"));
  TimelineEntities atsEntities = new TimelineEntities();
  atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b,
      entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter,
      null)));
  TimelinePutResponse response = store.put(atsEntities);
  assertEquals(0, response.getErrors().size());

  NameValuePair pfPair = new NameValuePair("user", "otheruser");
  List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
      pfPair);
  assertEquals(1, entities.size());
  verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2),
      EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0));

  entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
  assertEquals(2, entities.size());
  verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(0));
  verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(1));

  ((LeveldbTimelineStore)store).discardOldEntities(-123l);
  assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());

  ((LeveldbTimelineStore)store).discardOldEntities(123l);
  assertEquals(0, getEntities("type_1").size());
  assertEquals(0, getEntities("type_2").size());
  assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());

  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre2,代码行数:40,代码来源:TestLeveldbTimelineStore.java

示例2: testDeleteEntitiesPrimaryFilters

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Test
public void testDeleteEntitiesPrimaryFilters()
    throws IOException, InterruptedException {
  Map<String, Set<Object>> primaryFilter =
      Collections.singletonMap("user", Collections.singleton(
          (Object) "otheruser"));
  TimelineEntities atsEntities = new TimelineEntities();
  atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b,
      entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter,
      null, domainId1)));
  TimelinePutResponse response = store.put(atsEntities);
  assertEquals(0, response.getErrors().size());

  NameValuePair pfPair = new NameValuePair("user", "otheruser");
  List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
      pfPair);
  assertEquals(1, entities.size());
  verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2),
      EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0),
      domainId1);

  entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
  assertEquals(3, entities.size());
  verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(0), domainId1);
  verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(1), domainId1);
  verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(2), domainId2);

  ((LeveldbTimelineStore)store).discardOldEntities(-123l);
  assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(3, getEntitiesWithPrimaryFilter("type_1", userFilter).size());

  ((LeveldbTimelineStore)store).discardOldEntities(123l);
  assertEquals(0, getEntities("type_1").size());
  assertEquals(0, getEntities("type_2").size());
  assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());

  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:43,代码来源:TestLeveldbTimelineStore.java

示例3: loadTestDomainData

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
protected void loadTestDomainData() throws IOException {
  domain1 = new TimelineDomain();
  domain1.setId("domain_id_1");
  domain1.setDescription("description_1");
  domain1.setOwner("owner_1");
  domain1.setReaders("reader_user_1 reader_group_1");
  domain1.setWriters("writer_user_1 writer_group_1");
  store.put(domain1);

  domain2 = new TimelineDomain();
  domain2.setId("domain_id_2");
  domain2.setDescription("description_2");
  domain2.setOwner("owner_2");
  domain2.setReaders("reader_user_2 reader_group_2");
  domain2.setWriters("writer_user_2 writer_group_2");
  store.put(domain2);

  // Wait a second before updating the domain information
  elapsedTime = 1000;
  try {
    Thread.sleep(elapsedTime);
  } catch (InterruptedException e) {
    throw new IOException(e);
  }

  domain2.setDescription("description_3");
  domain2.setOwner("owner_3");
  domain2.setReaders("reader_user_3 reader_group_3");
  domain2.setWriters("writer_user_3 writer_group_3");
  store.put(domain2);

  domain3 = new TimelineDomain();
  domain3.setId("domain_id_4");
  domain3.setDescription("description_4");
  domain3.setOwner("owner_1");
  domain3.setReaders("reader_user_4 reader_group_4");
  domain3.setWriters("writer_user_4 writer_group_4");
  store.put(domain3);

  TimelineEntities entities = new TimelineEntities();
  if (store instanceof LeveldbTimelineStore) {
    LeveldbTimelineStore leveldb = (LeveldbTimelineStore) store;
    entities.setEntities(Collections.singletonList(createEntity(
            "ACL_ENTITY_ID_11", "ACL_ENTITY_TYPE_1", 63l, null, null, null, null,
            "domain_id_4")));
    leveldb.put(entities);
    entities.setEntities(Collections.singletonList(createEntity(
            "ACL_ENTITY_ID_22", "ACL_ENTITY_TYPE_1", 64l, null, null, null, null,
            "domain_id_2")));
    leveldb.put(entities);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:53,代码来源:TimelineStoreTestUtils.java

示例4: testDeleteEntitiesPrimaryFilters

import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; //导入方法依赖的package包/类
@Test
public void testDeleteEntitiesPrimaryFilters()
    throws IOException, InterruptedException {
  Map<String, Set<Object>> primaryFilter =
      Collections.singletonMap("user", Collections.singleton(
          (Object) "otheruser"));
  TimelineEntities atsEntities = new TimelineEntities();
  atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b,
      entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter,
      null, domainId1)));
  TimelinePutResponse response = store.put(atsEntities);
  assertEquals(0, response.getErrors().size());

  NameValuePair pfPair = new NameValuePair("user", "otheruser");
  List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
      pfPair);
  assertEquals(1, entities.size());
  verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2),
      EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0),
      domainId1);

  entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
  assertEquals(3, entities.size());
  verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(0), domainId1);
  verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(1), domainId1);
  verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
      primaryFilters, otherInfo, entities.get(2), domainId2);

  ((LeveldbTimelineStore)store).discardOldEntities(-123L);
  assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(3, getEntitiesWithPrimaryFilter("type_1", userFilter).size());

  ((LeveldbTimelineStore)store).discardOldEntities(123L);
  assertEquals(0, getEntities("type_1").size());
  assertEquals(0, getEntities("type_2").size());
  assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());

  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
  assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:43,代码来源:TestLeveldbTimelineStore.java


注:本文中的org.apache.hadoop.yarn.api.records.timeline.TimelineEntities.setEntities方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。