当前位置: 首页>>代码示例>>Java>>正文


Java GenericTestUtils类代码示例

本文整理汇总了Java中org.apache.hadoop.test.GenericTestUtils的典型用法代码示例。如果您正苦于以下问题:Java GenericTestUtils类的具体用法?Java GenericTestUtils怎么用?Java GenericTestUtils使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


GenericTestUtils类属于org.apache.hadoop.test包,在下文中一共展示了GenericTestUtils类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: waitForDNDeletions

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
/**
 * Wait for the datanodes in the cluster to process any block
 * deletions that have already been asynchronously queued.
 */
public static void waitForDNDeletions(final MiniDFSCluster cluster)
    throws TimeoutException, InterruptedException {
  GenericTestUtils.waitFor(new Supplier<Boolean>() {
    @Override
    public Boolean get() {
      for (DataNode dn : cluster.getDataNodes()) {
        if (DataNodeTestUtils.getPendingAsyncDeletions(dn) > 0) {
          return false;
        }
      }
      return true;
    }
  }, 1000, 10000);
  
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:HATestUtil.java

示例2: testGracefulFailoverFailBecomingActive

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
@Test
public void testGracefulFailoverFailBecomingActive() throws Exception {
  cluster.start();

  cluster.waitForActiveLockHolder(0);
  cluster.setFailToBecomeActive(1, true);

  // Ask for failover, it should fail and report back to user.
  try {
    cluster.getService(1).getZKFCProxy(conf, 5000).gracefulFailover();
    fail("Did not fail to graceful failover when target failed " +
        "to become active!");
  } catch (ServiceFailedException sfe) {
    GenericTestUtils.assertExceptionContains(
        "Couldn't make " + cluster.getService(1) + " active", sfe);
    GenericTestUtils.assertExceptionContains(
        "injected failure", sfe);
  }

  // No fencing
  assertEquals(0, cluster.getService(0).fenceCount);
  assertEquals(0, cluster.getService(1).fenceCount);

  // Service 0 should go back to being active after the failed failover
  cluster.waitForActiveLockHolder(0);
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:27,代码来源:TestZKFailoverController.java

示例3: testContextSpecificServletFilterWhenInitThrowsException

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
/**
 * Similar to the above test case, except that it uses a different API to add
 * the filter. Regression test for HADOOP-8786.
 */
@Test
public void testContextSpecificServletFilterWhenInitThrowsException()
    throws Exception {
  Configuration conf = new Configuration();
  HttpServer2 http = createTestServer(conf);
  HttpServer2.defineFilter(http.webAppContext,
      "ErrorFilter", ErrorFilter.class.getName(),
      null, null);
  try {
    http.start();
    fail("expecting exception");
  } catch (IOException e) {
    GenericTestUtils.assertExceptionContains(
        "Unable to initialize WebAppContext", e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:TestServletFilter.java

示例4: waitForBlockReceived

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
private ReceivedDeletedBlockInfo[] waitForBlockReceived(
    final ExtendedBlock fakeBlock,
    final DatanodeProtocolClientSideTranslatorPB mockNN) throws Exception {
  final String fakeBlockPoolId = fakeBlock.getBlockPoolId();
  final ArgumentCaptor<StorageReceivedDeletedBlocks[]> captor =
    ArgumentCaptor.forClass(StorageReceivedDeletedBlocks[].class);
  GenericTestUtils.waitFor(new Supplier<Boolean>() {

    @Override
    public Boolean get() {
      try {
        Mockito.verify(mockNN).blockReceivedAndDeleted(
          Mockito.<DatanodeRegistration>anyObject(),
          Mockito.eq(fakeBlockPoolId),
          captor.capture());
        return true;
      } catch (Throwable t) {
        return false;
      }
    }
  }, 100, 10000);
  return captor.getValue()[0].getBlocks();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:TestBPOfferService.java

示例5: testDFInvalidPath

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
@Test(timeout=5000)
public void testDFInvalidPath() throws Exception {
  // Generate a path that doesn't exist
  Random random = new Random(0xDEADBEEFl);
  File file = null;
  byte[] bytes = new byte[64];
  while (file == null) {
    random.nextBytes(bytes);
    final String invalid = new String("/" + bytes);
    final File invalidFile = new File(invalid);
    if (!invalidFile.exists()) {
      file = invalidFile;
    }
  }
  DF df = new DF(file, 0l);
  try {
    df.getMount();
  } catch (FileNotFoundException e) {
    // expected, since path does not exist
    GenericTestUtils.assertExceptionContains(file.getName(), e);
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:23,代码来源:TestDFVariations.java

示例6: testContextSpecificServletFilterWhenInitThrowsException

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
/**
 * Similar to the above test case, except that it uses a different API to add the
 * filter. Regression test for HADOOP-8786.
 */
@Test
public void testContextSpecificServletFilterWhenInitThrowsException()
    throws Exception {
  Configuration conf = new Configuration();
  HttpServer2 http = createTestServer(conf);
  HttpServer2.defineFilter(http.webAppContext,
      "ErrorFilter", ErrorFilter.class.getName(),
      null, null);
  try {
    http.start();
    fail("expecting exception");
  } catch (IOException e) {
    GenericTestUtils.assertExceptionContains("Unable to initialize WebAppContext", e);
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:20,代码来源:TestServletFilter.java

示例7: testJceAesCtrCryptoCodec

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
@Test(timeout=120000)
public void testJceAesCtrCryptoCodec() throws Exception {
  GenericTestUtils.assumeInNativeProfile();
  if (!NativeCodeLoader.buildSupportsOpenssl()) {
    LOG.warn("Skipping test since openSSL library not loaded");
    Assume.assumeTrue(false);
  }
  Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason());
  cryptoCodecTest(conf, seed, 0, jceCodecClass, jceCodecClass, iv);
  cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv);
  cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv);
  // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff 
  for(int i = 0; i < 8; i++) {
    iv[8 + i] = (byte) 0xff;
  }
  cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass, iv);
  cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass, iv);
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:19,代码来源:TestCryptoCodec.java

示例8: testOpensslAesCtrCryptoCodec

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
@Test(timeout=120000)
public void testOpensslAesCtrCryptoCodec() throws Exception {
  GenericTestUtils.assumeInNativeProfile();
  if (!NativeCodeLoader.buildSupportsOpenssl()) {
    LOG.warn("Skipping test since openSSL library not loaded");
    Assume.assumeTrue(false);
  }
  Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason());
  cryptoCodecTest(conf, seed, 0, opensslCodecClass, opensslCodecClass, iv);
  cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv);
  cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv);
  // Overflow test, IV: xx xx xx xx xx xx xx xx ff ff ff ff ff ff ff ff 
  for(int i = 0; i < 8; i++) {
    iv[8 + i] = (byte) 0xff;
  }
  cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass, iv);
  cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass, iv);
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:19,代码来源:TestCryptoCodec.java

示例9: testCheckDirsIOException

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
@Test (timeout = 30000)
public void testCheckDirsIOException() throws Throwable {
  Path path = new Path("target", TestDiskChecker.class.getSimpleName());
  File localDir = new File(path.toUri().getRawPath());
  localDir.mkdir();
  File localFile = new File(localDir, "test");
  localFile.createNewFile();
  File spyLocalDir = spy(localDir);
  doReturn(localFile.toPath()).when(spyLocalDir).toPath();
  try {
    DiskChecker.checkDirs(spyLocalDir);
    fail("Expected exception for I/O error");
  } catch (DiskErrorException e) {
    GenericTestUtils.assertExceptionContains("I/O error", e);
    assertTrue(e.getCause() instanceof IOException);
  } finally {
    localFile.delete();
    localDir.delete();
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:21,代码来源:TestDiskChecker.java

示例10: testConfIsUpdatedOnSuccessAsync

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
/**
 * Ensure that {@link ReconfigurableBase#startReconfigurationTask} updates
 * its parent's cached configuration on success.
 * @throws IOException
 */
@Test (timeout=300000)
public void testConfIsUpdatedOnSuccessAsync() throws ReconfigurationException,
    TimeoutException, InterruptedException, IOException {
  final String property = "FOO";
  final String value1 = "value1";
  final String value2 = "value2";

  final Configuration conf = new Configuration();
  conf.set(property, value1);
  final Configuration newConf = new Configuration();
  newConf.set(property, value2);

  final ReconfigurableBase reconfigurable = makeReconfigurable(
      conf, newConf, Arrays.asList(property));

  // Kick off a reconfiguration task and wait until it completes.
  reconfigurable.startReconfigurationTask();
  GenericTestUtils.waitFor(new Supplier<Boolean>() {
    @Override
    public Boolean get() {
      return reconfigurable.getReconfigurationTaskStatus().stopped();
    }
  }, 100, 60000);
  assertThat(reconfigurable.getConf().get(property), is(value2));
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:31,代码来源:TestReconfiguration.java

示例11: testConfIsUnsetAsync

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
/**
 * Ensure that {@link ReconfigurableBase#startReconfigurationTask} unsets the
 * property in its parent's configuration when the new value is null.
 * @throws IOException
 */
@Test (timeout=300000)
public void testConfIsUnsetAsync() throws ReconfigurationException,
    IOException, TimeoutException, InterruptedException {
  final String property = "FOO";
  final String value1 = "value1";

  final Configuration conf = new Configuration();
  conf.set(property, value1);
  final Configuration newConf = new Configuration();

  final ReconfigurableBase reconfigurable = makeReconfigurable(
      conf, newConf, Arrays.asList(property));

  // Kick off a reconfiguration task and wait until it completes.
  reconfigurable.startReconfigurationTask();
  GenericTestUtils.waitFor(new Supplier<Boolean>() {
    @Override
    public Boolean get() {
      return reconfigurable.getReconfigurationTaskStatus().stopped();
    }
  }, 100, 60000);
  assertNull(reconfigurable.getConf().get(property));
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:29,代码来源:TestReconfiguration.java

示例12: testGracefulFailoverToUnhealthy

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
@Test(timeout=15000)
public void testGracefulFailoverToUnhealthy() throws Exception {
  try {
    cluster.start();

    cluster.waitForActiveLockHolder(0);

    // Mark it unhealthy, wait for it to exit election
    cluster.setHealthy(1, false);
    cluster.waitForElectorState(1, ActiveStandbyElector.State.INIT);
    
    // Ask for failover, it should fail, because it's unhealthy
    try {
      cluster.getService(1).getZKFCProxy(conf, 5000).gracefulFailover();
      fail("Did not fail to graceful failover to unhealthy service!");
    } catch (ServiceFailedException sfe) {
      GenericTestUtils.assertExceptionContains(
          cluster.getService(1).toString() + 
          " is not currently healthy.", sfe);
    }
  } finally {
    cluster.stop();
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:TestZKFailoverController.java

示例13: testGracefulFailoverFailBecomingStandbyAndFailFence

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
@Test(timeout=15000)
public void testGracefulFailoverFailBecomingStandbyAndFailFence()
    throws Exception {
  try {
    cluster.start();

    cluster.waitForActiveLockHolder(0);
    
    // Ask for failover when old node fails to transition to standby.
    // This should trigger fencing, since the cedeActive() command
    // still works, but leaves the breadcrumb in place.
    cluster.setFailToBecomeStandby(0, true);
    cluster.setFailToFence(0, true);

    try {
      cluster.getService(1).getZKFCProxy(conf, 5000).gracefulFailover();
      fail("Failover should have failed when old node wont fence");
    } catch (ServiceFailedException sfe) {
      GenericTestUtils.assertExceptionContains(
          "Unable to fence " + cluster.getService(0), sfe);
    }
  } finally {
    cluster.stop();
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:26,代码来源:TestZKFailoverController.java

示例14: doRenameSrcNotWritableFS

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
private void doRenameSrcNotWritableFS() throws Exception {
  // Rename the link when the target and parent are not writable
  try {
    user.doAs(new PrivilegedExceptionAction<Object>() {
      @Override
      public Object run() throws IOException {
        FileSystem myfs = FileSystem.get(conf);
        Path newlink = new Path(targetParent, "newlink");
        myfs.rename(link, newlink);
        return null;
      }
    });
    fail("Renamed link even though link's parent is not writable!");
  } catch (IOException e) {
    GenericTestUtils.assertExceptionContains("Permission denied", e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestPermissionSymlinks.java

示例15: testLinkTargetNonSymlink

import org.apache.hadoop.test.GenericTestUtils; //导入依赖的package包/类
/**
 * Verifies that attempting to resolve a non-symlink results in client
 * exception
 */
@Test
public void testLinkTargetNonSymlink() throws UnsupportedFileSystemException,
    IOException {
  FileContext fc = null;
  Path notSymlink = new Path("/notasymlink");
  try {
    fc = FileContext.getFileContext(cluster.getFileSystem().getUri());
    fc.create(notSymlink, EnumSet.of(CreateFlag.CREATE));
    DFSClient client = new DFSClient(cluster.getFileSystem().getUri(),
        cluster.getConfiguration(0));
    try {
      client.getLinkTarget(notSymlink.toString());
      fail("Expected exception for resolving non-symlink");
    } catch (IOException e) {
      GenericTestUtils.assertExceptionContains("is not a symbolic link", e);
    }
  } finally {
    if (fc != null) {
      fc.delete(notSymlink, false);
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:TestResolveHdfsSymlink.java


注:本文中的org.apache.hadoop.test.GenericTestUtils类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。