当前位置: 首页>>代码示例>>Java>>正文


Java DFSTestUtil.urlGet方法代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.DFSTestUtil.urlGet方法的典型用法代码示例。如果您正苦于以下问题:Java DFSTestUtil.urlGet方法的具体用法?Java DFSTestUtil.urlGet怎么用?Java DFSTestUtil.urlGet使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hdfs.DFSTestUtil的用法示例。


在下文中一共展示了DFSTestUtil.urlGet方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testHttpServer

import org.apache.hadoop.hdfs.DFSTestUtil; //导入方法依赖的package包/类
@Test
public void testHttpServer() throws Exception {
  Nfs3 nfs = new Nfs3(conf);
  nfs.startServiceInternal(false);
  RpcProgramNfs3 nfsd = (RpcProgramNfs3) nfs.getRpcProgram();
  Nfs3HttpServer infoServer = nfsd.getInfoServer();

  String urlRoot = infoServer.getServerURI().toString();

  // Check default servlets.
  String pageContents = DFSTestUtil.urlGet(new URL(urlRoot + "/jmx"));
  assertTrue("Bad contents: " + pageContents,
      pageContents.contains("java.lang:type="));
  System.out.println("pc:" + pageContents);

  int port = infoServer.getSecurePort();
  assertTrue("Can't get https port", port > 0);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:TestNfs3HttpServer.java

示例2: testReadsAllowedDuringCheckpoint

import org.apache.hadoop.hdfs.DFSTestUtil; //导入方法依赖的package包/类
@Test(timeout=300000)
public void testReadsAllowedDuringCheckpoint() throws Exception {
  
  // Set it up so that we know when the SBN checkpoint starts and ends.
  FSImage spyImage1 = NameNodeAdapter.spyOnFsImage(nn1);
  DelayAnswer answerer = new DelayAnswer(LOG);
  Mockito.doAnswer(answerer).when(spyImage1)
      .saveNamespace(Mockito.any(FSNamesystem.class),
          Mockito.any(NameNodeFile.class),
          Mockito.any(Canceler.class));
  
  // Perform some edits and wait for a checkpoint to start on the SBN.
  doEdits(0, 1000);
  nn0.getRpcServer().rollEditLog();
  answerer.waitForCall();
  assertTrue("SBN is not performing checkpoint but it should be.",
      answerer.getFireCount() == 1 && answerer.getResultCount() == 0);
  
  // Make sure that the lock has actually been taken by the checkpointing
  // thread.
  ThreadUtil.sleepAtLeastIgnoreInterrupts(1000);
  
  // Perform an RPC that needs to take the write lock.
  Thread t = new Thread() {
    @Override
    public void run() {
      try {
        nn1.getRpcServer().restoreFailedStorage("false");
      } catch (IOException e) {
        e.printStackTrace();
      }
    }
  };
  t.start();
  
  // Make sure that our thread is waiting for the lock.
  ThreadUtil.sleepAtLeastIgnoreInterrupts(1000);
  
  assertFalse(nn1.getNamesystem().getFsLockForTests().hasQueuedThreads());
  assertFalse(nn1.getNamesystem().getFsLockForTests().isWriteLocked());
  assertTrue(nn1.getNamesystem().getCpLockForTests().hasQueuedThreads());
  
  // Get /jmx of the standby NN web UI, which will cause the FSNS read lock to
  // be taken.
  String pageContents = DFSTestUtil.urlGet(new URL("http://" +
      nn1.getHttpAddress().getHostName() + ":" +
      nn1.getHttpAddress().getPort() + "/jmx"));
  assertTrue(pageContents.contains("NumLiveDataNodes"));
  
  // Make sure that the checkpoint is still going on, implying that the client
  // RPC to the SBN happened during the checkpoint.
  assertTrue("SBN should have still been checkpointing.",
      answerer.getFireCount() == 1 && answerer.getResultCount() == 0);
  answerer.proceed();
  answerer.waitForResult();
  assertTrue("SBN should have finished checkpointing.",
      answerer.getFireCount() == 1 && answerer.getResultCount() == 1);
  
  t.join();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:61,代码来源:TestStandbyCheckpoints.java

示例3: testHttpServer

import org.apache.hadoop.hdfs.DFSTestUtil; //导入方法依赖的package包/类
@Test(timeout=100000)
public void testHttpServer() throws Exception {
  String urlRoot = jn.getHttpServerURI();
  
  // Check default servlets.
  String pageContents = DFSTestUtil.urlGet(new URL(urlRoot + "/jmx"));
  assertTrue("Bad contents: " + pageContents,
      pageContents.contains(
          "Hadoop:service=JournalNode,name=JvmMetrics"));

  // Create some edits on server side
  byte[] EDITS_DATA = QJMTestUtil.createTxnData(1, 3);
  IPCLoggerChannel ch = new IPCLoggerChannel(
      conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
  ch.newEpoch(1).get();
  ch.setEpoch(1);
  ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get();
  ch.sendEdits(1L, 1, 3, EDITS_DATA).get();
  ch.finalizeLogSegment(1, 3).get();

  // Attempt to retrieve via HTTP, ensure we get the data back
  // including the header we expected
  byte[] retrievedViaHttp = DFSTestUtil.urlGetBytes(new URL(urlRoot +
      "/getJournal?segmentTxId=1&jid=" + journalId));
  byte[] expected = Bytes.concat(
          Ints.toByteArray(HdfsConstants.NAMENODE_LAYOUT_VERSION),
          (new byte[] { 0, 0, 0, 0 }), // layout flags section
          EDITS_DATA);

  assertArrayEquals(expected, retrievedViaHttp);
  
  // Attempt to fetch a non-existent file, check that we get an
  // error status code
  URL badUrl = new URL(urlRoot + "/getJournal?segmentTxId=12345&jid=" + journalId);
  HttpURLConnection connection = (HttpURLConnection)badUrl.openConnection();
  try {
    assertEquals(404, connection.getResponseCode());
  } finally {
    connection.disconnect();
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:42,代码来源:TestJournalNode.java


注:本文中的org.apache.hadoop.hdfs.DFSTestUtil.urlGet方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。