当前位置: 首页>>代码示例>>Java>>正文


Java JspHelper.getUrlParam方法代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.server.common.JspHelper.getUrlParam方法的典型用法代码示例。如果您正苦于以下问题:Java JspHelper.getUrlParam方法的具体用法?Java JspHelper.getUrlParam怎么用?Java JspHelper.getUrlParam使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hdfs.server.common.JspHelper的用法示例。


在下文中一共展示了JspHelper.getUrlParam方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: generateNodeDataHeader

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
    String suffix, boolean alive, int nnInfoPort, String nnaddr, String scheme)
    throws IOException {
  // from nn_browsedfscontent.jsp:
  String url = "///" + JspHelper.Url.authority(scheme, d)
      + "/browseDirectory.jsp?namenodeInfoPort=" + nnInfoPort + "&dir="
      + URLEncoder.encode("/", "UTF-8")
      + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);

  String name = d.getXferAddrWithHostname();
  if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
    name = name.replaceAll("\\.[^.:]*", "");
  int idx = (suffix != null && name.endsWith(suffix)) ? name
      .indexOf(suffix) : -1;

  out.print(rowTxt() + "<td class=\"name\"> <a title=\"" + url
      + "\" href=\"" + url + "\">"
      + ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
      + ((alive) ? "" : "\n") + "<td class=\"address\">" + d.getXferAddr());
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:21,代码来源:NamenodeJspHelper.java

示例2: createRedirectURL

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
/** Create a redirection URL */
private URL createRedirectURL(UserGroupInformation ugi, DatanodeID host,
    HttpServletRequest request, NameNode nn) 
    throws IOException {
  final String hostname = host instanceof DatanodeInfo 
      ? ((DatanodeInfo)host).getHostName() : host.getIpAddr();
  final String scheme = request.getScheme();
  final int port = "https".equals(scheme)
      ? (Integer)getServletContext().getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY)
      : host.getInfoPort();
  final String encodedPath = ServletUtil.getRawPath(request, "/fileChecksum");

  String dtParam = "";
  if (UserGroupInformation.isSecurityEnabled()) {
    String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
    dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
  }
  String addr = nn.getNameNodeAddressHostPortString();
  String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);

  return new URL(scheme, hostname, port, 
      "/getFileChecksum" + encodedPath + '?' +
      "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) + 
      dtParam + addrParam);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:26,代码来源:FileChecksumServlets.java

示例3: generateNodeDataHeader

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
    String suffix, boolean alive, int nnHttpPort, String nnaddr)
    throws IOException {
  // from nn_browsedfscontent.jsp:
  String url = HttpConfig.getSchemePrefix() + d.getHostName() + ":"
      + d.getInfoPort()
      + "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir="
      + URLEncoder.encode("/", "UTF-8")
      + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);

  String name = d.getXferAddrWithHostname();
  if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
    name = name.replaceAll("\\.[^.:]*", "");
  int idx = (suffix != null && name.endsWith(suffix)) ? name
      .indexOf(suffix) : -1;

  out.print(rowTxt() + "<td class=\"name\"><a title=\"" + d.getXferAddr()
      + "\" href=\"" + url + "\">"
      + ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
      + ((alive) ? "" : "\n"));
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:22,代码来源:NamenodeJspHelper.java

示例4: generateNodeDataHeader

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
    String suffix, boolean alive, int nnHttpPort, String nnaddr)
    throws IOException {
  // from nn_browsedfscontent.jsp:
  String url = HttpConfig2.getSchemePrefix() + d.getHostName() + ":" +
      d.getInfoPort() + "/browseDirectory.jsp?namenodeInfoPort=" +
      nnHttpPort + "&dir=" + URLEncoder.encode("/", "UTF-8") +
      JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);

  String name = d.getXferAddrWithHostname();
  if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*")) {
    name = name.replaceAll("\\.[^.:]*", "");
  }
  int idx =
      (suffix != null && name.endsWith(suffix)) ? name.indexOf(suffix) : -1;

  out.print(rowTxt() + "<td class=\"name\"><a title=\"" + d.getXferAddr() +
      "\" href=\"" + url + "\">" +
      ((idx > 0) ? name.substring(0, idx) : name) + "</a>" +
      ((alive) ? "" : "\n"));
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:22,代码来源:NamenodeJspHelper.java

示例5: generateNodeDataHeader

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
    String suffix, boolean alive, int nnHttpPort, String nnaddr)
    throws IOException {
  // from nn_browsedfscontent.jsp:
  String url = "///" + d.getHostName() + ":"
      + d.getInfoPort()
      + "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir="
      + URLEncoder.encode("/", "UTF-8")
      + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);

  String name = d.getXferAddrWithHostname();
  if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
    name = name.replaceAll("\\.[^.:]*", "");
  int idx = (suffix != null && name.endsWith(suffix)) ? name
      .indexOf(suffix) : -1;

  out.print(rowTxt() + "<td class=\"name\"> <a title=\"" + url
      + "\" href=\"" + url + "\">"
      + ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
      + ((alive) ? "" : "\n") + "<td class=\"address\">" + d.getXferAddr());
}
 
开发者ID:chendave,项目名称:hadoop-TCP,代码行数:22,代码来源:NamenodeJspHelper.java

示例6: createRedirectURL

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
/** Create a redirection URL */
private URL createRedirectURL(UserGroupInformation ugi, DatanodeID host,
    HttpServletRequest request, NameNode nn) 
    throws IOException {
  final String hostname = host instanceof DatanodeInfo 
      ? host.getHostName() : host.getIpAddr();
  final String scheme = request.getScheme();
  int port = host.getInfoPort();
  if ("https".equals(scheme)) {
    final Integer portObject = (Integer) getServletContext().getAttribute(
        DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY);
    if (portObject != null) {
      port = portObject;
    }
  }
  final String encodedPath = ServletUtil.getRawPath(request, "/fileChecksum");

  String dtParam = "";
  if (UserGroupInformation.isSecurityEnabled()) {
    String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
    dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
  }
  String addr = nn.getNameNodeAddressHostPortString();
  String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);

  return new URL(scheme, hostname, port, 
      "/getFileChecksum" + encodedPath + '?' +
      "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) + 
      dtParam + addrParam);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:31,代码来源:FileChecksumServlets.java

示例7: createRedirectURL

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
/** Create a redirection URL */
private URL createRedirectURL(String path, String encodedPath, HdfsFileStatus status, 
    UserGroupInformation ugi, ClientProtocol nnproxy, HttpServletRequest request, String dt)
    throws IOException {
  String scheme = request.getScheme();
  final LocatedBlocks blks = nnproxy.getBlockLocations(
      status.getFullPath(new Path(path)).toUri().getPath(), 0, 1);
  final Configuration conf = NameNodeHttpServer.getConfFromContext(
      getServletContext());
  final DatanodeID host = pickSrcDatanode(blks, status, conf);
  final String hostname;
  if (host instanceof DatanodeInfo) {
    hostname = host.getHostName();
  } else {
    hostname = host.getIpAddr();
  }

  int port = "https".equals(scheme) ? host.getInfoSecurePort() : host
      .getInfoPort();

  String dtParam = "";
  if (dt != null) {
    dtParam = JspHelper.getDelegationTokenUrlParam(dt);
  }

  // Add namenode address to the url params
  NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
      getServletContext());
  String addr = nn.getNameNodeAddressHostPortString();
  String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
  
  return new URL(scheme, hostname, port,
      "/streamFile" + encodedPath + '?' +
      "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
      dtParam + addrParam);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:37,代码来源:FileDataServlet.java

示例8: createRedirectURL

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
/** Create a redirection URL */
private URL createRedirectURL(String path, String encodedPath, HdfsFileStatus status, 
    UserGroupInformation ugi, ClientProtocol nnproxy, HttpServletRequest request, String dt)
    throws IOException {
  String scheme = request.getScheme();
  final LocatedBlocks blks = nnproxy.getBlockLocations(
      status.getFullPath(new Path(path)).toUri().getPath(), 0, 1);
  final Configuration conf = NameNodeHttpServer.getConfFromContext(
      getServletContext());
  final DatanodeID host = pickSrcDatanode(blks, status, conf);
  final String hostname;
  if (host instanceof DatanodeInfo) {
    hostname = ((DatanodeInfo)host).getHostName();
  } else {
    hostname = host.getIpAddr();
  }
  final int port = "https".equals(scheme)
    ? (Integer)getServletContext().getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY)
    : host.getInfoPort();

  String dtParam = "";
  if (dt != null) {
    dtParam=JspHelper.getDelegationTokenUrlParam(dt);
  }

  // Add namenode address to the url params
  NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
      getServletContext());
  String addr = nn.getNameNodeAddressHostPortString();
  String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
  
  return new URL(scheme, hostname, port,
      "/streamFile" + encodedPath + '?' +
      "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
      dtParam + addrParam);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:37,代码来源:FileDataServlet.java

示例9: testViewingFile

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
private static void testViewingFile(MiniDFSCluster cluster, String filePath)
    throws IOException {
  FileSystem fs = cluster.getFileSystem();
  
  Path testPath = new Path(filePath);
  if (!fs.exists(testPath)) {
    DFSTestUtil.writeFile(fs, testPath, FILE_DATA);
  }
  
  InetSocketAddress nnIpcAddress = cluster.getNameNode().getNameNodeAddress();
  InetSocketAddress nnHttpAddress = cluster.getNameNode().getHttpAddress();
  int dnInfoPort = cluster.getDataNodes().get(0).getInfoPort();
  
  URL url = new URL("http://localhost:" + dnInfoPort + "/"
      + "browseDirectory.jsp" + JspHelper.getUrlParam("dir", 
          URLEncoder.encode(testPath.toString(), "UTF-8"), true)
      + JspHelper.getUrlParam("namenodeInfoPort", Integer
          .toString(nnHttpAddress.getPort())) + JspHelper
          .getUrlParam("nnaddr", "localhost:" + nnIpcAddress.getPort()));
  
  viewFilePage = StringEscapeUtils.unescapeHtml(DFSTestUtil.urlGet(url));
  
  assertTrue("page should show preview of file contents, got: " + viewFilePage,
      viewFilePage.contains(FILE_DATA));
  
  assertTrue("page should show link to download file", viewFilePage
      .contains("/streamFile" + ServletUtil.encodePath(filePath)
          + "?nnaddr=localhost:" + nnIpcAddress.getPort()));
  
  // check whether able to tail the file
  String regex = "<a.+href=\"(.+?)\">Tail\\s*this\\s*file\\<\\/a\\>";
  assertFileContents(regex, "Tail this File");
  
  // check whether able to 'Go Back to File View' after tailing the file
  regex = "<a.+href=\"(.+?)\">Go\\s*Back\\s*to\\s*File\\s*View\\<\\/a\\>";
  assertFileContents(regex, "Go Back to File View");
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:38,代码来源:TestDatanodeJsp.java

示例10: createRedirectURL

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
/**
 * Create a redirection URL
 */
private URL createRedirectURL(UserGroupInformation ugi, DatanodeID host,
    HttpServletRequest request, NameNode nn) throws IOException {
  final String hostname =
      host instanceof DatanodeInfo ? ((DatanodeInfo) host).getHostName() :
          host.getIpAddr();
  final String scheme = request.getScheme();
  final int port = "https".equals(scheme) ? (Integer) getServletContext()
      .getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY) :
      host.getInfoPort();
  final String encodedPath =
      ServletUtil.getRawPath(request, "/fileChecksum");

  String dtParam = "";
  if (UserGroupInformation.isSecurityEnabled()) {
    String tokenString =
        ugi.getTokens().iterator().next().encodeToUrlString();
    dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
  }
  String addr = nn.getNameNodeAddressHostPortString();
  String addrParam =
      JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);

  return new URL(scheme, hostname, port,
      "/getFileChecksum" + encodedPath + '?' +
          "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
          dtParam + addrParam);
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:31,代码来源:FileChecksumServlets.java

示例11: createRedirectURL

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
/**
 * Create a redirection URL
 */
private URL createRedirectURL(String path, String encodedPath,
    HdfsFileStatus status, UserGroupInformation ugi, ClientProtocol nnproxy,
    HttpServletRequest request, String dt) throws IOException {
  String scheme = request.getScheme();
  final LocatedBlocks blks = nnproxy
      .getBlockLocations(status.getFullPath(new Path(path)).toUri().getPath(),
          0, 1);
  final Configuration conf =
      NameNodeHttpServer.getConfFromContext(getServletContext());
  final DatanodeID host = pickSrcDatanode(blks, status, conf);
  final String hostname;
  if (host instanceof DatanodeInfo) {
    hostname = ((DatanodeInfo) host).getHostName();
  } else {
    hostname = host.getIpAddr();
  }
  final int port = "https".equals(scheme) ? (Integer) getServletContext()
      .getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY) :
      host.getInfoPort();

  String dtParam = "";
  if (dt != null) {
    dtParam = JspHelper.getDelegationTokenUrlParam(dt);
  }

  // Add namenode address to the url params
  NameNode nn =
      NameNodeHttpServer.getNameNodeFromContext(getServletContext());
  String addr = nn.getNameNodeAddressHostPortString();
  String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
  
  return new URL(scheme, hostname, port, "/streamFile" + encodedPath + '?' +
      "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
      dtParam + addrParam);
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:39,代码来源:FileDataServlet.java

示例12: testViewingFile

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
private static void testViewingFile(MiniDFSCluster cluster, String filePath)
    throws IOException {
  FileSystem fs = cluster.getFileSystem();
  
  Path testPath = new Path(filePath);
  if (!fs.exists(testPath)) {
    DFSTestUtil.writeFile(fs, testPath, FILE_DATA);
  }
  
  InetSocketAddress nnIpcAddress = cluster.getNameNode().getNameNodeAddress();
  InetSocketAddress nnHttpAddress = cluster.getNameNode().getHttpAddress();
  int dnInfoPort = cluster.getDataNodes().get(0).getInfoPort();
  
  URL url = new URL(
      "http://localhost:" + dnInfoPort + "/" + "browseDirectory.jsp" +
          JspHelper.getUrlParam("dir",
              URLEncoder.encode(testPath.toString(), "UTF-8"), true) +
          JspHelper.getUrlParam("namenodeInfoPort",
              Integer.toString(nnHttpAddress.getPort())) + JspHelper
          .getUrlParam("nnaddr", "localhost:" + nnIpcAddress.getPort()));
  
  viewFilePage = StringEscapeUtils.unescapeHtml(DFSTestUtil.urlGet(url));
  
  assertTrue(
      "page should show preview of file contents, got: " + viewFilePage,
      viewFilePage.contains(FILE_DATA));
  
  assertTrue("page should show link to download file", viewFilePage.contains(
      "/streamFile" + ServletUtil.encodePath(filePath) +
          "?nnaddr=localhost:" + nnIpcAddress.getPort()));
  
  // check whether able to tail the file
  String regex = "<a.+href=\"(.+?)\">Tail\\s*this\\s*file\\<\\/a\\>";
  assertFileContents(regex, "Tail this File");
  
  // check whether able to 'Go Back to File View' after tailing the file
  regex = "<a.+href=\"(.+?)\">Go\\s*Back\\s*to\\s*File\\s*View\\<\\/a\\>";
  assertFileContents(regex, "Go Back to File View");
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:40,代码来源:TestDatanodeJsp.java

示例13: createRedirectURL

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
/** Create a redirection URL */
private URL createRedirectURL(UserGroupInformation ugi, DatanodeID host,
    HttpServletRequest request, NameNode nn) 
    throws IOException {
  final String hostname = host instanceof DatanodeInfo 
      ? ((DatanodeInfo)host).getHostName() : host.getIpAddr();
  final String scheme = request.getScheme();
  int port = host.getInfoPort();
  if ("https".equals(scheme)) {
    final Integer portObject = (Integer) getServletContext().getAttribute(
        DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY);
    if (portObject != null) {
      port = portObject;
    }
  }
  final String encodedPath = ServletUtil.getRawPath(request, "/fileChecksum");

  String dtParam = "";
  if (UserGroupInformation.isSecurityEnabled()) {
    String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
    dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
  }
  String addr = nn.getNameNodeAddressHostPortString();
  String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);

  return new URL(scheme, hostname, port, 
      "/getFileChecksum" + encodedPath + '?' +
      "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) + 
      dtParam + addrParam);
}
 
开发者ID:chendave,项目名称:hadoop-TCP,代码行数:31,代码来源:FileChecksumServlets.java

示例14: createRedirectURL

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
/** Create a redirection URL */
private URL createRedirectURL(String path, String encodedPath, HdfsFileStatus status, 
    UserGroupInformation ugi, ClientProtocol nnproxy, HttpServletRequest request, String dt)
    throws IOException {
  String scheme = request.getScheme();
  final LocatedBlocks blks = nnproxy.getBlockLocations(
      status.getFullPath(new Path(path)).toUri().getPath(), 0, 1);
  final Configuration conf = NameNodeHttpServer.getConfFromContext(
      getServletContext());
  final DatanodeID host = pickSrcDatanode(blks, status, conf);
  final String hostname;
  if (host instanceof DatanodeInfo) {
    hostname = ((DatanodeInfo)host).getHostName();
  } else {
    hostname = host.getIpAddr();
  }

  int port = "https".equals(scheme) ? host.getInfoSecurePort() : host
      .getInfoPort();

  String dtParam = "";
  if (dt != null) {
    dtParam = JspHelper.getDelegationTokenUrlParam(dt);
  }

  // Add namenode address to the url params
  NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
      getServletContext());
  String addr = nn.getNameNodeAddressHostPortString();
  String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
  
  return new URL(scheme, hostname, port,
      "/streamFile" + encodedPath + '?' +
      "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
      dtParam + addrParam);
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre2,代码行数:37,代码来源:FileDataServlet.java

示例15: testViewingFile

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入方法依赖的package包/类
private static void testViewingFile(MiniDFSCluster cluster, String filePath,
    boolean doTail) throws IOException {
  FileSystem fs = cluster.getFileSystem();

  Path testPath = new Path(filePath);
  if (!fs.exists(testPath)) {
    DFSTestUtil.writeFile(fs, testPath, FILE_DATA);
  }

  InetSocketAddress nnHttpAddress = cluster.getNameNode().getHttpAddress();
  int dnInfoPort = cluster.getDataNodes().get(0).getInfoPort();

  String jspName = doTail ? "tail.jsp" : "browseDirectory.jsp";
  String fileParamName = doTail ? "filename" : "dir";

  URL url = new URL("http://localhost:" + dnInfoPort + "/" + jspName +
      JspHelper.getUrlParam(fileParamName, URLEncoder.encode(testPath.toString(), "UTF-8"), true) +
      JspHelper.getUrlParam("namenodeInfoPort", Integer.toString(nnHttpAddress.getPort())));

  String viewFilePage = DFSTestUtil.urlGet(url);

  assertTrue("page should show preview of file contents", viewFilePage.contains(FILE_DATA));

  if (!doTail) {
    assertTrue("page should show link to download file", viewFilePage
        .contains("/streamFile" + URIUtil.encodePath(testPath.toString())));
  }
}
 
开发者ID:cumulusyebl,项目名称:cumulus,代码行数:29,代码来源:TestDatanodeJsp.java


注:本文中的org.apache.hadoop.hdfs.server.common.JspHelper.getUrlParam方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。