当前位置: 首页>>代码示例>>Java>>正文


Java JspHelper类代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.server.common.JspHelper的典型用法代码示例。如果您正苦于以下问题:Java JspHelper类的具体用法?Java JspHelper怎么用?Java JspHelper使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


JspHelper类属于org.apache.hadoop.hdfs.server.common包,在下文中一共展示了JspHelper类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: init

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void init(final UserGroupInformation ugi,
    final DelegationParam delegation,
    final UserParam username, final DoAsParam doAsUser,
    final UriFsPathParam path, final HttpOpParam<?> op,
    final Param<?, ?>... parameters) {
  if (LOG.isTraceEnabled()) {
    LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path
        + ", ugi=" + ugi + ", " + username + ", " + doAsUser
        + Param.toSortedString(", ", parameters));
  }

  //clear content type
  response.setContentType(null);
  
  // set the remote address, if coming in via a trust proxy server then
  // the address with be that of the proxied client
  REMOTE_ADDRESS.set(JspHelper.getRemoteAddr(request));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:NamenodeWebHdfsMethods.java

示例2: start

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
void start() throws IOException {
  final InetSocketAddress httpAddr = getAddress(conf);

  final String httpsAddrString = conf.get(
      DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_KEY,
      DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_DEFAULT);
  InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);

  HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
      httpAddr, httpsAddr, "journal",
      DFSConfigKeys.DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
      DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY);

  httpServer = builder.build();
  httpServer.setAttribute(JN_ATTRIBUTE_KEY, localJournalNode);
  httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
  httpServer.addInternalServlet("getJournal", "/getJournal",
      GetJournalEditServlet.class, true);
  httpServer.start();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:JournalNodeHttpServer.java

示例3: generateNodeDataHeader

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
    String suffix, boolean alive, int nnInfoPort, String nnaddr, String scheme)
    throws IOException {
  // from nn_browsedfscontent.jsp:
  String url = "///" + JspHelper.Url.authority(scheme, d)
      + "/browseDirectory.jsp?namenodeInfoPort=" + nnInfoPort + "&dir="
      + URLEncoder.encode("/", "UTF-8")
      + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);

  String name = d.getXferAddrWithHostname();
  if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
    name = name.replaceAll("\\.[^.:]*", "");
  int idx = (suffix != null && name.endsWith(suffix)) ? name
      .indexOf(suffix) : -1;

  out.print(rowTxt() + "<td class=\"name\"> <a title=\"" + url
      + "\" href=\"" + url + "\">"
      + ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
      + ((alive) ? "" : "\n") + "<td class=\"address\">" + d.getXferAddr());
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:21,代码来源:NamenodeJspHelper.java

示例4: testNamenodeJspHelperRedirectToRandomDataNode

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
@Test(timeout = 15000)
public void testNamenodeJspHelperRedirectToRandomDataNode() throws IOException, InterruptedException {
  final String urlPart = "browseDirectory.jsp?namenodeInfoPort=";                     
  
  ServletContext context = mock(ServletContext.class);
  HttpServletRequest request = mock(HttpServletRequest.class);
  HttpServletResponse resp = mock(HttpServletResponse.class);          
  
  when(request.getScheme()).thenReturn("http");
  when(request.getParameter(UserParam.NAME)).thenReturn("localuser");
  when(context.getAttribute(NAMENODE_ATTRIBUTE_KEY)).thenReturn(
      cluster.getNameNode());
  when(context.getAttribute(JspHelper.CURRENT_CONF)).thenReturn(conf);    
  ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
  doAnswer(new Answer<String>() {
    @Override
   public String answer(InvocationOnMock invocation) throws Throwable {
      return null;
      }
  }).when(resp).sendRedirect(captor.capture());

  NamenodeJspHelper.redirectToRandomDataNode(context, request, resp);    
  assertTrue(captor.getValue().contains(urlPart));    
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:25,代码来源:TestNameNodeJspHelper.java

示例5: createRedirectURL

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
/** Create a redirection URL */
private URL createRedirectURL(UserGroupInformation ugi, DatanodeID host,
    HttpServletRequest request, NameNode nn) 
    throws IOException {
  final String hostname = host instanceof DatanodeInfo 
      ? ((DatanodeInfo)host).getHostName() : host.getIpAddr();
  final String scheme = request.getScheme();
  final int port = "https".equals(scheme)
      ? (Integer)getServletContext().getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY)
      : host.getInfoPort();
  final String encodedPath = ServletUtil.getRawPath(request, "/fileChecksum");

  String dtParam = "";
  if (UserGroupInformation.isSecurityEnabled()) {
    String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
    dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
  }
  String addr = nn.getNameNodeAddressHostPortString();
  String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);

  return new URL(scheme, hostname, port, 
      "/getFileChecksum" + encodedPath + '?' +
      "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) + 
      dtParam + addrParam);
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:26,代码来源:FileChecksumServlets.java

示例6: generateNodeDataHeader

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
    String suffix, boolean alive, int nnHttpPort, String nnaddr)
    throws IOException {
  // from nn_browsedfscontent.jsp:
  String url = HttpConfig.getSchemePrefix() + d.getHostName() + ":"
      + d.getInfoPort()
      + "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir="
      + URLEncoder.encode("/", "UTF-8")
      + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);

  String name = d.getXferAddrWithHostname();
  if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
    name = name.replaceAll("\\.[^.:]*", "");
  int idx = (suffix != null && name.endsWith(suffix)) ? name
      .indexOf(suffix) : -1;

  out.print(rowTxt() + "<td class=\"name\"><a title=\"" + d.getXferAddr()
      + "\" href=\"" + url + "\">"
      + ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
      + ((alive) ? "" : "\n"));
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:22,代码来源:NamenodeJspHelper.java

示例7: generateNodeDataHeader

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
    String suffix, boolean alive, int nnHttpPort, String nnaddr)
    throws IOException {
  // from nn_browsedfscontent.jsp:
  String url = HttpConfig2.getSchemePrefix() + d.getHostName() + ":" +
      d.getInfoPort() + "/browseDirectory.jsp?namenodeInfoPort=" +
      nnHttpPort + "&dir=" + URLEncoder.encode("/", "UTF-8") +
      JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);

  String name = d.getXferAddrWithHostname();
  if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*")) {
    name = name.replaceAll("\\.[^.:]*", "");
  }
  int idx =
      (suffix != null && name.endsWith(suffix)) ? name.indexOf(suffix) : -1;

  out.print(rowTxt() + "<td class=\"name\"><a title=\"" + d.getXferAddr() +
      "\" href=\"" + url + "\">" +
      ((idx > 0) ? name.substring(0, idx) : name) + "</a>" +
      ((alive) ? "" : "\n"));
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:22,代码来源:NamenodeJspHelper.java

示例8: generateNodeDataHeader

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
    String suffix, boolean alive, int nnHttpPort, String nnaddr)
    throws IOException {
  // from nn_browsedfscontent.jsp:
  String url = "///" + d.getHostName() + ":"
      + d.getInfoPort()
      + "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir="
      + URLEncoder.encode("/", "UTF-8")
      + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);

  String name = d.getXferAddrWithHostname();
  if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
    name = name.replaceAll("\\.[^.:]*", "");
  int idx = (suffix != null && name.endsWith(suffix)) ? name
      .indexOf(suffix) : -1;

  out.print(rowTxt() + "<td class=\"name\"> <a title=\"" + url
      + "\" href=\"" + url + "\">"
      + ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
      + ((alive) ? "" : "\n") + "<td class=\"address\">" + d.getXferAddr());
}
 
开发者ID:chendave,项目名称:hadoop-TCP,代码行数:22,代码来源:NamenodeJspHelper.java

示例9: start

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
void start() throws IOException {
  final InetSocketAddress httpAddr = getAddress(conf);

  final String httpsAddrString = conf.get(
      DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_KEY,
      DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_DEFAULT);
  InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);

  HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
      httpAddr, httpsAddr, "journal",
      DFSConfigKeys.DFS_JOURNALNODE_INTERNAL_SPNEGO_USER_NAME_KEY,
      DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY);

  httpServer = builder.build();
  httpServer.setAttribute(JN_ATTRIBUTE_KEY, localJournalNode);
  httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
  httpServer.addInternalServlet("getJournal", "/getJournal",
      GetJournalEditServlet.class, true);
  httpServer.start();
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre2,代码行数:21,代码来源:JournalNodeHttpServer.java

示例10: doGet

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
/**
* Service a GET request as described below.
* Request:
* 	GET http://<host>:<port>/monitor?class=...&key=...&... HTTP/1.1
* */
public void doGet(final HttpServletRequest request, final HttpServletResponse response)
	throws IOException {
	
	final ServletContext context = getServletContext();
    final Configuration conf = 
    		(Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
	final UserGroupInformation ugi = getUGI(request, conf);
	
	System.out.println("[" + new Date().toString() + "]" + 
		request.getRequestURL() + 
			(request.getQueryString() == null ? "" : ("?"+request.getQueryString())));
	
	try{
		ugi.doAs(new PrivilegedExceptionAction<Void>(){
			public Void run() throws Exception {
				doAll(request, response);
				return null;
			}
		});
	}catch(InterruptedException e){
		throw new IOException(e);
	}
}
 
开发者ID:cumulusyebl,项目名称:cumulus,代码行数:29,代码来源:MonitorServlet.java

示例11: getDFSClient

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
/** getting a client for connecting to dfs */
protected DFSClient getDFSClient(HttpServletRequest request)
    throws IOException, InterruptedException {
  final Configuration conf =
    (Configuration) getServletContext().getAttribute(JspHelper.CURRENT_CONF);
  
  UserGroupInformation ugi = getUGI(request, conf);
  DFSClient client = ugi.doAs(new PrivilegedExceptionAction<DFSClient>() {
    @Override
    public DFSClient run() throws IOException {
      return new DFSClient(nameNodeAddr, conf);
    }
  });
  
  return client;
}
 
开发者ID:cumulusyebl,项目名称:cumulus,代码行数:17,代码来源:StreamFile.java

示例12: createUri

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
/** Create a redirection URI */
protected URI createUri(String parent, HdfsFileStatus i, UserGroupInformation ugi,
    ClientProtocol nnproxy, HttpServletRequest request, String dt)
    throws IOException, URISyntaxException {
  String scheme = request.getScheme();
  final DatanodeID host = pickSrcDatanode(parent, i, nnproxy);
  final String hostname;
  if (host instanceof DatanodeInfo) {
    hostname = ((DatanodeInfo)host).getHostName();
  } else {
    hostname = host.getHost();
  }
      
  String dtParam="";
  if (dt != null) {
    dtParam=JspHelper.getDelegationTokenUrlParam(dt);
  }

  return new URI(scheme, null, hostname,
      "https".equals(scheme)
        ? (Integer)getServletContext().getAttribute("datanode.https.port")
        : host.getInfoPort(),
          "/streamFile" + i.getFullName(parent), 
          "ugi=" + ugi.getShortUserName() + dtParam, null);
}
 
开发者ID:cumulusyebl,项目名称:cumulus,代码行数:26,代码来源:FileDataServlet.java

示例13: initialize

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void initialize(Configuration conf) throws IOException {
  sslAddr = getSslAddr(conf);
  String nn = conf.get("hdfsproxy.dfs.namenode.address");
  if (nn == null)
    throw new IOException("HDFS NameNode address is not specified");
  InetSocketAddress nnAddr = NetUtils.createSocketAddr(nn);
  LOG.info("HDFS NameNode is at: " + nnAddr.getHostName() + ":" + nnAddr.getPort());

  Configuration sslConf = new HdfsConfiguration(false);
  sslConf.addResource(conf.get("hdfsproxy.https.server.keystore.resource",
      "ssl-server.xml"));
  // unit testing
  sslConf.set("proxy.http.test.listener.addr",
              conf.get("proxy.http.test.listener.addr"));

  this.server = new ProxyHttpServer(sslAddr, sslConf);
  this.server.setAttribute("proxy.https.port", server.getPort());
  this.server.setAttribute("name.node.address", nnAddr);
  this.server.setAttribute(JspHelper.CURRENT_CONF, new HdfsConfiguration());
  this.server.addGlobalFilter("ProxyFilter", ProxyFilter.class.getName(), null);
  this.server.addServlet("listPaths", "/listPaths/*", ProxyListPathsServlet.class);
  this.server.addServlet("data", "/data/*", ProxyFileDataServlet.class);
  this.server.addServlet("streamFile", "/streamFile/*", ProxyStreamFile.class);
}
 
开发者ID:cumulusyebl,项目名称:cumulus,代码行数:25,代码来源:HdfsProxy.java

示例14: getValue

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
@Override
public UserGroupInformation getValue(final HttpContext context) {
  final Configuration conf = (Configuration) servletcontext
      .getAttribute(JspHelper.CURRENT_CONF);
  try {
    return JspHelper.getUGI(servletcontext, request, conf,
        AuthenticationMethod.KERBEROS, false);
  } catch (IOException e) {
    throw new SecurityException(
        SecurityUtil.FAILED_TO_GET_UGI_MSG_HEADER + " " + e, e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:13,代码来源:UserProvider.java

示例15: addDelegationTokenParam

import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
protected String addDelegationTokenParam(String query) throws IOException {
  String tokenString = null;
  if (UserGroupInformation.isSecurityEnabled()) {
    synchronized (this) {
      tokenAspect.ensureTokenInitialized();
      if (delegationToken != null) {
        tokenString = delegationToken.encodeToUrlString();
        return (query + JspHelper.getDelegationTokenUrlParam(tokenString));
      }
    }
  }
  return query;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:14,代码来源:HftpFileSystem.java


注:本文中的org.apache.hadoop.hdfs.server.common.JspHelper类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。