本文整理汇总了Java中org.apache.hadoop.hdfs.server.common.JspHelper类的典型用法代码示例。如果您正苦于以下问题:Java JspHelper类的具体用法?Java JspHelper怎么用?Java JspHelper使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
JspHelper类属于org.apache.hadoop.hdfs.server.common包,在下文中一共展示了JspHelper类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: init
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void init(final UserGroupInformation ugi,
final DelegationParam delegation,
final UserParam username, final DoAsParam doAsUser,
final UriFsPathParam path, final HttpOpParam<?> op,
final Param<?, ?>... parameters) {
if (LOG.isTraceEnabled()) {
LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path
+ ", ugi=" + ugi + ", " + username + ", " + doAsUser
+ Param.toSortedString(", ", parameters));
}
//clear content type
response.setContentType(null);
// set the remote address, if coming in via a trust proxy server then
// the address with be that of the proxied client
REMOTE_ADDRESS.set(JspHelper.getRemoteAddr(request));
}
示例2: start
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
void start() throws IOException {
final InetSocketAddress httpAddr = getAddress(conf);
final String httpsAddrString = conf.get(
DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "journal",
DFSConfigKeys.DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY);
httpServer = builder.build();
httpServer.setAttribute(JN_ATTRIBUTE_KEY, localJournalNode);
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
httpServer.addInternalServlet("getJournal", "/getJournal",
GetJournalEditServlet.class, true);
httpServer.start();
}
示例3: generateNodeDataHeader
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
String suffix, boolean alive, int nnInfoPort, String nnaddr, String scheme)
throws IOException {
// from nn_browsedfscontent.jsp:
String url = "///" + JspHelper.Url.authority(scheme, d)
+ "/browseDirectory.jsp?namenodeInfoPort=" + nnInfoPort + "&dir="
+ URLEncoder.encode("/", "UTF-8")
+ JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);
String name = d.getXferAddrWithHostname();
if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
name = name.replaceAll("\\.[^.:]*", "");
int idx = (suffix != null && name.endsWith(suffix)) ? name
.indexOf(suffix) : -1;
out.print(rowTxt() + "<td class=\"name\"> <a title=\"" + url
+ "\" href=\"" + url + "\">"
+ ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
+ ((alive) ? "" : "\n") + "<td class=\"address\">" + d.getXferAddr());
}
示例4: testNamenodeJspHelperRedirectToRandomDataNode
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
@Test(timeout = 15000)
public void testNamenodeJspHelperRedirectToRandomDataNode() throws IOException, InterruptedException {
final String urlPart = "browseDirectory.jsp?namenodeInfoPort=";
ServletContext context = mock(ServletContext.class);
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse resp = mock(HttpServletResponse.class);
when(request.getScheme()).thenReturn("http");
when(request.getParameter(UserParam.NAME)).thenReturn("localuser");
when(context.getAttribute(NAMENODE_ATTRIBUTE_KEY)).thenReturn(
cluster.getNameNode());
when(context.getAttribute(JspHelper.CURRENT_CONF)).thenReturn(conf);
ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
doAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
return null;
}
}).when(resp).sendRedirect(captor.capture());
NamenodeJspHelper.redirectToRandomDataNode(context, request, resp);
assertTrue(captor.getValue().contains(urlPart));
}
示例5: createRedirectURL
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
/** Create a redirection URL */
private URL createRedirectURL(UserGroupInformation ugi, DatanodeID host,
HttpServletRequest request, NameNode nn)
throws IOException {
final String hostname = host instanceof DatanodeInfo
? ((DatanodeInfo)host).getHostName() : host.getIpAddr();
final String scheme = request.getScheme();
final int port = "https".equals(scheme)
? (Integer)getServletContext().getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY)
: host.getInfoPort();
final String encodedPath = ServletUtil.getRawPath(request, "/fileChecksum");
String dtParam = "";
if (UserGroupInformation.isSecurityEnabled()) {
String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
}
String addr = nn.getNameNodeAddressHostPortString();
String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
return new URL(scheme, hostname, port,
"/getFileChecksum" + encodedPath + '?' +
"ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
dtParam + addrParam);
}
示例6: generateNodeDataHeader
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
String suffix, boolean alive, int nnHttpPort, String nnaddr)
throws IOException {
// from nn_browsedfscontent.jsp:
String url = HttpConfig.getSchemePrefix() + d.getHostName() + ":"
+ d.getInfoPort()
+ "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir="
+ URLEncoder.encode("/", "UTF-8")
+ JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);
String name = d.getXferAddrWithHostname();
if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
name = name.replaceAll("\\.[^.:]*", "");
int idx = (suffix != null && name.endsWith(suffix)) ? name
.indexOf(suffix) : -1;
out.print(rowTxt() + "<td class=\"name\"><a title=\"" + d.getXferAddr()
+ "\" href=\"" + url + "\">"
+ ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
+ ((alive) ? "" : "\n"));
}
示例7: generateNodeDataHeader
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
String suffix, boolean alive, int nnHttpPort, String nnaddr)
throws IOException {
// from nn_browsedfscontent.jsp:
String url = HttpConfig2.getSchemePrefix() + d.getHostName() + ":" +
d.getInfoPort() + "/browseDirectory.jsp?namenodeInfoPort=" +
nnHttpPort + "&dir=" + URLEncoder.encode("/", "UTF-8") +
JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);
String name = d.getXferAddrWithHostname();
if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*")) {
name = name.replaceAll("\\.[^.:]*", "");
}
int idx =
(suffix != null && name.endsWith(suffix)) ? name.indexOf(suffix) : -1;
out.print(rowTxt() + "<td class=\"name\"><a title=\"" + d.getXferAddr() +
"\" href=\"" + url + "\">" +
((idx > 0) ? name.substring(0, idx) : name) + "</a>" +
((alive) ? "" : "\n"));
}
示例8: generateNodeDataHeader
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
String suffix, boolean alive, int nnHttpPort, String nnaddr)
throws IOException {
// from nn_browsedfscontent.jsp:
String url = "///" + d.getHostName() + ":"
+ d.getInfoPort()
+ "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir="
+ URLEncoder.encode("/", "UTF-8")
+ JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);
String name = d.getXferAddrWithHostname();
if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
name = name.replaceAll("\\.[^.:]*", "");
int idx = (suffix != null && name.endsWith(suffix)) ? name
.indexOf(suffix) : -1;
out.print(rowTxt() + "<td class=\"name\"> <a title=\"" + url
+ "\" href=\"" + url + "\">"
+ ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
+ ((alive) ? "" : "\n") + "<td class=\"address\">" + d.getXferAddr());
}
示例9: start
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
void start() throws IOException {
final InetSocketAddress httpAddr = getAddress(conf);
final String httpsAddrString = conf.get(
DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "journal",
DFSConfigKeys.DFS_JOURNALNODE_INTERNAL_SPNEGO_USER_NAME_KEY,
DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY);
httpServer = builder.build();
httpServer.setAttribute(JN_ATTRIBUTE_KEY, localJournalNode);
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
httpServer.addInternalServlet("getJournal", "/getJournal",
GetJournalEditServlet.class, true);
httpServer.start();
}
示例10: doGet
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
/**
* Service a GET request as described below.
* Request:
* GET http://<host>:<port>/monitor?class=...&key=...&... HTTP/1.1
* */
public void doGet(final HttpServletRequest request, final HttpServletResponse response)
throws IOException {
final ServletContext context = getServletContext();
final Configuration conf =
(Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
final UserGroupInformation ugi = getUGI(request, conf);
System.out.println("[" + new Date().toString() + "]" +
request.getRequestURL() +
(request.getQueryString() == null ? "" : ("?"+request.getQueryString())));
try{
ugi.doAs(new PrivilegedExceptionAction<Void>(){
public Void run() throws Exception {
doAll(request, response);
return null;
}
});
}catch(InterruptedException e){
throw new IOException(e);
}
}
示例11: getDFSClient
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
/** getting a client for connecting to dfs */
protected DFSClient getDFSClient(HttpServletRequest request)
throws IOException, InterruptedException {
final Configuration conf =
(Configuration) getServletContext().getAttribute(JspHelper.CURRENT_CONF);
UserGroupInformation ugi = getUGI(request, conf);
DFSClient client = ugi.doAs(new PrivilegedExceptionAction<DFSClient>() {
@Override
public DFSClient run() throws IOException {
return new DFSClient(nameNodeAddr, conf);
}
});
return client;
}
示例12: createUri
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
/** Create a redirection URI */
protected URI createUri(String parent, HdfsFileStatus i, UserGroupInformation ugi,
ClientProtocol nnproxy, HttpServletRequest request, String dt)
throws IOException, URISyntaxException {
String scheme = request.getScheme();
final DatanodeID host = pickSrcDatanode(parent, i, nnproxy);
final String hostname;
if (host instanceof DatanodeInfo) {
hostname = ((DatanodeInfo)host).getHostName();
} else {
hostname = host.getHost();
}
String dtParam="";
if (dt != null) {
dtParam=JspHelper.getDelegationTokenUrlParam(dt);
}
return new URI(scheme, null, hostname,
"https".equals(scheme)
? (Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort(),
"/streamFile" + i.getFullName(parent),
"ugi=" + ugi.getShortUserName() + dtParam, null);
}
示例13: initialize
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
private void initialize(Configuration conf) throws IOException {
sslAddr = getSslAddr(conf);
String nn = conf.get("hdfsproxy.dfs.namenode.address");
if (nn == null)
throw new IOException("HDFS NameNode address is not specified");
InetSocketAddress nnAddr = NetUtils.createSocketAddr(nn);
LOG.info("HDFS NameNode is at: " + nnAddr.getHostName() + ":" + nnAddr.getPort());
Configuration sslConf = new HdfsConfiguration(false);
sslConf.addResource(conf.get("hdfsproxy.https.server.keystore.resource",
"ssl-server.xml"));
// unit testing
sslConf.set("proxy.http.test.listener.addr",
conf.get("proxy.http.test.listener.addr"));
this.server = new ProxyHttpServer(sslAddr, sslConf);
this.server.setAttribute("proxy.https.port", server.getPort());
this.server.setAttribute("name.node.address", nnAddr);
this.server.setAttribute(JspHelper.CURRENT_CONF, new HdfsConfiguration());
this.server.addGlobalFilter("ProxyFilter", ProxyFilter.class.getName(), null);
this.server.addServlet("listPaths", "/listPaths/*", ProxyListPathsServlet.class);
this.server.addServlet("data", "/data/*", ProxyFileDataServlet.class);
this.server.addServlet("streamFile", "/streamFile/*", ProxyStreamFile.class);
}
示例14: getValue
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
@Override
public UserGroupInformation getValue(final HttpContext context) {
final Configuration conf = (Configuration) servletcontext
.getAttribute(JspHelper.CURRENT_CONF);
try {
return JspHelper.getUGI(servletcontext, request, conf,
AuthenticationMethod.KERBEROS, false);
} catch (IOException e) {
throw new SecurityException(
SecurityUtil.FAILED_TO_GET_UGI_MSG_HEADER + " " + e, e);
}
}
示例15: addDelegationTokenParam
import org.apache.hadoop.hdfs.server.common.JspHelper; //导入依赖的package包/类
protected String addDelegationTokenParam(String query) throws IOException {
String tokenString = null;
if (UserGroupInformation.isSecurityEnabled()) {
synchronized (this) {
tokenAspect.ensureTokenInitialized();
if (delegationToken != null) {
tokenString = delegationToken.encodeToUrlString();
return (query + JspHelper.getDelegationTokenUrlParam(tokenString));
}
}
}
return query;
}