本文整理汇总了Java中org.apache.hadoop.http.HttpServer2.Builder方法的典型用法代码示例。如果您正苦于以下问题:Java HttpServer2.Builder方法的具体用法?Java HttpServer2.Builder怎么用?Java HttpServer2.Builder使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.http.HttpServer2
的用法示例。
在下文中一共展示了HttpServer2.Builder方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: start
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
void start() throws IOException {
final InetSocketAddress httpAddr = getHttpAddress(conf);
final String httpsAddrString = conf.get(
NfsConfigKeys.NFS_HTTPS_ADDRESS_KEY,
NfsConfigKeys.NFS_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "nfs3",
NfsConfigKeys.DFS_NFS_KERBEROS_PRINCIPAL_KEY,
NfsConfigKeys.DFS_NFS_KEYTAB_FILE_KEY);
this.httpServer = builder.build();
this.httpServer.start();
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
int connIdx = 0;
if (policy.isHttpEnabled()) {
infoPort = httpServer.getConnectorAddress(connIdx++).getPort();
}
if (policy.isHttpsEnabled()) {
infoSecurePort = httpServer.getConnectorAddress(connIdx).getPort();
}
}
示例2: start
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
void start() throws IOException {
final InetSocketAddress httpAddr = getAddress(conf);
final String httpsAddrString = conf.get(
DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "journal",
DFSConfigKeys.DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY);
httpServer = builder.build();
httpServer.setAttribute(JN_ATTRIBUTE_KEY, localJournalNode);
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
httpServer.addInternalServlet("getJournal", "/getJournal",
GetJournalEditServlet.class, true);
httpServer.start();
}
示例3: loadSslConfiguration
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
/**
* Load the SSL keystore / truststore into the HttpServer builder.
* @param builder the HttpServer2.Builder to populate with ssl config
* @param sslConf the Configuration instance to use during loading of SSL conf
*/
public static HttpServer2.Builder loadSslConfiguration(
HttpServer2.Builder builder, Configuration sslConf) {
if (sslConf == null) {
sslConf = new Configuration(false);
}
boolean needsClientAuth = YarnConfiguration.YARN_SSL_CLIENT_HTTPS_NEED_AUTH_DEFAULT;
sslConf.addResource(YarnConfiguration.YARN_SSL_SERVER_RESOURCE_DEFAULT);
return builder
.needsClientAuth(needsClientAuth)
.keyPassword(getPassword(sslConf, WEB_APP_KEY_PASSWORD_KEY))
.keyStore(sslConf.get("ssl.server.keystore.location"),
getPassword(sslConf, WEB_APP_KEYSTORE_PASSWORD_KEY),
sslConf.get("ssl.server.keystore.type", "jks"))
.trustStore(sslConf.get("ssl.server.truststore.location"),
getPassword(sslConf, WEB_APP_TRUSTSTORE_PASSWORD_KEY),
sslConf.get("ssl.server.truststore.type", "jks"));
}
示例4: loadSslConfiguration
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
/**
* Load the SSL keystore / truststore into the HttpServer builder.
* @param builder the HttpServer2.Builder to populate with ssl config
* @param conf the Configuration instance to load custom SSL config from
*
* @return HttpServer2.Builder instance (passed in as the first parameter)
* after loading SSL stores
*/
public static HttpServer2.Builder loadSslConfiguration(
HttpServer2.Builder builder, Configuration conf) {
Configuration sslConf = new Configuration(false);
sslConf.addResource(YarnConfiguration.YARN_SSL_SERVER_RESOURCE_DEFAULT);
if (conf != null) {
sslConf.addResource(conf);
}
boolean needsClientAuth = YarnConfiguration.YARN_SSL_CLIENT_HTTPS_NEED_AUTH_DEFAULT;
return builder
.needsClientAuth(needsClientAuth)
.keyPassword(getPassword(sslConf, WEB_APP_KEY_PASSWORD_KEY))
.keyStore(sslConf.get("ssl.server.keystore.location"),
getPassword(sslConf, WEB_APP_KEYSTORE_PASSWORD_KEY),
sslConf.get("ssl.server.keystore.type", "jks"))
.trustStore(sslConf.get("ssl.server.truststore.location"),
getPassword(sslConf, WEB_APP_TRUSTSTORE_PASSWORD_KEY),
sslConf.get("ssl.server.truststore.type", "jks"))
.excludeCiphers(
sslConf.get("ssl.server.exclude.cipher.list"));
}
示例5: loadSslConfiguration
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
/**
* Load the SSL keystore / truststore into the HttpServer builder.
*/
public static HttpServer2.Builder loadSslConfiguration(
HttpServer2.Builder builder) {
Configuration sslConf = new Configuration(false);
boolean needsClientAuth = YarnConfiguration.YARN_SSL_CLIENT_HTTPS_NEED_AUTH_DEFAULT;
sslConf.addResource(YarnConfiguration.YARN_SSL_SERVER_RESOURCE_DEFAULT);
return builder
.needsClientAuth(needsClientAuth)
.keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
.keyStore(sslConf.get("ssl.server.keystore.location"),
sslConf.get("ssl.server.keystore.password"),
sslConf.get("ssl.server.keystore.type", "jks"))
.trustStore(sslConf.get("ssl.server.truststore.location"),
sslConf.get("ssl.server.truststore.password"),
sslConf.get("ssl.server.truststore.type", "jks"));
}
示例6: start
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
void start() throws IOException {
final InetSocketAddress httpAddr = getAddress(conf);
final String httpsAddrString = conf.get(
DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "journal",
DFSConfigKeys.DFS_JOURNALNODE_INTERNAL_SPNEGO_USER_NAME_KEY,
DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY);
httpServer = builder.build();
httpServer.setAttribute(JN_ATTRIBUTE_KEY, localJournalNode);
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
httpServer.addInternalServlet("getJournal", "/getJournal",
GetJournalEditServlet.class, true);
httpServer.start();
}
示例7: serviceStart
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
@Override
protected void serviceStart() throws Exception {
try {
Configuration conf = getConfig();
HttpServer2.Builder b = new HttpServer2.Builder()
.setName("proxy")
.addEndpoint(
URI.create(WebAppUtils.getHttpSchemePrefix(conf) + bindAddress
+ ":" + port)).setFindPort(port == 0).setConf(getConfig())
.setACL(acl);
if (YarnConfiguration.useHttps(conf)) {
WebAppUtils.loadSslConfiguration(b);
}
proxyServer = b.build();
proxyServer.addServlet(ProxyUriUtils.PROXY_SERVLET_NAME,
ProxyUriUtils.PROXY_PATH_SPEC, WebAppProxyServlet.class);
proxyServer.setAttribute(FETCHER_ATTRIBUTE, fetcher);
proxyServer
.setAttribute(IS_SECURITY_ENABLED_ATTRIBUTE, isSecurityEnabled);
proxyServer.setAttribute(PROXY_HOST_ATTRIBUTE, proxyHost);
proxyServer.start();
} catch (IOException e) {
LOG.error("Could not start proxy web server",e);
throw e;
}
super.serviceStart();
}
示例8: loadSslConfToHttpServerBuilder
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
public static HttpServer2.Builder loadSslConfToHttpServerBuilder(HttpServer2.Builder builder,
Configuration sslConf) {
return builder
.needsClientAuth(
sslConf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
.keyPassword(getPassword(sslConf, DFS_SERVER_HTTPS_KEYPASSWORD_KEY))
.keyStore(sslConf.get("ssl.server.keystore.location"),
getPassword(sslConf, DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY),
sslConf.get("ssl.server.keystore.type", "jks"))
.trustStore(sslConf.get("ssl.server.truststore.location"),
getPassword(sslConf, DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY),
sslConf.get("ssl.server.truststore.type", "jks"));
}
示例9: startInfoServer
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
/**
* Start the web server.
*/
@VisibleForTesting
public void startInfoServer() throws IOException {
final InetSocketAddress httpAddr = getHttpAddress(conf);
final String httpsAddrString = conf.getTrimmed(
DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "secondary", DFSConfigKeys.
DFS_SECONDARY_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);
infoServer = builder.build();
infoServer.setAttribute("secondary.name.node", this);
infoServer.setAttribute("name.system.image", checkpointImage);
infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
infoServer.addInternalServlet("imagetransfer", ImageServlet.PATH_SPEC,
ImageServlet.class, true);
infoServer.start();
LOG.info("Web server init done");
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
int connIdx = 0;
if (policy.isHttpEnabled()) {
InetSocketAddress httpAddress =
infoServer.getConnectorAddress(connIdx++);
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
NetUtils.getHostPortString(httpAddress));
}
if (policy.isHttpsEnabled()) {
InetSocketAddress httpsAddress =
infoServer.getConnectorAddress(connIdx);
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY,
NetUtils.getHostPortString(httpsAddress));
}
}
示例10: serviceStart
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
@Override
protected void serviceStart() throws Exception {
try {
Configuration conf = getConfig();
HttpServer2.Builder b = new HttpServer2.Builder()
.setName("proxy")
.addEndpoint(
URI.create(WebAppUtils.getHttpSchemePrefix(conf) + bindAddress
+ ":" + port)).setFindPort(port == 0).setConf(getConfig())
.setACL(acl);
if (YarnConfiguration.useHttps(conf)) {
WebAppUtils.loadSslConfiguration(b);
}
proxyServer = b.build();
proxyServer.addServlet(ProxyUriUtils.PROXY_SERVLET_NAME,
ProxyUriUtils.PROXY_PATH_SPEC, WebAppProxyServlet.class);
proxyServer.setAttribute(FETCHER_ATTRIBUTE, fetcher);
proxyServer
.setAttribute(IS_SECURITY_ENABLED_ATTRIBUTE, isSecurityEnabled);
proxyServer.setAttribute(PROXY_HOST_ATTRIBUTE, proxyHost);
proxyServer.start();
} catch (IOException e) {
LOG.fatal("Could not start proxy web server",e);
throw new YarnRuntimeException("Could not start proxy web server",e);
}
super.serviceStart();
}
示例11: loadSslConfToHttpServerBuilder
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
public static HttpServer2.Builder loadSslConfToHttpServerBuilder(HttpServer2.Builder builder,
Configuration sslConf) {
return builder
.needsClientAuth(
sslConf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
.keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
.keyStore(sslConf.get("ssl.server.keystore.location"),
sslConf.get("ssl.server.keystore.password"),
sslConf.get("ssl.server.keystore.type", "jks"))
.trustStore(sslConf.get("ssl.server.truststore.location"),
sslConf.get("ssl.server.truststore.password"),
sslConf.get("ssl.server.truststore.type", "jks"));
}
示例12: loadSslConfiguration
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
/**
* Load the SSL keystore / truststore into the HttpServer builder.
* @param builder the HttpServer2.Builder to populate with ssl config
*/
public static HttpServer2.Builder loadSslConfiguration(
HttpServer2.Builder builder) {
return loadSslConfiguration(builder, null);
}
示例13: start
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
/**
* @see DFSUtil#getHttpPolicy(org.apache.hadoop.conf.Configuration)
* for information related to the different configuration options and
* Http Policy is decided.
*/
void start() throws IOException {
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
final String infoHost = bindAddress.getHostName();
final InetSocketAddress httpAddr = bindAddress;
final String httpsAddrString = conf.getTrimmed(
DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
if (httpsAddr != null) {
// If DFS_NAMENODE_HTTPS_BIND_HOST_KEY exists then it overrides the
// host name portion of DFS_NAMENODE_HTTPS_ADDRESS_KEY.
final String bindHost =
conf.getTrimmed(DFSConfigKeys.DFS_NAMENODE_HTTPS_BIND_HOST_KEY);
if (bindHost != null && !bindHost.isEmpty()) {
httpsAddr = new InetSocketAddress(bindHost, httpsAddr.getPort());
}
}
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "hdfs",
DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
httpServer = builder.build();
if (policy.isHttpsEnabled()) {
// assume same ssl port for all datanodes
InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.getTrimmed(
DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":"
+ DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT));
httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY,
datanodeSslPort.getPort());
}
initWebHdfs(conf);
httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
setupServlets(httpServer, conf);
httpServer.start();
int connIdx = 0;
if (policy.isHttpEnabled()) {
httpAddress = httpServer.getConnectorAddress(connIdx++);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
NetUtils.getHostPortString(httpAddress));
}
if (policy.isHttpsEnabled()) {
httpsAddress = httpServer.getConnectorAddress(connIdx);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
NetUtils.getHostPortString(httpsAddress));
}
}
示例14: start
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
/**
* @see DFSUtil#getHttpPolicy(org.apache.hadoop.conf.Configuration)
* for information related to the different configuration options and
* Http Policy is decided.
*/
void start() throws IOException {
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
final String infoHost = bindAddress.getHostName();
final InetSocketAddress httpAddr = bindAddress;
final String httpsAddrString = conf.get(
DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
if (httpsAddr != null) {
// If DFS_NAMENODE_HTTPS_BIND_HOST_KEY exists then it overrides the
// host name portion of DFS_NAMENODE_HTTPS_ADDRESS_KEY.
final String bindHost =
conf.getTrimmed(DFSConfigKeys.DFS_NAMENODE_HTTPS_BIND_HOST_KEY);
if (bindHost != null && !bindHost.isEmpty()) {
httpsAddr = new InetSocketAddress(bindHost, httpsAddr.getPort());
}
}
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "hdfs",
DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
httpServer = builder.build();
if (policy.isHttpsEnabled()) {
// assume same ssl port for all datanodes
InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get(
DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":"
+ DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT));
httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY,
datanodeSslPort.getPort());
}
initWebHdfs(conf);
httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
setupServlets(httpServer, conf);
httpServer.start();
int connIdx = 0;
if (policy.isHttpEnabled()) {
httpAddress = httpServer.getConnectorAddress(connIdx++);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
NetUtils.getHostPortString(httpAddress));
}
if (policy.isHttpsEnabled()) {
httpsAddress = httpServer.getConnectorAddress(connIdx);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
NetUtils.getHostPortString(httpsAddress));
}
}
示例15: start
import org.apache.hadoop.http.HttpServer2; //导入方法依赖的package包/类
/**
* @see DFSUtil#getHttpPolicy(org.apache.hadoop.conf.Configuration)
* for information related to the different configuration options and
* Http Policy is decided.
*/
void start() throws IOException {
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
final String infoHost = bindAddress.getHostName();
final InetSocketAddress httpAddr = bindAddress;
final String httpsAddrString = conf.get(
DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf,
httpAddr, httpsAddr, "hdfs",
DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY,
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
httpServer = builder.build();
if (policy.isHttpsEnabled()) {
// assume same ssl port for all datanodes
InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get(
DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":"
+ DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT));
httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY,
datanodeSslPort.getPort());
}
initWebHdfs(conf);
httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
setupServlets(httpServer, conf);
httpServer.start();
int connIdx = 0;
if (policy.isHttpEnabled()) {
httpAddress = httpServer.getConnectorAddress(connIdx++);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
NetUtils.getHostPortString(httpAddress));
}
if (policy.isHttpsEnabled()) {
httpsAddress = httpServer.getConnectorAddress(connIdx);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
NetUtils.getHostPortString(httpsAddress));
}
}