本文整理汇总了Java中org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods类的典型用法代码示例。如果您正苦于以下问题:Java NamenodeWebHdfsMethods类的具体用法?Java NamenodeWebHdfsMethods怎么用?Java NamenodeWebHdfsMethods使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
NamenodeWebHdfsMethods类属于org.apache.hadoop.hdfs.server.namenode.web.resources包,在下文中一共展示了NamenodeWebHdfsMethods类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initWebHdfs
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
private void initWebHdfs(Configuration conf) throws IOException {
if (WebHdfsFileSystem.isEnabled(conf, HttpServer2.LOG)) {
// set user pattern based on configuration file
UserParam.setUserPattern(conf.get(
DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY,
DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT));
// add authentication filter for webhdfs
final String className = conf.get(
DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_KEY,
DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_DEFAULT);
final String name = className;
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
Map<String, String> params = getAuthFilterParams(conf);
HttpServer2.defineFilter(httpServer.getWebAppContext(), name, className,
params, new String[] { pathSpec });
HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
+ ")");
// add webhdfs packages
httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class
.getPackage().getName() + ";" + Param.class.getPackage().getName(),
pathSpec);
}
}
示例2: initWebHdfs
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
private void initWebHdfs(Configuration conf) throws IOException {
// set user pattern based on configuration file
UserParam.setUserPattern(conf.get(
HdfsClientConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY,
HdfsClientConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT));
// add authentication filter for webhdfs
final String className = conf.get(
DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_KEY,
DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_DEFAULT);
final String name = className;
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
Map<String, String> params = getAuthFilterParams(conf);
HttpServer2.defineFilter(httpServer.getWebAppContext(), name, className,
params, new String[] { pathSpec });
HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
+ ")");
// add webhdfs packages
httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class
.getPackage().getName() + ";" + Param.class.getPackage().getName(),
pathSpec);
}
示例3: getRemoteIp
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
private static InetAddress getRemoteIp() {
InetAddress ip = Server.getRemoteIp();
if (ip != null) {
return ip;
}
return NamenodeWebHdfsMethods.getRemoteIp();
}
示例4: getClientMachine
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
private static String getClientMachine() {
String clientMachine = NamenodeWebHdfsMethods.getRemoteAddress();
if (clientMachine == null) { //not a web client
clientMachine = Server.getRemoteAddress();
}
if (clientMachine == null) { //not a RPC client
clientMachine = "";
}
return clientMachine;
}
示例5: testNamenodeRestart
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
/** Test client retry with namenode restarting. */
@Test(timeout=300000)
public void testNamenodeRestart() throws Exception {
((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
final Configuration conf = WebHdfsTestUtil.createConf();
TestDFSClientRetries.namenodeRestartTest(conf, true);
}
示例6: testDelegationTokenWebHdfsApi
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
@SuppressWarnings("deprecation")
@Test
public void testDelegationTokenWebHdfsApi() throws Exception {
((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
final String uri = WebHdfsFileSystem.SCHEME + "://"
+ config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
//get file system as JobTracker
final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
"JobTracker", new String[]{"user"});
final WebHdfsFileSystem webhdfs = ugi.doAs(
new PrivilegedExceptionAction<WebHdfsFileSystem>() {
@Override
public WebHdfsFileSystem run() throws Exception {
return (WebHdfsFileSystem)FileSystem.get(new URI(uri), config);
}
});
{ //test addDelegationTokens(..)
Credentials creds = new Credentials();
final Token<?> tokens[] = webhdfs.addDelegationTokens("JobTracker", creds);
Assert.assertEquals(1, tokens.length);
Assert.assertEquals(1, creds.numberOfTokens());
Assert.assertSame(tokens[0], creds.getAllTokens().iterator().next());
checkTokenIdentifier(ugi, tokens[0]);
final Token<?> tokens2[] = webhdfs.addDelegationTokens("JobTracker", creds);
Assert.assertEquals(0, tokens2.length);
}
}
示例7: testNamenodeRestart
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
/** Test client retry with namenode restarting. */
@Test(timeout=300000)
public void testNamenodeRestart() throws Exception {
GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.ALL);
final Configuration conf = WebHdfsTestUtil.createConf();
TestDFSClientRetries.namenodeRestartTest(conf, true);
}
示例8: testDelegationTokenWebHdfsApi
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
@Test
public void testDelegationTokenWebHdfsApi() throws Exception {
GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.ALL);
final String uri = WebHdfsConstants.WEBHDFS_SCHEME + "://"
+ config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
//get file system as JobTracker
final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
"JobTracker", new String[]{"user"});
final WebHdfsFileSystem webhdfs = ugi.doAs(
new PrivilegedExceptionAction<WebHdfsFileSystem>() {
@Override
public WebHdfsFileSystem run() throws Exception {
return (WebHdfsFileSystem)FileSystem.get(new URI(uri), config);
}
});
{ //test addDelegationTokens(..)
Credentials creds = new Credentials();
final Token<?> tokens[] = webhdfs.addDelegationTokens("JobTracker", creds);
Assert.assertEquals(1, tokens.length);
Assert.assertEquals(1, creds.numberOfTokens());
Assert.assertSame(tokens[0], creds.getAllTokens().iterator().next());
checkTokenIdentifier(ugi, tokens[0]);
final Token<?> tokens2[] = webhdfs.addDelegationTokens("JobTracker", creds);
Assert.assertEquals(0, tokens2.length);
}
}
示例9: setLogLevel
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
static private void setLogLevel() {
((Log4JLogger)LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)NameNode.stateChangeLog).getLogger().setLevel(Level.OFF);
((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.OFF);
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.OFF);
}
示例10: setLogLevel
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
static private void setLogLevel() {
((Log4JLogger) LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) NameNode.stateChangeLog).getLogger().setLevel(Level.OFF);
((Log4JLogger) LeaseManager.LOG).getLogger().setLevel(Level.OFF);
((Log4JLogger) LogFactory.getLog(FSNamesystem.class)).getLogger()
.setLevel(Level.OFF);
}
示例11: testNamenodeRestart
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
/**
* Test client retry with namenode restarting.
*/
@Test(timeout = 900000)
public void testNamenodeRestart() throws Exception {
((Log4JLogger) NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
final Configuration conf = WebHdfsTestUtil.createConf();
TestDFSClientRetries.namenodeRestartTest(conf, true);
}
示例12: testDelegationTokenWebHdfsApi
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; //导入依赖的package包/类
@SuppressWarnings("deprecation")
@Test
public void testDelegationTokenWebHdfsApi() throws Exception {
((Log4JLogger) NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
final String uri = WebHdfsFileSystem.SCHEME + "://" +
config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
//get file system as JobTracker
final UserGroupInformation ugi = UserGroupInformation
.createUserForTesting("JobTracker", new String[]{"user"});
final WebHdfsFileSystem webhdfs =
ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() {
@Override
public WebHdfsFileSystem run() throws Exception {
return (WebHdfsFileSystem) FileSystem.get(new URI(uri), config);
}
});
{ //test addDelegationTokens(..)
Credentials creds = new Credentials();
final Token<?> tokens[] =
webhdfs.addDelegationTokens("JobTracker", creds);
Assert.assertEquals(1, tokens.length);
Assert.assertEquals(1, creds.numberOfTokens());
Assert.assertSame(tokens[0], creds.getAllTokens().iterator().next());
checkTokenIdentifier(ugi, tokens[0]);
final Token<?> tokens2[] =
webhdfs.addDelegationTokens("JobTracker", creds);
Assert.assertEquals(0, tokens2.length);
}
}