本文整理汇总了Java中org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod类的典型用法代码示例。如果您正苦于以下问题:Java AuthenticationMethod类的具体用法?Java AuthenticationMethod怎么用?Java AuthenticationMethod使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
AuthenticationMethod类属于org.apache.hadoop.security.UserGroupInformation包,在下文中一共展示了AuthenticationMethod类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getUser
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
/**
* Get the username encoded in the token identifier
*
* @return the username or owner
*/
@Override
public UserGroupInformation getUser() {
if ( (owner == null) || (owner.toString().isEmpty())) {
return null;
}
final UserGroupInformation realUgi;
final UserGroupInformation ugi;
if ((realUser == null) || (realUser.toString().isEmpty())
|| realUser.equals(owner)) {
ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString());
} else {
realUgi = UserGroupInformation.createRemoteUser(realUser.toString());
ugi = UserGroupInformation.createProxyUser(owner.toString(), realUgi);
}
realUgi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
return ugi;
}
示例2: getAuthMethods
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
private List<AuthMethod> getAuthMethods(SecretManager<?> secretManager,
Configuration conf) {
AuthenticationMethod confAuthenticationMethod =
SecurityUtil.getAuthenticationMethod(conf);
List<AuthMethod> authMethods = new ArrayList<AuthMethod>();
if (confAuthenticationMethod == AuthenticationMethod.TOKEN) {
if (secretManager == null) {
throw new IllegalArgumentException(AuthenticationMethod.TOKEN +
" authentication requires a secret manager");
}
} else if (secretManager != null) {
LOG.debug(AuthenticationMethod.TOKEN +
" authentication enabled for secret manager");
// most preferred, go to the front of the line!
authMethods.add(AuthenticationMethod.TOKEN.getAuthMethod());
}
authMethods.add(confAuthenticationMethod.getAuthMethod());
LOG.debug("Server accepts auth methods:" + authMethods);
return authMethods;
}
示例3: testLogin
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
@Test
public void testLogin() throws IOException {
String userPrincipal = System.getProperty("user.principal");
String userKeyTab = System.getProperty("user.keytab");
Assert.assertNotNull("User principal was not specified", userPrincipal);
Assert.assertNotNull("User keytab was not specified", userKeyTab);
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi = UserGroupInformation
.loginUserFromKeytabAndReturnUGI(userPrincipal, userKeyTab);
Assert.assertEquals(AuthenticationMethod.KERBEROS,
ugi.getAuthenticationMethod());
try {
UserGroupInformation
.loginUserFromKeytabAndReturnUGI("[email protected]", userKeyTab);
Assert.fail("Login should have failed");
} catch (Exception ex) {
ex.printStackTrace();
}
}
示例4: testGetUserWithOwnerAndReal
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
@Test
public void testGetUserWithOwnerAndReal() {
Text owner = new Text("owner");
Text realUser = new Text("realUser");
TestDelegationTokenIdentifier ident =
new TestDelegationTokenIdentifier(owner, null, realUser);
UserGroupInformation ugi = ident.getUser();
assertNotNull(ugi.getRealUser());
assertNull(ugi.getRealUser().getRealUser());
assertEquals("owner", ugi.getUserName());
assertEquals("realUser", ugi.getRealUser().getUserName());
assertEquals(AuthenticationMethod.PROXY,
ugi.getAuthenticationMethod());
assertEquals(AuthenticationMethod.TOKEN,
ugi.getRealUser().getAuthenticationMethod());
}
示例5: tryLoginAuthenticationMethod
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
private void tryLoginAuthenticationMethod(AuthenticationMethod method,
boolean expectSuccess)
throws IOException {
SecurityUtil.setAuthenticationMethod(method, conf);
UserGroupInformation.setConfiguration(conf); // pick up changed auth
UserGroupInformation ugi = null;
Exception ex = null;
try {
ugi = UserGroupInformation.getLoginUser();
} catch (Exception e) {
ex = e;
}
if (expectSuccess) {
assertNotNull(ugi);
assertEquals(method, ugi.getAuthenticationMethod());
} else {
assertNotNull(ex);
assertEquals(UnsupportedOperationException.class, ex.getClass());
assertEquals(method + " login authentication is not supported",
ex.getMessage());
}
}
示例6: testConstructorWithKerberos
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
/** test constructor */
@Test (timeout = 30000)
public void testConstructorWithKerberos() throws Exception {
// security on, default is remove default realm
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
UserGroupInformation.setConfiguration(conf);
testConstructorSuccess("user1", "user1");
testConstructorSuccess("[email protected]", "user2");
testConstructorSuccess("user3/[email protected]", "user3");
// failure test
testConstructorFailures("[email protected]");
testConstructorFailures("user5/[email protected]");
testConstructorFailures(null);
testConstructorFailures("");
}
示例7: testConstructorWithKerberosRules
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
/** test constructor */
@Test (timeout = 30000)
public void testConstructorWithKerberosRules() throws Exception {
// security on, explicit rules
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
conf.set(HADOOP_SECURITY_AUTH_TO_LOCAL,
"RULE:[2:[email protected]$0](.*@OTHER.REALM)s/(.*)@.*/other-$1/" +
"RULE:[1:[email protected]$0](.*@OTHER.REALM)s/(.*)@.*/other-$1/" +
"DEFAULT");
UserGroupInformation.setConfiguration(conf);
testConstructorSuccess("user1", "user1");
testConstructorSuccess("[email protected]", "user2");
testConstructorSuccess("user3/[email protected]", "user3");
testConstructorSuccess("[email protected]", "other-user4");
testConstructorSuccess("user5/[email protected]", "other-user5");
// failure test
testConstructorFailures(null);
testConstructorFailures("");
}
示例8: createSecureConfig
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
/**
* Creates configuration for starting a secure cluster.
*
* @param dataTransferProtection supported QOPs
* @return configuration for starting a secure cluster
* @throws Exception if there is any failure
*/
protected HdfsConfiguration createSecureConfig(
String dataTransferProtection) throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
String keystoresDir = baseDir.getAbsolutePath();
String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
return conf;
}
示例9: testTokenAuth
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
private void testTokenAuth(Class<? extends RpcClient> rpcImplClass) throws IOException,
ServiceException {
TEST_UTIL.getConfiguration().set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY,
rpcImplClass.getName());
try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
Table table = conn.getTable(TableName.META_TABLE_NAME)) {
CoprocessorRpcChannel rpcChannel = table.coprocessorService(HConstants.EMPTY_START_ROW);
AuthenticationProtos.AuthenticationService.BlockingInterface service =
AuthenticationProtos.AuthenticationService.newBlockingStub(rpcChannel);
WhoAmIResponse response = service.whoAmI(null, WhoAmIRequest.getDefaultInstance());
assertEquals(USERNAME, response.getUsername());
assertEquals(AuthenticationMethod.TOKEN.name(), response.getAuthMethod());
try {
service.getAuthenticationToken(null, GetAuthenticationTokenRequest.getDefaultInstance());
} catch (ServiceException e) {
AccessDeniedException exc = (AccessDeniedException) ProtobufUtil.getRemoteException(e);
assertTrue(exc.getMessage().contains(
"Token generation only allowed for Kerberos authenticated clients"));
}
}
}
示例10: testRpcCallWithEnabledKerberosSaslAuth
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
private void testRpcCallWithEnabledKerberosSaslAuth(Class<? extends RpcClient> rpcImplClass)
throws Exception {
String krbKeytab = getKeytabFileForTesting();
String krbPrincipal = getPrincipalForTesting();
UserGroupInformation ugi = loginKerberosPrincipal(krbKeytab, krbPrincipal);
UserGroupInformation ugi2 = UserGroupInformation.getCurrentUser();
// check that the login user is okay:
assertSame(ugi, ugi2);
assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod());
assertEquals(krbPrincipal, ugi.getUserName());
Configuration clientConf = getSecuredConfiguration();
callRpcService(rpcImplClass, User.create(ugi2), clientConf, false);
}
示例11: testRpcFallbackToSimpleAuth
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
public void testRpcFallbackToSimpleAuth(Class<? extends RpcClient> rpcImplClass) throws Exception {
String krbKeytab = getKeytabFileForTesting();
String krbPrincipal = getPrincipalForTesting();
UserGroupInformation ugi = loginKerberosPrincipal(krbKeytab, krbPrincipal);
assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod());
assertEquals(krbPrincipal, ugi.getUserName());
String clientUsername = "testuser";
UserGroupInformation clientUgi = UserGroupInformation.createUserForTesting(clientUsername,
new String[]{clientUsername});
// check that the client user is insecure
assertNotSame(ugi, clientUgi);
assertEquals(AuthenticationMethod.SIMPLE, clientUgi.getAuthenticationMethod());
assertEquals(clientUsername, clientUgi.getUserName());
Configuration clientConf = new Configuration();
clientConf.set(User.HBASE_SECURITY_CONF_KEY, "simple");
callRpcService(rpcImplClass, User.create(clientUgi), clientConf, true);
}
示例12: createSecureConfig
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
/**
* Creates configuration for starting a secure cluster.
*
* @param dataTransferProtection supported QOPs
* @return configuration for starting a secure cluster
* @throws Exception if there is any failure
*/
protected HdfsConfiguration createSecureConfig(
String dataTransferProtection) throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, hdfsKeytab);
conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, hdfsKeytab);
conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
String keystoresDir = baseDir.getAbsolutePath();
String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
conf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
KeyStoreTestUtil.getClientSSLConfigFileName());
conf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
KeyStoreTestUtil.getServerSSLConfigFileName());
return conf;
}
示例13: createSecureConfiguration
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; //导入依赖的package包/类
/***
* @param properties
*/
public static void createSecureConfiguration(Properties properties) {
AuthenticationMethod authType = getAuthtype(properties);
switch (authType) {
case KERBEROS:
Configuration conf = new
org.apache.hadoop.conf.Configuration();
conf.set("hadoop.security.authentication", KERBEROS.toString());
UserGroupInformation.setConfiguration(conf);
try {
UserGroupInformation.loginUserFromKeytab(
properties.getProperty("zeppelin.jdbc.principal"),
properties.getProperty("zeppelin.jdbc.keytab.location")
);
} catch (IOException e) {
LOGGER.error("Failed to get either keytab location or principal name in the " +
"interpreter", e);
}
}
}