本文整理汇总了Java中org.apache.hadoop.minikdc.MiniKdc.getRealm方法的典型用法代码示例。如果您正苦于以下问题:Java MiniKdc.getRealm方法的具体用法?Java MiniKdc.getRealm怎么用?Java MiniKdc.getRealm使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.minikdc.MiniKdc
的用法示例。
在下文中一共展示了MiniKdc.getRealm方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: startMiniKdc
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void startMiniKdc() throws Exception {
workDir = new File(System.getProperty("test.dir", "target"),
TestFlumeAuthenticator.class.getSimpleName());
flumeKeytab = new File(workDir, "flume.keytab");
aliceKeytab = new File(workDir, "alice.keytab");
conf = MiniKdc.createConf();
kdc = new MiniKdc(conf, workDir);
kdc.start();
kdc.createPrincipal(flumeKeytab, flumePrincipal);
flumePrincipal = flumePrincipal + "@" + kdc.getRealm();
kdc.createPrincipal(aliceKeytab, alicePrincipal);
alicePrincipal = alicePrincipal + "@" + kdc.getRealm();
}
示例2: initKdc
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
SaslDataTransferTestCase.class.getSimpleName());
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
String userName = UserGroupInformation.getLoginUser().getShortUserName();
File keytabFile = new File(baseDir, userName + ".keytab");
keytab = keytabFile.getAbsolutePath();
kdc.createPrincipal(keytabFile, userName + "/localhost", "HTTP/localhost");
hdfsPrincipal = userName + "/[email protected]" + kdc.getRealm();
spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();
}
示例3: initKdc
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
SaslDataTransferTestCase.class.getSimpleName());
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
String userName = RandomStringUtils.randomAlphabetic(8);
File userKeytabFile = new File(baseDir, userName + ".keytab");
userKeyTab = userKeytabFile.getAbsolutePath();
kdc.createPrincipal(userKeytabFile, userName + "/localhost");
userPrincipal = userName + "/[email protected]" + kdc.getRealm();
String superUserName = "hdfs";
File hdfsKeytabFile = new File(baseDir, superUserName + ".keytab");
hdfsKeytab = hdfsKeytabFile.getAbsolutePath();
kdc.createPrincipal(hdfsKeytabFile, superUserName + "/localhost", "HTTP/localhost");
hdfsPrincipal = superUserName + "/[email protected]" + kdc.getRealm();
spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();
}
示例4: initKdc
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"));
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
File keytabFile = new File(baseDir, "hdfs" + ".keytab");
keytab = keytabFile.getAbsolutePath();
kdc.createPrincipal(keytabFile, "hdfs" + "/localhost", "HTTP/localhost");
hdfsPrincipal = "hdfs" + "/[email protected]" + kdc.getRealm();
spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();
keytabFile = new File(baseDir, "connect-hdfs" + ".keytab");
connectorKeytab = keytabFile.getAbsolutePath();
kdc.createPrincipal(keytabFile, "connect-hdfs/localhost");
connectorPrincipal = "connect-hdfs/[email protected]" + kdc.getRealm();
}
示例5: init
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void init() throws Exception {
baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
TestSecureNNWithQJM.class.getSimpleName());
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
baseConf = new HdfsConfiguration();
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
baseConf);
UserGroupInformation.setConfiguration(baseConf);
assertTrue("Expected configuration to enable security",
UserGroupInformation.isSecurityEnabled());
String userName = UserGroupInformation.getLoginUser().getShortUserName();
File keytabFile = new File(baseDir, userName + ".keytab");
String keytab = keytabFile.getAbsolutePath();
// Windows will not reverse name lookup "127.0.0.1" to "localhost".
String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
kdc.createPrincipal(keytabFile,
userName + "/" + krbInstance,
"HTTP/" + krbInstance);
String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
spnegoPrincipal);
baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
String keystoresDir = baseDir.getAbsolutePath();
String sslConfDir = KeyStoreTestUtil.getClasspathDir(
TestSecureNNWithQJM.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
}
示例6: init
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void init() throws Exception {
baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
TestSecureNNWithQJM.class.getSimpleName());
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
baseConf = new HdfsConfiguration();
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
baseConf);
UserGroupInformation.setConfiguration(baseConf);
assertTrue("Expected configuration to enable security",
UserGroupInformation.isSecurityEnabled());
String userName = UserGroupInformation.getLoginUser().getShortUserName();
File keytabFile = new File(baseDir, userName + ".keytab");
String keytab = keytabFile.getAbsolutePath();
// Windows will not reverse name lookup "127.0.0.1" to "localhost".
String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
kdc.createPrincipal(keytabFile,
userName + "/" + krbInstance,
"HTTP/" + krbInstance);
String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
spnegoPrincipal);
baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
String keystoresDir = baseDir.getAbsolutePath();
String sslConfDir = KeyStoreTestUtil.getClasspathDir(
TestSecureNNWithQJM.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
baseConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
KeyStoreTestUtil.getClientSSLConfigFileName());
baseConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
KeyStoreTestUtil.getServerSSLConfigFileName());
}
示例7: startMiniKdc
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@Before
public void startMiniKdc() throws Exception {
conf = MiniKdc.createConf();
kdc = new MiniKdc(conf, kdcDir.getRoot());
kdc.start();
String localhostName = "localhost.localdomain";
String principalServerNoRealm = "herddb/" + localhostName;
String principalServer = "herddb/" + localhostName + "@" + kdc.getRealm();
String principalClientNoRealm = "herddbclient/" + localhostName;
String principalClient = principalClientNoRealm + "@" + kdc.getRealm();
System.out.println("adding principal: " + principalServerNoRealm);
System.out.println("adding principal: " + principalClientNoRealm);
File keytabClient = new File(workDir.getRoot(), "herddbclient.keytab");
kdc.createPrincipal(keytabClient, principalClientNoRealm);
File keytabServer = new File(workDir.getRoot(), "herddbserver.keytab");
kdc.createPrincipal(keytabServer, principalServerNoRealm);
File jaas_file = new File(workDir.getRoot(), "jaas.conf");
try (FileWriter writer = new FileWriter(jaas_file)) {
writer.write("\n"
+ "HerdDBServer {\n"
+ " com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
+ " useKeyTab=true\n"
+ " keyTab=\"" + keytabServer.getAbsolutePath() + "\n"
+ " storeKey=true\n"
+ " useTicketCache=false\n"
+ " principal=\"" + principalServer + "\";\n"
+ "};\n"
+ "\n"
+ "\n"
+ "\n"
+ "HerdDBClient {\n"
+ " com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
+ " useKeyTab=true\n"
+ " keyTab=\"" + keytabClient.getAbsolutePath() + "\n"
+ " storeKey=true\n"
+ " useTicketCache=false\n"
+ " principal=\"" + principalClient + "\";\n"
+ "};\n"
);
}
File krb5file = new File(workDir.getRoot(), "krb5.conf");
try (FileWriter writer = new FileWriter(krb5file)) {
writer.write("[libdefaults]\n"
+ " default_realm = " + kdc.getRealm() + "\n"
+ "\n"
+ "\n"
+ "[realms]\n"
+ " " + kdc.getRealm() + " = {\n"
+ " kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n"
+ " }"
);
}
System.setProperty("java.security.auth.login.config", jaas_file.getAbsolutePath());
System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());
}
示例8: prepare
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
public static void prepare(TemporaryFolder tempFolder) {
try {
File baseDirForSecureRun = tempFolder.newFolder();
LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);
String hostName = "localhost";
Properties kdcConf = MiniKdc.createConf();
if (LOG.isDebugEnabled()) {
kdcConf.setProperty(MiniKdc.DEBUG, "true");
}
kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
kdc.start();
LOG.info("Started Mini KDC");
File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
testKeytab = keytabFile.getAbsolutePath();
testZkServerPrincipal = "zookeeper/127.0.0.1";
testZkClientPrincipal = "zk-client/127.0.0.1";
testKafkaServerPrincipal = "kafka/" + hostName;
hadoopServicePrincipal = "hadoop/" + hostName;
testPrincipal = "client/" + hostName;
kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal,
hadoopServicePrincipal,
testZkClientPrincipal,
testKafkaServerPrincipal);
testPrincipal = testPrincipal + "@" + kdc.getRealm();
testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();
LOG.info("-------------------------------------------------------------------");
LOG.info("Test Principal: {}", testPrincipal);
LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
LOG.info("Test Keytab: {}", testKeytab);
LOG.info("-------------------------------------------------------------------");
//Security Context is established to allow non hadoop applications that requires JAAS
//based SASL/Kerberos authentication to work. However, for Hadoop specific applications
//the context can be reinitialized with Hadoop configuration by calling
//ctx.setHadoopConfiguration() for the UGI implementation to work properly.
//See Yarn test case module for reference
Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());
populateJavaPropertyVariables();
} catch (Exception e) {
throw new RuntimeException("Exception occured while preparing secure environment.", e);
}
}
示例9: startMiniKdc
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@Before
public void startMiniKdc() throws Exception {
createMiniKdcConf();
kdc = new MiniKdc(conf, kdcDir.getRoot());
kdc.start();
String localhostName = "localhost.localdomain";
String principalServerNoRealm = "majordodo/" + localhostName;
String principalServer = "majordodo/" + localhostName + "@" + kdc.getRealm();
String principalClientNoRealm = "majordodoclient/" + localhostName;
String principalClient = principalClientNoRealm + "@" + kdc.getRealm();
System.out.println("adding principal: " + principalServerNoRealm);
System.out.println("adding principal: " + principalClientNoRealm);
File keytabClient = new File(kerberosWorkDir.getRoot(), "majordodoclient.keytab");
kdc.createPrincipal(keytabClient, principalClientNoRealm);
File keytabServer = new File(kerberosWorkDir.getRoot(), "majordodoserver.keytab");
kdc.createPrincipal(keytabServer, principalServerNoRealm);
File jaas_file = new File(kerberosWorkDir.getRoot(), "jaas.conf");
try (FileWriter writer = new FileWriter(jaas_file)) {
writer.write("\n"
+ "MajordodoServer {\n"
+ " com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
+ " useKeyTab=true\n"
+ " keyTab=\"" + keytabServer.getAbsolutePath() + "\n"
+ " storeKey=true\n"
+ " useTicketCache=false\n"
+ " principal=\"" + principalServer + "\";\n"
+ "};\n"
+ "\n"
+ "\n"
+ "\n"
+ "MajordodoClient {\n"
+ " com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
+ " useKeyTab=true\n"
+ " keyTab=\"" + keytabClient.getAbsolutePath() + "\n"
+ " storeKey=true\n"
+ " useTicketCache=false\n"
+ " principal=\"" + principalClient + "\";\n"
+ "};\n"
);
}
File krb5file = new File(kerberosWorkDir.getRoot(), "krb5.conf");
try (FileWriter writer = new FileWriter(krb5file)) {
writer.write("[libdefaults]\n"
+ " default_realm = " + kdc.getRealm() + "\n"
+ "\n"
+ "\n"
+ "[realms]\n"
+ " " + kdc.getRealm() + " = {\n"
+ " kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n"
+ " }"
);
}
System.setProperty("java.security.auth.login.config", jaas_file.getAbsolutePath());
System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());
javax.security.auth.login.Configuration.getConfiguration().refresh();
}
示例10: startMiniKdc
import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@Before
public void startMiniKdc() throws Exception {
createMiniKdcConf();
kdc = new MiniKdc(conf, kdcDir.getRoot());
kdc.start();
String localhostName = "localhost.localdomain";
String principalServerNoRealm = "blazingcache/" + localhostName;
String principalServer = "blazingcache/" + localhostName + "@" + kdc.getRealm();
String principalClientNoRealm = "blazingcacheclient/" + localhostName;
String principalClient = principalClientNoRealm + "@" + kdc.getRealm();
System.out.println("adding principal: " + principalServerNoRealm);
System.out.println("adding principal: " + principalClientNoRealm);
File keytabClient = new File(kerberosWorkDir.getRoot(), "blazingcacheclient.keytab");
kdc.createPrincipal(keytabClient, principalClientNoRealm);
File keytabServer = new File(kerberosWorkDir.getRoot(), "blazingcacheserver.keytab");
kdc.createPrincipal(keytabServer, principalServerNoRealm);
File jaas_file = new File(kerberosWorkDir.getRoot(), "jaas.conf");
try (FileWriter writer = new FileWriter(jaas_file)) {
writer.write("\n"
+ "BlazingCacheServer {\n"
+ " com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
+ " useKeyTab=true\n"
+ " keyTab=\"" + keytabServer.getAbsolutePath() + "\n"
+ " storeKey=true\n"
+ " useTicketCache=false\n"
+ " principal=\"" + principalServer + "\";\n"
+ "};\n"
+ "\n"
+ "\n"
+ "\n"
+ "BlazingCacheClient {\n"
+ " com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
+ " useKeyTab=true\n"
+ " keyTab=\"" + keytabClient.getAbsolutePath() + "\n"
+ " storeKey=true\n"
+ " useTicketCache=false\n"
+ " principal=\"" + principalClient + "\";\n"
+ "};\n"
);
}
File krb5file = new File(kerberosWorkDir.getRoot(), "krb5.conf");
try (FileWriter writer = new FileWriter(krb5file)) {
writer.write("[libdefaults]\n"
+ " default_realm = " + kdc.getRealm() + "\n"
+ "\n"
+ "\n"
+ "[realms]\n"
+ " " + kdc.getRealm() + " = {\n"
+ " kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n"
+ " }"
);
}
System.setProperty("java.security.auth.login.config", jaas_file.getAbsolutePath());
System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());
System.setProperty("sun.security.krb5.debug", "true");
javax.security.auth.login.Configuration.getConfiguration().refresh();
}