当前位置: 首页>>代码示例>>Java>>正文


Java MiniKdc.getRealm方法代码示例

本文整理汇总了Java中org.apache.hadoop.minikdc.MiniKdc.getRealm方法的典型用法代码示例。如果您正苦于以下问题:Java MiniKdc.getRealm方法的具体用法?Java MiniKdc.getRealm怎么用?Java MiniKdc.getRealm使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.minikdc.MiniKdc的用法示例。


在下文中一共展示了MiniKdc.getRealm方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: startMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void startMiniKdc() throws Exception {
  workDir = new File(System.getProperty("test.dir", "target"),
          TestFlumeAuthenticator.class.getSimpleName());
  flumeKeytab = new File(workDir, "flume.keytab");
  aliceKeytab = new File(workDir, "alice.keytab");
  conf = MiniKdc.createConf();

  kdc = new MiniKdc(conf, workDir);
  kdc.start();

  kdc.createPrincipal(flumeKeytab, flumePrincipal);
  flumePrincipal = flumePrincipal + "@" + kdc.getRealm();

  kdc.createPrincipal(aliceKeytab, alicePrincipal);
  alicePrincipal = alicePrincipal + "@" + kdc.getRealm();
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:18,代码来源:TestFlumeAuthenticator.java

示例2: initKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    SaslDataTransferTestCase.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, userName + "/localhost", "HTTP/localhost");
  hdfsPrincipal = userName + "/[email protected]" + kdc.getRealm();
  spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:SaslDataTransferTestCase.java

示例3: initKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    SaslDataTransferTestCase.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  String userName = RandomStringUtils.randomAlphabetic(8);
  File userKeytabFile = new File(baseDir, userName + ".keytab");
  userKeyTab = userKeytabFile.getAbsolutePath();
  kdc.createPrincipal(userKeytabFile, userName + "/localhost");
  userPrincipal = userName + "/[email protected]" + kdc.getRealm();

  String superUserName = "hdfs";
  File hdfsKeytabFile = new File(baseDir, superUserName + ".keytab");
  hdfsKeytab = hdfsKeytabFile.getAbsolutePath();
  kdc.createPrincipal(hdfsKeytabFile, superUserName + "/localhost", "HTTP/localhost");
  hdfsPrincipal = superUserName + "/[email protected]" + kdc.getRealm();
  spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:25,代码来源:SaslDataTransferTestCase.java

示例4: initKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"));
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  File keytabFile = new File(baseDir, "hdfs" + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, "hdfs" + "/localhost", "HTTP/localhost");
  hdfsPrincipal = "hdfs" + "/[email protected]" + kdc.getRealm();
  spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();

  keytabFile = new File(baseDir, "connect-hdfs" + ".keytab");
  connectorKeytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, "connect-hdfs/localhost");
  connectorPrincipal = "connect-hdfs/[email protected]" + kdc.getRealm();
}
 
开发者ID:qubole,项目名称:streamx,代码行数:21,代码来源:TestWithSecureMiniDFSCluster.java

示例5: init

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void init() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    TestSecureNNWithQJM.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  baseConf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
    baseConf);
  UserGroupInformation.setConfiguration(baseConf);
  assertTrue("Expected configuration to enable security",
    UserGroupInformation.isSecurityEnabled());

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile,
    userName + "/" + krbInstance,
    "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
    spnegoPrincipal);
  baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
  baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(
    TestSecureNNWithQJM.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:52,代码来源:TestSecureNNWithQJM.java

示例6: init

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void init() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    TestSecureNNWithQJM.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  baseConf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
    baseConf);
  UserGroupInformation.setConfiguration(baseConf);
  assertTrue("Expected configuration to enable security",
    UserGroupInformation.isSecurityEnabled());

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile,
    userName + "/" + krbInstance,
    "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
    spnegoPrincipal);
  baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
  baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(
    TestSecureNNWithQJM.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
  baseConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      KeyStoreTestUtil.getClientSSLConfigFileName());
  baseConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      KeyStoreTestUtil.getServerSSLConfigFileName());
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:56,代码来源:TestSecureNNWithQJM.java

示例7: startMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@Before
public void startMiniKdc() throws Exception {

    conf = MiniKdc.createConf();
    kdc = new MiniKdc(conf, kdcDir.getRoot());
    kdc.start();

    String localhostName = "localhost.localdomain";
    String principalServerNoRealm = "herddb/" + localhostName;
    String principalServer = "herddb/" + localhostName + "@" + kdc.getRealm();
    String principalClientNoRealm = "herddbclient/" + localhostName;
    String principalClient = principalClientNoRealm + "@" + kdc.getRealm();

    System.out.println("adding principal: " + principalServerNoRealm);
    System.out.println("adding principal: " + principalClientNoRealm);

    File keytabClient = new File(workDir.getRoot(), "herddbclient.keytab");
    kdc.createPrincipal(keytabClient, principalClientNoRealm);

    File keytabServer = new File(workDir.getRoot(), "herddbserver.keytab");
    kdc.createPrincipal(keytabServer, principalServerNoRealm);

    File jaas_file = new File(workDir.getRoot(), "jaas.conf");
    try (FileWriter writer = new FileWriter(jaas_file)) {
        writer.write("\n"
            + "HerdDBServer {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabServer.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalServer + "\";\n"
            + "};\n"
            + "\n"
            + "\n"
            + "\n"
            + "HerdDBClient {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabClient.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalClient + "\";\n"
            + "};\n"
        );

    }

    File krb5file = new File(workDir.getRoot(), "krb5.conf");
    try (FileWriter writer = new FileWriter(krb5file)) {
        writer.write("[libdefaults]\n"
            + " default_realm = " + kdc.getRealm() + "\n"
            + "\n"
            + "\n"
            + "[realms]\n"
            + " " + kdc.getRealm() + "  = {\n"
            + "  kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n"
            + " }"
        );

    }

    System.setProperty("java.security.auth.login.config", jaas_file.getAbsolutePath());
    System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());

}
 
开发者ID:diennea,项目名称:herddb,代码行数:67,代码来源:JAASKerberosTest.java

示例8: prepare

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
public static void prepare(TemporaryFolder tempFolder) {

		try {
			File baseDirForSecureRun = tempFolder.newFolder();
			LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);

			String hostName = "localhost";
			Properties kdcConf = MiniKdc.createConf();
			if (LOG.isDebugEnabled()) {
				kdcConf.setProperty(MiniKdc.DEBUG, "true");
			}
			kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
			kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
			kdc.start();
			LOG.info("Started Mini KDC");

			File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
			testKeytab = keytabFile.getAbsolutePath();
			testZkServerPrincipal = "zookeeper/127.0.0.1";
			testZkClientPrincipal = "zk-client/127.0.0.1";
			testKafkaServerPrincipal = "kafka/" + hostName;
			hadoopServicePrincipal = "hadoop/" + hostName;
			testPrincipal = "client/" + hostName;

			kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal,
					hadoopServicePrincipal,
					testZkClientPrincipal,
					testKafkaServerPrincipal);

			testPrincipal = testPrincipal + "@" + kdc.getRealm();
			testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
			testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
			testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
			hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();

			LOG.info("-------------------------------------------------------------------");
			LOG.info("Test Principal: {}", testPrincipal);
			LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
			LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
			LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
			LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
			LOG.info("Test Keytab: {}", testKeytab);
			LOG.info("-------------------------------------------------------------------");

			//Security Context is established to allow non hadoop applications that requires JAAS
			//based SASL/Kerberos authentication to work. However, for Hadoop specific applications
			//the context can be reinitialized with Hadoop configuration by calling
			//ctx.setHadoopConfiguration() for the UGI implementation to work properly.
			//See Yarn test case module for reference
			Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
			flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
			flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
			flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
			SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
			TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());

			populateJavaPropertyVariables();

		} catch (Exception e) {
			throw new RuntimeException("Exception occured while preparing secure environment.", e);
		}

	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:66,代码来源:SecureTestEnvironment.java

示例9: startMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@Before
public void startMiniKdc() throws Exception {

    createMiniKdcConf();
    kdc = new MiniKdc(conf, kdcDir.getRoot());
    kdc.start();

    String localhostName = "localhost.localdomain";
    String principalServerNoRealm = "majordodo/" + localhostName;
    String principalServer = "majordodo/" + localhostName + "@" + kdc.getRealm();
    String principalClientNoRealm = "majordodoclient/" + localhostName;
    String principalClient = principalClientNoRealm + "@" + kdc.getRealm();

    System.out.println("adding principal: " + principalServerNoRealm);
    System.out.println("adding principal: " + principalClientNoRealm);

    File keytabClient = new File(kerberosWorkDir.getRoot(), "majordodoclient.keytab");
    kdc.createPrincipal(keytabClient, principalClientNoRealm);

    File keytabServer = new File(kerberosWorkDir.getRoot(), "majordodoserver.keytab");
    kdc.createPrincipal(keytabServer, principalServerNoRealm);

    File jaas_file = new File(kerberosWorkDir.getRoot(), "jaas.conf");
    try (FileWriter writer = new FileWriter(jaas_file)) {
        writer.write("\n"
            + "MajordodoServer {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabServer.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalServer + "\";\n"
            + "};\n"
            + "\n"
            + "\n"
            + "\n"
            + "MajordodoClient {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabClient.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalClient + "\";\n"
            + "};\n"
        );

    }

    File krb5file = new File(kerberosWorkDir.getRoot(), "krb5.conf");
    try (FileWriter writer = new FileWriter(krb5file)) {
        writer.write("[libdefaults]\n"
            + " default_realm = " + kdc.getRealm() + "\n"
            + "\n"
            + "\n"
            + "[realms]\n"
            + " " + kdc.getRealm() + "  = {\n"
            + "  kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n"
            + " }"
        );

    }

    System.setProperty("java.security.auth.login.config", jaas_file.getAbsolutePath());
    System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());
    javax.security.auth.login.Configuration.getConfiguration().refresh();

}
 
开发者ID:diennea,项目名称:majordodo,代码行数:68,代码来源:KerberosSimpleBrokerStatusReplicationTest.java

示例10: startMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@Before
public void startMiniKdc() throws Exception {

    createMiniKdcConf();
    kdc = new MiniKdc(conf, kdcDir.getRoot());
    kdc.start();

    String localhostName = "localhost.localdomain";
    String principalServerNoRealm = "blazingcache/" + localhostName;
    String principalServer = "blazingcache/" + localhostName + "@" + kdc.getRealm();
    String principalClientNoRealm = "blazingcacheclient/" + localhostName;
    String principalClient = principalClientNoRealm + "@" + kdc.getRealm();

    System.out.println("adding principal: " + principalServerNoRealm);
    System.out.println("adding principal: " + principalClientNoRealm);

    File keytabClient = new File(kerberosWorkDir.getRoot(), "blazingcacheclient.keytab");
    kdc.createPrincipal(keytabClient, principalClientNoRealm);

    File keytabServer = new File(kerberosWorkDir.getRoot(), "blazingcacheserver.keytab");
    kdc.createPrincipal(keytabServer, principalServerNoRealm);

    File jaas_file = new File(kerberosWorkDir.getRoot(), "jaas.conf");
    try (FileWriter writer = new FileWriter(jaas_file)) {
        writer.write("\n"
            + "BlazingCacheServer {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabServer.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalServer + "\";\n"
            + "};\n"
            + "\n"
            + "\n"
            + "\n"
            + "BlazingCacheClient {\n"
            + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n"
            + "  useKeyTab=true\n"
            + "  keyTab=\"" + keytabClient.getAbsolutePath() + "\n"
            + "  storeKey=true\n"
            + "  useTicketCache=false\n"
            + "  principal=\"" + principalClient + "\";\n"
            + "};\n"
        );

    }

    File krb5file = new File(kerberosWorkDir.getRoot(), "krb5.conf");
    try (FileWriter writer = new FileWriter(krb5file)) {
        writer.write("[libdefaults]\n"
            + " default_realm = " + kdc.getRealm() + "\n"
            + "\n"
            + "\n"
            + "[realms]\n"
            + " " + kdc.getRealm() + "  = {\n"
            + "  kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n"
            + " }"
        );

    }

    System.setProperty("java.security.auth.login.config", jaas_file.getAbsolutePath());
    System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());
    System.setProperty("sun.security.krb5.debug", "true");
    javax.security.auth.login.Configuration.getConfiguration().refresh();

}
 
开发者ID:diennea,项目名称:blazingcache,代码行数:69,代码来源:JAASKerberosTest.java


注:本文中的org.apache.hadoop.minikdc.MiniKdc.getRealm方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。