当前位置: 首页>>代码示例>>Java>>正文


Java MiniKdc.createPrincipal方法代码示例

本文整理汇总了Java中org.apache.hadoop.minikdc.MiniKdc.createPrincipal方法的典型用法代码示例。如果您正苦于以下问题:Java MiniKdc.createPrincipal方法的具体用法?Java MiniKdc.createPrincipal怎么用?Java MiniKdc.createPrincipal使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.minikdc.MiniKdc的用法示例。


在下文中一共展示了MiniKdc.createPrincipal方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: setUpMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setUpMiniKdc() throws Exception {
  File kdcDir = getTestDir();
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, kdcDir);
  kdc.start();
  keytab = new File(kdcDir, "keytab");
  List<String> principals = new ArrayList<String>();
  principals.add("HTTP/localhost");
  principals.add("client");
  principals.add("hdfs");
  principals.add("otheradmin");
  principals.add("client/host");
  principals.add("client1");
  for (KMSACLs.Type type : KMSACLs.Type.values()) {
    principals.add(type.toString());
  }
  principals.add("CREATE_MATERIAL");
  principals.add("ROLLOVER_MATERIAL");
  kdc.createPrincipal(keytab,
      principals.toArray(new String[principals.size()]));
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:23,代码来源:TestKMS.java

示例2: startMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void startMiniKdc() throws Exception {
  workDir = new File(System.getProperty("test.dir", "target"),
          TestFlumeAuthenticator.class.getSimpleName());
  flumeKeytab = new File(workDir, "flume.keytab");
  aliceKeytab = new File(workDir, "alice.keytab");
  conf = MiniKdc.createConf();

  kdc = new MiniKdc(conf, workDir);
  kdc.start();

  kdc.createPrincipal(flumeKeytab, flumePrincipal);
  flumePrincipal = flumePrincipal + "@" + kdc.getRealm();

  kdc.createPrincipal(aliceKeytab, alicePrincipal);
  alicePrincipal = alicePrincipal + "@" + kdc.getRealm();
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:18,代码来源:TestFlumeAuthenticator.java

示例3: initKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    SaslDataTransferTestCase.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, userName + "/localhost", "HTTP/localhost");
  hdfsPrincipal = userName + "/[email protected]" + kdc.getRealm();
  spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:SaslDataTransferTestCase.java

示例4: setUp

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setUp() throws Exception {
  Properties conf = MiniKdc.createConf();
  conf.put(MiniKdc.DEBUG, true);
  KDC = new MiniKdc(conf, new File(TEST_UTIL.getDataTestDir("kdc").toUri().getPath()));
  KDC.start();
  USERNAME = UserGroupInformation.getLoginUser().getShortUserName();
  PRINCIPAL = USERNAME + "/" + HOST;
  HTTP_PRINCIPAL = "HTTP/" + HOST;
  KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL);
  TEST_UTIL.startMiniZKCluster();

  HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
  HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + KDC.getRealm());
  HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration());
  setHdfsSecuredConfiguration(TEST_UTIL.getConfiguration());
  UserGroupInformation.setConfiguration(TEST_UTIL.getConfiguration());
  TEST_UTIL.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
    TokenProvider.class.getName());
  TEST_UTIL.startMiniDFSCluster(1);
  Path rootdir = TEST_UTIL.getDataTestDirOnTestFS("TestGenerateDelegationToken");
  FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootdir);
  CLUSTER = new LocalHBaseCluster(TEST_UTIL.getConfiguration(), 1);
  CLUSTER.startup();
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:26,代码来源:TestGenerateDelegationToken.java

示例5: initKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    SaslDataTransferTestCase.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  String userName = RandomStringUtils.randomAlphabetic(8);
  File userKeytabFile = new File(baseDir, userName + ".keytab");
  userKeyTab = userKeytabFile.getAbsolutePath();
  kdc.createPrincipal(userKeytabFile, userName + "/localhost");
  userPrincipal = userName + "/[email protected]" + kdc.getRealm();

  String superUserName = "hdfs";
  File hdfsKeytabFile = new File(baseDir, superUserName + ".keytab");
  hdfsKeytab = hdfsKeytabFile.getAbsolutePath();
  kdc.createPrincipal(hdfsKeytabFile, superUserName + "/localhost", "HTTP/localhost");
  hdfsPrincipal = superUserName + "/[email protected]" + kdc.getRealm();
  spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:25,代码来源:SaslDataTransferTestCase.java

示例6: initKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"));
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  File keytabFile = new File(baseDir, "hdfs" + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, "hdfs" + "/localhost", "HTTP/localhost");
  hdfsPrincipal = "hdfs" + "/[email protected]" + kdc.getRealm();
  spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();

  keytabFile = new File(baseDir, "connect-hdfs" + ".keytab");
  connectorKeytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, "connect-hdfs/localhost");
  connectorPrincipal = "connect-hdfs/[email protected]" + kdc.getRealm();
}
 
开发者ID:qubole,项目名称:streamx,代码行数:21,代码来源:TestWithSecureMiniDFSCluster.java

示例7: setUpKerberos

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@Before
public void setUpKerberos() throws Exception {
   kdc = new MiniKdc(MiniKdc.createConf(), temporaryFolder.newFolder("kdc"));
   kdc.start();

   // hard coded match, default_keytab_name in minikdc-krb5.conf template
   File userKeyTab = new File("target/test.krb5.keytab");
   kdc.createPrincipal(userKeyTab, "client", "amqp/localhost");

   if (debug) {
      java.util.logging.Logger logger = java.util.logging.Logger.getLogger("javax.security.sasl");
      logger.setLevel(java.util.logging.Level.FINEST);
      logger.addHandler(new java.util.logging.ConsoleHandler());
      for (java.util.logging.Handler handler : logger.getHandlers()) {
         handler.setLevel(java.util.logging.Level.FINEST);
      }
   }
}
 
开发者ID:apache,项目名称:activemq-artemis,代码行数:19,代码来源:JMSSaslGssapiTest.java

示例8: setUpKdcServer

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
private static void setUpKdcServer() throws Exception {
  Properties conf = MiniKdc.createConf();
  conf.put(MiniKdc.DEBUG, true);
  File kdcFile = new File(UTIL.getDataTestDir("kdc").toUri().getPath());
  KDC = new MiniKdc(conf, kdcFile);
  KDC.start();
  USERNAME = UserGroupInformation.getLoginUser().getShortUserName();
  SERVER_PRINCIPAL = USERNAME + "/" + LOCALHOST;
  HTTP_PRINCIPAL = "HTTP/" + LOCALHOST;
  KDC.createPrincipal(KEYTAB_FILE,
    SERVER_PRINCIPAL,
    HTTP_PRINCIPAL,
    USER_ADMIN + "/" + LOCALHOST,
    USER_OWNER + "/" + LOCALHOST,
    USER_RX + "/" + LOCALHOST,
    USER_RO + "/" + LOCALHOST,
    USER_XO + "/" + LOCALHOST,
    USER_NONE + "/" + LOCALHOST);
}
 
开发者ID:apache,项目名称:hbase,代码行数:20,代码来源:TestSecureExport.java

示例9: setupKDC

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setupKDC() throws Exception {
  testRootDir = new File("target",
    TestRMWebServicesDelegationTokens.class.getName() + "-root");
  testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir);
  testMiniKDC.start();
  testMiniKDC.createPrincipal(httpSpnegoKeytabFile, "HTTP/localhost",
    "client", "client2", "client3");
}
 
开发者ID:naver,项目名称:hadoop,代码行数:10,代码来源:TestRMWebServicesDelegationTokens.java

示例10: setUp

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setUp() throws Exception {
  Properties conf = MiniKdc.createConf();
  conf.put(MiniKdc.DEBUG, true);
  KDC = new MiniKdc(conf, new File(TEST_UTIL.getDataTestDir("kdc").toUri().getPath()));
  KDC.start();
  PRINCIPAL = "hbase/" + HOST;
  KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL);
  HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
  HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + KDC.getRealm());
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:12,代码来源:TestUsersOperationsWithSecureHadoop.java

示例11: beforeClass

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void beforeClass() throws Exception {
  testDir = new File("target", UUID.randomUUID().toString()).getAbsoluteFile();
  Assert.assertTrue(testDir.mkdirs());

  File kdcDir = new File(testDir, KDC);
  Assert.assertTrue(kdcDir.mkdirs());
  keytabFile = new File(testDir, TEST_KEYTAB);

  miniKdc = new MiniKdc(MiniKdc.createConf(), kdcDir);
  miniKdc.start();
  miniKdc.createPrincipal(keytabFile, KAFKA_BROKER_PRINCIPAL, KAFKA_CLIENT_PRINCIPAL);

  jaasConfigFile = new File(testDir, KAFKA_JAAS_CONF);
  jaasConfigFile.createNewFile();
  jaasConfigFile.setReadable(true);
  String jaasConf = JAAS_CONF.replaceAll("keyTabFile", keytabFile.getAbsolutePath());
  FileOutputStream outputStream = new FileOutputStream(jaasConfigFile);
  IOUtils.write(jaasConf, outputStream);
  outputStream.close();

  plainTextPort = NetworkUtils.getRandomPort();
  securePort = NetworkUtils.getRandomPort();

  // reload configuration when getConfiguration is called next
  Configuration.setConfiguration(null);
  System.setProperty(JAVA_SECURITY_AUTH_LOGIN_CONFIG, jaasConfigFile.getAbsolutePath());

  SecureKafkaBase.beforeClass();
}
 
开发者ID:streamsets,项目名称:datacollector,代码行数:31,代码来源:TestSaslEnabledKafka.java

示例12: startKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void startKdc() throws Exception {
  testDir = new File("target", UUID.randomUUID().toString()).getAbsoluteFile();
  Assert.assertTrue(testDir.mkdirs());
  File kdcDir = new File(testDir, "kdc");
  Assert.assertTrue(kdcDir.mkdirs());
  keytabFile = new File(testDir, "test.keytab");
  miniKdc = new MiniKdc(MiniKdc.createConf(), testDir);
  miniKdc.start();
  miniKdc.createPrincipal(keytabFile, "foo", "bar/localhost");
}
 
开发者ID:streamsets,项目名称:datacollector,代码行数:12,代码来源:TestSecurityContext.java

示例13: setUp

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setUp() throws Exception {
  try {
    testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir);
    testMiniKDC.start();
    testMiniKDC.createPrincipal(
        httpSpnegoKeytabFile, HTTP_USER + "/localhost");
  } catch (Exception e) {
    assertTrue("Couldn't setup MiniKDC", false);
  }
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:15,代码来源:TestHttpServerWithSpengo.java

示例14: setUpKerberos

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setUpKerberos() throws Exception {
    servicePrincipal = prepareServiceName();
    LOG.info("Using service principal: " + servicePrincipal);

    Path targetDir = FileSystems.getDefault().getPath("target");
    Path tempDirectory = Files.createTempDirectory(targetDir, "junit.SaslGssApiIntegrationTest.");
    File root = tempDirectory.toFile();

    kdc = new MiniKdc(MiniKdc.createConf(), new File(root, "kdc"));
    kdc.start();

    // hard coded match, default_keytab_name in minikdc-krb5.conf template
    File userKeyTab = new File(KRB5_KEYTAB);
    kdc.createPrincipal(userKeyTab, CLIENT_PRINCIPAL_LOGIN_CONFIG, CLIENT_PRINCIPAL_FACTORY_USERNAME,
            CLIENT_PRINCIPAL_URI_USERNAME, CLIENT_PRINCIPAL_DEFAULT_CONFIG_SCOPE, servicePrincipal);

    if (DEBUG) {
        Keytab kt = Keytab.read(userKeyTab);
        for (KeytabEntry entry : kt.getEntries()) {
            LOG.info("KeyTab Entry: PrincipalName:" + entry.getPrincipalName() + " ; KeyInfo:"+ entry.getKey().getKeyType());
        }

        java.util.logging.Logger logger = java.util.logging.Logger.getLogger("javax.security.sasl");
        logger.setLevel(java.util.logging.Level.FINEST);
        logger.addHandler(new java.util.logging.ConsoleHandler());
        for (java.util.logging.Handler handler : logger.getHandlers()) {
            handler.setLevel(java.util.logging.Level.FINEST);
        }
    }
}
 
开发者ID:apache,项目名称:qpid-jms,代码行数:32,代码来源:SaslGssApiIntegrationTest.java

示例15: init

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void init() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    TestSecureNNWithQJM.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  baseConf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
    baseConf);
  UserGroupInformation.setConfiguration(baseConf);
  assertTrue("Expected configuration to enable security",
    UserGroupInformation.isSecurityEnabled());

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile,
    userName + "/" + krbInstance,
    "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
    spnegoPrincipal);
  baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
  baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(
    TestSecureNNWithQJM.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:52,代码来源:TestSecureNNWithQJM.java


注:本文中的org.apache.hadoop.minikdc.MiniKdc.createPrincipal方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。