当前位置: 首页>>代码示例>>Java>>正文


Java MiniKdc.start方法代码示例

本文整理汇总了Java中org.apache.hadoop.minikdc.MiniKdc.start方法的典型用法代码示例。如果您正苦于以下问题:Java MiniKdc.start方法的具体用法?Java MiniKdc.start怎么用?Java MiniKdc.start使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.minikdc.MiniKdc的用法示例。


在下文中一共展示了MiniKdc.start方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: setUpMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setUpMiniKdc() throws Exception {
  File kdcDir = getTestDir();
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, kdcDir);
  kdc.start();
  keytab = new File(kdcDir, "keytab");
  List<String> principals = new ArrayList<String>();
  principals.add("HTTP/localhost");
  principals.add("client");
  principals.add("hdfs");
  principals.add("otheradmin");
  principals.add("client/host");
  principals.add("client1");
  for (KMSACLs.Type type : KMSACLs.Type.values()) {
    principals.add(type.toString());
  }
  principals.add("CREATE_MATERIAL");
  principals.add("ROLLOVER_MATERIAL");
  kdc.createPrincipal(keytab,
      principals.toArray(new String[principals.size()]));
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:23,代码来源:TestKMS.java

示例2: startMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void startMiniKdc() throws Exception {
  workDir = new File(System.getProperty("test.dir", "target"),
          TestFlumeAuthenticator.class.getSimpleName());
  flumeKeytab = new File(workDir, "flume.keytab");
  aliceKeytab = new File(workDir, "alice.keytab");
  conf = MiniKdc.createConf();

  kdc = new MiniKdc(conf, workDir);
  kdc.start();

  kdc.createPrincipal(flumeKeytab, flumePrincipal);
  flumePrincipal = flumePrincipal + "@" + kdc.getRealm();

  kdc.createPrincipal(aliceKeytab, alicePrincipal);
  alicePrincipal = alicePrincipal + "@" + kdc.getRealm();
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:18,代码来源:TestFlumeAuthenticator.java

示例3: initKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    SaslDataTransferTestCase.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, userName + "/localhost", "HTTP/localhost");
  hdfsPrincipal = userName + "/[email protected]" + kdc.getRealm();
  spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:SaslDataTransferTestCase.java

示例4: setUp

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setUp() throws Exception {
  Properties conf = MiniKdc.createConf();
  conf.put(MiniKdc.DEBUG, true);
  KDC = new MiniKdc(conf, new File(TEST_UTIL.getDataTestDir("kdc").toUri().getPath()));
  KDC.start();
  USERNAME = UserGroupInformation.getLoginUser().getShortUserName();
  PRINCIPAL = USERNAME + "/" + HOST;
  HTTP_PRINCIPAL = "HTTP/" + HOST;
  KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL);
  TEST_UTIL.startMiniZKCluster();

  HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
  HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + KDC.getRealm());
  HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration());
  setHdfsSecuredConfiguration(TEST_UTIL.getConfiguration());
  UserGroupInformation.setConfiguration(TEST_UTIL.getConfiguration());
  TEST_UTIL.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
    TokenProvider.class.getName());
  TEST_UTIL.startMiniDFSCluster(1);
  Path rootdir = TEST_UTIL.getDataTestDirOnTestFS("TestGenerateDelegationToken");
  FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootdir);
  CLUSTER = new LocalHBaseCluster(TEST_UTIL.getConfiguration(), 1);
  CLUSTER.startup();
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:26,代码来源:TestGenerateDelegationToken.java

示例5: initKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    SaslDataTransferTestCase.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  String userName = RandomStringUtils.randomAlphabetic(8);
  File userKeytabFile = new File(baseDir, userName + ".keytab");
  userKeyTab = userKeytabFile.getAbsolutePath();
  kdc.createPrincipal(userKeytabFile, userName + "/localhost");
  userPrincipal = userName + "/[email protected]" + kdc.getRealm();

  String superUserName = "hdfs";
  File hdfsKeytabFile = new File(baseDir, superUserName + ".keytab");
  hdfsKeytab = hdfsKeytabFile.getAbsolutePath();
  kdc.createPrincipal(hdfsKeytabFile, superUserName + "/localhost", "HTTP/localhost");
  hdfsPrincipal = superUserName + "/[email protected]" + kdc.getRealm();
  spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:25,代码来源:SaslDataTransferTestCase.java

示例6: initKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void initKdc() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"));
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());
  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  File keytabFile = new File(baseDir, "hdfs" + ".keytab");
  keytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, "hdfs" + "/localhost", "HTTP/localhost");
  hdfsPrincipal = "hdfs" + "/[email protected]" + kdc.getRealm();
  spnegoPrincipal = "HTTP/[email protected]" + kdc.getRealm();

  keytabFile = new File(baseDir, "connect-hdfs" + ".keytab");
  connectorKeytab = keytabFile.getAbsolutePath();
  kdc.createPrincipal(keytabFile, "connect-hdfs/localhost");
  connectorPrincipal = "connect-hdfs/[email protected]" + kdc.getRealm();
}
 
开发者ID:qubole,项目名称:streamx,代码行数:21,代码来源:TestWithSecureMiniDFSCluster.java

示例7: setUpKerberos

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@Before
public void setUpKerberos() throws Exception {
   kdc = new MiniKdc(MiniKdc.createConf(), temporaryFolder.newFolder("kdc"));
   kdc.start();

   // hard coded match, default_keytab_name in minikdc-krb5.conf template
   File userKeyTab = new File("target/test.krb5.keytab");
   kdc.createPrincipal(userKeyTab, "client", "amqp/localhost");

   if (debug) {
      java.util.logging.Logger logger = java.util.logging.Logger.getLogger("javax.security.sasl");
      logger.setLevel(java.util.logging.Level.FINEST);
      logger.addHandler(new java.util.logging.ConsoleHandler());
      for (java.util.logging.Handler handler : logger.getHandlers()) {
         handler.setLevel(java.util.logging.Level.FINEST);
      }
   }
}
 
开发者ID:apache,项目名称:activemq-artemis,代码行数:19,代码来源:JMSSaslGssapiTest.java

示例8: setUpKdcServer

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
private static void setUpKdcServer() throws Exception {
  Properties conf = MiniKdc.createConf();
  conf.put(MiniKdc.DEBUG, true);
  File kdcFile = new File(UTIL.getDataTestDir("kdc").toUri().getPath());
  KDC = new MiniKdc(conf, kdcFile);
  KDC.start();
  USERNAME = UserGroupInformation.getLoginUser().getShortUserName();
  SERVER_PRINCIPAL = USERNAME + "/" + LOCALHOST;
  HTTP_PRINCIPAL = "HTTP/" + LOCALHOST;
  KDC.createPrincipal(KEYTAB_FILE,
    SERVER_PRINCIPAL,
    HTTP_PRINCIPAL,
    USER_ADMIN + "/" + LOCALHOST,
    USER_OWNER + "/" + LOCALHOST,
    USER_RX + "/" + LOCALHOST,
    USER_RO + "/" + LOCALHOST,
    USER_XO + "/" + LOCALHOST,
    USER_NONE + "/" + LOCALHOST);
}
 
开发者ID:apache,项目名称:hbase,代码行数:20,代码来源:TestSecureExport.java

示例9: startMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@Before
public void startMiniKdc() throws Exception {
  // This setting below is required. If not enabled, UGI will abort
  // any attempt to loginUserFromKeytab.
  Configuration conf = new Configuration();
  conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
      "kerberos");
  UserGroupInformation.setConfiguration(conf);
  workDir = folder.getRoot();
  kdc = new MiniKdc(MiniKdc.createConf(), workDir);
  kdc.start();
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:13,代码来源:TestUGILoginFromKeytab.java

示例10: startMiniKdc

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void startMiniKdc() throws Exception {
  workDir = new File(System.getProperty("test.dir", "target"));
  securityProperties = MiniKdc.createConf();
  kdc = new MiniKdc(securityProperties, workDir);
  kdc.start();
  keytab = createKeytab("foo");
  conf = new Configuration();
  conf.set(HADOOP_SECURITY_AUTHENTICATION, "KERBEROS");
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:11,代码来源:TestKDiag.java

示例11: setupKDC

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setupKDC() throws Exception {
  testRootDir = new File("target",
    TestRMWebServicesDelegationTokens.class.getName() + "-root");
  testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir);
  testMiniKDC.start();
  testMiniKDC.createPrincipal(httpSpnegoKeytabFile, "HTTP/localhost",
    "client", "client2", "client3");
}
 
开发者ID:naver,项目名称:hadoop,代码行数:10,代码来源:TestRMWebServicesDelegationTokens.java

示例12: setupKDCAndPrincipals

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
/**
 * Sets up the KDC and a set of principals in the JAAS file
 *
 * @throws Exception
 */
public static void setupKDCAndPrincipals() throws Exception {
  // set up the KDC
  File target = new File(System.getProperty("test.dir", "target"));
  kdcWorkDir = new File(target, "kdc");
  kdcWorkDir.mkdirs();
  if (!kdcWorkDir.mkdirs()) {
    assertTrue(kdcWorkDir.isDirectory());
  }
  kdcConf = MiniKdc.createConf();
  kdcConf.setProperty(MiniKdc.DEBUG, "true");
  kdc = new MiniKdc(kdcConf, kdcWorkDir);
  kdc.start();

  keytab_zk = createKeytab(ZOOKEEPER, "zookeeper.keytab");
  keytab_alice = createKeytab(ALICE, "alice.keytab");
  keytab_bob = createKeytab(BOB, "bob.keytab");
  zkServerPrincipal = Shell.WINDOWS ? ZOOKEEPER_1270001 : ZOOKEEPER_LOCALHOST;

  StringBuilder jaas = new StringBuilder(1024);
  jaas.append(registrySecurity.createJAASEntry(ZOOKEEPER_CLIENT_CONTEXT,
      ZOOKEEPER, keytab_zk));
  jaas.append(registrySecurity.createJAASEntry(ZOOKEEPER_SERVER_CONTEXT,
      zkServerPrincipal, keytab_zk));
  jaas.append(registrySecurity.createJAASEntry(ALICE_CLIENT_CONTEXT,
      ALICE_LOCALHOST , keytab_alice));
  jaas.append(registrySecurity.createJAASEntry(BOB_CLIENT_CONTEXT,
      BOB_LOCALHOST, keytab_bob));

  jaasFile = new File(kdcWorkDir, "jaas.txt");
  FileUtils.write(jaasFile, jaas.toString());
  LOG.info("\n"+ jaas);
  RegistrySecurity.bindJVMtoJAASFile(jaasFile);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:39,代码来源:AbstractSecureRegistryTest.java

示例13: setUp

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setUp() throws Exception {
  Properties conf = MiniKdc.createConf();
  conf.put(MiniKdc.DEBUG, true);
  KDC = new MiniKdc(conf, new File(TEST_UTIL.getDataTestDir("kdc").toUri().getPath()));
  KDC.start();
  PRINCIPAL = "hbase/" + HOST;
  KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL);
  HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
  HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + KDC.getRealm());
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:12,代码来源:TestUsersOperationsWithSecureHadoop.java

示例14: setUp

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
@BeforeClass
public static void setUp() throws Exception {
  try {
    testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir);
    testMiniKDC.start();
    testMiniKDC.createPrincipal(
        httpSpnegoKeytabFile, HTTP_USER + "/localhost");
  } catch (Exception e) {
    assertTrue("Couldn't setup MiniKDC", false);
  }
  Writer w = new FileWriter(secretFile);
  w.write("secret");
  w.close();
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:15,代码来源:TestHttpServerWithSpengo.java

示例15: startKDC

import org.apache.hadoop.minikdc.MiniKdc; //导入方法依赖的package包/类
protected File startKDC() throws Exception {
    File target = Files.createTempDirectory("sectest").toFile();
    File kdcWorkDir = new File(target, "kdc");
    Properties kdcConf = MiniKdc.createConf();
    kdcConf.setProperty(MiniKdc.DEBUG, "true");
    kdc = new MiniKdc(kdcConf, kdcWorkDir);
    kdc.start();

    Assert.assertNotNull(kdc.getRealm());
    return kdcWorkDir;
}
 
开发者ID:apache,项目名称:incubator-atlas,代码行数:12,代码来源:BaseSecurityTest.java


注:本文中的org.apache.hadoop.minikdc.MiniKdc.start方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。