当前位置: 首页>>代码示例>>Java>>正文


Java SecurityUtil.setAuthenticationMethod方法代码示例

本文整理汇总了Java中org.apache.hadoop.security.SecurityUtil.setAuthenticationMethod方法的典型用法代码示例。如果您正苦于以下问题:Java SecurityUtil.setAuthenticationMethod方法的具体用法?Java SecurityUtil.setAuthenticationMethod怎么用?Java SecurityUtil.setAuthenticationMethod使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.security.SecurityUtil的用法示例。


在下文中一共展示了SecurityUtil.setAuthenticationMethod方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: createSecureConfig

import org.apache.hadoop.security.SecurityUtil; //导入方法依赖的package包/类
private Configuration createSecureConfig(String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
  conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY,
           "true");//https://issues.apache.org/jira/browse/HDFS-7431
  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
开发者ID:jiangxiluning,项目名称:kafka-connect-hdfs,代码行数:22,代码来源:TestWithSecureMiniDFSCluster.java

示例2: createSecureConfig

import org.apache.hadoop.security.SecurityUtil; //导入方法依赖的package包/类
/**
 * Creates configuration for starting a secure cluster.
 *
 * @param dataTransferProtection supported QOPs
 * @return configuration for starting a secure cluster
 * @throws Exception if there is any failure
 */
protected HdfsConfiguration createSecureConfig(
    String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:29,代码来源:SaslDataTransferTestCase.java

示例3: setUp

import org.apache.hadoop.security.SecurityUtil; //导入方法依赖的package包/类
@BeforeClass
public static void setUp() {
  conf = new Configuration();
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);    
  UserGroupInformation.setLoginUser(
      UserGroupInformation.createUserForTesting(
          "LoginUser", new String[]{"supergroup"}));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:10,代码来源:TestWebHdfsTokens.java

示例4: any

import org.apache.hadoop.security.SecurityUtil; //导入方法依赖的package包/类
@SuppressWarnings("unchecked") // for any(Token.class)
@Test
public void testLazyTokenFetchForWebhdfs() throws Exception {
  MiniDFSCluster cluster = null;
  WebHdfsFileSystem fs = null;
  try {
    final Configuration clusterConf = new HdfsConfiguration(conf);
    SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
    clusterConf.setBoolean(DFSConfigKeys
        .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);

    // trick the NN into thinking security is enabled w/o it trying
    // to login from a keytab
    UserGroupInformation.setConfiguration(clusterConf);
    cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build();
    cluster.waitActive();
    SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
    UserGroupInformation.setConfiguration(clusterConf);
    
    uri = DFSUtil.createUri(
        "webhdfs", cluster.getNameNode().getHttpAddress());
    validateLazyTokenFetch(clusterConf);
  } finally {
    IOUtils.cleanup(null, fs);
    if (cluster != null) {
      cluster.shutdown();
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:30,代码来源:TestWebHdfsTokens.java

示例5: setUp

import org.apache.hadoop.security.SecurityUtil; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
  conf = new YarnConfiguration();
  AuthenticationMethod authMethod = AuthenticationMethod.SIMPLE;
  if (isSecurityEnabled) {
    authMethod = AuthenticationMethod.KERBEROS;
  }
  SecurityUtil.setAuthenticationMethod(authMethod, conf);
  UserGroupInformation.setConfiguration(conf);

  rmDispatcher = new DrainDispatcher();
  ContainerAllocationExpirer containerAllocationExpirer = 
      mock(ContainerAllocationExpirer.class);
  AMLivelinessMonitor amLivelinessMonitor = mock(AMLivelinessMonitor.class);
  AMLivelinessMonitor amFinishingMonitor = mock(AMLivelinessMonitor.class);
  store = mock(RMStateStore.class);
  writer = mock(RMApplicationHistoryWriter.class);
  DelegationTokenRenewer renewer = mock(DelegationTokenRenewer.class);
  RMContext realRMContext = 
      new RMContextImpl(rmDispatcher,
        containerAllocationExpirer, amLivelinessMonitor, amFinishingMonitor,
        renewer, new AMRMTokenSecretManager(conf, this.rmContext),
        new RMContainerTokenSecretManager(conf),
        new NMTokenSecretManagerInRM(conf),
        new ClientToAMTokenSecretManagerInRM(),
        writer);
  ((RMContextImpl)realRMContext).setStateStore(store);
  publisher = mock(SystemMetricsPublisher.class);
  ((RMContextImpl)realRMContext).setSystemMetricsPublisher(publisher);

  this.rmContext = spy(realRMContext);

  ResourceScheduler resourceScheduler = mock(ResourceScheduler.class);
  doReturn(null).when(resourceScheduler)
            .getAppResourceUsageReport((ApplicationAttemptId)Matchers.any());
  doReturn(resourceScheduler).when(rmContext).getScheduler();

  rmDispatcher.register(RMAppAttemptEventType.class,
      new TestApplicationAttemptEventDispatcher(this.rmContext));

  rmDispatcher.register(RMAppEventType.class,
      new TestApplicationEventDispatcher(rmContext));
  
  rmDispatcher.register(RMAppManagerEventType.class,
      new TestApplicationManagerEventDispatcher());
  
  schedulerDispatcher = new TestSchedulerEventDispatcher();
  rmDispatcher.register(SchedulerEventType.class,
      schedulerDispatcher);
  
  rmDispatcher.init(conf);
  rmDispatcher.start();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:54,代码来源:TestRMAppTransitions.java

示例6: testSecureAuthParamsInUrl

import org.apache.hadoop.security.SecurityUtil; //导入方法依赖的package包/类
@Test(timeout=60000)
public void testSecureAuthParamsInUrl() throws IOException {
  Configuration conf = new Configuration();
  // fake turning on security so api thinks it should use tokens
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);

  UserGroupInformation ugi =
      UserGroupInformation.createRemoteUser("test-user");
  ugi.setAuthenticationMethod(KERBEROS);
  UserGroupInformation.setLoginUser(ugi);

  WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
  Path fsPath = new Path("/");
  String tokenString = webhdfs.getDelegationToken().encodeToUrlString();

  // send user
  URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      getTokenUrl);

  // send user
  URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      renewTokenUrl);

  // send token
  URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);
  
  // send token
  URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new DelegationParam(tokenString).toString()
      },
      fileStatusUrl);

  // wipe out internal token to simulate auth always required
  webhdfs.setDelegationToken(null);

  // send user
  cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);

  // send user
  fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      fileStatusUrl);    
}
 
开发者ID:naver,项目名称:hadoop,代码行数:80,代码来源:TestWebHdfsUrl.java

示例7: testSetTokenServiceAndKind

import org.apache.hadoop.security.SecurityUtil; //导入方法依赖的package包/类
@Test
public void testSetTokenServiceAndKind() throws Exception {
  MiniDFSCluster cluster = null;

  try {
    final Configuration clusterConf = new HdfsConfiguration(conf);
    SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
    clusterConf.setBoolean(DFSConfigKeys
            .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);

    // trick the NN into thinking s[ecurity is enabled w/o it trying
    // to login from a keytab
    UserGroupInformation.setConfiguration(clusterConf);
    cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(0).build();
    cluster.waitActive();
    SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
    final WebHdfsFileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem
            (clusterConf, "webhdfs");
    Whitebox.setInternalState(fs, "canRefreshDelegationToken", true);

    URLConnectionFactory factory = new URLConnectionFactory(new ConnectionConfigurator() {
      @Override
      public HttpURLConnection configure(HttpURLConnection conn)
              throws IOException {
        return conn;
      }
    }) {
      @Override
      public URLConnection openConnection(URL url) throws IOException {
        return super.openConnection(new URL(url + "&service=foo&kind=bar"));
      }
    };
    Whitebox.setInternalState(fs, "connectionFactory", factory);
    Token<?> token1 = fs.getDelegationToken();
    Assert.assertEquals(new Text("bar"), token1.getKind());

    final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
    Token<DelegationTokenIdentifier> token2 =
        fs.new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(
            op, null, new RenewerParam(null)) {
          @Override
          Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json)
              throws IOException {
            return JsonUtil.toDelegationToken(json);
          }
        }.run();

    Assert.assertEquals(new Text("bar"), token2.getKind());
    Assert.assertEquals(new Text("foo"), token2.getService());
  } finally {
    if (cluster != null) {
      cluster.shutdown();
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:56,代码来源:TestWebHdfsTokens.java

示例8: init

import org.apache.hadoop.security.SecurityUtil; //导入方法依赖的package包/类
@BeforeClass
public static void init() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    TestSecureNNWithQJM.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  baseConf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
    baseConf);
  UserGroupInformation.setConfiguration(baseConf);
  assertTrue("Expected configuration to enable security",
    UserGroupInformation.isSecurityEnabled());

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile,
    userName + "/" + krbInstance,
    "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
    spnegoPrincipal);
  baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
  baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(
    TestSecureNNWithQJM.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:52,代码来源:TestSecureNNWithQJM.java


注:本文中的org.apache.hadoop.security.SecurityUtil.setAuthenticationMethod方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。