當前位置: 首頁>>代碼示例>>Java>>正文


Java SecurityUtil.setAuthenticationMethod方法代碼示例

本文整理匯總了Java中org.apache.hadoop.security.SecurityUtil.setAuthenticationMethod方法的典型用法代碼示例。如果您正苦於以下問題:Java SecurityUtil.setAuthenticationMethod方法的具體用法?Java SecurityUtil.setAuthenticationMethod怎麽用?Java SecurityUtil.setAuthenticationMethod使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.security.SecurityUtil的用法示例。


在下文中一共展示了SecurityUtil.setAuthenticationMethod方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: createSecureConfig

import org.apache.hadoop.security.SecurityUtil; //導入方法依賴的package包/類
private Configuration createSecureConfig(String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
  conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY,
           "true");//https://issues.apache.org/jira/browse/HDFS-7431
  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
開發者ID:jiangxiluning,項目名稱:kafka-connect-hdfs,代碼行數:22,代碼來源:TestWithSecureMiniDFSCluster.java

示例2: createSecureConfig

import org.apache.hadoop.security.SecurityUtil; //導入方法依賴的package包/類
/**
 * Creates configuration for starting a secure cluster.
 *
 * @param dataTransferProtection supported QOPs
 * @return configuration for starting a secure cluster
 * @throws Exception if there is any failure
 */
protected HdfsConfiguration createSecureConfig(
    String dataTransferProtection) throws Exception {
  HdfsConfiguration conf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
  conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
  conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  return conf;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:29,代碼來源:SaslDataTransferTestCase.java

示例3: setUp

import org.apache.hadoop.security.SecurityUtil; //導入方法依賴的package包/類
@BeforeClass
public static void setUp() {
  conf = new Configuration();
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);    
  UserGroupInformation.setLoginUser(
      UserGroupInformation.createUserForTesting(
          "LoginUser", new String[]{"supergroup"}));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:10,代碼來源:TestWebHdfsTokens.java

示例4: any

import org.apache.hadoop.security.SecurityUtil; //導入方法依賴的package包/類
@SuppressWarnings("unchecked") // for any(Token.class)
@Test
public void testLazyTokenFetchForWebhdfs() throws Exception {
  MiniDFSCluster cluster = null;
  WebHdfsFileSystem fs = null;
  try {
    final Configuration clusterConf = new HdfsConfiguration(conf);
    SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
    clusterConf.setBoolean(DFSConfigKeys
        .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);

    // trick the NN into thinking security is enabled w/o it trying
    // to login from a keytab
    UserGroupInformation.setConfiguration(clusterConf);
    cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build();
    cluster.waitActive();
    SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
    UserGroupInformation.setConfiguration(clusterConf);
    
    uri = DFSUtil.createUri(
        "webhdfs", cluster.getNameNode().getHttpAddress());
    validateLazyTokenFetch(clusterConf);
  } finally {
    IOUtils.cleanup(null, fs);
    if (cluster != null) {
      cluster.shutdown();
    }
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:30,代碼來源:TestWebHdfsTokens.java

示例5: setUp

import org.apache.hadoop.security.SecurityUtil; //導入方法依賴的package包/類
@Before
public void setUp() throws Exception {
  conf = new YarnConfiguration();
  AuthenticationMethod authMethod = AuthenticationMethod.SIMPLE;
  if (isSecurityEnabled) {
    authMethod = AuthenticationMethod.KERBEROS;
  }
  SecurityUtil.setAuthenticationMethod(authMethod, conf);
  UserGroupInformation.setConfiguration(conf);

  rmDispatcher = new DrainDispatcher();
  ContainerAllocationExpirer containerAllocationExpirer = 
      mock(ContainerAllocationExpirer.class);
  AMLivelinessMonitor amLivelinessMonitor = mock(AMLivelinessMonitor.class);
  AMLivelinessMonitor amFinishingMonitor = mock(AMLivelinessMonitor.class);
  store = mock(RMStateStore.class);
  writer = mock(RMApplicationHistoryWriter.class);
  DelegationTokenRenewer renewer = mock(DelegationTokenRenewer.class);
  RMContext realRMContext = 
      new RMContextImpl(rmDispatcher,
        containerAllocationExpirer, amLivelinessMonitor, amFinishingMonitor,
        renewer, new AMRMTokenSecretManager(conf, this.rmContext),
        new RMContainerTokenSecretManager(conf),
        new NMTokenSecretManagerInRM(conf),
        new ClientToAMTokenSecretManagerInRM(),
        writer);
  ((RMContextImpl)realRMContext).setStateStore(store);
  publisher = mock(SystemMetricsPublisher.class);
  ((RMContextImpl)realRMContext).setSystemMetricsPublisher(publisher);

  this.rmContext = spy(realRMContext);

  ResourceScheduler resourceScheduler = mock(ResourceScheduler.class);
  doReturn(null).when(resourceScheduler)
            .getAppResourceUsageReport((ApplicationAttemptId)Matchers.any());
  doReturn(resourceScheduler).when(rmContext).getScheduler();

  rmDispatcher.register(RMAppAttemptEventType.class,
      new TestApplicationAttemptEventDispatcher(this.rmContext));

  rmDispatcher.register(RMAppEventType.class,
      new TestApplicationEventDispatcher(rmContext));
  
  rmDispatcher.register(RMAppManagerEventType.class,
      new TestApplicationManagerEventDispatcher());
  
  schedulerDispatcher = new TestSchedulerEventDispatcher();
  rmDispatcher.register(SchedulerEventType.class,
      schedulerDispatcher);
  
  rmDispatcher.init(conf);
  rmDispatcher.start();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:54,代碼來源:TestRMAppTransitions.java

示例6: testSecureAuthParamsInUrl

import org.apache.hadoop.security.SecurityUtil; //導入方法依賴的package包/類
@Test(timeout=60000)
public void testSecureAuthParamsInUrl() throws IOException {
  Configuration conf = new Configuration();
  // fake turning on security so api thinks it should use tokens
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);

  UserGroupInformation ugi =
      UserGroupInformation.createRemoteUser("test-user");
  ugi.setAuthenticationMethod(KERBEROS);
  UserGroupInformation.setLoginUser(ugi);

  WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
  Path fsPath = new Path("/");
  String tokenString = webhdfs.getDelegationToken().encodeToUrlString();

  // send user
  URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      getTokenUrl);

  // send user
  URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      renewTokenUrl);

  // send token
  URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);
  
  // send token
  URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new DelegationParam(tokenString).toString()
      },
      fileStatusUrl);

  // wipe out internal token to simulate auth always required
  webhdfs.setDelegationToken(null);

  // send user
  cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);

  // send user
  fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      fileStatusUrl);    
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:80,代碼來源:TestWebHdfsUrl.java

示例7: testSetTokenServiceAndKind

import org.apache.hadoop.security.SecurityUtil; //導入方法依賴的package包/類
@Test
public void testSetTokenServiceAndKind() throws Exception {
  MiniDFSCluster cluster = null;

  try {
    final Configuration clusterConf = new HdfsConfiguration(conf);
    SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
    clusterConf.setBoolean(DFSConfigKeys
            .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);

    // trick the NN into thinking s[ecurity is enabled w/o it trying
    // to login from a keytab
    UserGroupInformation.setConfiguration(clusterConf);
    cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(0).build();
    cluster.waitActive();
    SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
    final WebHdfsFileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem
            (clusterConf, "webhdfs");
    Whitebox.setInternalState(fs, "canRefreshDelegationToken", true);

    URLConnectionFactory factory = new URLConnectionFactory(new ConnectionConfigurator() {
      @Override
      public HttpURLConnection configure(HttpURLConnection conn)
              throws IOException {
        return conn;
      }
    }) {
      @Override
      public URLConnection openConnection(URL url) throws IOException {
        return super.openConnection(new URL(url + "&service=foo&kind=bar"));
      }
    };
    Whitebox.setInternalState(fs, "connectionFactory", factory);
    Token<?> token1 = fs.getDelegationToken();
    Assert.assertEquals(new Text("bar"), token1.getKind());

    final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
    Token<DelegationTokenIdentifier> token2 =
        fs.new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(
            op, null, new RenewerParam(null)) {
          @Override
          Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json)
              throws IOException {
            return JsonUtil.toDelegationToken(json);
          }
        }.run();

    Assert.assertEquals(new Text("bar"), token2.getKind());
    Assert.assertEquals(new Text("foo"), token2.getService());
  } finally {
    if (cluster != null) {
      cluster.shutdown();
    }
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:56,代碼來源:TestWebHdfsTokens.java

示例8: init

import org.apache.hadoop.security.SecurityUtil; //導入方法依賴的package包/類
@BeforeClass
public static void init() throws Exception {
  baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
    TestSecureNNWithQJM.class.getSimpleName());
  FileUtil.fullyDelete(baseDir);
  assertTrue(baseDir.mkdirs());

  Properties kdcConf = MiniKdc.createConf();
  kdc = new MiniKdc(kdcConf, baseDir);
  kdc.start();

  baseConf = new HdfsConfiguration();
  SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
    baseConf);
  UserGroupInformation.setConfiguration(baseConf);
  assertTrue("Expected configuration to enable security",
    UserGroupInformation.isSecurityEnabled());

  String userName = UserGroupInformation.getLoginUser().getShortUserName();
  File keytabFile = new File(baseDir, userName + ".keytab");
  String keytab = keytabFile.getAbsolutePath();
  // Windows will not reverse name lookup "127.0.0.1" to "localhost".
  String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
  kdc.createPrincipal(keytabFile,
    userName + "/" + krbInstance,
    "HTTP/" + krbInstance);
  String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
  String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();

  baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
  baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
  baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
    spnegoPrincipal);
  baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
  baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
  baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
  baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
  baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

  String keystoresDir = baseDir.getAbsolutePath();
  String sslConfDir = KeyStoreTestUtil.getClasspathDir(
    TestSecureNNWithQJM.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:52,代碼來源:TestSecureNNWithQJM.java


注:本文中的org.apache.hadoop.security.SecurityUtil.setAuthenticationMethod方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。