当前位置: 首页>>代码示例>>Java>>正文


Java Credentials.readTokenStorageStream方法代码示例

本文整理汇总了Java中org.apache.hadoop.security.Credentials.readTokenStorageStream方法的典型用法代码示例。如果您正苦于以下问题:Java Credentials.readTokenStorageStream方法的具体用法?Java Credentials.readTokenStorageStream怎么用?Java Credentials.readTokenStorageStream使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.security.Credentials的用法示例。


在下文中一共展示了Credentials.readTokenStorageStream方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: parseCredentials

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws IOException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    credentials.readTokenStorageStream(buf);
    if (LOG.isDebugEnabled()) {
      for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
        LOG.debug(tk.getService() + " = " + tk.toString());
      }
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:ContainerManagerImpl.java

示例2: setupTokens

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
private void setupTokens(
    ContainerLaunchContext container, ContainerId containerID)
    throws IOException {
  Map<String, String> environment = container.getEnvironment();
  environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
      application.getWebProxyBase());
  // Set AppSubmitTime and MaxAppAttempts to be consumable by the AM.
  ApplicationId applicationId =
      application.getAppAttemptId().getApplicationId();
  environment.put(
      ApplicationConstants.APP_SUBMIT_TIME_ENV,
      String.valueOf(rmContext.getRMApps()
          .get(applicationId)
          .getSubmitTime()));
  environment.put(ApplicationConstants.MAX_APP_ATTEMPTS_ENV,
      String.valueOf(rmContext.getRMApps().get(
          applicationId).getMaxAppAttempts()));

  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  if (container.getTokens() != null) {
    // TODO: Don't do this kind of checks everywhere.
    dibb.reset(container.getTokens());
    credentials.readTokenStorageStream(dibb);
  }

  // Add AMRMToken
  Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
  if (amrmToken != null) {
    credentials.addToken(amrmToken.getService(), amrmToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:36,代码来源:AMLauncher.java

示例3: parseCredentials

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
protected Credentials parseCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = submissionContext.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:12,代码来源:RMAppImpl.java

示例4: parseCredentials

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
protected Credentials parseCredentials(
    ApplicationSubmissionContext application) throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = application.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:13,代码来源:RMAppManager.java

示例5: getContainerCredentials

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
public Credentials getContainerCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer buf = new DataInputByteBuffer();
  containerTokens.rewind();
  buf.reset(containerTokens);
  credentials.readTokenStorageStream(buf);
  return credentials;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:9,代码来源:TestAMAuthorization.java

示例6: addTimelineDelegationToken

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
private void addTimelineDelegationToken(
    ContainerLaunchContext clc) throws YarnException, IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = clc.getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  // If the timeline delegation token is already in the CLC, no need to add
  // one more
  for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
      .getAllTokens()) {
    if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
      return;
    }
  }
  org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier>
      timelineDelegationToken = getTimelineDelegationToken();
  if (timelineDelegationToken == null) {
    return;
  }
  credentials.addToken(timelineService, timelineDelegationToken);
  if (LOG.isDebugEnabled()) {
    LOG.debug("Add timline delegation token into credentials: "
        + timelineDelegationToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(tokens);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:34,代码来源:YarnClientImpl.java

示例7: runLocalization

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
@SuppressWarnings("deprecation")
public int runLocalization(final InetSocketAddress nmAddr)
    throws IOException, InterruptedException {
  // load credentials
  initDirs(conf, user, appId, lfs, localDirs);
  final Credentials creds = new Credentials();
  DataInputStream credFile = null;
  try {
    // assume credentials in cwd
    // TODO: Fix
    Path tokenPath =
        new Path(String.format(TOKEN_FILE_NAME_FMT, localizerId));
    credFile = lfs.open(tokenPath);
    creds.readTokenStorageStream(credFile);
    // Explicitly deleting token file.
    lfs.delete(tokenPath, false);      
  } finally  {
    if (credFile != null) {
      credFile.close();
    }
  }
  // create localizer context
  UserGroupInformation remoteUser =
    UserGroupInformation.createRemoteUser(user);
  remoteUser.addToken(creds.getToken(LocalizerTokenIdentifier.KIND));
  final LocalizationProtocol nodeManager =
      remoteUser.doAs(new PrivilegedAction<LocalizationProtocol>() {
        @Override
        public LocalizationProtocol run() {
          return getProxy(nmAddr);
        }
      });

  // create user context
  UserGroupInformation ugi =
    UserGroupInformation.createRemoteUser(user);
  for (Token<? extends TokenIdentifier> token : creds.getAllTokens()) {
    ugi.addToken(token);
  }

  ExecutorService exec = null;
  try {
    exec = createDownloadThreadPool();
    CompletionService<Path> ecs = createCompletionService(exec);
    localizeFiles(nodeManager, ecs, ugi);
    return 0;
  } catch (Throwable e) {
    // Print traces to stdout so that they can be logged by the NM address
    // space.
    e.printStackTrace(System.out);
    return -1;
  } finally {
    try {
      if (exec != null) {
        exec.shutdownNow();
      }
      LocalDirAllocator.removeContext(appCacheDirContextName);
    } finally {
      closeFileSystems(ugi);
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:63,代码来源:ContainerLocalizer.java

示例8: testAppSubmissionWithoutDelegationToken

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
@Test
public void testAppSubmissionWithoutDelegationToken() throws Exception {
  conf.setBoolean(YarnConfiguration.RM_PROXY_USER_PRIVILEGES_ENABLED, true);
  // create token2
  Text userText2 = new Text("user2");
  DelegationTokenIdentifier dtId2 =
      new DelegationTokenIdentifier(new Text("user2"), new Text("renewer2"),
        userText2);
  final Token<DelegationTokenIdentifier> token2 =
      new Token<DelegationTokenIdentifier>(dtId2.getBytes(),
        "password2".getBytes(), dtId2.getKind(), new Text("service2"));
  final MockRM rm = new TestSecurityMockRM(conf, null) {
    @Override
    protected DelegationTokenRenewer createDelegationTokenRenewer() {
      return new DelegationTokenRenewer() {
        @Override
        protected Token<?>[] obtainSystemTokensForUser(String user,
            final Credentials credentials) throws IOException {
          credentials.addToken(token2.getService(), token2);
          return new Token<?>[] { token2 };
        }
      };
    }
  };
  rm.start();

  // submit an app without delegationToken
  RMApp app = rm.submitApp(200);

  // wait for the new retrieved hdfs token.
  GenericTestUtils.waitFor(new Supplier<Boolean>() {
    public Boolean get() {
      return rm.getRMContext().getDelegationTokenRenewer()
        .getDelegationTokens().contains(token2);
    }
  }, 1000, 20000);

  // check nm can retrieve the token
  final MockNM nm1 =
      new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
  nm1.registerNode();
  NodeHeartbeatResponse response = nm1.nodeHeartbeat(true);
  ByteBuffer tokenBuffer =
      response.getSystemCredentialsForApps().get(app.getApplicationId());
  Assert.assertNotNull(tokenBuffer);
  Credentials appCredentials = new Credentials();
  DataInputByteBuffer buf = new DataInputByteBuffer();
  tokenBuffer.rewind();
  buf.reset(tokenBuffer);
  appCredentials.readTokenStorageStream(buf);
  Assert.assertTrue(appCredentials.getAllTokens().contains(token2));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:53,代码来源:TestDelegationTokenRenewer.java

示例9: testAttemptContainerRequest

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
@Test
public void testAttemptContainerRequest() throws Exception {
  final Text SECRET_KEY_ALIAS = new Text("secretkeyalias");
  final byte[] SECRET_KEY = ("secretkey").getBytes();
  Map<ApplicationAccessType, String> acls =
      new HashMap<ApplicationAccessType, String>(1);
  acls.put(ApplicationAccessType.VIEW_APP, "otheruser");
  ApplicationId appId = ApplicationId.newInstance(1, 1);
  JobId jobId = MRBuilderUtils.newJobId(appId, 1);
  TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
  Path jobFile = mock(Path.class);

  EventHandler eventHandler = mock(EventHandler.class);
  TaskAttemptListener taListener = mock(TaskAttemptListener.class);
  when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));

  JobConf jobConf = new JobConf();
  jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
  jobConf.setBoolean("fs.file.impl.disable.cache", true);
  jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");

  // setup UGI for security so tokens and keys are preserved
  jobConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
  UserGroupInformation.setConfiguration(jobConf);

  Credentials credentials = new Credentials();
  credentials.addSecretKey(SECRET_KEY_ALIAS, SECRET_KEY);
  Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(
      ("tokenid").getBytes(), ("tokenpw").getBytes(),
      new Text("tokenkind"), new Text("tokenservice"));

  TaskAttemptImpl taImpl =
      new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
          mock(TaskSplitMetaInfo.class), jobConf, taListener,
          jobToken, credentials,
          new SystemClock(), null);

  jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, taImpl.getID().toString());

  ContainerLaunchContext launchCtx =
      TaskAttemptImpl.createContainerLaunchContext(acls,
          jobConf, jobToken, taImpl.createRemoteTask(),
          TypeConverter.fromYarn(jobId),
          mock(WrappedJvmID.class), taListener,
          credentials);

  Assert.assertEquals("ACLs mismatch", acls, launchCtx.getApplicationACLs());
  Credentials launchCredentials = new Credentials();

  DataInputByteBuffer dibb = new DataInputByteBuffer();
  dibb.reset(launchCtx.getTokens());
  launchCredentials.readTokenStorageStream(dibb);

  // verify all tokens specified for the task attempt are in the launch context
  for (Token<? extends TokenIdentifier> token : credentials.getAllTokens()) {
    Token<? extends TokenIdentifier> launchToken =
        launchCredentials.getToken(token.getService());
    Assert.assertNotNull("Token " + token.getService() + " is missing",
        launchToken);
    Assert.assertEquals("Token " + token.getService() + " mismatch",
        token, launchToken);
  }

  // verify the secret key is in the launch context
  Assert.assertNotNull("Secret key missing",
      launchCredentials.getSecretKey(SECRET_KEY_ALIAS));
  Assert.assertTrue("Secret key mismatch", Arrays.equals(SECRET_KEY,
      launchCredentials.getSecretKey(SECRET_KEY_ALIAS)));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:70,代码来源:TestTaskAttemptContainerRequest.java


注:本文中的org.apache.hadoop.security.Credentials.readTokenStorageStream方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。