当前位置: 首页>>代码示例>>Java>>正文


Java Credentials.writeTokenStorageToStream方法代码示例

本文整理汇总了Java中org.apache.hadoop.security.Credentials.writeTokenStorageToStream方法的典型用法代码示例。如果您正苦于以下问题:Java Credentials.writeTokenStorageToStream方法的具体用法?Java Credentials.writeTokenStorageToStream怎么用?Java Credentials.writeTokenStorageToStream使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.security.Credentials的用法示例。


在下文中一共展示了Credentials.writeTokenStorageToStream方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: createFakeCredentials

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok)
      throws IOException {
    Credentials creds = new Credentials();
    byte[] password = new byte[20];
    Text kind = new Text();
    Text service = new Text();
    Text alias = new Text();
    for (int i = 0; i < nTok; ++i) {
      byte[] identifier = ("idef" + i).getBytes();
      r.nextBytes(password);
      kind.set("kind" + i);
      service.set("service" + i);
      alias.set("token" + i);
      Token token = new Token(identifier, password, kind, service);
      creds.addToken(alias, token);
    }
    DataOutputBuffer buf = new DataOutputBuffer();
    creds.writeTokenStorageToStream(buf);
    DataInputBuffer ret = new DataInputBuffer();
    ret.reset(buf.getData(), 0, buf.getLength());
    return ret;
  }
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:TestContainerLocalizer.java

示例2: testAppRecoverPath

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
@Test (timeout = 30000)
public void testAppRecoverPath() throws IOException {
  LOG.info("--- START: testAppRecoverPath ---");
  ApplicationSubmissionContext sub =
      Records.newRecord(ApplicationSubmissionContext.class);
  ContainerLaunchContext clc =
      Records.newRecord(ContainerLaunchContext.class);
  Credentials credentials = new Credentials();
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  ByteBuffer securityTokens =
      ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(securityTokens);
  sub.setAMContainerSpec(clc);
  testCreateAppSubmittedRecovery(sub);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:17,代码来源:TestRMAppTransitions.java

示例3: createBinaryTokenFile

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
private static void createBinaryTokenFile(Configuration conf) {
  // Fetch delegation tokens and store in binary token file.
  try {
    Credentials cred1 = new Credentials();
    Credentials cred2 = new Credentials();
    TokenCache.obtainTokensForNamenodesInternal(cred1, new Path[] { p1 },
        conf);
    for (Token<? extends TokenIdentifier> t : cred1.getAllTokens()) {
      cred2.addToken(new Text(DELEGATION_TOKEN_KEY), t);
    }
    DataOutputStream os = new DataOutputStream(new FileOutputStream(
        binaryTokenFileName.toString()));
    try {
      cred2.writeTokenStorageToStream(os);
    } finally {
      os.close();
    }
  } catch (IOException e) {
    Assert.fail("Exception " + e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:22,代码来源:TestBinaryTokenFile.java

示例4: testNodeHeartBeatResponse

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
@Test
public void testNodeHeartBeatResponse() throws IOException {
  NodeHeartbeatResponse record =
      Records.newRecord(NodeHeartbeatResponse.class);
  Map<ApplicationId, ByteBuffer> appCredentials =
      new HashMap<ApplicationId, ByteBuffer>();
  Credentials app1Cred = new Credentials();

  Token<DelegationTokenIdentifier> token1 =
      new Token<DelegationTokenIdentifier>();
  token1.setKind(new Text("kind1"));
  app1Cred.addToken(new Text("token1"), token1);
  Token<DelegationTokenIdentifier> token2 =
      new Token<DelegationTokenIdentifier>();
  token2.setKind(new Text("kind2"));
  app1Cred.addToken(new Text("token2"), token2);

  DataOutputBuffer dob = new DataOutputBuffer();
  app1Cred.writeTokenStorageToStream(dob);
  ByteBuffer byteBuffer1 = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  appCredentials.put(ApplicationId.newInstance(1234, 1), byteBuffer1);
  record.setSystemCredentialsForApps(appCredentials);

  NodeHeartbeatResponse proto =
      new NodeHeartbeatResponsePBImpl(
        ((NodeHeartbeatResponsePBImpl) record).getProto());
  Assert.assertEquals(appCredentials, proto.getSystemCredentialsForApps());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:29,代码来源:TestProtocolRecords.java

示例5: buildAppProto

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
private ContainerManagerApplicationProto buildAppProto(ApplicationId appId,
    String user, Credentials credentials,
    Map<ApplicationAccessType, String> appAcls,
    LogAggregationContext logAggregationContext) {

  ContainerManagerApplicationProto.Builder builder =
      ContainerManagerApplicationProto.newBuilder();
  builder.setId(((ApplicationIdPBImpl) appId).getProto());
  builder.setUser(user);

  if (logAggregationContext != null) {
    builder.setLogAggregationContext((
        (LogAggregationContextPBImpl)logAggregationContext).getProto());
  }

  builder.clearCredentials();
  if (credentials != null) {
    DataOutputBuffer dob = new DataOutputBuffer();
    try {
      credentials.writeTokenStorageToStream(dob);
      builder.setCredentials(ByteString.copyFrom(dob.getData()));
    } catch (IOException e) {
      // should not occur
      LOG.error("Cannot serialize credentials", e);
    }
  }

  builder.clearAcls();
  if (appAcls != null) {
    for (Map.Entry<ApplicationAccessType, String> acl : appAcls.entrySet()) {
      ApplicationACLMapProto p = ApplicationACLMapProto.newBuilder()
          .setAccessType(ProtoUtils.convertToProtoFormat(acl.getKey()))
          .setAcl(acl.getValue())
          .build();
      builder.addAcls(p);
    }
  }

  return builder.build();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:41,代码来源:ContainerManagerImpl.java

示例6: setupTokens

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
private void setupTokens(
    ContainerLaunchContext container, ContainerId containerID)
    throws IOException {
  Map<String, String> environment = container.getEnvironment();
  environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
      application.getWebProxyBase());
  // Set AppSubmitTime and MaxAppAttempts to be consumable by the AM.
  ApplicationId applicationId =
      application.getAppAttemptId().getApplicationId();
  environment.put(
      ApplicationConstants.APP_SUBMIT_TIME_ENV,
      String.valueOf(rmContext.getRMApps()
          .get(applicationId)
          .getSubmitTime()));
  environment.put(ApplicationConstants.MAX_APP_ATTEMPTS_ENV,
      String.valueOf(rmContext.getRMApps().get(
          applicationId).getMaxAppAttempts()));

  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  if (container.getTokens() != null) {
    // TODO: Don't do this kind of checks everywhere.
    dibb.reset(container.getTokens());
    credentials.readTokenStorageStream(dibb);
  }

  // Add AMRMToken
  Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
  if (amrmToken != null) {
    credentials.addToken(amrmToken.getService(), amrmToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:36,代码来源:AMLauncher.java

示例7: addTimelineDelegationToken

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
private void addTimelineDelegationToken(
    ContainerLaunchContext clc) throws YarnException, IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = clc.getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  // If the timeline delegation token is already in the CLC, no need to add
  // one more
  for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
      .getAllTokens()) {
    if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
      return;
    }
  }
  org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier>
      timelineDelegationToken = getTimelineDelegationToken();
  if (timelineDelegationToken == null) {
    return;
  }
  credentials.addToken(timelineService, timelineDelegationToken);
  if (LOG.isDebugEnabled()) {
    LOG.debug("Add timline delegation token into credentials: "
        + timelineDelegationToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(tokens);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:34,代码来源:YarnClientImpl.java

示例8: createCommonContainerLaunchContext

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
private static ContainerLaunchContext createCommonContainerLaunchContext(
    MasterService masterService, Map<ApplicationAccessType, String> applicationACLs,
    Configuration conf, final ApplicationId appid, Credentials credentials) {

  // Application resources
  Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();

  // Application environment
  Map<String, String> environment = new HashMap<String, String>();

  // Service data
  Map<String, ByteBuffer> serviceData = new HashMap<String, ByteBuffer>();

  // Tokens
  ByteBuffer taskCredentialsBuffer = ByteBuffer.wrap(new byte[] {});
  try {
    FileSystem remoteFS = FileSystem.get(conf);

    // Set up JobConf to be localized properly on the remote NM.
    Path remoteJobSubmitDir = new Path(conf.get(AngelConf.ANGEL_JOB_DIR));
    Path remoteJobConfPath = new Path(remoteJobSubmitDir, AngelConf.ANGEL_JOB_CONF_FILE);
    localResources.put(
        AngelConf.ANGEL_JOB_CONF_FILE,
        createLocalResource(remoteFS, remoteJobConfPath, LocalResourceType.FILE,
            LocalResourceVisibility.APPLICATION));
    LOG.info("The job-conf file on the remote FS is " + remoteJobConfPath.toUri().toASCIIString());

    // Setup DistributedCache
    AngelApps.setupDistributedCache(conf, localResources);

    // Setup up task credentials buffer
    LOG.info("Adding #" + credentials.numberOfTokens() + " tokens and #"
        + credentials.numberOfSecretKeys() + " secret keys for NM use for launching container");

    Credentials taskCredentials = new Credentials(credentials);

    DataOutputBuffer containerTokens_dob = new DataOutputBuffer();
    LOG.info("Size of containertokens_dob is " + taskCredentials.numberOfTokens());
    taskCredentials.writeTokenStorageToStream(containerTokens_dob);
    taskCredentialsBuffer =
        ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength());
    containerTokens_dob.close();

    InetSocketAddress listenAddr = masterService.getRPCListenAddr();

    Apps.addToEnvironment(environment, AngelEnvironment.LISTEN_ADDR.name(), listenAddr
        .getAddress().getHostAddress());

    Apps.addToEnvironment(environment, AngelEnvironment.LISTEN_PORT.name(),
        String.valueOf(listenAddr.getPort()));

    Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), getInitialClasspath(conf));

    if (initialAppClasspath != null) {
      Apps.addToEnvironment(environment, Environment.APP_CLASSPATH.name(), initialAppClasspath);
    }

    Apps.addToEnvironment(environment, AngelEnvironment.INIT_MIN_CLOCK.name(), "0");
  } catch (IOException e) {
    throw new YarnRuntimeException(e);
  }

  // Add pwd to LD_LIBRARY_PATH, add this before adding anything else
  Apps.addToEnvironment(environment, Environment.LD_LIBRARY_PATH.name(), Environment.PWD.$());

  ContainerLaunchContext container =
      ContainerLaunchContext.newInstance(localResources, environment, null, serviceData,
          taskCredentialsBuffer, applicationACLs);

  return container;
}
 
开发者ID:Tencent,项目名称:angel,代码行数:72,代码来源:PSAgentAttempt.java

示例9: testDelegationTokenRestoredInDelegationTokenRenewer

import org.apache.hadoop.security.Credentials; //导入方法依赖的package包/类
@Test (timeout = 60000)
public void testDelegationTokenRestoredInDelegationTokenRenewer()
    throws Exception {
  conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
  conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
      "kerberos");
  UserGroupInformation.setConfiguration(conf);

  MemoryRMStateStore memStore = new MemoryRMStateStore();
  memStore.init(conf);
  RMState rmState = memStore.getState();

  Map<ApplicationId, ApplicationStateData> rmAppState =
      rmState.getApplicationState();
  MockRM rm1 = new TestSecurityMockRM(conf, memStore);
  rm1.start();

  HashSet<Token<RMDelegationTokenIdentifier>> tokenSet =
      new HashSet<Token<RMDelegationTokenIdentifier>>();

  // create an empty credential
  Credentials ts = new Credentials();

  // create tokens and add into credential
  Text userText1 = new Text("user1");
  RMDelegationTokenIdentifier dtId1 =
      new RMDelegationTokenIdentifier(userText1, new Text("renewer1"),
        userText1);
  Token<RMDelegationTokenIdentifier> token1 =
      new Token<RMDelegationTokenIdentifier>(dtId1,
        rm1.getRMContext().getRMDelegationTokenSecretManager());
  SecurityUtil.setTokenService(token1, rmAddr);
  ts.addToken(userText1, token1);
  tokenSet.add(token1);

  Text userText2 = new Text("user2");
  RMDelegationTokenIdentifier dtId2 =
      new RMDelegationTokenIdentifier(userText2, new Text("renewer2"),
        userText2);
  Token<RMDelegationTokenIdentifier> token2 =
      new Token<RMDelegationTokenIdentifier>(dtId2,
        rm1.getRMContext().getRMDelegationTokenSecretManager());
  SecurityUtil.setTokenService(token2, rmAddr);
  ts.addToken(userText2, token2);
  tokenSet.add(token2);

  // submit an app with customized credential
  RMApp app = rm1.submitApp(200, "name", "user",
      new HashMap<ApplicationAccessType, String>(), false, "default", 1, ts);

  // assert app info is saved
  ApplicationStateData appState = rmAppState.get(app.getApplicationId());
  Assert.assertNotNull(appState);

  // assert delegation tokens exist in rm1 DelegationTokenRenewr
  Assert.assertEquals(tokenSet, rm1.getRMContext()
    .getDelegationTokenRenewer().getDelegationTokens());

  // assert delegation tokens are saved
  DataOutputBuffer dob = new DataOutputBuffer();
  ts.writeTokenStorageToStream(dob);
  ByteBuffer securityTokens =
      ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  securityTokens.rewind();
  Assert.assertEquals(securityTokens, appState
    .getApplicationSubmissionContext().getAMContainerSpec()
    .getTokens());

  // start new RM
  MockRM rm2 = new TestSecurityMockRM(conf, memStore);
  rm2.start();

  // Need to wait for a while as now token renewal happens on another thread
  // and is asynchronous in nature.
  waitForTokensToBeRenewed(rm2);

  // verify tokens are properly populated back to rm2 DelegationTokenRenewer
  Assert.assertEquals(tokenSet, rm2.getRMContext()
    .getDelegationTokenRenewer().getDelegationTokens());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:81,代码来源:TestRMRestart.java


注:本文中的org.apache.hadoop.security.Credentials.writeTokenStorageToStream方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。