本文整理汇总了Java中org.apache.hadoop.security.Credentials类的典型用法代码示例。如果您正苦于以下问题:Java Credentials类的具体用法?Java Credentials怎么用?Java Credentials使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Credentials类属于org.apache.hadoop.security包,在下文中一共展示了Credentials类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: StubbedJob
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
public StubbedJob(JobId jobId, ApplicationAttemptId applicationAttemptId,
Configuration conf, EventHandler eventHandler, boolean newApiCommitter,
String user, int numSplits, AppContext appContext) {
super(jobId, applicationAttemptId, conf, eventHandler,
null, new JobTokenSecretManager(), new Credentials(),
new SystemClock(), Collections.<TaskId, TaskInfo> emptyMap(),
MRAppMetrics.create(), null, newApiCommitter, user,
System.currentTimeMillis(), null, appContext, null, null);
initTransition = getInitTransition(numSplits);
localFactory = stateMachineFactory.addTransition(JobStateInternal.NEW,
EnumSet.of(JobStateInternal.INITED, JobStateInternal.FAILED),
JobEventType.JOB_INIT,
// This is abusive.
initTransition);
// This "this leak" is okay because the retained pointer is in an
// instance variable.
localStateMachine = localFactory.make(this);
}
示例2: testAddTokensToUGI
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
@Test
public void testAddTokensToUGI() {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("someone");
Credentials creds = new Credentials();
for (int i=0; i < service.length; i++) {
creds.addToken(service[i], token[i]);
}
ugi.addCredentials(creds);
creds = ugi.getCredentials();
for (int i=0; i < service.length; i++) {
assertSame(token[i], creds.getToken(service[i]));
}
assertEquals(service.length, creds.numberOfTokens());
}
示例3: collectDelegationTokens
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
/**
* Recursively obtain the tokens for this FileSystem and all descended
* FileSystems as determined by getChildFileSystems().
* @param renewer the user allowed to renew the delegation tokens
* @param credentials cache in which to add the new delegation tokens
* @param tokens list in which to add acquired tokens
* @throws IOException
*/
private void collectDelegationTokens(final String renewer,
final Credentials credentials,
final List<Token<?>> tokens)
throws IOException {
final String serviceName = getCanonicalServiceName();
// Collect token of the this filesystem and then of its embedded children
if (serviceName != null) { // fs has token, grab it
final Text service = new Text(serviceName);
Token<?> token = credentials.getToken(service);
if (token == null) {
token = getDelegationToken(renewer);
if (token != null) {
tokens.add(token);
credentials.addToken(service, token);
}
}
}
// Now collect the tokens from the children
final FileSystem[] children = getChildFileSystems();
if (children != null) {
for (final FileSystem fs : children) {
fs.collectDelegationTokens(renewer, credentials, tokens);
}
}
}
示例4: testGetDelegationTokensWithCredentials
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
@Test
public void testGetDelegationTokensWithCredentials() throws IOException {
Credentials credentials = new Credentials();
List<Token<?>> delTokens =
Arrays.asList(fsView.addDelegationTokens("sanjay", credentials));
int expectedTokenCount = getExpectedDelegationTokenCountWithCredentials();
Assert.assertEquals(expectedTokenCount, delTokens.size());
Credentials newCredentials = new Credentials();
for (int i = 0; i < expectedTokenCount / 2; i++) {
Token<?> token = delTokens.get(i);
newCredentials.addToken(token.getService(), token);
}
List<Token<?>> delTokens2 =
Arrays.asList(fsView.addDelegationTokens("sanjay", newCredentials));
Assert.assertEquals((expectedTokenCount + 1) / 2, delTokens2.size());
}
示例5: getDelegationTokens
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
/**
* For each archive or cache file - get the corresponding delegation token
* @param job
* @param credentials
* @throws IOException
*/
public static void getDelegationTokens(Configuration job,
Credentials credentials) throws IOException {
URI[] tarchives = DistributedCache.getCacheArchives(job);
URI[] tfiles = DistributedCache.getCacheFiles(job);
int size = (tarchives!=null? tarchives.length : 0) + (tfiles!=null ? tfiles.length :0);
Path[] ps = new Path[size];
int i = 0;
if (tarchives != null) {
for (i=0; i < tarchives.length; i++) {
ps[i] = new Path(tarchives[i].toString());
}
}
if (tfiles != null) {
for(int j=0; j< tfiles.length; j++) {
ps[i+j] = new Path(tfiles[j].toString());
}
}
TokenCache.obtainTokensForNamenodes(credentials, ps, job);
}
示例6: submitJob
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
@Override
public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts)
throws IOException, InterruptedException {
addHistoryToken(ts);
// Construct necessary information to start the MR AM
ApplicationSubmissionContext appContext = createApplicationSubmissionContext(conf, jobSubmitDir, ts);
// Submit to ResourceManager
try {
ApplicationId applicationId = resMgrDelegate.submitApplication(appContext);
ApplicationReport appMaster = resMgrDelegate.getApplicationReport(applicationId);
String diagnostics = (appMaster == null ? "application report is null" : appMaster.getDiagnostics());
if (appMaster == null || appMaster.getYarnApplicationState() == YarnApplicationState.FAILED
|| appMaster.getYarnApplicationState() == YarnApplicationState.KILLED) {
throw new IOException("Failed to run job : " + diagnostics);
}
return clientCache.getClient(jobId).getJobStatus(jobId);
} catch (YarnException e) {
throw new IOException(e);
}
}
示例7: getCopyListing
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
/**
* Public Factory method with which the appropriate CopyListing implementation may be retrieved.
*
* @param configuration The input configuration.
* @param credentials Credentials object on which the FS delegation tokens are cached
* @param options The input Options, to help choose the appropriate CopyListing Implementation.
* @return An instance of the appropriate CopyListing implementation.
* @throws java.io.IOException Exception if any
*/
public static CopyListing getCopyListing(
Configuration configuration,
Credentials credentials,
S3MapReduceCpOptions options)
throws IOException {
String copyListingClassName = configuration.get(S3MapReduceCpConstants.CONF_LABEL_COPY_LISTING_CLASS, "");
Class<? extends CopyListing> copyListingClass;
try {
if (!copyListingClassName.isEmpty()) {
copyListingClass = configuration.getClass(S3MapReduceCpConstants.CONF_LABEL_COPY_LISTING_CLASS,
SimpleCopyListing.class, CopyListing.class);
} else {
copyListingClass = SimpleCopyListing.class;
}
copyListingClassName = copyListingClass.getName();
Constructor<? extends CopyListing> constructor = copyListingClass.getDeclaredConstructor(Configuration.class,
Credentials.class);
return constructor.newInstance(configuration, credentials);
} catch (Exception e) {
throw new IOException("Unable to instantiate " + copyListingClassName, e);
}
}
示例8: checkSecrets
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
private static void checkSecrets(Credentials ts) {
if ( ts == null){
throw new RuntimeException("The credentials are not available");
// fail the test
}
for(int i=0; i<NUM_OF_KEYS; i++) {
String secretName = "alias"+i;
// get token storage and a key
byte[] secretValue = ts.getSecretKey(new Text(secretName));
System.out.println(secretValue);
if (secretValue == null){
throw new RuntimeException("The key "+ secretName + " is not available. ");
// fail the test
}
String secretValueStr = new String (secretValue);
if ( !("password"+i).equals(secretValueStr)){
throw new RuntimeException("The key "+ secretName +
" is not correct. Expected value is "+ ("password"+i) +
". Actual value is " + secretValueStr); // fail the test
}
}
}
示例9: testFsWithChildTokens
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
@Test
public void testFsWithChildTokens() throws Exception {
Credentials credentials = new Credentials();
Text service1 = new Text("singleTokenFs1");
Text service2 = new Text("singleTokenFs2");
MockFileSystem fs1 = createFileSystemForServiceName(service1);
MockFileSystem fs2 = createFileSystemForServiceName(service2);
MockFileSystem fs3 = createFileSystemForServiceName(null);
MockFileSystem multiFs =
createFileSystemForServiceName(null, fs1, fs2, fs3);
multiFs.addDelegationTokens(renewer, credentials);
verifyTokenFetch(multiFs, false); // has no tokens of own, only child tokens
verifyTokenFetch(fs1, true);
verifyTokenFetch(fs2, true);
verifyTokenFetch(fs3, false);
assertEquals(2, credentials.numberOfTokens());
assertNotNull(credentials.getToken(service1));
assertNotNull(credentials.getToken(service2));
}
示例10: parseCredentials
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
private Credentials parseCredentials(ContainerLaunchContext launchContext)
throws IOException {
Credentials credentials = new Credentials();
// //////////// Parse credentials
ByteBuffer tokens = launchContext.getTokens();
if (tokens != null) {
DataInputByteBuffer buf = new DataInputByteBuffer();
tokens.rewind();
buf.reset(tokens);
credentials.readTokenStorageStream(buf);
if (LOG.isDebugEnabled()) {
for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
LOG.debug(tk.getService() + " = " + tk.toString());
}
}
}
// //////////// End of parsing credentials
return credentials;
}
示例11: testFsWithChildTokensOneExists
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
@Test
public void testFsWithChildTokensOneExists() throws Exception {
Credentials credentials = new Credentials();
Text service1 = new Text("singleTokenFs1");
Text service2 = new Text("singleTokenFs2");
Token<?> token = mock(Token.class);
credentials.addToken(service2, token);
MockFileSystem fs1 = createFileSystemForServiceName(service1);
MockFileSystem fs2 = createFileSystemForServiceName(service2);
MockFileSystem fs3 = createFileSystemForServiceName(null);
MockFileSystem multiFs = createFileSystemForServiceName(null, fs1, fs2, fs3);
multiFs.addDelegationTokens(renewer, credentials);
verifyTokenFetch(multiFs, false);
verifyTokenFetch(fs1, true);
verifyTokenFetch(fs2, false); // we had added its token to credentials
verifyTokenFetch(fs3, false);
assertEquals(2, credentials.numberOfTokens());
assertNotNull(credentials.getToken(service1));
assertSame(token, credentials.getToken(service2));
}
示例12: createFakeCredentials
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok)
throws IOException {
Credentials creds = new Credentials();
byte[] password = new byte[20];
Text kind = new Text();
Text service = new Text();
Text alias = new Text();
for (int i = 0; i < nTok; ++i) {
byte[] identifier = ("idef" + i).getBytes();
r.nextBytes(password);
kind.set("kind" + i);
service.set("service" + i);
alias.set("token" + i);
Token token = new Token(identifier, password, kind, service);
creds.addToken(alias, token);
}
DataOutputBuffer buf = new DataOutputBuffer();
creds.writeTokenStorageToStream(buf);
DataInputBuffer ret = new DataInputBuffer();
ret.reset(buf.getData(), 0, buf.getLength());
return ret;
}
示例13: testSingleTokenFetch
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
@Test
public void testSingleTokenFetch() throws Exception {
Configuration conf = new Configuration();
conf.set(YarnConfiguration.RM_PRINCIPAL, "mapred/[email protected]");
String renewer = Master.getMasterPrincipal(conf);
Credentials credentials = new Credentials();
final MockFileSystem fs = new MockFileSystem();
final MockFileSystem mockFs = (MockFileSystem) fs.getRawFileSystem();
when(mockFs.getCanonicalServiceName()).thenReturn("host:0");
when(mockFs.getUri()).thenReturn(new URI("mockfs://host:0"));
Path mockPath = mock(Path.class);
when(mockPath.getFileSystem(conf)).thenReturn(mockFs);
Path[] paths = new Path[]{ mockPath, mockPath };
when(mockFs.addDelegationTokens("me", credentials)).thenReturn(null);
TokenCache.obtainTokensForNamenodesInternal(credentials, paths, conf);
verify(mockFs, times(1)).addDelegationTokens(renewer, credentials);
}
示例14: convertCredentialsFromByteBuffer
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
private static Credentials convertCredentialsFromByteBuffer(
ByteBuffer appAttemptTokens) {
DataInputByteBuffer dibb = new DataInputByteBuffer();
try {
Credentials credentials = null;
if (appAttemptTokens != null) {
credentials = new Credentials();
appAttemptTokens.rewind();
dibb.reset(appAttemptTokens);
credentials.readTokenStorageStream(dibb);
}
return credentials;
} catch (IOException e) {
LOG.error("Failed to convert Credentials from ByteBuffer.");
assert false;
return null;
} finally {
IOUtils.closeStream(dibb);
}
}
示例15: loadTokens
import org.apache.hadoop.security.Credentials; //导入依赖的package包/类
/**
* load job token from a file
* @deprecated Use {@link Credentials#readTokenStorageFile} instead,
* this method is included for compatibility against Hadoop-1.
* @param conf
* @throws IOException
*/
@InterfaceAudience.Private
@Deprecated
public static Credentials loadTokens(String jobTokenFile, JobConf conf)
throws IOException {
Path localJobTokenFile = new Path ("file:///" + jobTokenFile);
Credentials ts = Credentials.readTokenStorageFile(localJobTokenFile, conf);
if(LOG.isDebugEnabled()) {
LOG.debug("Task: Loaded jobTokenFile from: "+
localJobTokenFile.toUri().getPath()
+"; num of sec keys = " + ts.numberOfSecretKeys() +
" Number of tokens " + ts.numberOfTokens());
}
return ts;
}