本文整理匯總了Java中org.apache.hadoop.mapreduce.security.TokenCache類的典型用法代碼示例。如果您正苦於以下問題:Java TokenCache類的具體用法?Java TokenCache怎麽用?Java TokenCache使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
TokenCache類屬於org.apache.hadoop.mapreduce.security包,在下文中一共展示了TokenCache類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: checkOutputSpecs
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
public void checkOutputSpecs(FileSystem ignored, JobConf job)
throws FileAlreadyExistsException,
InvalidJobConfException, IOException {
// Ensure that the output directory is set and not already there
Path outDir = getOutputPath(job);
if (outDir == null && job.getNumReduceTasks() != 0) {
throw new InvalidJobConfException("Output directory not set in JobConf.");
}
if (outDir != null) {
FileSystem fs = outDir.getFileSystem(job);
// normalize the output directory
outDir = fs.makeQualified(outDir);
setOutputPath(job, outDir);
// get delegation token for the outDir's file system
TokenCache.obtainTokensForNamenodes(job.getCredentials(),
new Path[] {outDir}, job);
// check its existence
if (fs.exists(outDir)) {
throw new FileAlreadyExistsException("Output directory " + outDir +
" already exists");
}
}
}
示例2: getDelegationTokens
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
/**
* For each archive or cache file - get the corresponding delegation token
* @param job
* @param credentials
* @throws IOException
*/
public static void getDelegationTokens(Configuration job,
Credentials credentials) throws IOException {
URI[] tarchives = DistributedCache.getCacheArchives(job);
URI[] tfiles = DistributedCache.getCacheFiles(job);
int size = (tarchives!=null? tarchives.length : 0) + (tfiles!=null ? tfiles.length :0);
Path[] ps = new Path[size];
int i = 0;
if (tarchives != null) {
for (i=0; i < tarchives.length; i++) {
ps[i] = new Path(tarchives[i].toString());
}
}
if (tfiles != null) {
for(int j=0; j< tfiles.length; j++) {
ps[i+j] = new Path(tfiles[j].toString());
}
}
TokenCache.obtainTokensForNamenodes(credentials, ps, job);
}
示例3: checkOutputSpecs
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
public void checkOutputSpecs(JobContext job
) throws FileAlreadyExistsException, IOException{
// Ensure that the output directory is set and not already there
Path outDir = getOutputPath(job);
if (outDir == null) {
throw new InvalidJobConfException("Output directory not set.");
}
// get delegation token for outDir's file system
TokenCache.obtainTokensForNamenodes(job.getCredentials(),
new Path[] { outDir }, job.getConfiguration());
if (outDir.getFileSystem(job.getConfiguration()).exists(outDir)) {
throw new FileAlreadyExistsException("Output directory " + outDir +
" already exists");
}
}
示例4: checkOutputSpecs
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
/** {@inheritDoc} */
@Override
public void checkOutputSpecs(JobContext context) throws IOException {
Configuration conf = context.getConfiguration();
if (getCommitDirectory(conf) == null) {
throw new IllegalStateException("Commit directory not configured");
}
Path workingPath = getWorkingDirectory(conf);
if (workingPath == null) {
throw new IllegalStateException("Working directory not configured");
}
// get delegation token for outDir's file system
TokenCache.obtainTokensForNamenodes(context.getCredentials(),
new Path[] {workingPath}, conf);
}
示例5: getSplitsFromManifest
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
private List<InputSplit> getSplitsFromManifest(JobConf job) throws IOException {
Path[] dirs = getInputPaths(job);
if (dirs.length == 0) {
throw new IOException("No input path specified in job");
} else if (dirs.length > 1) {
throw new IOException("Will only look for manifests in a single input directory (" + dirs
.length + " directories provided).");
}
TokenCache.obtainTokensForNamenodes(job.getCredentials(), dirs, job);
Path dir = dirs[0];
FileSystem fs = dir.getFileSystem(job);
if (!fs.getFileStatus(dir).isDirectory()) {
throw new IOException("Input path not a directory: " + dir);
}
Path manifestPath = new Path(dir, ExportManifestOutputFormat.MANIFEST_FILENAME);
if (!fs.isFile(manifestPath)) {
return null;
}
return parseManifest(fs, manifestPath, job);
}
示例6: setTokensFor
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
public static void setTokensFor(ContainerLaunchContext amContainer, List<Path> paths, Configuration conf) throws IOException {
Credentials credentials = new Credentials();
// for HDFS
TokenCache.obtainTokensForNamenodes(credentials, paths.toArray(new Path[0]), conf);
// for HBase
obtainTokenForHBase(credentials, conf);
// for user
UserGroupInformation currUsr = UserGroupInformation.getCurrentUser();
Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens();
for (Token<? extends TokenIdentifier> token : usrTok) {
final Text id = new Text(token.getIdentifier());
LOG.info("Adding user token " + id + " with " + token);
credentials.addToken(id, token);
}
try (DataOutputBuffer dob = new DataOutputBuffer()) {
credentials.writeTokenStorageToStream(dob);
if (LOG.isDebugEnabled()) {
LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength());
}
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
amContainer.setTokens(securityTokens);
}
}
示例7: checkOutputSpecs
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
/** @inheritDoc */
@Override
public void checkOutputSpecs(JobContext context) throws IOException {
Configuration conf = context.getConfiguration();
if (getCommitDirectory(conf) == null) {
throw new IllegalStateException("Commit directory not configured");
}
Path workingPath = getWorkingDirectory(conf);
if (workingPath == null) {
throw new IllegalStateException("Working directory not configured");
}
// get delegation token for outDir's file system
TokenCache.obtainTokensForNamenodes(context.getCredentials(),
new Path[] {workingPath}, conf);
}
示例8: populateTokenCache
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
@SuppressWarnings("unchecked")
private void populateTokenCache(Configuration conf, Credentials credentials)
throws IOException{
readTokensFromFiles(conf, credentials);
// add the delegation tokens from configuration
String [] nameNodes = conf.getStrings(JobContext.JOB_NAMENODES);
LOG.debug("adding the following namenodes' delegation tokens:" +
Arrays.toString(nameNodes));
if(nameNodes != null) {
Path [] ps = new Path[nameNodes.length];
for(int i=0; i< nameNodes.length; i++) {
ps[i] = new Path(nameNodes[i]);
}
TokenCache.obtainTokensForNamenodes(credentials, ps, conf);
}
}
示例9: localizeJobTokenFile
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
/**
* Download the job-token file from the FS and save on local fs.
* @param user
* @param jobId
* @return the local file system path of the downloaded file.
* @throws IOException
*/
private String localizeJobTokenFile(String user, JobID jobId)
throws IOException {
// check if the tokenJob file is there..
Path skPath = new Path(systemDirectory,
jobId.toString()+"/"+TokenCache.JOB_TOKEN_HDFS_FILE);
FileStatus status = null;
long jobTokenSize = -1;
status = systemFS.getFileStatus(skPath); //throws FileNotFoundException
jobTokenSize = status.getLen();
Path localJobTokenFile =
lDirAlloc.getLocalPathForWrite(getPrivateDirJobTokenFile(user,
jobId.toString()), jobTokenSize, fConf);
String localJobTokenFileStr = localJobTokenFile.toUri().getPath();
if(LOG.isDebugEnabled())
LOG.debug("localizingJobTokenFile from sd="+skPath.toUri().getPath() +
" to " + localJobTokenFileStr);
// Download job_token
systemFS.copyToLocalFile(skPath, localJobTokenFile);
return localJobTokenFileStr;
}
示例10: getDelegationTokens
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
/**
* For each archive or cache file - get the corresponding delegation token
* @param job
* @param credentials
* @throws IOException
*/
public static void getDelegationTokens(Configuration job,
Credentials credentials)
throws IOException {
URI[] tarchives = DistributedCache.getCacheArchives(job);
URI[] tfiles = DistributedCache.getCacheFiles(job);
int size = (tarchives!=null? tarchives.length : 0) + (tfiles!=null ? tfiles.length :0);
Path[] ps = new Path[size];
int i = 0;
if (tarchives != null) {
for (i=0; i < tarchives.length; i++) {
ps[i] = new Path(tarchives[i].toString());
}
}
if (tfiles != null) {
for(int j=0; j< tfiles.length; j++) {
ps[i+j] = new Path(tfiles[j].toString());
}
}
TokenCache.obtainTokensForNamenodes(credentials, ps, job);
}
示例11: checkOutputSpecs
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
public void checkOutputSpecs(JobContext job
) throws FileAlreadyExistsException, IOException{
// Ensure that the output directory is set and not already there
Path outDir = getOutputPath(job);
if (outDir == null) {
throw new InvalidJobConfException("Output directory not set.");
}
// get delegation token for outDir's file system
TokenCache.obtainTokensForNamenodes(job.getCredentials(),
new Path[] {outDir},
job.getConfiguration());
if (outDir.getFileSystem(job.getConfiguration()).exists(outDir)) {
throw new FileAlreadyExistsException("Output directory " + outDir +
" already exists");
}
}
示例12: checkSrcPath
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
/** Sanity check for srcPath */
private static void checkSrcPath(JobConf jobConf, List<Path> srcPaths)
throws IOException {
List<IOException> rslt = new ArrayList<IOException>();
Path[] ps = new Path[srcPaths.size()];
ps = srcPaths.toArray(ps);
TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), ps, jobConf);
for (Path p : srcPaths) {
FileSystem fs = p.getFileSystem(jobConf);
if (!fs.exists(p)) {
rslt.add(new IOException("Input source " + p + " does not exist."));
}
}
if (!rslt.isEmpty()) {
throw new InvalidInputException(rslt);
}
}
示例13: populateTokenCache
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
@SuppressWarnings("unchecked")
private void populateTokenCache(Configuration conf, Credentials credentials)
throws IOException{
readTokensFromFiles(conf, credentials);
// add the delegation tokens from configuration
String [] nameNodes = conf.getStrings(MRJobConfig.JOB_NAMENODES);
LOG.debug("adding the following namenodes' delegation tokens:" +
Arrays.toString(nameNodes));
if(nameNodes != null) {
Path [] ps = new Path[nameNodes.length];
for(int i=0; i< nameNodes.length; i++) {
ps[i] = new Path(nameNodes[i]);
}
TokenCache.obtainTokensForNamenodes(credentials, ps, conf);
}
}
示例14: checkSrcPath
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
/** Sanity check for srcPath */
public static void checkSrcPath(JobConf jobConf, List<Path> srcPaths)
throws IOException {
List<IOException> rslt = new ArrayList<IOException>();
Path[] ps = new Path[srcPaths.size()];
ps = srcPaths.toArray(ps);
TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), ps, jobConf);
for (Path p : srcPaths) {
FileSystem fs = p.getFileSystem(jobConf);
if (!fs.exists(p)) {
rslt.add(new IOException("Input source " + p + " does not exist."));
}
}
if (!rslt.isEmpty()) {
throw new InvalidInputException(rslt);
}
}
示例15: checkOutputSpecs
import org.apache.hadoop.mapreduce.security.TokenCache; //導入依賴的package包/類
@Override
public void checkOutputSpecs(JobContext context) throws IOException {
Path path = getOutputPath(context);
if (path == null) {
throw new IOException("Temporary output path is not set");
}
TokenCache.obtainTokensForNamenodes(
context.getCredentials(),
new Path[] { path },
context.getConfiguration());
if (path.getFileSystem(context.getConfiguration()).exists(path)) {
throw new IOException(MessageFormat.format(
"Output directory {0} already exists",
path));
}
}