当前位置: 首页>>代码示例>>Java>>正文


Java TokenCache.setJobToken方法代码示例

本文整理汇总了Java中org.apache.hadoop.mapreduce.security.TokenCache.setJobToken方法的典型用法代码示例。如果您正苦于以下问题:Java TokenCache.setJobToken方法的具体用法?Java TokenCache.setJobToken怎么用?Java TokenCache.setJobToken使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.mapreduce.security.TokenCache的用法示例。


在下文中一共展示了TokenCache.setJobToken方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: generateAndStoreTokens

import org.apache.hadoop.mapreduce.security.TokenCache; //导入方法依赖的package包/类
/**
 * generate job token and save it into the file
 * @throws IOException
 */
private void generateAndStoreTokens() throws IOException {
  Path jobDir = jobtracker.getSystemDirectoryForJob(jobId);
  Path keysFile = new Path(jobDir, TokenCache.JOB_TOKEN_HDFS_FILE);
  if (tokenStorage == null) {
    tokenStorage = new Credentials();
  }
  //create JobToken file and write token to it
  JobTokenIdentifier identifier = new JobTokenIdentifier(new Text(jobId
      .toString()));
  Token<JobTokenIdentifier> token = new Token<JobTokenIdentifier>(identifier,
      jobtracker.getJobTokenSecretManager());
  token.setService(identifier.getJobId());
  
  TokenCache.setJobToken(token, tokenStorage);
      
  // write TokenStorage out
  tokenStorage.writeTokenStorageFile(keysFile, jobtracker.getConf());
  LOG.info("jobToken generated and stored with users keys in "
      + keysFile.toUri().getPath());
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:25,代码来源:JobInProgress.java

示例2: generateAndStoreTokens

import org.apache.hadoop.mapreduce.security.TokenCache; //导入方法依赖的package包/类
/**
 * generate job token and save it into the file
 * @throws IOException
 */
private void generateAndStoreTokens() throws IOException{
  Path jobDir = jobtracker.getSystemDirectoryForJob(jobId);
  Path keysFile = new Path(jobDir, TokenCache.JOB_TOKEN_HDFS_FILE);

  if (tokenStorage == null) {
    tokenStorage = new Credentials();
  }
  
  //create JobToken file and write token to it
  JobTokenIdentifier identifier = new JobTokenIdentifier(new Text(jobId
      .toString()));
  Token<JobTokenIdentifier> token = new Token<JobTokenIdentifier>(identifier,
      jobtracker.getJobTokenSecretManager());
  token.setService(identifier.getJobId());
  
  TokenCache.setJobToken(token, tokenStorage);
  
  // write TokenStorage out
  tokenStorage.writeTokenStorageFile(keysFile, jobtracker.getConf());
  LOG.info("jobToken generated and stored with users keys in "
      + keysFile.toUri().getPath());
}
 
开发者ID:rekhajoshm,项目名称:mapreduce-fork,代码行数:27,代码来源:JobInProgress.java

示例3: generateAndStoreTokens

import org.apache.hadoop.mapreduce.security.TokenCache; //导入方法依赖的package包/类
/**
 * generate job token and save it into the file
 * @throws IOException
 */
private void generateAndStoreTokens() throws IOException {
  Path jobDir = jobtracker.getSystemDirectoryForJob(jobId);
  Path keysFile = new Path(jobDir, TokenCache.JOB_TOKEN_HDFS_FILE);
  if (tokenStorage == null) {
    tokenStorage = new Credentials();
  }
  //create JobToken file and write token to it
  JobTokenIdentifier identifier = new JobTokenIdentifier(new Text(jobId
      .toString()));
  Token<JobTokenIdentifier> token = new Token<JobTokenIdentifier>(identifier,
      jobtracker.getJobTokenSecretManager());
  token.setService(identifier.getJobId());
  
  TokenCache.setJobToken(token, tokenStorage);
      
  // write TokenStorage out
  FileSystem fs = keysFile.getFileSystem(jobtracker.getConf());
  FSDataOutputStream os = null;
  try {
    os = fs.createNonRecursive(keysFile, true,
        jobtracker.getConf().getInt("io.file.buffer.size", 4096),
        fs.getDefaultReplication(keysFile),
        fs.getDefaultBlockSize(keysFile), null);
    tokenStorage.writeTokenStorageToStream(os);
  } finally {
    IOUtils.closeStream(os);
  }
  LOG.info("jobToken generated and stored with users keys in "
      + keysFile.toUri().getPath());
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:35,代码来源:JobInProgress.java

示例4: testRunner

import org.apache.hadoop.mapreduce.security.TokenCache; //导入方法依赖的package包/类
/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception
 */
@Test
public void testRunner() throws Exception {

  // clean old password files
  File[] psw = cleanTokenPasswordFile();
  try {
    RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
    JobConf conf = new JobConf();
    conf.set(Submitter.IS_JAVA_RR, "true");
    // for stdour and stderror

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);

    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(
            new Path(workSpace + File.separator + "outfile")), IntWritable.class,
            Text.class, null, null, true);
    output.setWriter(wr);
    // stub for client
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");

    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token,  conf.getCredentials());
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    TestTaskReporter reporter = new TestTaskReporter();
    PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();

    initStdOut(conf);

    runner.configure(conf);
    runner.run(rReader, output, reporter);

    String stdOut = readStdOut(conf);

    // test part of translated data. As common file for client and test -
    // clients stdOut
    // check version
    assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
    // check key and value classes
    assertTrue(stdOut
            .contains("Key class:org.apache.hadoop.io.FloatWritable"));
    assertTrue(stdOut
            .contains("Value class:org.apache.hadoop.io.NullWritable"));
    // test have sent all data from reader
    assertTrue(stdOut.contains("value:0.0"));
    assertTrue(stdOut.contains("value:9.0"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }

  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:70,代码来源:TestPipeApplication.java

示例5: testPipesReduser

import org.apache.hadoop.mapreduce.security.TokenCache; //导入方法依赖的package包/类
/**
 * test org.apache.hadoop.mapred.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception
 */
@Test
public void testPipesReduser() throws Exception {

  File[] psw = cleanTokenPasswordFile();
  JobConf conf = new JobConf();
  try {
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token, conf.getCredentials());

    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub");
    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());

    PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
    reducer.configure(conf);
    BooleanWritable bw = new BooleanWritable(true);

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
    initStdOut(conf);
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    Reporter reporter = new TestTaskReporter();
    List<Text> texts = new ArrayList<Text>();
    texts.add(new Text("first"));
    texts.add(new Text("second"));
    texts.add(new Text("third"));

    reducer.reduce(bw, texts.iterator(), output, reporter);
    reducer.close();
    String stdOut = readStdOut(conf);
    // test data: key
    assertTrue(stdOut.contains("reducer key :true"));
    // and values
    assertTrue(stdOut.contains("reduce value  :first"));
    assertTrue(stdOut.contains("reduce value  :second"));
    assertTrue(stdOut.contains("reduce value  :third"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }
  }

}
 
开发者ID:naver,项目名称:hadoop,代码行数:56,代码来源:TestPipeApplication.java

示例6: testRunner

import org.apache.hadoop.mapreduce.security.TokenCache; //导入方法依赖的package包/类
/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception
 */
@Test
public void testRunner() throws Exception {

  // clean old password files
  File[] psw = cleanTokenPasswordFile();
  try {
    RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
    JobConf conf = new JobConf();
    conf.set(Submitter.IS_JAVA_RR, "true");
    // for stdour and stderror

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);

    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.initialize(FsConstants.LOCAL_FS_URI, conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(
            new Path(workSpace + File.separator + "outfile")), IntWritable.class,
            Text.class, null, null, true);
    output.setWriter(wr);
    // stub for client
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");

    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token,  conf.getCredentials());
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    TestTaskReporter reporter = new TestTaskReporter();
    PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();

    initStdOut(conf);

    runner.configure(conf);
    runner.run(rReader, output, reporter);

    String stdOut = readStdOut(conf);

    // test part of translated data. As common file for client and test -
    // clients stdOut
    // check version
    assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
    // check key and value classes
    assertTrue(stdOut
            .contains("Key class:org.apache.hadoop.io.FloatWritable"));
    assertTrue(stdOut
            .contains("Value class:org.apache.hadoop.io.NullWritable"));
    // test have sent all data from reader
    assertTrue(stdOut.contains("value:0.0"));
    assertTrue(stdOut.contains("value:9.0"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }

  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:70,代码来源:TestPipeApplication.java

示例7: testRunner

import org.apache.hadoop.mapreduce.security.TokenCache; //导入方法依赖的package包/类
/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception
 */
@Test
public void testRunner() throws Exception {

  // clean old password files
  File[] psw = cleanTokenPasswordFile();
  try {
    RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
    JobConf conf = new JobConf();
    conf.set(Submitter.IS_JAVA_RR, "true");
    // for stdour and stderror

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);

    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs,
            new Path(workSpace + File.separator + "outfile"), IntWritable.class,
            Text.class, null, null);
    output.setWriter(wr);
    // stub for client
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");

    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token,  conf.getCredentials());
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    TestTaskReporter reporter = new TestTaskReporter();
    PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();

    initStdOut(conf);

    runner.configure(conf);
    runner.run(rReader, output, reporter);

    String stdOut = readStdOut(conf);

    // test part of translated data. As common file for client and test -
    // clients stdOut
    // check version
    assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
    // check key and value classes
    assertTrue(stdOut
            .contains("Key class:org.apache.hadoop.io.FloatWritable"));
    assertTrue(stdOut
            .contains("Value class:org.apache.hadoop.io.NullWritable"));
    // test have sent all data from reader
    assertTrue(stdOut.contains("value:0.0"));
    assertTrue(stdOut.contains("value:9.0"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }

  }
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:70,代码来源:TestPipeApplication.java

示例8: testRunner

import org.apache.hadoop.mapreduce.security.TokenCache; //导入方法依赖的package包/类
/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception
 */
@Ignore //also time out on apache hadoop
@Test
public void testRunner() throws Exception {

  // clean old password files
  File[] psw = cleanTokenPasswordFile();
  try {
    RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
    JobConf conf = new JobConf();
    conf.set(Submitter.IS_JAVA_RR, "true");
    // for stdour and stderror

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);

    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(
            new Path(workSpace + File.separator + "outfile")), IntWritable.class,
            Text.class, null, null, true);
    output.setWriter(wr);
    // stub for client
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");

    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token,  conf.getCredentials());
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    TestTaskReporter reporter = new TestTaskReporter();
    PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();

    initStdOut(conf);

    runner.configure(conf);
    runner.run(rReader, output, reporter);

    String stdOut = readStdOut(conf);

    // test part of translated data. As common file for client and test -
    // clients stdOut
    // check version
    assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
    // check key and value classes
    assertTrue(stdOut
            .contains("Key class:org.apache.hadoop.io.FloatWritable"));
    assertTrue(stdOut
            .contains("Value class:org.apache.hadoop.io.NullWritable"));
    // test have sent all data from reader
    assertTrue(stdOut.contains("value:0.0"));
    assertTrue(stdOut.contains("value:9.0"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }

  }
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:71,代码来源:TestPipeApplication.java

示例9: testPipesReduser

import org.apache.hadoop.mapreduce.security.TokenCache; //导入方法依赖的package包/类
/**
 * test org.apache.hadoop.mapred.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception
 */
@Ignore //also time out on apache hadoop
@Test
public void testPipesReduser() throws Exception {

  File[] psw = cleanTokenPasswordFile();
  JobConf conf = new JobConf();
  try {
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token, conf.getCredentials());

    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub");
    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());

    PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
    reducer.configure(conf);
    BooleanWritable bw = new BooleanWritable(true);

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
    initStdOut(conf);
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    Reporter reporter = new TestTaskReporter();
    List<Text> texts = new ArrayList<Text>();
    texts.add(new Text("first"));
    texts.add(new Text("second"));
    texts.add(new Text("third"));

    reducer.reduce(bw, texts.iterator(), output, reporter);
    reducer.close();
    String stdOut = readStdOut(conf);
    // test data: key
    assertTrue(stdOut.contains("reducer key :true"));
    // and values
    assertTrue(stdOut.contains("reduce value  :first"));
    assertTrue(stdOut.contains("reduce value  :second"));
    assertTrue(stdOut.contains("reduce value  :third"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }
  }

}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:57,代码来源:TestPipeApplication.java


注:本文中的org.apache.hadoop.mapreduce.security.TokenCache.setJobToken方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。