當前位置: 首頁>>代碼示例>>Java>>正文


Java TokenCache.setJobToken方法代碼示例

本文整理匯總了Java中org.apache.hadoop.mapreduce.security.TokenCache.setJobToken方法的典型用法代碼示例。如果您正苦於以下問題:Java TokenCache.setJobToken方法的具體用法?Java TokenCache.setJobToken怎麽用?Java TokenCache.setJobToken使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.mapreduce.security.TokenCache的用法示例。


在下文中一共展示了TokenCache.setJobToken方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: generateAndStoreTokens

import org.apache.hadoop.mapreduce.security.TokenCache; //導入方法依賴的package包/類
/**
 * generate job token and save it into the file
 * @throws IOException
 */
private void generateAndStoreTokens() throws IOException {
  Path jobDir = jobtracker.getSystemDirectoryForJob(jobId);
  Path keysFile = new Path(jobDir, TokenCache.JOB_TOKEN_HDFS_FILE);
  if (tokenStorage == null) {
    tokenStorage = new Credentials();
  }
  //create JobToken file and write token to it
  JobTokenIdentifier identifier = new JobTokenIdentifier(new Text(jobId
      .toString()));
  Token<JobTokenIdentifier> token = new Token<JobTokenIdentifier>(identifier,
      jobtracker.getJobTokenSecretManager());
  token.setService(identifier.getJobId());
  
  TokenCache.setJobToken(token, tokenStorage);
      
  // write TokenStorage out
  tokenStorage.writeTokenStorageFile(keysFile, jobtracker.getConf());
  LOG.info("jobToken generated and stored with users keys in "
      + keysFile.toUri().getPath());
}
 
開發者ID:Seagate,項目名稱:hadoop-on-lustre,代碼行數:25,代碼來源:JobInProgress.java

示例2: generateAndStoreTokens

import org.apache.hadoop.mapreduce.security.TokenCache; //導入方法依賴的package包/類
/**
 * generate job token and save it into the file
 * @throws IOException
 */
private void generateAndStoreTokens() throws IOException{
  Path jobDir = jobtracker.getSystemDirectoryForJob(jobId);
  Path keysFile = new Path(jobDir, TokenCache.JOB_TOKEN_HDFS_FILE);

  if (tokenStorage == null) {
    tokenStorage = new Credentials();
  }
  
  //create JobToken file and write token to it
  JobTokenIdentifier identifier = new JobTokenIdentifier(new Text(jobId
      .toString()));
  Token<JobTokenIdentifier> token = new Token<JobTokenIdentifier>(identifier,
      jobtracker.getJobTokenSecretManager());
  token.setService(identifier.getJobId());
  
  TokenCache.setJobToken(token, tokenStorage);
  
  // write TokenStorage out
  tokenStorage.writeTokenStorageFile(keysFile, jobtracker.getConf());
  LOG.info("jobToken generated and stored with users keys in "
      + keysFile.toUri().getPath());
}
 
開發者ID:rekhajoshm,項目名稱:mapreduce-fork,代碼行數:27,代碼來源:JobInProgress.java

示例3: generateAndStoreTokens

import org.apache.hadoop.mapreduce.security.TokenCache; //導入方法依賴的package包/類
/**
 * generate job token and save it into the file
 * @throws IOException
 */
private void generateAndStoreTokens() throws IOException {
  Path jobDir = jobtracker.getSystemDirectoryForJob(jobId);
  Path keysFile = new Path(jobDir, TokenCache.JOB_TOKEN_HDFS_FILE);
  if (tokenStorage == null) {
    tokenStorage = new Credentials();
  }
  //create JobToken file and write token to it
  JobTokenIdentifier identifier = new JobTokenIdentifier(new Text(jobId
      .toString()));
  Token<JobTokenIdentifier> token = new Token<JobTokenIdentifier>(identifier,
      jobtracker.getJobTokenSecretManager());
  token.setService(identifier.getJobId());
  
  TokenCache.setJobToken(token, tokenStorage);
      
  // write TokenStorage out
  FileSystem fs = keysFile.getFileSystem(jobtracker.getConf());
  FSDataOutputStream os = null;
  try {
    os = fs.createNonRecursive(keysFile, true,
        jobtracker.getConf().getInt("io.file.buffer.size", 4096),
        fs.getDefaultReplication(keysFile),
        fs.getDefaultBlockSize(keysFile), null);
    tokenStorage.writeTokenStorageToStream(os);
  } finally {
    IOUtils.closeStream(os);
  }
  LOG.info("jobToken generated and stored with users keys in "
      + keysFile.toUri().getPath());
}
 
開發者ID:Nextzero,項目名稱:hadoop-2.6.0-cdh5.4.3,代碼行數:35,代碼來源:JobInProgress.java

示例4: testRunner

import org.apache.hadoop.mapreduce.security.TokenCache; //導入方法依賴的package包/類
/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception
 */
@Test
public void testRunner() throws Exception {

  // clean old password files
  File[] psw = cleanTokenPasswordFile();
  try {
    RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
    JobConf conf = new JobConf();
    conf.set(Submitter.IS_JAVA_RR, "true");
    // for stdour and stderror

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);

    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(
            new Path(workSpace + File.separator + "outfile")), IntWritable.class,
            Text.class, null, null, true);
    output.setWriter(wr);
    // stub for client
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");

    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token,  conf.getCredentials());
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    TestTaskReporter reporter = new TestTaskReporter();
    PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();

    initStdOut(conf);

    runner.configure(conf);
    runner.run(rReader, output, reporter);

    String stdOut = readStdOut(conf);

    // test part of translated data. As common file for client and test -
    // clients stdOut
    // check version
    assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
    // check key and value classes
    assertTrue(stdOut
            .contains("Key class:org.apache.hadoop.io.FloatWritable"));
    assertTrue(stdOut
            .contains("Value class:org.apache.hadoop.io.NullWritable"));
    // test have sent all data from reader
    assertTrue(stdOut.contains("value:0.0"));
    assertTrue(stdOut.contains("value:9.0"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }

  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:70,代碼來源:TestPipeApplication.java

示例5: testPipesReduser

import org.apache.hadoop.mapreduce.security.TokenCache; //導入方法依賴的package包/類
/**
 * test org.apache.hadoop.mapred.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception
 */
@Test
public void testPipesReduser() throws Exception {

  File[] psw = cleanTokenPasswordFile();
  JobConf conf = new JobConf();
  try {
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token, conf.getCredentials());

    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub");
    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());

    PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
    reducer.configure(conf);
    BooleanWritable bw = new BooleanWritable(true);

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
    initStdOut(conf);
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    Reporter reporter = new TestTaskReporter();
    List<Text> texts = new ArrayList<Text>();
    texts.add(new Text("first"));
    texts.add(new Text("second"));
    texts.add(new Text("third"));

    reducer.reduce(bw, texts.iterator(), output, reporter);
    reducer.close();
    String stdOut = readStdOut(conf);
    // test data: key
    assertTrue(stdOut.contains("reducer key :true"));
    // and values
    assertTrue(stdOut.contains("reduce value  :first"));
    assertTrue(stdOut.contains("reduce value  :second"));
    assertTrue(stdOut.contains("reduce value  :third"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }
  }

}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:56,代碼來源:TestPipeApplication.java

示例6: testRunner

import org.apache.hadoop.mapreduce.security.TokenCache; //導入方法依賴的package包/類
/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception
 */
@Test
public void testRunner() throws Exception {

  // clean old password files
  File[] psw = cleanTokenPasswordFile();
  try {
    RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
    JobConf conf = new JobConf();
    conf.set(Submitter.IS_JAVA_RR, "true");
    // for stdour and stderror

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);

    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.initialize(FsConstants.LOCAL_FS_URI, conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(
            new Path(workSpace + File.separator + "outfile")), IntWritable.class,
            Text.class, null, null, true);
    output.setWriter(wr);
    // stub for client
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");

    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token,  conf.getCredentials());
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    TestTaskReporter reporter = new TestTaskReporter();
    PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();

    initStdOut(conf);

    runner.configure(conf);
    runner.run(rReader, output, reporter);

    String stdOut = readStdOut(conf);

    // test part of translated data. As common file for client and test -
    // clients stdOut
    // check version
    assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
    // check key and value classes
    assertTrue(stdOut
            .contains("Key class:org.apache.hadoop.io.FloatWritable"));
    assertTrue(stdOut
            .contains("Value class:org.apache.hadoop.io.NullWritable"));
    // test have sent all data from reader
    assertTrue(stdOut.contains("value:0.0"));
    assertTrue(stdOut.contains("value:9.0"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }

  }
}
 
開發者ID:aliyun-beta,項目名稱:aliyun-oss-hadoop-fs,代碼行數:70,代碼來源:TestPipeApplication.java

示例7: testRunner

import org.apache.hadoop.mapreduce.security.TokenCache; //導入方法依賴的package包/類
/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception
 */
@Test
public void testRunner() throws Exception {

  // clean old password files
  File[] psw = cleanTokenPasswordFile();
  try {
    RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
    JobConf conf = new JobConf();
    conf.set(Submitter.IS_JAVA_RR, "true");
    // for stdour and stderror

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);

    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs,
            new Path(workSpace + File.separator + "outfile"), IntWritable.class,
            Text.class, null, null);
    output.setWriter(wr);
    // stub for client
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");

    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token,  conf.getCredentials());
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    TestTaskReporter reporter = new TestTaskReporter();
    PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();

    initStdOut(conf);

    runner.configure(conf);
    runner.run(rReader, output, reporter);

    String stdOut = readStdOut(conf);

    // test part of translated data. As common file for client and test -
    // clients stdOut
    // check version
    assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
    // check key and value classes
    assertTrue(stdOut
            .contains("Key class:org.apache.hadoop.io.FloatWritable"));
    assertTrue(stdOut
            .contains("Value class:org.apache.hadoop.io.NullWritable"));
    // test have sent all data from reader
    assertTrue(stdOut.contains("value:0.0"));
    assertTrue(stdOut.contains("value:9.0"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }

  }
}
 
開發者ID:ict-carch,項目名稱:hadoop-plus,代碼行數:70,代碼來源:TestPipeApplication.java

示例8: testRunner

import org.apache.hadoop.mapreduce.security.TokenCache; //導入方法依賴的package包/類
/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception
 */
@Ignore //also time out on apache hadoop
@Test
public void testRunner() throws Exception {

  // clean old password files
  File[] psw = cleanTokenPasswordFile();
  try {
    RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
    JobConf conf = new JobConf();
    conf.set(Submitter.IS_JAVA_RR, "true");
    // for stdour and stderror

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);

    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(
            new Path(workSpace + File.separator + "outfile")), IntWritable.class,
            Text.class, null, null, true);
    output.setWriter(wr);
    // stub for client
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");

    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token,  conf.getCredentials());
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    TestTaskReporter reporter = new TestTaskReporter();
    PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();

    initStdOut(conf);

    runner.configure(conf);
    runner.run(rReader, output, reporter);

    String stdOut = readStdOut(conf);

    // test part of translated data. As common file for client and test -
    // clients stdOut
    // check version
    assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
    // check key and value classes
    assertTrue(stdOut
            .contains("Key class:org.apache.hadoop.io.FloatWritable"));
    assertTrue(stdOut
            .contains("Value class:org.apache.hadoop.io.NullWritable"));
    // test have sent all data from reader
    assertTrue(stdOut.contains("value:0.0"));
    assertTrue(stdOut.contains("value:9.0"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }

  }
}
 
開發者ID:hopshadoop,項目名稱:hops,代碼行數:71,代碼來源:TestPipeApplication.java

示例9: testPipesReduser

import org.apache.hadoop.mapreduce.security.TokenCache; //導入方法依賴的package包/類
/**
 * test org.apache.hadoop.mapred.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception
 */
@Ignore //also time out on apache hadoop
@Test
public void testPipesReduser() throws Exception {

  File[] psw = cleanTokenPasswordFile();
  JobConf conf = new JobConf();
  try {
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token, conf.getCredentials());

    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub");
    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());

    PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
    reducer.configure(conf);
    BooleanWritable bw = new BooleanWritable(true);

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
    initStdOut(conf);
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    Reporter reporter = new TestTaskReporter();
    List<Text> texts = new ArrayList<Text>();
    texts.add(new Text("first"));
    texts.add(new Text("second"));
    texts.add(new Text("third"));

    reducer.reduce(bw, texts.iterator(), output, reporter);
    reducer.close();
    String stdOut = readStdOut(conf);
    // test data: key
    assertTrue(stdOut.contains("reducer key :true"));
    // and values
    assertTrue(stdOut.contains("reduce value  :first"));
    assertTrue(stdOut.contains("reduce value  :second"));
    assertTrue(stdOut.contains("reduce value  :third"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }
  }

}
 
開發者ID:hopshadoop,項目名稱:hops,代碼行數:57,代碼來源:TestPipeApplication.java


注:本文中的org.apache.hadoop.mapreduce.security.TokenCache.setJobToken方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。