當前位置: 首頁>>代碼示例>>Java>>正文


Java NullOutputFormat類代碼示例

本文整理匯總了Java中org.apache.hadoop.mapreduce.lib.output.NullOutputFormat的典型用法代碼示例。如果您正苦於以下問題:Java NullOutputFormat類的具體用法?Java NullOutputFormat怎麽用?Java NullOutputFormat使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


NullOutputFormat類屬於org.apache.hadoop.mapreduce.lib.output包,在下文中一共展示了NullOutputFormat類的14個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: createJob

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
public Job createJob() 
throws IOException {
  Configuration conf = getConf();
  conf.setInt(MRJobConfig.NUM_MAPS, 1);
  Job job = Job.getInstance(conf, "test");
  job.setNumReduceTasks(1);
  job.setJarByClass(CredentialsTestJob.class);
  job.setNumReduceTasks(1);
  job.setMapperClass(CredentialsTestJob.CredentialsTestMapper.class);
  job.setMapOutputKeyClass(IntWritable.class);
  job.setMapOutputValueClass(NullWritable.class);
  job.setReducerClass(CredentialsTestJob.CredentialsTestReducer.class);
  job.setInputFormatClass(SleepJob.SleepInputFormat.class);
  job.setPartitionerClass(SleepJob.SleepJobPartitioner.class);
  job.setOutputFormatClass(NullOutputFormat.class);
  job.setSpeculativeExecution(false);
  job.setJobName("test job");
  FileInputFormat.addInputPath(job, new Path("ignored"));
  return job;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:21,代碼來源:CredentialsTestJob.java

示例2: createJob

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
public Job createJob(boolean failMappers, boolean failReducers, Path inputFile) 
    throws IOException {
  Configuration conf = getConf();
  conf.setBoolean(FAIL_MAP, failMappers);
  conf.setBoolean(FAIL_REDUCE, failReducers);
  Job job = Job.getInstance(conf, "fail");
  job.setJarByClass(FailJob.class);
  job.setMapperClass(FailMapper.class);
  job.setMapOutputKeyClass(LongWritable.class);
  job.setMapOutputValueClass(NullWritable.class);
  job.setReducerClass(FailReducer.class);
  job.setOutputFormatClass(NullOutputFormat.class);
  job.setInputFormatClass(TextInputFormat.class);
  job.setSpeculativeExecution(false);
  job.setJobName("Fail job");
  FileInputFormat.addInputPath(job, inputFile);
  return job;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:19,代碼來源:FailJob.java

示例3: runTest

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
private static void runTest(String name, Job job) throws Exception {
  job.setNumReduceTasks(1);
  job.getConfiguration().set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME);
  job.getConfiguration().setInt(MRJobConfig.IO_SORT_FACTOR, 1000);
  job.getConfiguration().set("fs.defaultFS", "file:///");
  job.getConfiguration().setInt("test.mapcollection.num.maps", 1);
  job.setInputFormatClass(FakeIF.class);
  job.setOutputFormatClass(NullOutputFormat.class);
  job.setMapperClass(Mapper.class);
  job.setReducerClass(SpillReducer.class);
  job.setMapOutputKeyClass(KeyWritable.class);
  job.setMapOutputValueClass(ValWritable.class);
  job.setSortComparatorClass(VariableComparator.class);

  LOG.info("Running " + name);
  assertTrue("Job failed!", job.waitForCompletion(false));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestMapCollection.java

示例4: createJob

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
public Job createJob(int numMapper, int numReducer, 
                     long mapSleepTime, int mapSleepCount, 
                     long reduceSleepTime, int reduceSleepCount) 
    throws IOException {
  Configuration conf = getConf();
  conf.setLong(MAP_SLEEP_TIME, mapSleepTime);
  conf.setLong(REDUCE_SLEEP_TIME, reduceSleepTime);
  conf.setInt(MAP_SLEEP_COUNT, mapSleepCount);
  conf.setInt(REDUCE_SLEEP_COUNT, reduceSleepCount);
  conf.setInt(MRJobConfig.NUM_MAPS, numMapper);
  Job job = Job.getInstance(conf, "sleep");
  job.setNumReduceTasks(numReducer);
  job.setJarByClass(SleepJob.class);
  job.setMapperClass(SleepMapper.class);
  job.setMapOutputKeyClass(IntWritable.class);
  job.setMapOutputValueClass(NullWritable.class);
  job.setReducerClass(SleepReducer.class);
  job.setOutputFormatClass(NullOutputFormat.class);
  job.setInputFormatClass(SleepInputFormat.class);
  job.setPartitionerClass(SleepJobPartitioner.class);
  job.setSpeculativeExecution(false);
  job.setJobName("Sleep job");
  FileInputFormat.addInputPath(job, new Path("ignored"));
  return job;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:26,代碼來源:SleepJob.java

示例5: call

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
@Override
public Job call() throws IOException, InterruptedException,
                         ClassNotFoundException {
  UserGroupInformation ugi = UserGroupInformation.getLoginUser();
  ugi.doAs( new PrivilegedExceptionAction <Job>() {
     public Job run() throws IOException, ClassNotFoundException,
                             InterruptedException {
      job.setMapperClass(GenDCDataMapper.class);
      job.setNumReduceTasks(0);
      job.setMapOutputKeyClass(NullWritable.class);
      job.setMapOutputValueClass(BytesWritable.class);
      job.setInputFormatClass(GenDCDataFormat.class);
      job.setOutputFormatClass(NullOutputFormat.class);
      job.setJarByClass(GenerateDistCacheData.class);
      try {
        FileInputFormat.addInputPath(job, new Path("ignored"));
      } catch (IOException e) {
        LOG.error("Error while adding input path ", e);
      }
      job.submit();
      return job;
    }
  });
  return job;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:26,代碼來源:GenerateDistCacheData.java

示例6: testInputFormat

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
void testInputFormat(Class<? extends InputFormat> clazz)
    throws IOException, InterruptedException, ClassNotFoundException {
  final Job job = MapreduceTestingShim.createJob(UTIL.getConfiguration());
  job.setInputFormatClass(clazz);
  job.setOutputFormatClass(NullOutputFormat.class);
  job.setMapperClass(ExampleVerifier.class);
  job.setNumReduceTasks(0);

  LOG.debug("submitting job.");
  assertTrue("job failed!", job.waitForCompletion(true));
  assertEquals("Saw the wrong number of instances of the filtered-for row.", 2, job.getCounters()
      .findCounter(TestTableInputFormat.class.getName() + ":row", "aaa").getValue());
  assertEquals("Saw any instances of the filtered out row.", 0, job.getCounters()
      .findCounter(TestTableInputFormat.class.getName() + ":row", "bbb").getValue());
  assertEquals("Saw the wrong number of instances of columnA.", 1, job.getCounters()
      .findCounter(TestTableInputFormat.class.getName() + ":family", "columnA").getValue());
  assertEquals("Saw the wrong number of instances of columnB.", 1, job.getCounters()
      .findCounter(TestTableInputFormat.class.getName() + ":family", "columnB").getValue());
  assertEquals("Saw the wrong count of values for the filtered-for row.", 2, job.getCounters()
      .findCounter(TestTableInputFormat.class.getName() + ":value", "value aaa").getValue());
  assertEquals("Saw the wrong count of values for the filtered-out row.", 0, job.getCounters()
      .findCounter(TestTableInputFormat.class.getName() + ":value", "value bbb").getValue());
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:24,代碼來源:TestTableInputFormat.java

示例7: runTestOnTable

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
private void runTestOnTable() throws InterruptedException, ClassNotFoundException {
    Job job = null;
    try {
        Configuration conf = graph.configuration().toHBaseConfiguration();
        job = Job.getInstance(conf, "test123");
        job.setOutputFormatClass(NullOutputFormat.class);
        job.setNumReduceTasks(0);
        Scan scan = new Scan();
        scan.addColumn(FAMILY_NAME, COLUMN_NAME);
        scan.setTimeRange(MINSTAMP, MAXSTAMP);
        scan.setMaxVersions();
        TableMapReduceUtil.initTableMapperJob(TABLE_NAME.getNameAsString(),
                scan, ProcessTimeRangeMapper.class, Text.class, Text.class, job,
                true, TableInputFormat.class);
        job.waitForCompletion(true);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        if (job != null) {
            FileUtil.fullyDelete(
                    new File(job.getConfiguration().get("hadoop.tmp.dir")));
        }
    }
}
 
開發者ID:rayokota,項目名稱:hgraphdb,代碼行數:26,代碼來源:TableInputFormatTest.java

示例8: runTestOnTable

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
private void runTestOnTable()
throws IOException, InterruptedException, ClassNotFoundException {
  UTIL.startMiniMapReduceCluster(1);
  Job job = null;
  try {
    job = new Job(UTIL.getConfiguration(), "test123");
    job.setOutputFormatClass(NullOutputFormat.class);
    job.setNumReduceTasks(0);
    Scan scan = new Scan();
    scan.addColumn(FAMILY_NAME, COLUMN_NAME);
    scan.setTimeRange(MINSTAMP, MAXSTAMP);
    scan.setMaxVersions();
    TableMapReduceUtil.initTableMapperJob(Bytes.toString(TABLE_NAME),
      scan, ProcessTimeRangeMapper.class, Text.class, Text.class, job);
    job.waitForCompletion(true);
  } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
  } finally {
    UTIL.shutdownMiniMapReduceCluster();
    if (job != null) {
      FileUtil.fullyDelete(
        new File(job.getConfiguration().get("hadoop.tmp.dir")));
    }
  }
}
 
開發者ID:fengchen8086,項目名稱:LCIndex-HBase-0.94.16,代碼行數:27,代碼來源:TestTimeRangeMapRed.java

示例9: testOneRemoteJT

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
public void testOneRemoteJT() throws Exception {
  LOG.info("Starting testOneRemoteJT");
  String[] racks = "/rack-1".split(",");
  String[] trackers = "tracker-1".split(",");
  corona = new MiniCoronaCluster.Builder().numTaskTrackers(1).racks(racks)
      .hosts(trackers).build();
  Configuration conf = corona.createJobConf();
  conf.set("mapred.job.tracker", "corona");
  conf.set("mapred.job.tracker.class", CoronaJobTracker.class.getName());
  String locationsCsv = "tracker-1";
  conf.set("test.locations", locationsCsv);
  conf.setBoolean("mapred.coronajobtracker.forceremote", true);
  Job job = new Job(conf);
  job.setMapperClass(TstJob.TestMapper.class);
  job.setInputFormatClass(TstJob.TestInputFormat.class);
  job.setOutputFormatClass(NullOutputFormat.class);
  job.setNumReduceTasks(0);
  job.getConfiguration().set("io.sort.record.pct", "0.50");
  job.getConfiguration().set("io.sort.mb", "25");
  boolean success = job.waitForCompletion(true);
  assertTrue("Job did not succeed", success);
}
 
開發者ID:rhli,項目名稱:hadoop-EAR,代碼行數:23,代碼來源:TestMiniCoronaFederatedJT.java

示例10: getVertexJobWithDefaultMapper

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
private Job getVertexJobWithDefaultMapper(org.apache.hadoop.conf.Configuration c) throws IOException {

        Job job = Job.getInstance(c);

        job.setJarByClass(HadoopScanMapper.class);
        job.setJobName("testPartitionedVertexScan");
        job.setOutputKeyClass(NullWritable.class);
        job.setOutputValueClass(NullWritable.class);
        job.setMapOutputKeyClass(NullWritable.class);
        job.setMapOutputValueClass(NullWritable.class);
        job.setNumReduceTasks(0);
        job.setOutputFormatClass(NullOutputFormat.class);
        job.setInputFormatClass(CassandraInputFormat.class);

        return job;
    }
 
開發者ID:graben1437,項目名稱:titan1withtp3.1,代碼行數:17,代碼來源:CassandraScanJobIT.java

示例11: createJob

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
public Job createJob(int numMapper, int numReducer, 
                     long mapSleepTime, int mapSleepCount, 
                     long reduceSleepTime, int reduceSleepCount) 
    throws IOException {
  Configuration conf = getConf();
  conf.setLong(MAP_SLEEP_TIME, mapSleepTime);
  conf.setLong(REDUCE_SLEEP_TIME, reduceSleepTime);
  conf.setInt(MAP_SLEEP_COUNT, mapSleepCount);
  conf.setInt(REDUCE_SLEEP_COUNT, reduceSleepCount);
  conf.setInt(MRJobConfig.NUM_MAPS, numMapper);
  Job job = Job.getInstance(conf, "sleep");
  job.setNumReduceTasks(numReducer);
  job.setJarByClass(SleepJob.class);
  job.setNumReduceTasks(numReducer);
  job.setMapperClass(SleepMapper.class);
  job.setMapOutputKeyClass(IntWritable.class);
  job.setMapOutputValueClass(NullWritable.class);
  job.setReducerClass(SleepReducer.class);
  job.setOutputFormatClass(NullOutputFormat.class);
  job.setInputFormatClass(SleepInputFormat.class);
  job.setPartitionerClass(SleepJobPartitioner.class);
  job.setSpeculativeExecution(false);
  job.setJobName("Sleep job");
  FileInputFormat.addInputPath(job, new Path("ignored"));
  return job;
}
 
開發者ID:ict-carch,項目名稱:hadoop-plus,代碼行數:27,代碼來源:SleepJob.java

示例12: main

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job = getInstance(conf, "Top popular airports");

    job.setMapperClass(TokenizerMapper.class);
    job.setReducerClass(SumReducer.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    job.setOutputFormatClass(NullOutputFormat.class);

    addInputPath(job, new Path(args[0]));
    job.setNumReduceTasks(20);

    job.setJarByClass(TopPopularAirportAll.class);
    exit(job.waitForCompletion(true) ? 0 : 1);
}
 
開發者ID:kgrodzicki,項目名稱:cloud-computing-specialization,代碼行數:18,代碼來源:TopPopularAirportAll.java

示例13: runTestOnTable

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
private void runTestOnTable()
throws IOException, InterruptedException, ClassNotFoundException {
  UTIL.startMiniMapReduceCluster();
  Job job = null;
  try {
    job = new Job(UTIL.getConfiguration(), "test123");
    job.setOutputFormatClass(NullOutputFormat.class);
    job.setNumReduceTasks(0);
    Scan scan = new Scan();
    scan.addColumn(FAMILY_NAME, COLUMN_NAME);
    scan.setTimeRange(MINSTAMP, MAXSTAMP);
    scan.setMaxVersions();
    TableMapReduceUtil.initTableMapperJob(TABLE_NAME,
      scan, ProcessTimeRangeMapper.class, Text.class, Text.class, job);
    job.waitForCompletion(true);
  } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
  } finally {
    UTIL.shutdownMiniMapReduceCluster();
    if (job != null) {
      FileUtil.fullyDelete(
        new File(job.getConfiguration().get("hadoop.tmp.dir")));
    }
  }
}
 
開發者ID:grokcoder,項目名稱:pbase,代碼行數:27,代碼來源:TestTimeRangeMapRed.java

示例14: runTestOnTable

import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; //導入依賴的package包/類
private void runTestOnTable()
throws IOException, InterruptedException, ClassNotFoundException {
  UTIL.startMiniMapReduceCluster();
  Job job = null;
  try {
    job = new Job(UTIL.getConfiguration(), "test123");
    job.setOutputFormatClass(NullOutputFormat.class);
    job.setNumReduceTasks(0);
    Scan scan = new Scan();
    scan.addColumn(FAMILY_NAME, COLUMN_NAME);
    scan.setTimeRange(MINSTAMP, MAXSTAMP);
    scan.setMaxVersions();
    TableMapReduceUtil.initTableMapperJob(Bytes.toString(TABLE_NAME),
      scan, ProcessTimeRangeMapper.class, Text.class, Text.class, job);
    job.waitForCompletion(true);
  } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
  } finally {
    UTIL.shutdownMiniMapReduceCluster();
    if (job != null) {
      FileUtil.fullyDelete(
        new File(job.getConfiguration().get("hadoop.tmp.dir")));
    }
  }
}
 
開發者ID:tenggyut,項目名稱:HIndex,代碼行數:27,代碼來源:TestTimeRangeMapRed.java


注:本文中的org.apache.hadoop.mapreduce.lib.output.NullOutputFormat類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。