当前位置: 首页>>代码示例>>Java>>正文


Java Tool类代码示例

本文整理汇总了Java中org.apache.hadoop.util.Tool的典型用法代码示例。如果您正苦于以下问题:Java Tool类的具体用法?Java Tool怎么用?Java Tool使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


Tool类属于org.apache.hadoop.util包,在下文中一共展示了Tool类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testSplitRamdom

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
@Test
@SuppressWarnings("unchecked")
public void testSplitRamdom() throws Exception {
  Tool tool = new InputSampler<Object,Object>(new Configuration());
  int result = tool.run(new String[] { "-r", Integer.toString(NUM_REDUCES),
      // Use 0.999 probability to reduce the flakiness of the test because
      // the test will fail if the number of samples is less than (number of reduces + 1).
      "-splitRandom", "0.999f", "20", "100",
      input1, input2, output });
  assertEquals(0, result);
  Object[] partitions = readPartitions(output);
  // must be 3 split points since NUM_REDUCES = 4:
  assertEquals(3, partitions.length);
  // check that the partition array is sorted:
  Object[] sortedPartitions = Arrays.copyOf(partitions, partitions.length);
  Arrays.sort(sortedPartitions, new LongWritable.Comparator());
  assertArrayEquals(sortedPartitions, partitions);
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:19,代码来源:TestInputSamplerTool.java

示例2: run

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
@Override
public int run(String[] args) throws Exception {
  //get the class, run with the conf
  if (args.length < 1) {
    return printUsage();
  }
  Tool tool = null;
  if (args[0].equals("Generator")) {
    tool = new Generator();
  } else if (args[0].equals("Verify")) {
    tool = new Verify();
  } else if (args[0].equals("Loop")) {
    tool = new Loop();
  } else if (args[0].equals("Walker")) {
    tool = new Walker();
  } else if (args[0].equals("Print")) {
    tool = new Print();
  } else if (args[0].equals("Delete")) {
    tool = new Delete();
  } else {
    return printUsage();
  }

  args = Arrays.copyOfRange(args, 1, args.length);
  return ToolRunner.run(getConf(), tool, args);
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:27,代码来源:IntegrationTestBigLinkedList.java

示例3: runTestFromCommandLine

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
@Override
public int runTestFromCommandLine() throws Exception {

  Tool tool = null;
  if (toRun.equals("Generator")) {
    tool = new Generator();
  } else if (toRun.equals("Verify")) {
    tool = new Verify();
  } else if (toRun.equals("Loop")) {
    Loop loop = new Loop();
    loop.it = this;
    tool = loop;
  } else if (toRun.equals("Walker")) {
    tool = new Walker();
  } else if (toRun.equals("Print")) {
    tool = new Print();
  } else if (toRun.equals("Delete")) {
    tool = new Delete();
  } else {
    usage();
    throw new RuntimeException("Unknown arg");
  }

  return ToolRunner.run(getConf(), tool, otherArgs);
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:26,代码来源:IntegrationTestBigLinkedList.java

示例4: parseInputAndOutputParentDirectory

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
public static Job parseInputAndOutputParentDirectory(Tool tool,
		Configuration conf, String[] args) throws IOException {
	if (args.length != 2) {
		printUsage(tool, "<input> <output>");
		return null;
	}
	Job job = new Job(conf);
	job.setJarByClass(tool.getClass());
	FileSystem fs = FileSystem.get(conf);
	FileStatus[] status = fs.listStatus(new Path(args[0]));
	for (int i = 0; i < status.length; i++) {
		if(status[i].isDir()) {
			FileInputFormat.addInputPath(job, status[i].getPath());
		}
	}

	FileOutputFormat.setOutputPath(job, new Path(args[1]));
	return job;
}
 
开发者ID:quhfus,项目名称:DoSeR,代码行数:20,代码来源:JobBuilder.java

示例5: resolveClient

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
private static Tool resolveClient(Configuration conf, ClassDescription client) {
    try {
        Class<?> aClass = client.resolve(conf.getClassLoader());
        if (Tool.class.isAssignableFrom(aClass) == false) {
            throw new IllegalArgumentException(MessageFormat.format(
                    "MapReduce client class must implement Tool interface: {0}",
                    client.getClassName()));
        }
        Tool tool = ReflectionUtils.newInstance(aClass.asSubclass(Tool.class), conf);
        return tool;
    } catch (ReflectiveOperationException e) {
        throw new IllegalArgumentException(MessageFormat.format(
                "failed to resolve MapReduce client class: {0}",
                client.getClassName()));
    }
}
 
开发者ID:asakusafw,项目名称:asakusafw-compiler,代码行数:17,代码来源:MapReduceRunner.java

示例6: testRun_l2t10

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
@Test
public void testRun_l2t10() throws Exception {
  // gen rowkeys file for later test
  Configuration conf = TEST_UTIL.getConfiguration();
  String outputPath = "/run_b2t3";
  Tool tool = new GetRandomRowsByRegions(conf);
  int status = tool.run(new String[] { "-b", "2", "-t", "3", VERTEX_TABLE, outputPath });
  Assert.assertEquals(0, status);

  // merge content
  File tf = mergeResults(conf, outputPath, "rowkeys-1");

  // run test
  File tPath = tf.getParentFile();
  tPath = new File(tPath, "performanceTestResults_" + System.currentTimeMillis());
  FileUtils.forceMkdir(tPath);

  tool = new HGraphClientPerformanceTest(conf);
  status =
      tool.run(new String[] { "-l", "2", "-t", "10", VERTEX_TABLE, EDGE_TABLE,
          tf.getAbsolutePath(), tPath.getAbsolutePath() });
  Assert.assertEquals(0, status);

  // verify test results
  outputTestResults(tPath);
}
 
开发者ID:trendmicro,项目名称:HGraph,代码行数:27,代码来源:HGraphClientPerformanceTestTest.java

示例7: testRun_ml2t10

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
@Test
public void testRun_ml2t10() throws Exception {
  // gen rowkeys file for later test
  Configuration conf = TEST_UTIL.getConfiguration();
  String outputPath = "/run_ml2t10";
  Tool tool = new GetRandomRowsByRegions(conf);
  int status = tool.run(new String[] { "-b", "2", "-t", "3", VERTEX_TABLE, outputPath });
  Assert.assertEquals(0, status);

  // merge content
  File tf = mergeResults(conf, outputPath, "rowkeys-2");

  // run test
  File tPath = tf.getParentFile();
  tPath = new File(tPath, "performanceTestResults_" + System.currentTimeMillis());
  FileUtils.forceMkdir(tPath);

  tool = new HGraphClientPerformanceTest(conf);
  status =
      tool.run(new String[] { "-m", "-l", "2", "-t", "10", VERTEX_TABLE, EDGE_TABLE,
          tf.getAbsolutePath(), tPath.getAbsolutePath() });
  Assert.assertEquals(0, status);

  // verify test results
  outputTestResults(tPath);
}
 
开发者ID:trendmicro,项目名称:HGraph,代码行数:27,代码来源:HGraphClientPerformanceTestTest.java

示例8: testRun_i2000l2t10

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
@Test
public void testRun_i2000l2t10() throws Exception {
  // gen rowkeys file for later test
  Configuration conf = TEST_UTIL.getConfiguration();
  String outputPath = "/run_i2000l2t10";
  Tool tool = new GetRandomRowsByRegions(conf);
  int status = tool.run(new String[] { "-b", "2", "-t", "3", VERTEX_TABLE, outputPath });
  Assert.assertEquals(0, status);

  // merge content
  File tf = mergeResults(conf, outputPath, "rowkeys-2");

  // run test
  File tPath = tf.getParentFile();
  tPath = new File(tPath, "performanceTestResults_" + System.currentTimeMillis());
  FileUtils.forceMkdir(tPath);

  tool = new HGraphClientPerformanceTest(conf);
  status =
      tool.run(new String[] { "-i", "2000", "-l", "2", "-t", "10", VERTEX_TABLE, EDGE_TABLE,
          tf.getAbsolutePath(), tPath.getAbsolutePath() });
  Assert.assertEquals(0, status);

  // verify test results
  outputTestResults(tPath);
}
 
开发者ID:trendmicro,项目名称:HGraph,代码行数:27,代码来源:HGraphClientPerformanceTestTest.java

示例9: runTool

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
private void runTool(Tool tool, Batch batch, ResultCollector resultCollector) throws
                                                                              IOException,
                                                                              InterruptedException,
                                                                              TransformerException {
    //create the input as a file on the cluster
    Configuration conf = new Configuration();
    getProperties().setProperty(ConfigConstants.ITERATOR_USE_FILESYSTEM, "False");
    propertiesToHadoopConfiguration(conf, getProperties());

    conf.set(ConfigConstants.BATCH_ID, batch.getFullID());

    String user = conf.get(ConfigConstants.HADOOP_USER, "newspapr");
    conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);


    FileSystem fs = FileSystem.get(FileSystem.getDefaultUri(conf), conf, user);
    long time = System.currentTimeMillis();
    String jobFolder = getProperties().getProperty(ConfigConstants.JOB_FOLDER);
    Path inputFile = createInputFile(batch, fs, time, jobFolder);
    Path outDir = new Path(
            jobFolder, "output_" + batch.getFullID() + "_" + time);


    runJob(tool, batch, resultCollector, conf, inputFile, outDir, user);

}
 
开发者ID:statsbiblioteket,项目名称:newspaper-batch-event-framework,代码行数:27,代码来源:AbstractHadoopRunnableComponent.java

示例10: runTool

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
public static int runTool(Configuration conf, Tool tool, String[] args,
    OutputStream out) throws Exception {
  PrintStream oldOut = System.out;
  PrintStream newOut = new PrintStream(out, true);
  try {
    System.setOut(newOut);
    return ToolRunner.run(conf, tool, args);
  } finally {
    System.setOut(oldOut);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:12,代码来源:TestMRJobClient.java

示例11: executeTool

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
private void executeTool(String toolMessage, Tool tool, String[] args, int expectedResult)
    throws Exception {
  LOG.info("Starting " + toolMessage);
  int res = ToolRunner.run(getConf(), tool, new String[] { "--upgrade" });
  if (res != expectedResult) {
    LOG.error(toolMessage + "returned " + res + ", expected " + expectedResult);
    throw new Exception("Unexpected return code from " + toolMessage);
  }
  LOG.info("Successfully completed " + toolMessage);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:11,代码来源:UpgradeTo96.java

示例12: doMROnTableTest

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
    String[] args, int valueMultiplier) throws Exception {
  TableName table = TableName.valueOf(args[args.length - 1]);
  Configuration conf = new Configuration(util.getConfiguration());

  // populate input file
  FileSystem fs = FileSystem.get(conf);
  Path inputPath = fs.makeQualified(new Path(util
      .getDataTestDirOnTestFS(table.getNameAsString()), "input.dat"));
  FSDataOutputStream op = fs.create(inputPath, true);
  op.write(Bytes.toBytes(data));
  op.close();
  LOG.debug(String.format("Wrote test data to file: %s", inputPath));

  if (conf.getBoolean(FORCE_COMBINER_CONF, true)) {
    LOG.debug("Forcing combiner.");
    conf.setInt("mapreduce.map.combine.minspills", 1);
  }

  // run the import
  List<String> argv = new ArrayList<String>(Arrays.asList(args));
  argv.add(inputPath.toString());
  Tool tool = new ImportTsv();
  LOG.debug("Running ImportTsv with arguments: " + argv);
  try {
    // Job will fail if observer rejects entries without TTL
    assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));
  } finally {
    // Clean up
    if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
      LOG.debug("Deleting test subdirectory");
      util.cleanupDataTestDirOnTestFS(table.getNameAsString());
    }
  }

  return tool;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:38,代码来源:TestImportTSVWithTTLs.java

示例13: doMROnTableTest

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
/**
 * Run an ImportTsv job and perform basic validation on the results. Returns
 * the ImportTsv <code>Tool</code> instance so that other tests can inspect it
 * for further validation as necessary. This method is static to insure
 * non-reliance on instance's util/conf facilities.
 * 
 * @param args
 *          Any arguments to pass BEFORE inputFile path is appended.
 * @param dataAvailable
 * @return The Tool instance used to run the test.
 */
private Tool doMROnTableTest(HBaseTestingUtility util, String family, String data, String[] args,
    int valueMultiplier, boolean dataAvailable) throws Exception {
  String table = args[args.length - 1];
  Configuration conf = new Configuration(util.getConfiguration());

  // populate input file
  FileSystem fs = FileSystem.get(conf);
  Path inputPath = fs.makeQualified(new Path(util.getDataTestDirOnTestFS(table), "input.dat"));
  FSDataOutputStream op = fs.create(inputPath, true);
  op.write(Bytes.toBytes(data));
  op.close();
  LOG.debug(String.format("Wrote test data to file: %s", inputPath));

  if (conf.getBoolean(FORCE_COMBINER_CONF, true)) {
    LOG.debug("Forcing combiner.");
    conf.setInt("mapreduce.map.combine.minspills", 1);
  }

  // run the import
  List<String> argv = new ArrayList<String>(Arrays.asList(args));
  argv.add(inputPath.toString());
  Tool tool = new ImportTsv();
  LOG.debug("Running ImportTsv with arguments: " + argv);
  assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));

  validateTable(conf, TableName.valueOf(table), family, valueMultiplier, dataAvailable);

  if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
    LOG.debug("Deleting test subdirectory");
    util.cleanupDataTestDirOnTestFS(table);
  }
  return tool;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:45,代码来源:TestImportTSVWithOperationAttributes.java

示例14: testGenerateAndLoad

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
@Test
public void testGenerateAndLoad() throws Exception {
  LOG.info("Running test testGenerateAndLoad.");
  TableName table = TableName.valueOf(NAME + "-" + UUID.randomUUID());
  String cf = "d";
  Path hfiles = new Path(
      util.getDataTestDirOnTestFS(table.getNameAsString()), "hfiles");

  String[] args = {
      format("-D%s=%s", ImportTsv.BULK_OUTPUT_CONF_KEY, hfiles),
      format("-D%s=HBASE_ROW_KEY,HBASE_TS_KEY,%s:c1,%s:c2",
        ImportTsv.COLUMNS_CONF_KEY, cf, cf),
      // configure the test harness to NOT delete the HFiles after they're
      // generated. We need those for doLoadIncrementalHFiles
      format("-D%s=false", TestImportTsv.DELETE_AFTER_LOAD_CONF),
      table.getNameAsString()
  };

  // run the job, complete the load.
  util.createTable(table, new String[]{cf});
  Tool t = TestImportTsv.doMROnTableTest(util, cf, simple_tsv, args);
  doLoadIncrementalHFiles(hfiles, table);

  // validate post-conditions
  validateDeletedPartitionsFile(t.getConf());

  // clean up after ourselves.
  util.deleteTable(table);
  util.cleanupDataTestDirOnTestFS(table.getNameAsString());
  LOG.info("testGenerateAndLoad completed successfully.");
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:32,代码来源:IntegrationTestImportTsv.java

示例15: runTestFromCommandLine

import org.apache.hadoop.util.Tool; //导入依赖的package包/类
@Override
public int runTestFromCommandLine() throws Exception {
  Tool tool = null;
  Loop loop = new VisibilityLoop();
  loop.it = this;
  tool = loop;
  return ToolRunner.run(getConf(), tool, otherArgs);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:9,代码来源:IntegrationTestBigLinkedListWithVisibility.java


注:本文中的org.apache.hadoop.util.Tool类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。