本文整理匯總了Java中org.apache.hadoop.util.GenericOptionsParser.getRemainingArgs方法的典型用法代碼示例。如果您正苦於以下問題:Java GenericOptionsParser.getRemainingArgs方法的具體用法?Java GenericOptionsParser.getRemainingArgs怎麽用?Java GenericOptionsParser.getRemainingArgs使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.util.GenericOptionsParser
的用法示例。
在下文中一共展示了GenericOptionsParser.getRemainingArgs方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: main
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
public static void main(String argv[]) {
Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
StringUtils.startupShutdownMessage(ResourceManager.class, argv, LOG);
try {
Configuration conf = new YarnConfiguration();
GenericOptionsParser hParser = new GenericOptionsParser(conf, argv);
argv = hParser.getRemainingArgs();
// If -format-state-store, then delete RMStateStore; else startup normally
if (argv.length == 1 && argv[0].equals("-format-state-store")) {
deleteRMStateStore(conf);
} else {
ResourceManager resourceManager = new ResourceManager();
ShutdownHookManager.get().addShutdownHook(
new CompositeServiceShutdownHook(resourceManager),
SHUTDOWN_HOOK_PRIORITY);
resourceManager.init(conf);
resourceManager.start();
}
} catch (Throwable t) {
LOG.fatal("Error starting ResourceManager", t);
System.exit(-1);
}
}
示例2: parseArgs
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
private int parseArgs(String[] args) throws IOException {
GenericOptionsParser parser =
new GenericOptionsParser(getConf(), args);
String[] remainingArgs = parser.getRemainingArgs();
if (remainingArgs.length != 3) {
usage();
return -1;
}
tableName = TableName.valueOf(remainingArgs[0]);
region1 = Bytes.toBytesBinary(remainingArgs[1]);
region2 = Bytes.toBytesBinary(remainingArgs[2]);
int status = 0;
if (notInTable(tableName, region1) || notInTable(tableName, region2)) {
status = -1;
} else if (Bytes.equals(region1, region2)) {
LOG.error("Can't merge a region with itself");
status = -1;
}
return status;
}
示例3: testJobConfigurationsWithTsvImporterTextMapper
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
@Test
public void testJobConfigurationsWithTsvImporterTextMapper() throws Exception {
String table = "test-" + UUID.randomUUID();
Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
String INPUT_FILE = "InputFile1.csv";
// Prepare the arguments required for the test.
String[] args =
new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY
+ "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
"-D" + ImportTsv.COLUMNS_CONF_KEY
+ "=HBASE_ROW_KEY,FAM:A,FAM:B",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
"-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table,
INPUT_FILE
};
GenericOptionsParser opts = new GenericOptionsParser(util.getConfiguration(), args);
args = opts.getRemainingArgs();
Job job = ImportTsv.createSubmittableJob(util.getConfiguration(), args);
assertTrue(job.getMapperClass().equals(TsvImporterTextMapper.class));
assertTrue(job.getReducerClass().equals(TextSortReducer.class));
assertTrue(job.getMapOutputValueClass().equals(Text.class));
}
示例4: instantiateDataNode
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
/** Instantiate a single datanode object, along with its secure resources.
* This must be run by invoking{@link DataNode#runDatanodeDaemon()}
* subsequently.
*/
public static DataNode instantiateDataNode(String args [], Configuration conf,
SecureResources resources) throws IOException {
if (conf == null)
conf = new HdfsConfiguration();
if (args != null) {
// parse generic hadoop options
GenericOptionsParser hParser = new GenericOptionsParser(conf, args);
args = hParser.getRemainingArgs();
}
if (!parseArguments(args, conf)) {
printUsage(System.err);
return null;
}
Collection<StorageLocation> dataLocations = getStorageLocations(conf);
UserGroupInformation.setConfiguration(conf);
SecurityUtil.login(conf, DFS_DATANODE_KEYTAB_FILE_KEY,
DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, getHostName(conf));
return makeInstance(dataLocations, conf, resources);
}
示例5: main
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
public static void main(String argv[]) {
Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
StringUtils.startupShutdownMessage(ResourceManager.class, argv, LOG);
try {
Configuration conf = new YarnConfiguration();
GenericOptionsParser hParser = new GenericOptionsParser(conf, argv);
argv = hParser.getRemainingArgs();
// If -format-state-store, then delete RMStateStore; else startup normally
if (argv.length == 1 && argv[0].equals("-format-state-store")) {
deleteRMStateStore(conf);
} else {
ResourceManager resourceManager = new ResourceManager();
ShutdownHookManager.get().addShutdownHook(
new CompositeServiceShutdownHook(resourceManager),
SHUTDOWN_HOOK_PRIORITY);
resourceManager.init(conf);
resourceManager.start();
}
} catch (Throwable t) {
LOG.fatal("Error starting ResourceManager", t);
System.exit(-1);
}
}
示例6: instantiateDataNode
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
/** Instantiate a single datanode object, along with its secure resources.
* This must be run by invoking{@link DataNode#runDatanodeDaemon()}
* subsequently.
*/
public static DataNode instantiateDataNode(String args [], Configuration conf,
SecureResources resources) throws IOException {
if (conf == null)
conf = new HdfsConfiguration();
if (args != null) {
// parse generic hadoop options
GenericOptionsParser hParser = new GenericOptionsParser(conf, args);
args = hParser.getRemainingArgs();
}
if (!parseArguments(args, conf)) {
printUsage(System.err);
return null;
}
Collection<StorageLocation> dataLocations = getStorageLocations(conf);
UserGroupInformation.setConfiguration(conf);
SecurityUtil.login(conf, DFS_DATANODE_KEYTAB_FILE_KEY,
DFS_DATANODE_KERBEROS_PRINCIPAL_KEY);
return makeInstance(dataLocations, conf, resources);
}
示例7: main
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
GenericOptionsParser optionParser = new GenericOptionsParser(conf, args);
String[] remainingArgs = optionParser.getRemainingArgs();
if (remainingArgs.length < 3) {
logger.info("Usage: HazelcastLoader <input-file> <type[csv|parquet]> <key-indemvnx>");
System.exit(2);
}
String filePath = remainingArgs[0];
String type = remainingArgs[1];
String keyIndex = remainingArgs[2];
conf.set("key-index", keyIndex);
Job job = Job.getInstance(conf, "Hazelcast Data Loader");
job.setJarByClass(HazelcastLoader.class);
job.setNumReduceTasks(0);
job.setOutputFormatClass(HazelcastOutputFormat.class);
Path path = new Path(filePath);
setMapperAndInputFormatClass(job, path, type);
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
示例8: parseGenericOptions
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
/**
* Parse arguments in 'args' via the GenericOptionsParser and
* embed the results in the supplied configuration.
* @param conf the configuration to populate with generic options.
* @param args the arguments to process.
* @return the unused args to be passed to the application itself.
*/
public static String [] parseGenericOptions(
Configuration conf, String [] args) throws IOException {
// This needs to be shimmed because in Apache Hadoop this can throw
// an IOException, but it does not do so in CDH. We just mandate in
// this method that an IOException is possible.
GenericOptionsParser genericParser = new GenericOptionsParser(
conf, args);
return genericParser.getRemainingArgs();
}
示例9: parseArguments
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
private static CommandLine parseArguments(Configuration conf, Options options, String[] args)
throws ParseException, IOException {
GenericOptionsParser genParser = new GenericOptionsParser(conf, args);
String[] remainingArgs = genParser.getRemainingArgs();
CommandLineParser parser = new PosixParser();
return parser.parse(options, remainingArgs);
}
示例10: runCount
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
private boolean runCount(String[] args) throws IOException, InterruptedException,
ClassNotFoundException {
// need to make a copy of the configuration because to make sure
// different temp dirs are used.
GenericOptionsParser opts = new GenericOptionsParser(
new Configuration(UTIL.getConfiguration()), args);
Configuration configuration = opts.getConfiguration();
args = opts.getRemainingArgs();
Job job = CellCounter.createSubmittableJob(configuration, args);
job.waitForCompletion(false);
return job.isSuccessful();
}
示例11: runExport
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
/**
* Runs an export job with the specified command line args
* @param args
* @return true if job completed successfully
* @throws IOException
* @throws InterruptedException
* @throws ClassNotFoundException
*/
boolean runExport(String[] args)
throws IOException, InterruptedException, ClassNotFoundException {
// need to make a copy of the configuration because to make sure different temp dirs are used.
GenericOptionsParser opts =
new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
Configuration conf = opts.getConfiguration();
args = opts.getRemainingArgs();
Job job = Export.createSubmittableJob(conf, args);
job.waitForCompletion(false);
return job.isSuccessful();
}
示例12: runImport
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
/**
* Runs an import job with the specified command line args
* @param args
* @return true if job completed successfully
* @throws IOException
* @throws InterruptedException
* @throws ClassNotFoundException
*/
boolean runImport(String[] args)
throws IOException, InterruptedException, ClassNotFoundException {
// need to make a copy of the configuration because to make sure different temp dirs are used.
GenericOptionsParser opts =
new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
Configuration conf = opts.getConfiguration();
args = opts.getRemainingArgs();
Job job = Import.createSubmittableJob(conf, args);
job.waitForCompletion(false);
return job.isSuccessful();
}
示例13: runRowCount
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
/**
* Run the RowCounter map reduce job and verify the row count.
*
* @param args the command line arguments to be used for rowcounter job.
* @param expectedCount the expected row count (result of map reduce job).
* @throws Exception
*/
private void runRowCount(String[] args, int expectedCount)
throws Exception {
GenericOptionsParser opts = new GenericOptionsParser(TEST_UTIL.getConfiguration(), args);
Configuration conf = opts.getConfiguration();
args = opts.getRemainingArgs();
Job job = RowCounter.createSubmittableJob(conf, args);
job.waitForCompletion(true);
assertTrue(job.isSuccessful());
Counter counter = job.getCounters().findCounter(RowCounterMapper.Counters.ROWS);
assertEquals(expectedCount, counter.getValue());
}
示例14: runCopy
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
private boolean runCopy(String[] args) throws IOException, InterruptedException,
ClassNotFoundException {
GenericOptionsParser opts = new GenericOptionsParser(
new Configuration(TEST_UTIL.getConfiguration()), args);
Configuration configuration = opts.getConfiguration();
args = opts.getRemainingArgs();
Job job = new CopyTable(configuration).createSubmittableJob(args);
job.waitForCompletion(false);
return job.isSuccessful();
}
示例15: main
import org.apache.hadoop.util.GenericOptionsParser; //導入方法依賴的package包/類
public static void main(String argv[]) {
Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
StringUtils.startupShutdownMessage(ResourceManager.class, argv, LOG);
try {
Configuration conf = new YarnConfiguration();
GenericOptionsParser hParser = new GenericOptionsParser(conf, argv);
argv = hParser.getRemainingArgs();
// If -format-state-store, then delete RMStateStore; else startup normally
if (argv.length >= 1) {
if (argv[0].equals("-format-state-store")) {
deleteRMStateStore(conf);
} else if (argv[0].equals("-remove-application-from-state-store")
&& argv.length == 2) {
removeApplication(conf, argv[1]);
} else {
printUsage(System.err);
}
} else {
ResourceManager resourceManager = new ResourceManager();
ShutdownHookManager.get().addShutdownHook(
new CompositeServiceShutdownHook(resourceManager),
SHUTDOWN_HOOK_PRIORITY);
resourceManager.init(conf);
resourceManager.start();
}
} catch (Throwable t) {
LOG.fatal("Error starting ResourceManager", t);
System.exit(-1);
}
}