本文整理汇总了Java中org.apache.hadoop.tools.DistCpOptions.setBlocking方法的典型用法代码示例。如果您正苦于以下问题:Java DistCpOptions.setBlocking方法的具体用法?Java DistCpOptions.setBlocking怎么用?Java DistCpOptions.setBlocking使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.tools.DistCpOptions
的用法示例。
在下文中一共展示了DistCpOptions.setBlocking方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: copy
import org.apache.hadoop.tools.DistCpOptions; //导入方法依赖的package包/类
@Override
public Metrics copy() throws CircusTrainException {
LOG.info("Copying table data.");
LOG.debug("Invoking DistCp: {} -> {}", sourceDataBaseLocation, replicaDataLocation);
DistCpOptions distCpOptions = parseCopierOptions(copierOptions);
LOG.debug("Invoking DistCp with options: {}", distCpOptions);
CircusTrainCopyListing.setAsCopyListingClass(conf);
CircusTrainCopyListing.setRootPath(conf, sourceDataBaseLocation);
try {
loadHComS3AFileSystem();
distCpOptions.setBlocking(false);
Job job = executor.exec(conf, distCpOptions);
String counter = String.format("%s_BYTES_WRITTEN", replicaDataLocation.toUri().getScheme().toUpperCase());
registerRunningJobMetrics(job, counter);
if (!job.waitForCompletion(true)) {
throw new IOException(
"DistCp failure: Job " + job.getJobID() + " has failed: " + job.getStatus().getFailureInfo());
}
return new JobMetrics(job, FileSystemCounter.class.getName(), counter);
} catch (Exception e) {
cleanUpReplicaDataLocation();
throw new CircusTrainException("Unable to copy file(s)", e);
}
}
示例2: getDistCpOptions
import org.apache.hadoop.tools.DistCpOptions; //导入方法依赖的package包/类
public DistCpOptions getDistCpOptions(String[] args) throws ParseException {
Options options = new Options();
Option opt;
opt = new Option("maxMaps", true,
"max number of maps to use for this copy");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("sourcePaths", true,
"comma separtated list of source paths to be copied");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("targetPath", true, "target path");
opt.setRequired(true);
options.addOption(opt);
CommandLine cmd = new GnuParser().parse(options, args);
String[] paths = cmd.getOptionValue("sourcePaths").trim().split(",");
List<Path> srcPaths = getPaths(paths);
String trgPath = cmd.getOptionValue("targetPath").trim();
DistCpOptions distcpOptions = new DistCpOptions(srcPaths, new Path(
trgPath));
distcpOptions.setSyncFolder(true);
distcpOptions.setBlocking(true);
distcpOptions
.setMaxMaps(Integer.valueOf(cmd.getOptionValue("maxMaps")));
return distcpOptions;
}
示例3: getDistCpOptions
import org.apache.hadoop.tools.DistCpOptions; //导入方法依赖的package包/类
public DistCpOptions getDistCpOptions(String[] args) throws ParseException {
Options options = new Options();
Option opt;
opt = new Option("update", false,
"specify update for synching folders");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("blocking", true,
"should DistCp be running in blocking mode");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("maxMaps", true,
"max number of maps to use for this copy");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("sourcePaths", true,
"comma separtated list of source paths to be copied");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("targetPath", true, "target path");
opt.setRequired(true);
options.addOption(opt);
CommandLine cmd = new GnuParser().parse(options, args);
String[] paths = cmd.getOptionValue("sourcePaths").trim().split(",");
List<Path> srcPaths = getPaths(paths);
String trgPath = cmd.getOptionValue("targetPath").trim();
DistCpOptions distcpOptions = new DistCpOptions(srcPaths, new Path(
trgPath));
distcpOptions.setSyncFolder(true);
distcpOptions.setBlocking(Boolean.valueOf(cmd
.getOptionValue("blocking")));
distcpOptions
.setMaxMaps(Integer.valueOf(cmd.getOptionValue("maxMaps")));
return distcpOptions;
}