本文整理汇总了Java中org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat.setRanges方法的典型用法代码示例。如果您正苦于以下问题:Java AccumuloInputFormat.setRanges方法的具体用法?Java AccumuloInputFormat.setRanges怎么用?Java AccumuloInputFormat.setRanges使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat
的用法示例。
在下文中一共展示了AccumuloInputFormat.setRanges方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getExpectedLoadJob
import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; //导入方法依赖的package包/类
public Job getExpectedLoadJob(String inst, String zookeepers, String user,
String password, String table, String start, String end,
Authorizations authorizations,
List<Pair<Text, Text>> columnFamilyColumnQualifierPairs)
throws IOException {
Collection<Range> ranges = new LinkedList<Range>();
ranges.add(new Range(start, end));
Job expected = new Job(new Configuration());
try {
AccumuloInputFormat.setConnectorInfo(expected, user,
new PasswordToken(password));
} catch (AccumuloSecurityException e) {
Assert.fail(e.getMessage());
}
AccumuloInputFormat.setInputTableName(expected, table);
AccumuloInputFormat.setScanAuthorizations(expected, authorizations);
AccumuloInputFormat.setZooKeeperInstance(expected, inst, zookeepers);
AccumuloInputFormat.fetchColumns(expected,
columnFamilyColumnQualifierPairs);
AccumuloInputFormat.setRanges(expected, ranges);
return expected;
}
示例2: setLocation
import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; //导入方法依赖的package包/类
@Override
public void setLocation(final String location, final Job job) throws IOException {
if (logger.isDebugEnabled()) {
logger.debug("Set Location[" + location + "] for job[" + job.getJobName() + "]");
}
conf = job.getConfiguration();
setLocationFromUri(location, job);
if (!ConfiguratorBase.isConnectorInfoSet(AccumuloInputFormat.class, conf)) {
try {
AccumuloInputFormat.setConnectorInfo(job, user, new PasswordToken(userP.getBytes(StandardCharsets.UTF_8)));
} catch (final AccumuloSecurityException e) {
throw new RuntimeException(e);
}
AccumuloInputFormat.setInputTableName(job, table);
AccumuloInputFormat.setScanAuthorizations(job, authorizations);
if (!mock) {
AccumuloInputFormat.setZooKeeperInstance(job, inst, zookeepers);
} else {
AccumuloInputFormat.setMockInstance(job, inst);
}
}
if (columnFamilyColumnQualifierPairs.size() > 0) {
AccumuloInputFormat.fetchColumns(job, columnFamilyColumnQualifierPairs);
}
logger.info("Set ranges[" + ranges + "] for job[" + job.getJobName() + "] on table[" + table + "] " +
"for columns[" + columnFamilyColumnQualifierPairs + "] with authorizations[" + authorizations + "]");
if (ranges.size() == 0) {
throw new IOException("Accumulo Range must be specified");
}
AccumuloInputFormat.setRanges(job, ranges);
}
示例3: run
import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; //导入方法依赖的package包/类
/**
* cloudbase props
*/
@Override
public int run(final String[] strings) throws Exception {
conf.set(MRUtils.JOB_NAME_PROP, "Gather Evaluation Statistics");
//initialize
init();
final Job job = new Job(conf);
job.setJarByClass(AccumuloRdfCountTool.class);
setupAccumuloInput(job);
AccumuloInputFormat.setRanges(job, Lists.newArrayList(new Range(new Text(new byte[]{}), new Text(new byte[]{Byte.MAX_VALUE}))));
// set input output of the particular job
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Mutation.class);
// set mapper and reducer classes
job.setMapperClass(CountPiecesMapper.class);
job.setCombinerClass(CountPiecesCombiner.class);
job.setReducerClass(CountPiecesReducer.class);
final String outputTable = MRUtils.getTablePrefix(conf) + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX;
setupAccumuloOutput(job, outputTable);
// Submit the job
final Date startTime = new Date();
System.out.println("Job started: " + startTime);
final int exitCode = job.waitForCompletion(true) ? 0 : 1;
if (exitCode == 0) {
final Date end_time = new Date();
System.out.println("Job ended: " + end_time);
System.out.println("The job took "
+ (end_time.getTime() - startTime.getTime()) / 1000
+ " seconds.");
return 0;
} else {
System.out.println("Job Failed!!!");
}
return -1;
}
示例4: setLocation
import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; //导入方法依赖的package包/类
@Override
public void setLocation(String location, Job job) throws IOException {
setLocationFromUri(location);
loadDependentJars(job.getConfiguration());
Map<String, String> entries = getInputFormatEntries(job
.getConfiguration());
unsetEntriesFromConfiguration(job.getConfiguration(), entries);
try {
AccumuloInputFormat.setConnectorInfo(job, user, new PasswordToken(
password));
} catch (AccumuloSecurityException e) {
throw new IOException(e);
}
AccumuloInputFormat.setInputTableName(job, table);
AccumuloInputFormat.setScanAuthorizations(job, authorizations);
AccumuloInputFormat.setZooKeeperInstance(job, inst, zookeepers);
List<Pair<Text, Text>> inputFormatColumns = new LinkedList<Pair<Text, Text>>();
int colfamPrefix = 0;
for (Column c : columns) {
switch (c.getType()) {
case LITERAL:
// Pull the colf[:colq] individually
inputFormatColumns.add(makePair(c.getColumnFamily(),
c.getColumnQualifier()));
break;
case COLFAM_PREFIX:
// Some colfams
colfamPrefix++;
break;
case COLQUAL_PREFIX:
// Some colquals in a given colfam
inputFormatColumns.add(makePair(c.getColumnFamily(), null));
break;
default:
log.info("Ignoring unhandled column type");
break;
}
}
// If we have colfam prefixes, we have to pull all columns and filter on
// client-side
// TODO Create an iterator that lets us push-down *all* of the filter
// logic
if (0 == colfamPrefix && !inputFormatColumns.isEmpty()) {
AccumuloInputFormat.fetchColumns(job, inputFormatColumns);
}
Collection<Range> ranges = Collections.singleton(new Range(start, end));
log.info("Scanning Accumulo for " + ranges + " for table " + table);
AccumuloInputFormat.setRanges(job, ranges);
configureInputFormat(job);
}