本文整理汇总了Java中org.apache.spark.rdd.PairRDDFunctions类的典型用法代码示例。如果您正苦于以下问题:Java PairRDDFunctions类的具体用法?Java PairRDDFunctions怎么用?Java PairRDDFunctions使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
PairRDDFunctions类属于org.apache.spark.rdd包,在下文中一共展示了PairRDDFunctions类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: convert
import org.apache.spark.rdd.PairRDDFunctions; //导入依赖的package包/类
@Override
public RDD<Tuple> convert(List<RDD<Tuple>> predecessors,
PODistinct poDistinct) throws IOException {
SparkUtil.assertPredecessorSize(predecessors, poDistinct, 1);
RDD<Tuple> rdd = predecessors.get(0);
ClassTag<Tuple2<Tuple, Object>> tuple2ClassManifest = SparkUtil
.<Tuple, Object> getTuple2Manifest();
RDD<Tuple2<Tuple, Object>> rddPairs = rdd.map(TO_KEY_VALUE_FUNCTION,
tuple2ClassManifest);
PairRDDFunctions<Tuple, Object> pairRDDFunctions
= new PairRDDFunctions<Tuple, Object>(
rddPairs, SparkUtil.getManifest(Tuple.class),
SparkUtil.getManifest(Object.class), null);
int parallelism = SparkUtil.getParallelism(predecessors, poDistinct);
return pairRDDFunctions.reduceByKey(MERGE_VALUES_FUNCTION, parallelism)
.map(TO_VALUE_FUNCTION, SparkUtil.getManifest(Tuple.class));
}
示例2: convert
import org.apache.spark.rdd.PairRDDFunctions; //导入依赖的package包/类
@Override
public RDD<Tuple2<Text, Tuple>> convert(List<RDD<Tuple>> predecessors,
POStore physicalOperator) throws IOException {
SparkUtil.assertPredecessorSize(predecessors, physicalOperator, 1);
RDD<Tuple> rdd = predecessors.get(0);
// convert back to KV pairs
JavaRDD<Tuple2<Text, Tuple>> rddPairs = rdd.toJavaRDD().map(FROM_TUPLE_FUNCTION);
PairRDDFunctions<Text, Tuple> pairRDDFunctions = new PairRDDFunctions<Text, Tuple>(
rddPairs.rdd(), SparkUtil.getManifest(Text.class),
SparkUtil.getManifest(Tuple.class), null);
JobConf storeJobConf = SparkUtil.newJobConf(pigContext);
POStore poStore = configureStorer(storeJobConf, physicalOperator);
pairRDDFunctions.saveAsNewAPIHadoopFile(poStore.getSFile()
.getFileName(), Text.class, Tuple.class, PigOutputFormat.class,
storeJobConf);
return rddPairs.rdd();
}
示例3: prepareKeyValues
import org.apache.spark.rdd.PairRDDFunctions; //导入依赖的package包/类
@Override
protected void prepareKeyValues(final ImportKeyValuePairRDDToAccumulo operation, final AccumuloKeyRangePartitioner partitioner) throws OperationException {
final OrderedRDDFunctions orderedRDDFunctions = new OrderedRDDFunctions(operation.getInput(), ORDERING_CLASS_TAG, KEY_CLASS_TAG, VALUE_CLASS_TAG, scala.reflect.ClassTag$.MODULE$.apply(Tuple2.class));
final PairRDDFunctions pairRDDFunctions = new PairRDDFunctions(orderedRDDFunctions.repartitionAndSortWithinPartitions(partitioner), KEY_CLASS_TAG, VALUE_CLASS_TAG, ORDERING_CLASS_TAG);
pairRDDFunctions.saveAsNewAPIHadoopFile(operation.getOutputPath(), Key.class, Value.class, AccumuloFileOutputFormat.class, getConfiguration(operation));
}