本文整理汇总了Java中scala.reflect.ClassTag类的典型用法代码示例。如果您正苦于以下问题:Java ClassTag类的具体用法?Java ClassTag怎么用?Java ClassTag使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ClassTag类属于scala.reflect包,在下文中一共展示了ClassTag类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: matrixObjectToRDDStringIJV
import scala.reflect.ClassTag; //导入依赖的package包/类
/**
* Convert a {@code MatrixObject} to a {@code RDD<String>} in IJV format.
*
* @param matrixObject
* the {@code MatrixObject}
* @return the {@code MatrixObject} converted to a {@code RDD<String>}
*/
public static RDD<String> matrixObjectToRDDStringIJV(MatrixObject matrixObject) {
// NOTE: The following works when called from Java but does not
// currently work when called from Spark Shell (when you call
// collect() on the RDD<String>).
//
// JavaRDD<String> javaRDD = jsc.parallelize(list);
// RDD<String> rdd = JavaRDD.toRDD(javaRDD);
//
// Therefore, we call parallelize() on the SparkContext rather than
// the JavaSparkContext to produce the RDD<String> for Scala.
List<String> list = matrixObjectToListStringIJV(matrixObject);
ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
return sc().parallelize(JavaConversions.asScalaBuffer(list), sc().defaultParallelism(), tag);
}
示例2: frameObjectToRDDStringIJV
import scala.reflect.ClassTag; //导入依赖的package包/类
/**
* Convert a {@code FrameObject} to a {@code RDD<String>} in IJV format.
*
* @param frameObject
* the {@code FrameObject}
* @return the {@code FrameObject} converted to a {@code RDD<String>}
*/
public static RDD<String> frameObjectToRDDStringIJV(FrameObject frameObject) {
// NOTE: The following works when called from Java but does not
// currently work when called from Spark Shell (when you call
// collect() on the RDD<String>).
//
// JavaRDD<String> javaRDD = jsc.parallelize(list);
// RDD<String> rdd = JavaRDD.toRDD(javaRDD);
//
// Therefore, we call parallelize() on the SparkContext rather than
// the JavaSparkContext to produce the RDD<String> for Scala.
List<String> list = frameObjectToListStringIJV(frameObject);
ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
return sc().parallelize(JavaConversions.asScalaBuffer(list), sc().defaultParallelism(), tag);
}
示例3: matrixObjectToRDDStringCSV
import scala.reflect.ClassTag; //导入依赖的package包/类
/**
* Convert a {@code MatrixObject} to a {@code RDD<String>} in CSV format.
*
* @param matrixObject
* the {@code MatrixObject}
* @return the {@code MatrixObject} converted to a {@code RDD<String>}
*/
public static RDD<String> matrixObjectToRDDStringCSV(MatrixObject matrixObject) {
// NOTE: The following works when called from Java but does not
// currently work when called from Spark Shell (when you call
// collect() on the RDD<String>).
//
// JavaRDD<String> javaRDD = jsc.parallelize(list);
// RDD<String> rdd = JavaRDD.toRDD(javaRDD);
//
// Therefore, we call parallelize() on the SparkContext rather than
// the JavaSparkContext to produce the RDD<String> for Scala.
List<String> list = matrixObjectToListStringCSV(matrixObject);
ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
return sc().parallelize(JavaConversions.asScalaBuffer(list), sc().defaultParallelism(), tag);
}
示例4: frameObjectToRDDStringCSV
import scala.reflect.ClassTag; //导入依赖的package包/类
/**
* Convert a {@code FrameObject} to a {@code RDD<String>} in CSV format.
*
* @param frameObject
* the {@code FrameObject}
* @param delimiter
* the delimiter
* @return the {@code FrameObject} converted to a {@code RDD<String>}
*/
public static RDD<String> frameObjectToRDDStringCSV(FrameObject frameObject, String delimiter) {
// NOTE: The following works when called from Java but does not
// currently work when called from Spark Shell (when you call
// collect() on the RDD<String>).
//
// JavaRDD<String> javaRDD = jsc.parallelize(list);
// RDD<String> rdd = JavaRDD.toRDD(javaRDD);
//
// Therefore, we call parallelize() on the SparkContext rather than
// the JavaSparkContext to produce the RDD<String> for Scala.
List<String> list = frameObjectToListStringCSV(frameObject, delimiter);
ClassTag<String> tag = scala.reflect.ClassTag$.MODULE$.apply(String.class);
return sc().parallelize(JavaConversions.asScalaBuffer(list), sc().defaultParallelism(), tag);
}
示例5: create
import scala.reflect.ClassTag; //导入依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public JavaStreamingContext create() {
sparkConf.set("spark.streaming.kafka.maxRatePerPartition", String.valueOf(maxRatePerPartition));
JavaStreamingContext result = new JavaStreamingContext(sparkConf, new Duration(duration));
Map<String, String> props = new HashMap<>();
if (!autoOffsetValue.isEmpty()) {
props.put(AbstractStreamingBinding.AUTO_OFFSET_RESET, autoOffsetValue);
}
logMessage("topic list " + topic, isRunningInMesos);
logMessage("Auto offset reset is set to " + autoOffsetValue, isRunningInMesos);
props.putAll(extraKafkaConfigs);
for (Map.Entry<String, String> map : props.entrySet()) {
logMessage(Utils.format("Adding extra kafka config, {}:{}", map.getKey(), map.getValue()), isRunningInMesos);
}
props.put("key.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
JavaPairInputDStream<byte[], byte[]> dStream;
if (offsetHelper.isSDCCheckPointing()) {
JavaInputDStream stream =
KafkaUtils.createDirectStream(
result,
byte[].class,
byte[].class,
Tuple2.class,
props,
MaprStreamsOffsetManagerImpl.get().getOffsetForDStream(topic, numberOfPartitions),
MESSAGE_HANDLER_FUNCTION
);
ClassTag<byte[]> byteClassTag = scala.reflect.ClassTag$.MODULE$.apply(byte[].class);
dStream = JavaPairInputDStream.fromInputDStream(stream.inputDStream(), byteClassTag, byteClassTag);
} else {
dStream =
KafkaUtils.createDirectStream(result, byte[].class, byte[].class,
props, new HashSet<>(Arrays.asList(topic.split(","))));
}
Driver$.MODULE$.foreach(dStream.dstream(), MaprStreamsOffsetManagerImpl.get());
return result;
}
示例6: convert
import scala.reflect.ClassTag; //导入依赖的package包/类
@Override
public RDD<Tuple> convert(List<RDD<Tuple>> predecessors,
PODistinct poDistinct) throws IOException {
SparkUtil.assertPredecessorSize(predecessors, poDistinct, 1);
RDD<Tuple> rdd = predecessors.get(0);
ClassTag<Tuple2<Tuple, Object>> tuple2ClassManifest = SparkUtil
.<Tuple, Object> getTuple2Manifest();
RDD<Tuple2<Tuple, Object>> rddPairs = rdd.map(TO_KEY_VALUE_FUNCTION,
tuple2ClassManifest);
PairRDDFunctions<Tuple, Object> pairRDDFunctions
= new PairRDDFunctions<Tuple, Object>(
rddPairs, SparkUtil.getManifest(Tuple.class),
SparkUtil.getManifest(Object.class), null);
int parallelism = SparkUtil.getParallelism(predecessors, poDistinct);
return pairRDDFunctions.reduceByKey(MERGE_VALUES_FUNCTION, parallelism)
.map(TO_VALUE_FUNCTION, SparkUtil.getManifest(Tuple.class));
}
示例7: MizoRDD
import scala.reflect.ClassTag; //导入依赖的package包/类
public MizoRDD(SparkContext context, IMizoRDDConfig config, ClassTag<TReturn> classTag) {
super(context, new ArrayBuffer<>(), classTag);
if (!Strings.isNullOrEmpty(config.logConfigPath())) {
PropertyConfigurator.configure(config.logConfigPath());
}
this.config = config;
this.regionsPaths = getRegionsPaths(config.regionDirectoriesPath());
this.relationTypes = loadRelationTypes(config.titanConfigPath());
}
示例8: deserialize
import scala.reflect.ClassTag; //导入依赖的package包/类
@Override
public <T> T deserialize(final ByteBuffer byteBuffer, final ClassLoader classLoader, final ClassTag<T> classTag) {
this.input.setBuffer(byteBuffer.array());
return this.gryoSerializer.getGryoPool().readWithKryo(kryo -> {
kryo.setClassLoader(classLoader);
return (T) kryo.readClassAndObject(this.input);
});
}
示例9: getPartitionOffset
import scala.reflect.ClassTag; //导入依赖的package包/类
public static <T> DStream<Tuple2<Integer, Iterable<Long>>> getPartitionOffset(
DStream<MessageAndMetadata<T>> unionStreams, Properties props) {
ClassTag<MessageAndMetadata<T>> messageMetaClassTag =
ScalaUtil.<T>getMessageAndMetadataClassTag();
JavaDStream<MessageAndMetadata<T>> javaDStream =
new JavaDStream<MessageAndMetadata<T>>(unionStreams, messageMetaClassTag);
JavaPairDStream<Integer, Iterable<Long>> partitonOffset = getPartitionOffset(javaDStream, props);
return partitonOffset.dstream();
}
示例10: persists
import scala.reflect.ClassTag; //导入依赖的package包/类
@SuppressWarnings("deprecation")
public static void persists(DStream<Tuple2<Integer, Iterable<Long>>> partitonOffset, Properties props) {
ClassTag<Tuple2<Integer, Iterable<Long>>> tuple2ClassTag =
ScalaUtil.<Integer, Iterable<Long>>getTuple2ClassTag();
JavaDStream<Tuple2<Integer, Iterable<Long>>> jpartitonOffset =
new JavaDStream<Tuple2<Integer, Iterable<Long>>>(partitonOffset, tuple2ClassTag);
jpartitonOffset.foreachRDD(new VoidFunction<JavaRDD<Tuple2<Integer, Iterable<Long>>>>() {
@Override
public void call(JavaRDD<Tuple2<Integer, Iterable<Long>>> po) throws Exception {
List<Tuple2<Integer, Iterable<Long>>> poList = po.collect();
doPersists(poList, props);
}
});
}
示例11: MatrixMultiplicationRDD
import scala.reflect.ClassTag; //导入依赖的package包/类
public MatrixMultiplicationRDD(final RDD<?> oneParent, final ClassTag<MatrixStore<N>> evidence$2) {
super(oneParent, evidence$2);
}
示例12: NumberRDD
import scala.reflect.ClassTag; //导入依赖的package包/类
public NumberRDD(final RDD<?> oneParent, final ClassTag<N> evidence$2) {
super(oneParent, evidence$2);
}
示例13: OtherBlockMatrixRDD
import scala.reflect.ClassTag; //导入依赖的package包/类
public OtherBlockMatrixRDD(final RDD<?> oneParent, final ClassTag<MatrixStore<N>> evidence$2) {
super(oneParent, evidence$2);
}
示例14: serialize
import scala.reflect.ClassTag; //导入依赖的package包/类
@Override
public <T> ByteBuffer serialize(final T t, final ClassTag<T> classTag) {
this.gryoSerializer.getGryoPool().writeWithKryo(kryo -> kryo.writeClassAndObject(this.output, t));
return ByteBuffer.wrap(this.output.getBuffer());
}
示例15: writeObject
import scala.reflect.ClassTag; //导入依赖的package包/类
@Override
public <T> SerializationStream writeObject(final T t, final ClassTag<T> classTag) {
this.gryoSerializer.getGryoPool().writeWithKryo(kryo -> kryo.writeClassAndObject(this.output, t));
return this;
}