本文整理汇总了Java中org.apache.spark.streaming.StreamingContext类的典型用法代码示例。如果您正苦于以下问题:Java StreamingContext类的具体用法?Java StreamingContext怎么用?Java StreamingContext使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
StreamingContext类属于org.apache.spark.streaming包,在下文中一共展示了StreamingContext类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: WatermarkSyncedDStream
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
public WatermarkSyncedDStream(final Queue<JavaRDD<WindowedValue<T>>> rdds,
final Long batchDuration,
final StreamingContext ssc) {
super(ssc, JavaSparkContext$.MODULE$.<WindowedValue<T>>fakeClassTag());
this.rdds = rdds;
this.batchDuration = batchDuration;
}
示例2: SourceDStream
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
SourceDStream(
StreamingContext ssc,
UnboundedSource<T, CheckpointMarkT> unboundedSource,
SerializablePipelineOptions options,
Long boundMaxRecords) {
super(ssc, JavaSparkContext$.MODULE$.<scala.Tuple2<Source<T>, CheckpointMarkT>>fakeClassTag());
this.unboundedSource = unboundedSource;
this.options = options;
SparkPipelineOptions sparkOptions = options.get().as(
SparkPipelineOptions.class);
// Reader cache expiration interval. 50% of batch interval is added to accommodate latency.
this.readerCacheInterval = 1.5 * sparkOptions.getBatchIntervalMillis();
this.boundReadDuration = boundReadDuration(sparkOptions.getReadTimePercentage(),
sparkOptions.getMinReadTimeMillis());
// set initial parallelism once.
this.initialParallelism = ssc().sparkContext().defaultParallelism();
checkArgument(this.initialParallelism > 0, "Number of partitions must be greater than zero.");
this.boundMaxRecords = boundMaxRecords;
try {
this.numPartitions =
createMicrobatchSource()
.split(sparkOptions)
.size();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
示例3: FacebookInputDStream
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
public FacebookInputDStream(StreamingContext ssc, String accessToken, BatchRequestBuilder[] batchRequestBuilders,
StorageLevel storageLevel) {
super(ssc, scala.reflect.ClassTag$.MODULE$.apply(String.class));
this.accessToken = accessToken;
this.storageLevel = storageLevel;
this.batchRequestBuilders = batchRequestBuilders;
}
示例4: SparkScheduler
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
public SparkScheduler(JobQueue queue) {
SparkConf conf = new SparkConf();
conf.setMaster(System.getProperty("resource.runner.spark.host","local"));
conf.setAppName("OODT Spark Job");
URL location = SparkScheduler.class.getResource('/'+SparkScheduler.class.getName().replace('.', '/')+".class");
conf.setJars(new String[]{"../lib/cas-resource-0.8-SNAPSHOT.jar"});
sc = new SparkContext(conf);
ssc = new StreamingContext(sc,new Duration(10000));
this.queue = queue;
}
示例5: PubsubInputDStream
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
public PubsubInputDStream(final StreamingContext _ssc, final String _subscription, final Integer _batchSize,
final boolean _decodeData) {
super(_ssc, new PubsubReceiver(_subscription, _batchSize, _decodeData), STRING_CLASS_TAG);
}
示例6: PubsubReceiverInputDStream
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
public PubsubReceiverInputDStream(final StreamingContext _ssc, final String _subscription, final Integer _batchSize,
final boolean _decodeData) {
super(new PubsubInputDStream(_ssc, _subscription, _batchSize, _decodeData), STRING_CLASS_TAG);
}
示例7: createStream
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
public static ReceiverInputDStream<String> createStream(StreamingContext ssc, String accessToken,
BatchRequestBuilder[] batchRequestBuilders) {
return new FacebookInputDStream(ssc, accessToken, batchRequestBuilders, StorageLevel.MEMORY_AND_DISK_2());
}
示例8: launch
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
public static <E> DStream<MessageAndMetadata<E>> launch(
StreamingContext ssc, Properties pros, int numberOfReceivers,
StorageLevel storageLevel, KafkaMessageHandler<E> messageHandler) {
JavaStreamingContext jsc = new JavaStreamingContext(ssc);
return createStream(jsc, pros, numberOfReceivers, storageLevel, messageHandler).dstream();
}
示例9: setStreamingContext
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
@Override
public void setStreamingContext(StreamingContext context) {
this.ssc = new JavaStreamingContext(context);
}
示例10: asStreamOf
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
/**
* @param ssc, the (Scala based) Spark Streaming Context
* @return a Spark Stream, belonging to the provided Context, that will collect NATS Messages
*/
public ReceiverInputDStream<R> asStreamOf(StreamingContext ssc) {
return ssc.receiverStream(this, scala.reflect.ClassTag$.MODULE$.apply(String.class));
}
示例11: asStreamOfKeyValue
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
/**
* @param ssc, the (Scala based) Spark Streaming Context
* @return a Spark Stream, belonging to the provided Context,
* that will collect NATS Messages as Tuples of (the NATS Subject) / (the NATS Payload)
*/
public ReceiverInputDStream<Tuple2<String, R>> asStreamOfKeyValue(StreamingContext ssc) {
return ssc.receiverStream(this.storedAsKeyValue(), scala.reflect.ClassTag$.MODULE$.apply(Tuple2.class));
}
示例12: setStreamingContext
import org.apache.spark.streaming.StreamingContext; //导入依赖的package包/类
/**
* Set the context to run by.
* @param context
*/
public void setStreamingContext(StreamingContext context);