当前位置: 首页>>代码示例>>Java>>正文


Java Durations.milliseconds方法代码示例

本文整理汇总了Java中org.apache.spark.streaming.Durations.milliseconds方法的典型用法代码示例。如果您正苦于以下问题:Java Durations.milliseconds方法的具体用法?Java Durations.milliseconds怎么用?Java Durations.milliseconds使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.spark.streaming.Durations的用法示例。


在下文中一共展示了Durations.milliseconds方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import org.apache.spark.streaming.Durations; //导入方法依赖的package包/类
public static void main(String[] args) throws InterruptedException {

		System.setProperty("hadoop.home.dir", "C:\\softwares\\Winutils");

		SparkSession sparkSession = SparkSession.builder().master("local[*]").appName("Stateful Streaming Example")
				.config("spark.sql.warehouse.dir", "file:////C:/Users/sgulati/spark-warehouse").getOrCreate();

		JavaStreamingContext jssc= new JavaStreamingContext(new JavaSparkContext(sparkSession.sparkContext()),
				Durations.milliseconds(1000));
		JavaReceiverInputDStream<String> inStream = jssc.socketTextStream("10.204.136.223", 9999);
		jssc.checkpoint("C:\\Users\\sgulati\\spark-checkpoint");

		JavaDStream<FlightDetails> flightDetailsStream = inStream.map(x -> {
			ObjectMapper mapper = new ObjectMapper();
			return mapper.readValue(x, FlightDetails.class);
		});
		
		

		JavaPairDStream<String, FlightDetails> flightDetailsPairStream = flightDetailsStream
				.mapToPair(f -> new Tuple2<String, FlightDetails>(f.getFlightId(), f));

		Function3<String, Optional<FlightDetails>, State<List<FlightDetails>>, Tuple2<String, Double>> mappingFunc = (
				flightId, curFlightDetail, state) -> {
			List<FlightDetails> details = state.exists() ? state.get() : new ArrayList<>();

			boolean isLanded = false;

			if (curFlightDetail.isPresent()) {
				details.add(curFlightDetail.get());
				if (curFlightDetail.get().isLanded()) {
					isLanded = true;
				}
			}
			Double avgSpeed = details.stream().mapToDouble(f -> f.getTemperature()).average().orElse(0.0);

			if (isLanded) {
				state.remove();
			} else {
				state.update(details);
			}
			return new Tuple2<String, Double>(flightId, avgSpeed);
		};

		JavaMapWithStateDStream<String, FlightDetails, List<FlightDetails>, Tuple2<String, Double>> streamWithState = flightDetailsPairStream
				.mapWithState(StateSpec.function(mappingFunc).timeout(Durations.minutes(5)));
		
		streamWithState.print();
		jssc.start();
		jssc.awaitTermination();
	}
 
开发者ID:PacktPublishing,项目名称:Apache-Spark-2x-for-Java-Developers,代码行数:52,代码来源:StateFulProcessingExample.java

示例2: SparkOperatorCreater

import org.apache.spark.streaming.Durations; //导入方法依赖的package包/类
public SparkOperatorCreater(String appName) throws IOException {
    super(appName);
    properties = new Properties();
    properties.load(this.getClass().getClassLoader().getResourceAsStream("spark-cluster.properties"));
    SparkConf conf = new SparkConf().setMaster(this.getMaster()).setAppName(appName);
    conf.set("spark.streaming.ui.retainedBatches", "2000");
    jssc = new JavaStreamingContext(conf, Durations.milliseconds(this.getDurationsMilliseconds()));
}
 
开发者ID:wangyangjun,项目名称:StreamBench,代码行数:9,代码来源:SparkOperatorCreater.java

示例3: timeDurationsToSparkDuration

import org.apache.spark.streaming.Durations; //导入方法依赖的package包/类
public static Duration timeDurationsToSparkDuration(TimeDurations timeDurations) {
    Duration duration = Durations.seconds(1);
    switch (timeDurations.getUnit()) {
        case MILLISECONDS:
            duration = Durations.milliseconds(timeDurations.getLength());
            break;
        case SECONDS:
            duration = Durations.seconds(timeDurations.getLength());
            break;
        case MINUTES:
            duration = Durations.minutes(timeDurations.getLength());
            break;
    }
    return duration;
}
 
开发者ID:wangyangjun,项目名称:StreamBench,代码行数:16,代码来源:Utils.java

示例4: setBatchDuration

import org.apache.spark.streaming.Durations; //导入方法依赖的package包/类
private void setBatchDuration(org.joda.time.Duration duration) {
  Long durationMillis = duration.getMillis();
  // validate window size
  if (durationMillis < SPARK_MIN_WINDOW.milliseconds()) {
    throw new IllegalArgumentException("Windowing of size " + durationMillis +
        "msec is not supported!");
  }
  // choose the smallest duration to be Spark's batch duration, larger ones will be handled
  // as window functions  over the batched-stream
  if (!windowing || this.batchDuration.milliseconds() > durationMillis) {
    this.batchDuration = Durations.milliseconds(durationMillis);
  }
  windowing = true;
}
 
开发者ID:shakamunyi,项目名称:spark-dataflow,代码行数:15,代码来源:StreamingWindowPipelineDetector.java

示例5: setUp

import org.apache.spark.streaming.Durations; //导入方法依赖的package包/类
@Before
public void setUp() {
    SparkConf sparkConf = new SparkConf()
            .setMaster("local[4]").setAppName(this.getClass().getSimpleName());
    ssc = new JavaStreamingContext(sparkConf, Durations.milliseconds(200));
    talosTestUtils = new TalosTestUtils(new HashMap<String, String>() {{
        put("auto.offset.reset", "smallest");
    }});
    talosTestUtils.deleteTopic(topic);
    talosTestUtils.createTopic(topic, 8);
}
 
开发者ID:XiaoMi,项目名称:galaxy-sdk-java,代码行数:12,代码来源:JavaDirectTalosStreamSuite.java

示例6: main

import org.apache.spark.streaming.Durations; //导入方法依赖的package包/类
public static void main(String[] args) {

//        String inputFile = StreamKMeans.class.getClassLoader().getResource("centroids.txt").getFile();
        SparkConf sparkConf = new SparkConf().setMaster("spark://master:7077").setAppName("JavaKMeans");

        JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, Durations.milliseconds(1000));

        HashSet<String> topicsSet = new HashSet<>();
        topicsSet.add("KMeans");
        HashMap<String, String> kafkaParams = new HashMap<>();
//        kafkaParams.put("metadata.broker.list", "kafka1:9092,kafka2:9092,kafka3:9092");
        kafkaParams.put("metadata.broker.list", "localhost:9092");
        kafkaParams.put("auto.offset.reset", "largest");
        kafkaParams.put("zookeeper.connect", "zoo1:2181");
        kafkaParams.put("group.id", "spark");

        // Create direct kafka stream with brokers and topics
        JavaPairInputDStream<String, String> lines = KafkaUtils.createDirectStream(
                jssc,
                String.class,
                String.class,
                StringDecoder.class,
                StringDecoder.class,
                kafkaParams,
                topicsSet
        );

        JavaDStream<Vector> points = lines.map(new ParseKafkaString()).map(new ParsePoint());

        Vector[] initCentroids = loadInitCentroids();
        double[] weights = new double[96];
        for (int i = 0; i < 96; i++) {
            weights[i] = 1.0 / 96;
        }

        final StreamingKMeans model = new StreamingKMeans()
                .setK(96)
                .setDecayFactor(0)
                .setInitialCenters(initCentroids, weights);

        model.trainOn(points);

        points.foreachRDD(new Function2<JavaRDD<Vector>, Time, Void>() {
            @Override
            public Void call(JavaRDD<Vector> vectorJavaRDD, Time time) throws Exception {
                Vector[] vector = model.latestModel().clusterCenters();
                for (int i = 0; i < vector.length; i++) {
                    logger.warn(vector[i].toArray()[0] + "\t" + vector[i].toArray()[1]);
                }
                return null;
            }
        });

        jssc.addStreamingListener(new PerformanceStreamingListener());
        jssc.start();
        jssc.awaitTermination();
    }
 
开发者ID:wangyangjun,项目名称:StreamBench,代码行数:58,代码来源:StreamKMeans.java

示例7: main

import org.apache.spark.streaming.Durations; //导入方法依赖的package包/类
public static void main(String[] args) throws InterruptedException {

		System.setProperty("hadoop.home.dir", "C:\\softwares\\Winutils");

		SparkSession sparkSession = SparkSession.builder().master("local[*]").appName("stateless Streaming Example")
				.config("spark.sql.warehouse.dir", "file:////C:/Users/sgulati/spark-warehouse").getOrCreate();

		JavaStreamingContext jssc = new JavaStreamingContext(new JavaSparkContext(sparkSession.sparkContext()),
				Durations.milliseconds(1000));
		JavaReceiverInputDStream<String> inStream = jssc.socketTextStream("10.204.136.223", 9999);

		JavaDStream<FlightDetails> flightDetailsStream = inStream.map(x -> {
			ObjectMapper mapper = new ObjectMapper();
			return mapper.readValue(x, FlightDetails.class);
		});
		
		
		
		//flightDetailsStream.print();
		
		//flightDetailsStream.foreachRDD((VoidFunction<JavaRDD<FlightDetails>>) rdd -> rdd.saveAsTextFile("hdfs://namenode:port/path"));
		
	   JavaDStream<FlightDetails> window = flightDetailsStream.window(Durations.minutes(5),Durations.minutes(1));
		
	    JavaPairDStream<String, Double> transfomedWindow = window.mapToPair(f->new Tuple2<String,Double>(f.getFlightId(),f.getTemperature())).
	    mapValues(t->new Tuple2<Double,Integer>(t,1))
	    .reduceByKey((t1, t2) -> new Tuple2<Double, Integer>(t1._1()+t2._1(), t1._2()+t2._2())).mapValues(t -> t._1()/t._2());
	    transfomedWindow.cache();
	    transfomedWindow.print();
	    
		jssc.start();
		jssc.awaitTermination();
	}
 
开发者ID:PacktPublishing,项目名称:Apache-Spark-2x-for-Java-Developers,代码行数:34,代码来源:StateLessProcessingExample.java


注:本文中的org.apache.spark.streaming.Durations.milliseconds方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。