当前位置: 首页>>代码示例>>Java>>正文


Java ParameterTool.getLong方法代码示例

本文整理汇总了Java中org.apache.flink.api.java.utils.ParameterTool.getLong方法的典型用法代码示例。如果您正苦于以下问题:Java ParameterTool.getLong方法的具体用法?Java ParameterTool.getLong怎么用?Java ParameterTool.getLong使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.flink.api.java.utils.ParameterTool的用法示例。


在下文中一共展示了ParameterTool.getLong方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: configure

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
@Override
public void configure(ParameterTool parameterTool) {
	if (hasDefaultValue && !parameterTool.has(name)) {
		// skip checks for min and max when using default value
		value = defaultValue;
	} else {
		value = parameterTool.getLong(name);

		if (hasMinimumValue) {
			Util.checkParameter(value >= minimumValue,
				name + " must be greater than or equal to " + minimumValue);
		}

		if (hasMaximumValue) {
			Util.checkParameter(value <= maximumValue,
				name + " must be less than or equal to " + maximumValue);
		}
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:20,代码来源:LongParameter.java

示例2: main

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
    ParameterTool params = ParameterTool.fromArgs(args);
    FlinkPravegaParams helper = new FlinkPravegaParams(params);
    StreamId stream = helper.createStreamFromParam("input", "examples/turbineHeatTest");

    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

    // 1. read and decode the sensor events from a Pravega stream
    long startTime = params.getLong("start", 0L);
    FlinkPravegaReader<String> reader = helper.newReader(stream, startTime, String.class);
    DataStream<SensorEvent> events = env.addSource(reader, "input").map(new SensorMapper()).name("events");

    // 2. extract timestamp information to support 'event-time' processing
    SingleOutputStreamOperator<SensorEvent> timestamped = events.assignTimestampsAndWatermarks(
            new BoundedOutOfOrdernessTimestampExtractor<SensorEvent>(Time.seconds(10)) {
        @Override
        public long extractTimestamp(SensorEvent element) {
            return element.getTimestamp();
        }
    });
    timestamped.print();

    // 3. summarize the temperature data for each sensor
    SingleOutputStreamOperator<SensorAggregate> summaries = timestamped
            .keyBy("sensorId")
            .window(TumblingEventTimeWindows.of(Time.days(1), Time.hours(8)))
            .fold(null, new SensorAggregator()).name("summaries");

    // 4. save to HDFS and print to stdout.  Refer to the TaskManager's 'Stdout' view in the Flink UI.
    summaries.print().name("stdout");
    if (params.has("output")) {
        summaries.writeAsCsv(params.getRequired("output"), FileSystem.WriteMode.OVERWRITE);
    }

    env.execute("TurbineHeatProcessor_" + stream);
}
 
开发者ID:pravega,项目名称:pravega-samples,代码行数:38,代码来源:TurbineHeatProcessor.java

示例3: SummarizationJobParameters

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
public SummarizationJobParameters(ParameterTool params) {
    timelyHostname = params.getRequired("timelyHostname");
    timelyTcpPort = params.getInt("timelyTcpPort", 4241);
    timelyHttpsPort = params.getInt("timelyHttpsPort", 4242);
    timelyWssPort = params.getInt("timelyWssPort", 4243);
    doLogin = params.getBoolean("doLogin", false);
    timelyUsername = params.get("timelyUsername", null);
    timelyPassword = params.get("timelyPassword", null);
    keyStoreFile = params.getRequired("keyStoreFile");
    keyStoreType = params.get("keyStoreType", "JKS");
    keyStorePass = params.getRequired("keyStorePass");
    trustStoreFile = params.getRequired("trustStoreFile");
    trustStoreType = params.get("trustStoreType", "JKS");
    trustStorePass = params.getRequired("trustStorePass");
    hostVerificationEnabled = params.getBoolean("hostVerificationEnabled", true);
    bufferSize = params.getInt("bufferSize", 10485760);
    String metricNames = params.getRequired("metrics");
    if (null != metricNames) {
        metrics = metricNames.split(",");
    } else {
        metrics = null;
    }
    startTime = params.getLong("startTime", 0L);
    endTime = params.getLong("endTime", 0L);
    interval = params.getRequired("interval");
    intervalUnits = params.getRequired("intervalUnits");
}
 
开发者ID:NationalSecurityAgency,项目名称:timely,代码行数:28,代码来源:SummarizationJobParameters.java

示例4: main

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
	// parse the parameters
	final ParameterTool params = ParameterTool.fromArgs(args);
	final long windowSize = params.getLong("windowSize", 2000);
	final long rate = params.getLong("rate", 3L);

	System.out.println("Using windowSize=" + windowSize + ", data rate=" + rate);
	System.out.println("To customize example, use: WindowJoin [--windowSize <window-size-in-millis>] [--rate <elements-per-second>]");

	// obtain execution environment, run this example in "ingestion time"
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);

	// make parameters available in the web interface
	env.getConfig().setGlobalJobParameters(params);

	// create the data sources for both grades and salaries
	DataStream<Tuple2<String, Integer>> grades = GradeSource.getSource(env, rate);
	DataStream<Tuple2<String, Integer>> salaries = SalarySource.getSource(env, rate);

	// run the actual window join program
	// for testability, this functionality is in a separate method.
	DataStream<Tuple3<String, Integer, Integer>> joinedStream = runWindowJoin(grades, salaries, windowSize);

	// print the results with a single thread, rather than in parallel
	joinedStream.print().setParallelism(1);

	// execute program
	env.execute("Windowed Join Example");
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:31,代码来源:WindowJoin.java

示例5: main

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
/**
 * The program main method.
 * @param args the command line arguments.
 */
public static void main(String[] args) throws Exception {

  // CONFIGURATION
  ParameterTool parameter = ParameterTool.fromArgs(args);
  final int port = Integer.valueOf(parameter.getRequired("port"));
  final Path outputPath = FileSystems.getDefault().getPath(parameter.get("output", PROGRAM_NAME + ".out"));
  final long windowSize = parameter.getLong("windowSize", 10);
  final TimeUnit windowUnit = TimeUnit.valueOf(parameter.get("windowUnit", "SECONDS"));
  final int parallelism = parameter.getInt("parallelism", 1);

  // ENVIRONMENT
  final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
  env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);

  // CONFIGURATION RESUME
  System.out.println("############################################################################");
  System.out.printf("%s\n", PROGRAM_NAME);
  System.out.println("----------------------------------------------------------------------------");
  System.out.printf("%s\n", PROGRAM_DESCRIPTION);
  System.out.println("****************************************************************************");
  System.out.println("Port: " + port);
  System.out.println("Output: " + outputPath);
  System.out.println("Window: " + windowSize + " " + windowUnit);
  System.out.println("Parallelism: " + parallelism);
  System.out.println("############################################################################");

  // TOPOLOGY
  DataStream<String> text = env.socketTextStream("localhost", port, "\n");

  DataStream<WordWithCount> windowCounts = text
      .flatMap(new WordTokenizer())
      .keyBy("word")
      .timeWindow(Time.of(windowSize, windowUnit))
      .reduce(new WordCountReducer())
      .setParallelism(parallelism);

  windowCounts.writeAsText(outputPath.toAbsolutePath().toString(), FileSystem.WriteMode.OVERWRITE);

  // EXECUTION
  env.execute(PROGRAM_NAME);
}
 
开发者ID:gmarciani,项目名称:flink-scaffolding,代码行数:46,代码来源:TopologyQuery1.java

示例6: main

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
/**
 * The program main method.
 * @param args the command line arguments.
 */
public static void main(String[] args) throws Exception {
  // CONFIGURATION
  ParameterTool parameter = ParameterTool.fromArgs(args);
  final int port = Integer.valueOf(parameter.getRequired("port"));
  final Path outputPath = FileSystems.getDefault().getPath(parameter.get("output", PROGRAM_NAME + ".out"));
  final long windowSize = parameter.getLong("windowSize", 10);
  final TimeUnit windowUnit = TimeUnit.valueOf(parameter.get("windowUnit", "SECONDS"));
  final int rankSize = parameter.getInt("rankSize", 3);
  final int parallelism = parameter.getInt("parallelism", 1);

  // ENVIRONMENT
  final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
  env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

  // CONFIGURATION RESUME
  System.out.println("############################################################################");
  System.out.printf("%s\n", PROGRAM_NAME);
  System.out.println("----------------------------------------------------------------------------");
  System.out.printf("%s\n", PROGRAM_DESCRIPTION);
  System.out.println("****************************************************************************");
  System.out.println("Port: " + port);
  System.out.println("Output: " + outputPath);
  System.out.println("Window: " + windowSize + " " + windowUnit);
  System.out.println("Rank Size: " + rankSize);
  System.out.println("Parallelism: " + parallelism);
  System.out.println("############################################################################");

  // TOPOLOGY
  DataStream<TimedWord> timedWords = env.addSource(new StoppableTimedWordSocketSource("localhost", port))
      .assignTimestampsAndWatermarks(new EventTimestampExtractor());

  DataStream<WindowWordWithCount> windowCounts = timedWords
      .keyBy(new WordKeySelector())
      .timeWindow(Time.of(windowSize, windowUnit))
      .aggregate(new TimedWordCounterAggregator(), new TimedWordCounterWindowFunction())
      .setParallelism(parallelism);

  DataStream<WindowWordRanking> ranking = windowCounts.timeWindowAll(Time.of(windowSize, windowUnit))
      .apply(new WordRankerWindowFunction(rankSize));

  ranking.writeAsText(outputPath.toAbsolutePath().toString(), FileSystem.WriteMode.OVERWRITE);

  // EXECUTION
  env.execute(PROGRAM_NAME);
}
 
开发者ID:gmarciani,项目名称:flink-scaffolding,代码行数:50,代码来源:TopologyQuery2.java

示例7: main

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
/**
 * The program main method.
 * @param args the command line arguments.
 */
public static void main(String[] args) throws Exception {
  // CONFIGURATION
  ParameterTool parameter = ParameterTool.fromArgs(args);
  final String kafkaZookeeper = parameter.get("kafka.zookeeper", "localhost:2181");
  final String kafkaBootstrap = parameter.get("kafka.bootstrap", "localhost:9092");
  final String kafkaTopic = parameter.get("kafka.topic", "topic-query-3");
  final Path outputPath = FileSystems.getDefault().getPath(parameter.get("output", PROGRAM_NAME + ".out"));
  final String elasticsearch = parameter.get("elasticsearch", null);
  final long windowSize = parameter.getLong("windowSize", 10);
  final TimeUnit windowUnit = TimeUnit.valueOf(parameter.get("windowUnit", "SECONDS"));
  final int rankSize = parameter.getInt("rankSize", 3);
  final long tsEnd = parameter.getLong("tsEnd", 100000L);
  final Set<String> ignoredWords = Sets.newHashSet(parameter.get("ignoredWords", "")
      .trim().split(","));
  final int parallelism = parameter.getInt("parallelism", 1);

  // ENVIRONMENT
  final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
  env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
  env.setParallelism(parallelism);
  final KafkaProperties kafkaProps = new KafkaProperties(kafkaBootstrap, kafkaZookeeper);
  final ESProperties elasticsearchProps = ESProperties.fromPropString(elasticsearch);

  // CONFIGURATION RESUME
  System.out.println("############################################################################");
  System.out.printf("%s\n", PROGRAM_NAME);
  System.out.println("----------------------------------------------------------------------------");
  System.out.printf("%s\n", PROGRAM_DESCRIPTION);
  System.out.println("****************************************************************************");
  System.out.println("Kafka Zookeeper: " + kafkaZookeeper);
  System.out.println("Kafka Bootstrap: " + kafkaBootstrap);
  System.out.println("Kafka Topic: " + kafkaTopic);
  System.out.println("Output: " + outputPath);
  System.out.println("Elasticsearch: " + elasticsearch);
  System.out.println("Window: " + windowSize + " " + windowUnit);
  System.out.println("Rank Size: " + rankSize);
  System.out.println("Timestamp End: " + tsEnd);
  System.out.println("Ignored Words: " + ignoredWords);
  System.out.println("Parallelism: " + parallelism);
  System.out.println("############################################################################");

  // TOPOLOGY
  DataStream<TimedWord> timedWords = env.addSource(new StoppableTimedWordKafkaSource(kafkaTopic, kafkaProps, tsEnd));

  DataStream<TimedWord> fileterTimedWords = timedWords.filter(new TimedWordFilter(ignoredWords))
      .assignTimestampsAndWatermarks(new EventTimestampExtractor());

  DataStream<WindowWordWithCount> windowCounts = fileterTimedWords
      .keyBy(new WordKeySelector())
      .timeWindow(Time.of(windowSize, windowUnit))
      .aggregate(new TimedWordCounterAggregator(), new TimedWordCounterWindowFunction());

  DataStream<WindowWordRanking> ranking = windowCounts.timeWindowAll(Time.of(windowSize, windowUnit))
      .apply(new WordRankerWindowFunction(rankSize));

  ranking.writeAsText(outputPath.toAbsolutePath().toString(), FileSystem.WriteMode.OVERWRITE);

  if (elasticsearch != null) {
    ranking.addSink(new ESSink<>(elasticsearchProps,
        new MyESSinkFunction(elasticsearchProps.getIndexName(), elasticsearchProps.getTypeName()))
    );
  }

  // EXECUTION
  env.execute(PROGRAM_NAME);
}
 
开发者ID:gmarciani,项目名称:flink-scaffolding,代码行数:71,代码来源:TopologyQuery3.java

示例8: main

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
/**
 * The program main method.
 * @param args the command line arguments.
 */
public static void main(String[] args) throws Exception {

  // CONFIGURATION
  ParameterTool parameter = ParameterTool.fromArgs(args);
  final String kafkaZookeeper = parameter.get("kafka.zookeeper", "localhost:2181");
  final String kafkaBootstrap = parameter.get("kafka.bootstrap", "localhost:9092");
  final String kafkaTopic = parameter.get("kafka.topic", "socstream");
  final Path outputPath = FileSystems.getDefault().getPath(parameter.get("output", PROGRAM_NAME + ".out"));
  final String elasticsearch = parameter.get("elasticsearch", null);
  final Path metadataPath = FileSystems.getDefault().getPath(parameter.get("metadata", "./metadata.yml"));
  final long windowSize = parameter.getLong("windowSize", 70);
  final TimeUnit windowUnit = TimeUnit.valueOf(parameter.get("windowUnit", "MINUTES"));
  final long matchStart = parameter.getLong("match.start", 10753295594424116L);
  final long matchEnd = parameter.getLong("match.end", 14879639146403495L);
  final long matchIntervalStart = parameter.getLong("match.interval.start", 12557295594424116L);
  final long matchIntervalEnd = parameter.getLong("match.interval.end", 13086639146403495L);
  final int parallelism = parameter.getInt("parallelism", 1);
  final Match match = MatchService.fromYamlFile(metadataPath);
  final Set<Long> ignoredSensors = MatchService.collectIgnoredSensors(match);
  final Map<Long,Long> sid2Pid = MatchService.collectSid2Pid(match);

  // ENVIRONMENT
  final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
  env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
  final KafkaProperties kafkaProps = new KafkaProperties(kafkaBootstrap);
  final ESProperties elasticsearchProps = ESProperties.fromPropString(elasticsearch);

  // CONFIGURATION RESUME
  System.out.println("############################################################################");
  System.out.printf("%s\n", PROGRAM_NAME);
  System.out.println("----------------------------------------------------------------------------");
  System.out.printf("%s\n", PROGRAM_DESCRIPTION);
  System.out.println("****************************************************************************");
  System.out.println("Kafka Zookeeper: " + kafkaZookeeper);
  System.out.println("Kafka Bootstrap: " + kafkaBootstrap);
  System.out.println("Kafka Topic: " + kafkaTopic);
  System.out.println("Output: " + outputPath);
  System.out.println("Elasticsearch: " + elasticsearch);
  System.out.println("Metadata: " + metadataPath);
  System.out.println("Window: " + windowSize + " " + windowUnit);
  System.out.println("Match Start: " + matchStart);
  System.out.println("Match End: " + matchEnd);
  System.out.println("Match Interval Start: " + matchIntervalStart);
  System.out.println("Match Interval End: " + matchIntervalEnd);
  System.out.println("Ignored Sensors: " + ignoredSensors);
  System.out.println("Parallelism: " + parallelism);
  System.out.println("############################################################################");

  // TOPOLOGY
  DataStream<RichSensorEvent> sensorEvents = env.addSource(
      new RichSensorEventKafkaSource(kafkaTopic, kafkaProps, matchStart, matchEnd,
          matchIntervalStart, matchIntervalEnd, ignoredSensors, sid2Pid
      )
  ).assignTimestampsAndWatermarks(new RichSensorEventTimestampExtractor()).setParallelism(parallelism);

  DataStream<PlayerRunningStatistics> statistics = sensorEvents.keyBy(new RichSensorEventKeyer())
      .timeWindow(Time.of(windowSize, windowUnit))
      .aggregate(new PlayerRunningStatisticsCalculatorAggregator(), new PlayerRunningStatisticsCalculatorWindowFunction())
      .setParallelism(parallelism);

  statistics.writeAsText(outputPath.toAbsolutePath().toString(), FileSystem.WriteMode.OVERWRITE).setParallelism(1);

  if (elasticsearch != null) {
    statistics.addSink(new ESSink<>(elasticsearchProps,
        new PlayerRunningStatisticsESSinkFunction(elasticsearchProps.getIndexName(), elasticsearchProps.getTypeName()))
    ).setParallelism(1);
  }

  // EXECUTION
  env.execute(PROGRAM_NAME);
}
 
开发者ID:braineering,项目名称:socstream,代码行数:76,代码来源:TopologyQuery1.java

示例9: main

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
/**
 * The program main method.
 * @param args the command line arguments.
 */
public static void main(String[] args) throws Exception {

  // CONFIGURATION
  ParameterTool parameter = ParameterTool.fromArgs(args);
  final String kafkaZookeeper = parameter.get("kafka.zookeeper", "localhost:2181");
  final String kafkaBootstrap = parameter.get("kafka.bootstrap", "localhost:9092");
  final String kafkaTopic = parameter.get("kafka.topic", "socstream");
  final Path outputPath = FileSystems.getDefault().getPath(parameter.get("output", PROGRAM_NAME + ".out"));
  final String elasticsearch = parameter.get("elasticsearch", null);
  final Path metadataPath = FileSystems.getDefault().getPath(parameter.get("metadata", "./metadata.yml"));
  final long windowSize = parameter.getLong("windowSize", 70);
  final TimeUnit windowUnit = TimeUnit.valueOf(parameter.get("windowUnit", "MINUTES"));
  final int rankSize = parameter.getInt("rankSize", 5);
  final long matchStart = parameter.getLong("match.start", 10753295594424116L);
  final long matchEnd = parameter.getLong("match.end", 14879639146403495L);
  final long matchIntervalStart = parameter.getLong("match.interval.start", 12557295594424116L);
  final long matchIntervalEnd = parameter.getLong("match.interval.end", 13086639146403495L);
  final int parallelism = parameter.getInt("parallelism", 1);

  final Match match = MatchService.fromYamlFile(metadataPath);
  final Set<Long> ignoredSensors = MatchService.collectIgnoredSensors(match);
  final Map<Long,Long> sid2Pid = MatchService.collectSid2Pid(match);

  // ENVIRONMENT
  final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
  env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
  final KafkaProperties kafkaProps = new KafkaProperties(kafkaBootstrap);
  final ESProperties elasticsearchProps = ESProperties.fromPropString(elasticsearch);

  // CONFIGURATION RESUME
  System.out.println("############################################################################");
  System.out.printf("%s\n", PROGRAM_NAME);
  System.out.println("----------------------------------------------------------------------------");
  System.out.printf("%s\n", PROGRAM_DESCRIPTION);
  System.out.println("****************************************************************************");
  System.out.println("Kafka Zookeeper: " + kafkaZookeeper);
  System.out.println("Kafka Bootstrap: " + kafkaBootstrap);
  System.out.println("Kafka Topic: " + kafkaTopic);
  System.out.println("Output: " + outputPath);
  System.out.println("Elasticsearch: " + elasticsearch);
  System.out.println("Metadata: " + metadataPath);
  System.out.println("Window: " + windowSize + " " + windowUnit);
  System.out.println("Rank Size: " + rankSize);
  System.out.println("Match Start: " + matchStart);
  System.out.println("Match End: " + matchEnd);
  System.out.println("Match Interval Start: " + matchIntervalStart);
  System.out.println("Match Interval End: " + matchIntervalEnd);
  System.out.println("Ignored Sensors: " + ignoredSensors);
  System.out.println("Parallelism: " + parallelism);
  System.out.println("############################################################################");

  // TOPOLOGY
  DataStream<SpeedSensorEvent> sensorEvents = env.addSource(
      new SpeedSensorEventKafkaSource(kafkaTopic, kafkaProps, matchStart, matchEnd,
          matchIntervalStart, matchIntervalEnd, ignoredSensors, sid2Pid
      )
  ).assignTimestampsAndWatermarks(new SpeedSensorEventTimestampExtractor()).setParallelism(parallelism);

  DataStream<PlayerSpeedStatistics> statistics = sensorEvents.keyBy(new SpeedSensorEventKeyer())
      .timeWindow(Time.of(windowSize, windowUnit))
      .aggregate(new PlayerSpeedStatisticsCalculatorAggregator(), new PlayerSpeedStatisticsCalculatorWindowFunction())
      .setParallelism(parallelism);

  DataStream<PlayersSpeedRanking> ranking = statistics.timeWindowAll(Time.of(windowSize, windowUnit))
      .apply(new GlobalRankerWindowFunction(rankSize));

  ranking.writeAsText(outputPath.toAbsolutePath().toString(), FileSystem.WriteMode.OVERWRITE).setParallelism(1);

  if (elasticsearch != null) {
    ranking.addSink(new ESSink<>(elasticsearchProps,
        new PlayerSpeedRankingESSinkFunction(elasticsearchProps.getIndexName(), elasticsearchProps.getTypeName()))
    ).setParallelism(1);
  }

  // EXECUTION
  env.execute(PROGRAM_NAME);
}
 
开发者ID:braineering,项目名称:socstream,代码行数:82,代码来源:TopologyQuery2.java

示例10: main

import org.apache.flink.api.java.utils.ParameterTool; //导入方法依赖的package包/类
/**
 * The program main method.
 * @param args the command line arguments.
 */
public static void main(String[] args) throws Exception {

  // CONFIGURATION
  ParameterTool parameter = ParameterTool.fromArgs(args);
  final String kafkaZookeeper = parameter.get("kafka.zookeeper", "localhost:2181");
  final String kafkaBootstrap = parameter.get("kafka.bootstrap", "localhost:9092");
  final String kafkaTopic = parameter.get("kafka.topic", "socstream");
  final Path outputPath = FileSystems.getDefault().getPath(parameter.get("output", PROGRAM_NAME + ".out"));
  final String elasticsearch = parameter.get("elasticsearch", null);
  final Path metadataPath = FileSystems.getDefault().getPath(parameter.get("metadata", "./metadata.yml"));
  final long windowSize = parameter.getLong("windowSize", 70);
  final TimeUnit windowUnit = TimeUnit.valueOf(parameter.get("windowUnit", "MINUTES"));
  final long matchStart = parameter.getLong("match.start", 10753295594424116L);
  final long matchEnd = parameter.getLong("match.end", 14879639146403495L);
  final long matchIntervalStart = parameter.getLong("match.interval.start", 12557295594424116L);
  final long matchIntervalEnd = parameter.getLong("match.interval.end", 13086639146403495L);
  final int parallelism = parameter.getInt("parallelism", 1);

  final Match match = MatchService.fromYamlFile(metadataPath);
  final Set<Long> ignoredSensors = MatchService.collectIgnoredSensors(match);
  final Map<Long,Long> sid2Pid = MatchService.collectSid2Pid(match);

  // ENVIRONMENT
  final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
  env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
  final KafkaProperties kafkaProps = new KafkaProperties(kafkaBootstrap);
  final ESProperties elasticsearchProps = ESProperties.fromPropString(elasticsearch);

  // CONFIGURATION RESUME
  System.out.println("############################################################################");
  System.out.printf("%s\n", PROGRAM_NAME);
  System.out.println("----------------------------------------------------------------------------");
  System.out.printf("%s\n", PROGRAM_DESCRIPTION);
  System.out.println("****************************************************************************");
  System.out.println("Kafka Zookeeper: " + kafkaZookeeper);
  System.out.println("Kafka Bootstrap: " + kafkaBootstrap);
  System.out.println("Kafka Topic: " + kafkaTopic);
  System.out.println("Output: " + outputPath);
  System.out.println("Elasticsearch: " + elasticsearch);
  System.out.println("Metadata: " + metadataPath);
  System.out.println("Window: " + windowSize + " " + windowUnit);
  System.out.println("Match Start: " + matchStart);
  System.out.println("Match End: " + matchEnd);
  System.out.println("Match Interval Start: " + matchIntervalStart);
  System.out.println("Match Interval End: " + matchIntervalEnd);
  System.out.println("Ignored Sensors: " + ignoredSensors);
  System.out.println("Parallelism: " + parallelism);
  System.out.println("############################################################################");

  // TOPOLOGY
  DataStream<PositionSensorEvent> sensorEvents = env.addSource(
      new PositionSensorEventKafkaSource(kafkaTopic, kafkaProps, matchStart, matchEnd,
          matchIntervalStart, matchIntervalEnd, ignoredSensors, sid2Pid
      ).assignTimestampsAndWatermarks(new PositionSensorEventTimestampExtractor())).setParallelism(1);

  DataStream<PlayerGridStatistics> statistics = sensorEvents.keyBy(new PositionSensorEventKeyer())
      .timeWindow(Time.of(windowSize, windowUnit))
      .aggregate(new PlayerOnGridStatisticsCalculatorAggregator(), new PlayerOnGridStatisticsCalculatorWindowFunction())
      .setParallelism(parallelism);

  statistics.writeAsText(outputPath.toAbsolutePath().toString(), FileSystem.WriteMode.OVERWRITE).setParallelism(1);

  if (elasticsearch != null) {
    statistics.addSink(new ESSink<>(elasticsearchProps,
        new PlayerGridStatisticsESSinkFunction(elasticsearchProps.getIndexName(), elasticsearchProps.getTypeName()))
    ).setParallelism(1);
  }

  // EXECUTION
  env.execute(PROGRAM_NAME);
}
 
开发者ID:braineering,项目名称:socstream,代码行数:76,代码来源:TopologyQuery3.java


注:本文中的org.apache.flink.api.java.utils.ParameterTool.getLong方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。