当前位置: 首页>>代码示例>>Java>>正文


Java StreamExecutionEnvironment.readTextFile方法代码示例

本文整理汇总了Java中org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.readTextFile方法的典型用法代码示例。如果您正苦于以下问题:Java StreamExecutionEnvironment.readTextFile方法的具体用法?Java StreamExecutionEnvironment.readTextFile怎么用?Java StreamExecutionEnvironment.readTextFile使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.flink.streaming.api.environment.StreamExecutionEnvironment的用法示例。


在下文中一共展示了StreamExecutionEnvironment.readTextFile方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@SuppressWarnings("Convert2Lambda")
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
    DataStream<String> dataStream = streamExecutionEnvironment.readTextFile("file:///tmp/flink-esper-input");
    
    EsperStream<String> esperStream = Esper.pattern(dataStream, "select bytes from String");

    DataStream<String> result = esperStream.select(new EsperSelectFunction<String>() {
        private static final long serialVersionUID = 7093943872082195786L;

        @Override
        public String select(EventBean eventBean) throws Exception {
            return new String((byte[]) eventBean.get("bytes"));
        }
    });

    result.writeAsText("file:///tmp/flink-esper-output");

    streamExecutionEnvironment.execute("Simple Flink Esper Example");
}
 
开发者ID:phil3k3,项目名称:flink-esper,代码行数:21,代码来源:FlinkTestClass.java

示例2: getTextDataStream

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
private static DataStream<String> getTextDataStream(StreamExecutionEnvironment env) {
	if (fileOutput) {
		// read the text file from given input path
		return env.readTextFile(textPath);
	} else {
		// get default test text data
		return env.fromElements(WordCountData.WORDS);
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:10,代码来源:WordCount.java

示例3: getTextDataStream

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
private static DataStream<String> getTextDataStream(final StreamExecutionEnvironment env) {
	if (fileOutput) {
		// read the text file from given input path
		return env.readTextFile(textPath);
	}

	return env.fromElements(WordCountData.WORDS);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:9,代码来源:ExclamationWithBolt.java

示例4: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {

		// Checking input parameters
		final ParameterTool params = ParameterTool.fromArgs(args);

		// set up the execution environment
		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

		// make parameters available in the web interface
		env.getConfig().setGlobalJobParameters(params);

		// get input data
		DataStream<String> text;
		if (params.has("input")) {
			System.out.println("Executing WordCountPojo example with default input data set.");
			System.out.println("Use --input to specify file input.");
			// read the text file from given input path
			text = env.readTextFile(params.get("input"));
		} else {
			// get default test text data
			text = env.fromElements(WordCountData.WORDS);
		}

		DataStream<Word> counts =
		// split up the lines into Word objects
		text.flatMap(new Tokenizer())
		// group by the field word and sum up the frequency
				.keyBy("word").sum("frequency");

		if (params.has("output")) {
			counts.writeAsText(params.get("output"));
		} else {
			System.out.println("Printing result to stdout. Use --output to specify output path.");
			counts.print();
		}

		// execute program
		env.execute("WordCount Pojo Example");
	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:40,代码来源:PojoExample.java

示例5: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {

		// Checking input parameters
		final ParameterTool params = ParameterTool.fromArgs(args);

		// set up the execution environment
		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

		// make parameters available in the web interface
		env.getConfig().setGlobalJobParameters(params);

		// get input data
		DataStream<String> text;
		if (params.has("input")) {
			// read the text file from given input path
			text = env.readTextFile(params.get("input"));
		} else {
			System.out.println("Executing WordCount example with default input data set.");
			System.out.println("Use --input to specify file input.");
			// get default test text data
			text = env.fromElements(WordCountData.WORDS);
		}

		DataStream<Tuple2<String, Integer>> counts =
		// split up the lines in pairs (2-tuples) containing: (word,1)
		text.flatMap(new Tokenizer())
		// group by the tuple field "0" and sum up tuple field "1"
				.keyBy(0).sum(1);

		// emit result
		if (params.has("output")) {
			counts.writeAsText(params.get("output"));
		} else {
			System.out.println("Printing result to stdout. Use --output to specify output path.");
			counts.print();
		}

		// execute program
		env.execute("Streaming WordCount");
	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:41,代码来源:WordCount.java

示例6: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {

		// get the execution environment
		final StreamExecutionEnvironment job = StreamExecutionEnvironment.getExecutionEnvironment();
		String inputPath, outputPath = null;
		try {
			final ParameterTool params = ParameterTool.fromArgs(args);
			inputPath = params.get("input");

			if (params.has("output")) {
				outputPath = params.get("output");
			}
			// make parameters available in the web interface
			job.getConfig().setGlobalJobParameters(params);
		} catch (Exception e) {
			System.err.println("No input specified. Please run '" + FileWordCount.class.getSimpleName() +
				"--input <file-path>', where 'input' is the path to a text file");
			return;
		}

		DataServiceFacade dataService = new DataServiceFacade(DataEntityType.WORD_COUNT);

		dataService.setUpEmbeddedCassandra();
		dataService.setUpDataModel();

		LOG.info("Example starts!");

		// get input data by reading content from file
		DataStream<String> text = job.readTextFile(inputPath);

		DataStream<Tuple2<String, Long>> result =
			// split up the lines in pairs (2-tuples) containing: (word,1)
			text.flatMap(new FlatMapFunction<String, Tuple2<String, Long>>() {

				@Override
				public void flatMap(String value, Collector<Tuple2<String, Long>> out) throws Exception {
					// normalize and split the line
					String[] words = value.toLowerCase().split("\\W+");

					// emit the pairs
					for (String word : words) {
						//Do not accept empty word, since word is defined as primary key in C* table
						if (!word.isEmpty()) {
							out.collect(new Tuple2<String, Long>(word, 1L));
						}
					}
				}
			})
				// group by the tuple field "0" and sum up tuple field "1"
				.keyBy(0)
				.sum(1);

		//Update the results to C* sink
		CassandraSink.addSink(result)
				.setQuery("INSERT INTO " + WordCount.CQL_KEYSPACE_NAME + "." + WordCount.CQL_TABLE_NAME + "(word, count) " +
						"values (?, ?);")
				.setHost("127.0.0.1")
				.build();

		// emit result
		if (outputPath != null) {
			result.writeAsText(outputPath);
		} else {
			System.out.println("Printing result to stdout. Use --output to specify output path.");

			CQLPrintSinkFunction<Tuple2<String, Long>, WordCount> func = new CQLPrintSinkFunction();
			func.setDataModel(dataService, 10);
			result.addSink(func).setParallelism(1);
		}

		// execute program
		job.execute("[STREAM] FileWordCount w/ C* Sink");
	}
 
开发者ID:mcfongtw,项目名称:flink-cassandra-connector-examples,代码行数:74,代码来源:FileWordCount.java

示例7: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {

		final ParameterTool params = ParameterTool.fromArgs(args);

		// set up the execution environment
		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

		// get input data
		DataStream<String> text;
		if (params.has("input")) {
			// read the text file from given input path
			text = env.readTextFile(params.get("input"));
		} else {
			System.out.println("Executing WindowWordCount example with default input data set.");
			System.out.println("Use --input to specify file input.");
			// get default test text data
			text = env.fromElements(WordCountData.WORDS);
		}

		// make parameters available in the web interface
		env.getConfig().setGlobalJobParameters(params);

		final int windowSize = params.getInt("window", 10);
		final int slideSize = params.getInt("slide", 5);

		DataStream<Tuple2<String, Integer>> counts =
		// split up the lines in pairs (2-tuples) containing: (word,1)
		text.flatMap(new WordCount.Tokenizer())
				// create windows of windowSize records slided every slideSize records
				.keyBy(0)
				.countWindow(windowSize, slideSize)
				// group by the tuple field "0" and sum up tuple field "1"
				.sum(1);

		// emit result
		if (params.has("output")) {
			counts.writeAsText(params.get("output"));
		} else {
			System.out.println("Printing result to stdout. Use --output to specify output path.");
			counts.print();
		}

		// execute program
		env.execute("WindowWordCount");
	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:46,代码来源:WindowWordCount.java


注:本文中的org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.readTextFile方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。