当前位置: 首页>>代码示例>>Java>>正文


Java StreamExecutionEnvironment.fromElements方法代码示例

本文整理汇总了Java中org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.fromElements方法的典型用法代码示例。如果您正苦于以下问题:Java StreamExecutionEnvironment.fromElements方法的具体用法?Java StreamExecutionEnvironment.fromElements怎么用?Java StreamExecutionEnvironment.fromElements使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.flink.streaming.api.environment.StreamExecutionEnvironment的用法示例。


在下文中一共展示了StreamExecutionEnvironment.fromElements方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testPOJOWithNestedArrayNoHashCodeKeyRejection

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testPOJOWithNestedArrayNoHashCodeKeyRejection() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<POJOWithHashCode> input = env.fromElements(
			new POJOWithHashCode(new int[] {1, 2}));

	TypeInformation<?> expectedTypeInfo = new TupleTypeInfo<Tuple1<int[]>>(
			PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO);

	// adjust the rule
	expectedException.expect(InvalidProgramException.class);
	expectedException.expectMessage(new StringStartsWith("Type " + expectedTypeInfo + " cannot be used as key."));

	input.keyBy("id");
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:DataStreamTest.java

示例2: testFoldProcessingTime

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
@SuppressWarnings("rawtypes")
public void testFoldProcessingTime() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));

	DataStream<Tuple3<String, String, Integer>> window = source
			.windowAll(SlidingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.fold(new Tuple3<>("", "", 0), new DummyFolder());

	OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform =
			(OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String,  Integer>>) window.getTransformation();
	OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator();
	Assert.assertTrue(operator instanceof WindowOperator);
	WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
	Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger);
	Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingProcessingTimeWindows);
	Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor);

	processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:24,代码来源:AllWindowTranslationTest.java

示例3: testSetupOfKeyGroupPartitioner

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
/**
 * Tests that the KeyGroupStreamPartitioner are properly set up with the correct value of
 * maximum parallelism.
 */
@Test
public void testSetupOfKeyGroupPartitioner() {
	int maxParallelism = 42;
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.getConfig().setMaxParallelism(maxParallelism);

	DataStream<Integer> source = env.fromElements(1, 2, 3);

	DataStream<Integer> keyedResult = source.keyBy(new KeySelector<Integer, Integer>() {
		private static final long serialVersionUID = 9205556348021992189L;

		@Override
		public Integer getKey(Integer value) throws Exception {
			return value;
		}
	}).map(new NoOpIntMap());

	keyedResult.addSink(new DiscardingSink<Integer>());

	StreamGraph graph = env.getStreamGraph();

	StreamNode keyedResultNode = graph.getStreamNode(keyedResult.getId());

	StreamPartitioner<?> streamPartitioner = keyedResultNode.getInEdges().get(0).getPartitioner();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:30,代码来源:StreamGraphGeneratorTest.java

示例4: testTupleNestedArrayKeyRejection

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testTupleNestedArrayKeyRejection() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Tuple2<Integer[], String>> input = env.fromElements(
			new Tuple2<>(new Integer[] {1, 2}, "test-test"));

	TypeInformation<?> expectedTypeInfo = new TupleTypeInfo<Tuple2<Integer[], String>>(
			BasicArrayTypeInfo.INT_ARRAY_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	// adjust the rule
	expectedException.expect(InvalidProgramException.class);
	expectedException.expectMessage(new StringStartsWith("Type " + expectedTypeInfo + " cannot be used as key."));

	input.keyBy(new KeySelector<Tuple2<Integer[], String>, Tuple2<Integer[], String>>() {
		@Override
		public Tuple2<Integer[], String> getKey(Tuple2<Integer[], String> value) throws Exception {
			return value;
		}
	});
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:22,代码来源:DataStreamTest.java

示例5: testReduceWithCustomTrigger

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
@SuppressWarnings("rawtypes")
public void testReduceWithCustomTrigger() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));

	DummyReducer reducer = new DummyReducer();

	DataStream<Tuple2<String, Integer>> window1 = source
			.keyBy(0)
			.window(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.trigger(CountTrigger.of(1))
			.reduce(reducer);

	OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();
	OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator();
	Assert.assertTrue(operator instanceof WindowOperator);
	WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
	Assert.assertTrue(winOperator.getTrigger() instanceof CountTrigger);
	Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows);
	Assert.assertTrue(winOperator.getStateDescriptor() instanceof ReducingStateDescriptor);

	processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:27,代码来源:WindowTranslationTest.java

示例6: testReduceEventTime

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
@SuppressWarnings("rawtypes")
public void testReduceEventTime() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));

	DataStream<Tuple2<String, Integer>> window1 = source
			.windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.reduce(new DummyReducer());

	OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();
	OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator();
	Assert.assertTrue(operator instanceof WindowOperator);
	WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
	Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
	Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows);
	Assert.assertTrue(winOperator.getStateDescriptor() instanceof ReducingStateDescriptor);

	processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:23,代码来源:AllWindowTranslationTest.java

示例7: testFoldWithRichFolderFails

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
/**
 * .fold() does not support RichFoldFunction, since the fold function is used internally
 * in a {@code FoldingState}.
 */
@Test(expected = UnsupportedOperationException.class)
public void testFoldWithRichFolderFails() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
	env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	source
			.keyBy(0)
			.window(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.fold(new Tuple2<>("", 0), new RichFoldFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {

				@Override
				public Tuple2<String, Integer> fold(Tuple2<String, Integer> value1,
						Tuple2<String, Integer> value2) throws Exception {
					return null;
				}
			});

	fail("exception was not thrown");
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:26,代码来源:WindowTranslationTest.java

示例8: testSources

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testSources() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	SourceFunction<Integer> srcFun = new SourceFunction<Integer>() {
		private static final long serialVersionUID = 1L;

		@Override
		public void run(SourceContext<Integer> ctx) throws Exception {
		}

		@Override
		public void cancel() {
		}
	};
	DataStreamSource<Integer> src1 = env.addSource(srcFun);
	src1.addSink(new DiscardingSink<Integer>());
	assertEquals(srcFun, getFunctionFromDataSource(src1));

	List<Long> list = Arrays.asList(0L, 1L, 2L);

	DataStreamSource<Long> src2 = env.generateSequence(0, 2);
	assertTrue(getFunctionFromDataSource(src2) instanceof StatefulSequenceSource);

	DataStreamSource<Long> src3 = env.fromElements(0L, 1L, 2L);
	assertTrue(getFunctionFromDataSource(src3) instanceof FromElementsFunction);

	DataStreamSource<Long> src4 = env.fromCollection(list);
	assertTrue(getFunctionFromDataSource(src4) instanceof FromElementsFunction);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:31,代码来源:StreamExecutionEnvironmentTest.java

示例9: shouldSelectFromStreamUsingAnonymousClassSelect

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
@SuppressWarnings("Convert2Lambda")
public void shouldSelectFromStreamUsingAnonymousClassSelect() throws Exception {
    StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
    executionEnvironment.setParallelism(1);

    DataStream<TestEvent> dataStream = executionEnvironment.fromElements(new TestEvent("peter", 10), new TestEvent("alex", 25), new TestEvent("maria", 30));

    EsperStream<TestEvent> esperStream = Esper.query(dataStream, "select name, age from TestEvent");

    DataStream<TestEvent> resultStream = esperStream.select(new EsperSelectFunction<TestEvent>() {
        private static final long serialVersionUID = 8802852465465541287L;

        @Override
        public TestEvent select(EventBean eventBean) throws Exception {
            String name = (String) eventBean.get("name");
            int age = (int) eventBean.get("age");
            return new TestEvent(name, age);
        }
    });

    resultStream.addSink(new SinkFunction<TestEvent>() {

        private static final long serialVersionUID = -8260794084029816089L;

        @Override
        public void invoke(TestEvent testEvent) throws Exception {
            System.err.println(testEvent);
            result.add(testEvent);
        }
    });

    executionEnvironment.execute("test-2");

    assertThat(result, is(notNullValue()));
    assertThat(result.size(), is(3));
}
 
开发者ID:phil3k3,项目名称:flink-esper,代码行数:38,代码来源:EsperQueryTest.java

示例10: shouldSelectFromStreamUsingLambdaSelect

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
@SuppressWarnings("Convert2Lambda")
public void shouldSelectFromStreamUsingLambdaSelect() throws Exception {

    StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
    executionEnvironment.setParallelism(1);

    DataStream<TestEvent> dataStream = executionEnvironment.fromElements(new TestEvent("peter1", 10), new TestEvent("alex1", 25), new TestEvent("maria1", 30));

    EsperStream<TestEvent> esperStream = Esper.query(dataStream, "select name, age from TestEvent");

    DataStream<TestEvent> resultStream = esperStream.select((EsperSelectFunction<TestEvent>) collector -> {
        String name = (String) collector.get("name");
        int age = (int) collector.get("age");
        return new TestEvent(name, age);
    });

    resultStream.addSink(new SinkFunction<TestEvent>() {

        private static final long serialVersionUID = 5588530728493738002L;

        @Override
        public void invoke(TestEvent testEvent) throws Exception {
            result.add(testEvent);
        }
    });

    executionEnvironment.execute("test-1");

    assertThat(result, is(notNullValue()));
    assertThat(result.size(), is(3));
}
 
开发者ID:phil3k3,项目名称:flink-esper,代码行数:33,代码来源:EsperQueryTest.java

示例11: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	DataStream<TemperatureEvent> inputEventStream = env.fromElements(new TemperatureEvent("xyz", 22.0),
			new TemperatureEvent("xyz", 20.1), new TemperatureEvent("xyz", 21.1), new TemperatureEvent("xyz", 22.2),
			new TemperatureEvent("xyz", 29.1), new TemperatureEvent("xyz", 22.3), new TemperatureEvent("xyz", 22.1),
			new TemperatureEvent("xyz", 22.4), new TemperatureEvent("xyz", 22.7),
			new TemperatureEvent("xyz", 27.0));

	Pattern<TemperatureEvent, ?> warningPattern = Pattern.<TemperatureEvent> begin("first")
			.subtype(TemperatureEvent.class).where(new FilterFunction<TemperatureEvent>() {
				private static final long serialVersionUID = 1L;

				public boolean filter(TemperatureEvent value) {
					if (value.getTemperature() >= 26.0) {
						return true;
					}
					return false;
				}
			}).within(Time.seconds(10));

	DataStream<Alert> patternStream = CEP.pattern(inputEventStream, warningPattern)
			.select(new PatternSelectFunction<TemperatureEvent, Alert>() {
				private static final long serialVersionUID = 1L;

				public Alert select(Map<String, TemperatureEvent> event) throws Exception {

					return new Alert("Temperature Rise Detected");
				}

			});

	patternStream.print();
	env.execute("CEP on Temperature Sensor");
}
 
开发者ID:PacktPublishing,项目名称:Mastering-Apache-Flink,代码行数:35,代码来源:App.java

示例12: testAggregateWithEvictor

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testAggregateWithEvictor() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));

	DataStream<Tuple2<String, Integer>> window1 = source
			.windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.evictor(CountEvictor.of(100))
			.aggregate(new DummyAggregationFunction());

	OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform =
			(OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();

	OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator();

	Assert.assertTrue(operator instanceof WindowOperator);
	WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator =
			(WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;

	Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
	Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows);
	Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor);

	processElementAndEnsureOutput(
			winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:29,代码来源:AllWindowTranslationTest.java

示例13: testAggregateWithRichFunctionFails

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
/**
 * .aggregate() does not support RichAggregateFunction, since the AggregateFunction is used internally
 * in an {@code AggregatingState}.
 */
@Test(expected = UnsupportedOperationException.class)
public void testAggregateWithRichFunctionFails() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
	env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	source
			.windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.aggregate(new DummyRichAggregationFunction<Tuple2<String, Integer>>());

	fail("exception was not thrown");
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:AllWindowTranslationTest.java

示例14: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {

		// Checking input parameters
		final ParameterTool params = ParameterTool.fromArgs(args);

		// set up the execution environment
		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

		// make parameters available in the web interface
		env.getConfig().setGlobalJobParameters(params);

		// get input data
		DataStream<String> text;
		if (params.has("input")) {
			System.out.println("Executing WordCountPojo example with default input data set.");
			System.out.println("Use --input to specify file input.");
			// read the text file from given input path
			text = env.readTextFile(params.get("input"));
		} else {
			// get default test text data
			text = env.fromElements(WordCountData.WORDS);
		}

		DataStream<Word> counts =
		// split up the lines into Word objects
		text.flatMap(new Tokenizer())
		// group by the field word and sum up the frequency
				.keyBy("word").sum("frequency");

		if (params.has("output")) {
			counts.writeAsText(params.get("output"));
		} else {
			System.out.println("Printing result to stdout. Use --output to specify output path.");
			counts.print();
		}

		// execute program
		env.execute("WordCount Pojo Example");
	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:40,代码来源:PojoExample.java

示例15: testProgram

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Override
protected void testProgram() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<String> text = env.fromElements(WordCountData.TEXT);

	DataStream<Tuple2<String, Integer>> counts = text
			.flatMap(new Tokenizer())
			.keyBy(0).sum(1);

	counts.writeAsText(resultPath);

	env.execute("WriteAsTextTest");
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:15,代码来源:TextOutputFormatITCase.java


注:本文中的org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.fromElements方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。