當前位置: 首頁>>代碼示例>>Java>>正文


Java StreamExecutionEnvironment.fromElements方法代碼示例

本文整理匯總了Java中org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.fromElements方法的典型用法代碼示例。如果您正苦於以下問題:Java StreamExecutionEnvironment.fromElements方法的具體用法?Java StreamExecutionEnvironment.fromElements怎麽用?Java StreamExecutionEnvironment.fromElements使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.flink.streaming.api.environment.StreamExecutionEnvironment的用法示例。


在下文中一共展示了StreamExecutionEnvironment.fromElements方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: testPOJOWithNestedArrayNoHashCodeKeyRejection

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
public void testPOJOWithNestedArrayNoHashCodeKeyRejection() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<POJOWithHashCode> input = env.fromElements(
			new POJOWithHashCode(new int[] {1, 2}));

	TypeInformation<?> expectedTypeInfo = new TupleTypeInfo<Tuple1<int[]>>(
			PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO);

	// adjust the rule
	expectedException.expect(InvalidProgramException.class);
	expectedException.expectMessage(new StringStartsWith("Type " + expectedTypeInfo + " cannot be used as key."));

	input.keyBy("id");
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:17,代碼來源:DataStreamTest.java

示例2: testFoldProcessingTime

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("rawtypes")
public void testFoldProcessingTime() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));

	DataStream<Tuple3<String, String, Integer>> window = source
			.windowAll(SlidingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.fold(new Tuple3<>("", "", 0), new DummyFolder());

	OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform =
			(OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String,  Integer>>) window.getTransformation();
	OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator();
	Assert.assertTrue(operator instanceof WindowOperator);
	WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
	Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger);
	Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingProcessingTimeWindows);
	Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor);

	processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:24,代碼來源:AllWindowTranslationTest.java

示例3: testSetupOfKeyGroupPartitioner

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
/**
 * Tests that the KeyGroupStreamPartitioner are properly set up with the correct value of
 * maximum parallelism.
 */
@Test
public void testSetupOfKeyGroupPartitioner() {
	int maxParallelism = 42;
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.getConfig().setMaxParallelism(maxParallelism);

	DataStream<Integer> source = env.fromElements(1, 2, 3);

	DataStream<Integer> keyedResult = source.keyBy(new KeySelector<Integer, Integer>() {
		private static final long serialVersionUID = 9205556348021992189L;

		@Override
		public Integer getKey(Integer value) throws Exception {
			return value;
		}
	}).map(new NoOpIntMap());

	keyedResult.addSink(new DiscardingSink<Integer>());

	StreamGraph graph = env.getStreamGraph();

	StreamNode keyedResultNode = graph.getStreamNode(keyedResult.getId());

	StreamPartitioner<?> streamPartitioner = keyedResultNode.getInEdges().get(0).getPartitioner();
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:30,代碼來源:StreamGraphGeneratorTest.java

示例4: testTupleNestedArrayKeyRejection

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
public void testTupleNestedArrayKeyRejection() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Tuple2<Integer[], String>> input = env.fromElements(
			new Tuple2<>(new Integer[] {1, 2}, "test-test"));

	TypeInformation<?> expectedTypeInfo = new TupleTypeInfo<Tuple2<Integer[], String>>(
			BasicArrayTypeInfo.INT_ARRAY_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	// adjust the rule
	expectedException.expect(InvalidProgramException.class);
	expectedException.expectMessage(new StringStartsWith("Type " + expectedTypeInfo + " cannot be used as key."));

	input.keyBy(new KeySelector<Tuple2<Integer[], String>, Tuple2<Integer[], String>>() {
		@Override
		public Tuple2<Integer[], String> getKey(Tuple2<Integer[], String> value) throws Exception {
			return value;
		}
	});
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:22,代碼來源:DataStreamTest.java

示例5: testReduceWithCustomTrigger

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("rawtypes")
public void testReduceWithCustomTrigger() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));

	DummyReducer reducer = new DummyReducer();

	DataStream<Tuple2<String, Integer>> window1 = source
			.keyBy(0)
			.window(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.trigger(CountTrigger.of(1))
			.reduce(reducer);

	OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();
	OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator();
	Assert.assertTrue(operator instanceof WindowOperator);
	WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
	Assert.assertTrue(winOperator.getTrigger() instanceof CountTrigger);
	Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows);
	Assert.assertTrue(winOperator.getStateDescriptor() instanceof ReducingStateDescriptor);

	processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:27,代碼來源:WindowTranslationTest.java

示例6: testReduceEventTime

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("rawtypes")
public void testReduceEventTime() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));

	DataStream<Tuple2<String, Integer>> window1 = source
			.windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.reduce(new DummyReducer());

	OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();
	OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator();
	Assert.assertTrue(operator instanceof WindowOperator);
	WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
	Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
	Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows);
	Assert.assertTrue(winOperator.getStateDescriptor() instanceof ReducingStateDescriptor);

	processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:23,代碼來源:AllWindowTranslationTest.java

示例7: testFoldWithRichFolderFails

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
/**
 * .fold() does not support RichFoldFunction, since the fold function is used internally
 * in a {@code FoldingState}.
 */
@Test(expected = UnsupportedOperationException.class)
public void testFoldWithRichFolderFails() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
	env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	source
			.keyBy(0)
			.window(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.fold(new Tuple2<>("", 0), new RichFoldFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {

				@Override
				public Tuple2<String, Integer> fold(Tuple2<String, Integer> value1,
						Tuple2<String, Integer> value2) throws Exception {
					return null;
				}
			});

	fail("exception was not thrown");
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:26,代碼來源:WindowTranslationTest.java

示例8: testSources

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
public void testSources() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	SourceFunction<Integer> srcFun = new SourceFunction<Integer>() {
		private static final long serialVersionUID = 1L;

		@Override
		public void run(SourceContext<Integer> ctx) throws Exception {
		}

		@Override
		public void cancel() {
		}
	};
	DataStreamSource<Integer> src1 = env.addSource(srcFun);
	src1.addSink(new DiscardingSink<Integer>());
	assertEquals(srcFun, getFunctionFromDataSource(src1));

	List<Long> list = Arrays.asList(0L, 1L, 2L);

	DataStreamSource<Long> src2 = env.generateSequence(0, 2);
	assertTrue(getFunctionFromDataSource(src2) instanceof StatefulSequenceSource);

	DataStreamSource<Long> src3 = env.fromElements(0L, 1L, 2L);
	assertTrue(getFunctionFromDataSource(src3) instanceof FromElementsFunction);

	DataStreamSource<Long> src4 = env.fromCollection(list);
	assertTrue(getFunctionFromDataSource(src4) instanceof FromElementsFunction);
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:31,代碼來源:StreamExecutionEnvironmentTest.java

示例9: shouldSelectFromStreamUsingAnonymousClassSelect

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("Convert2Lambda")
public void shouldSelectFromStreamUsingAnonymousClassSelect() throws Exception {
    StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
    executionEnvironment.setParallelism(1);

    DataStream<TestEvent> dataStream = executionEnvironment.fromElements(new TestEvent("peter", 10), new TestEvent("alex", 25), new TestEvent("maria", 30));

    EsperStream<TestEvent> esperStream = Esper.query(dataStream, "select name, age from TestEvent");

    DataStream<TestEvent> resultStream = esperStream.select(new EsperSelectFunction<TestEvent>() {
        private static final long serialVersionUID = 8802852465465541287L;

        @Override
        public TestEvent select(EventBean eventBean) throws Exception {
            String name = (String) eventBean.get("name");
            int age = (int) eventBean.get("age");
            return new TestEvent(name, age);
        }
    });

    resultStream.addSink(new SinkFunction<TestEvent>() {

        private static final long serialVersionUID = -8260794084029816089L;

        @Override
        public void invoke(TestEvent testEvent) throws Exception {
            System.err.println(testEvent);
            result.add(testEvent);
        }
    });

    executionEnvironment.execute("test-2");

    assertThat(result, is(notNullValue()));
    assertThat(result.size(), is(3));
}
 
開發者ID:phil3k3,項目名稱:flink-esper,代碼行數:38,代碼來源:EsperQueryTest.java

示例10: shouldSelectFromStreamUsingLambdaSelect

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("Convert2Lambda")
public void shouldSelectFromStreamUsingLambdaSelect() throws Exception {

    StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
    executionEnvironment.setParallelism(1);

    DataStream<TestEvent> dataStream = executionEnvironment.fromElements(new TestEvent("peter1", 10), new TestEvent("alex1", 25), new TestEvent("maria1", 30));

    EsperStream<TestEvent> esperStream = Esper.query(dataStream, "select name, age from TestEvent");

    DataStream<TestEvent> resultStream = esperStream.select((EsperSelectFunction<TestEvent>) collector -> {
        String name = (String) collector.get("name");
        int age = (int) collector.get("age");
        return new TestEvent(name, age);
    });

    resultStream.addSink(new SinkFunction<TestEvent>() {

        private static final long serialVersionUID = 5588530728493738002L;

        @Override
        public void invoke(TestEvent testEvent) throws Exception {
            result.add(testEvent);
        }
    });

    executionEnvironment.execute("test-1");

    assertThat(result, is(notNullValue()));
    assertThat(result.size(), is(3));
}
 
開發者ID:phil3k3,項目名稱:flink-esper,代碼行數:33,代碼來源:EsperQueryTest.java

示例11: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	DataStream<TemperatureEvent> inputEventStream = env.fromElements(new TemperatureEvent("xyz", 22.0),
			new TemperatureEvent("xyz", 20.1), new TemperatureEvent("xyz", 21.1), new TemperatureEvent("xyz", 22.2),
			new TemperatureEvent("xyz", 29.1), new TemperatureEvent("xyz", 22.3), new TemperatureEvent("xyz", 22.1),
			new TemperatureEvent("xyz", 22.4), new TemperatureEvent("xyz", 22.7),
			new TemperatureEvent("xyz", 27.0));

	Pattern<TemperatureEvent, ?> warningPattern = Pattern.<TemperatureEvent> begin("first")
			.subtype(TemperatureEvent.class).where(new FilterFunction<TemperatureEvent>() {
				private static final long serialVersionUID = 1L;

				public boolean filter(TemperatureEvent value) {
					if (value.getTemperature() >= 26.0) {
						return true;
					}
					return false;
				}
			}).within(Time.seconds(10));

	DataStream<Alert> patternStream = CEP.pattern(inputEventStream, warningPattern)
			.select(new PatternSelectFunction<TemperatureEvent, Alert>() {
				private static final long serialVersionUID = 1L;

				public Alert select(Map<String, TemperatureEvent> event) throws Exception {

					return new Alert("Temperature Rise Detected");
				}

			});

	patternStream.print();
	env.execute("CEP on Temperature Sensor");
}
 
開發者ID:PacktPublishing,項目名稱:Mastering-Apache-Flink,代碼行數:35,代碼來源:App.java

示例12: testAggregateWithEvictor

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
public void testAggregateWithEvictor() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));

	DataStream<Tuple2<String, Integer>> window1 = source
			.windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.evictor(CountEvictor.of(100))
			.aggregate(new DummyAggregationFunction());

	OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform =
			(OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();

	OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator();

	Assert.assertTrue(operator instanceof WindowOperator);
	WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator =
			(WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;

	Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
	Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows);
	Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor);

	processElementAndEnsureOutput(
			winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:29,代碼來源:AllWindowTranslationTest.java

示例13: testAggregateWithRichFunctionFails

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
/**
 * .aggregate() does not support RichAggregateFunction, since the AggregateFunction is used internally
 * in an {@code AggregatingState}.
 */
@Test(expected = UnsupportedOperationException.class)
public void testAggregateWithRichFunctionFails() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
	env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	source
			.windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.aggregate(new DummyRichAggregationFunction<Tuple2<String, Integer>>());

	fail("exception was not thrown");
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:18,代碼來源:AllWindowTranslationTest.java

示例14: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception {

		// Checking input parameters
		final ParameterTool params = ParameterTool.fromArgs(args);

		// set up the execution environment
		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

		// make parameters available in the web interface
		env.getConfig().setGlobalJobParameters(params);

		// get input data
		DataStream<String> text;
		if (params.has("input")) {
			System.out.println("Executing WordCountPojo example with default input data set.");
			System.out.println("Use --input to specify file input.");
			// read the text file from given input path
			text = env.readTextFile(params.get("input"));
		} else {
			// get default test text data
			text = env.fromElements(WordCountData.WORDS);
		}

		DataStream<Word> counts =
		// split up the lines into Word objects
		text.flatMap(new Tokenizer())
		// group by the field word and sum up the frequency
				.keyBy("word").sum("frequency");

		if (params.has("output")) {
			counts.writeAsText(params.get("output"));
		} else {
			System.out.println("Printing result to stdout. Use --output to specify output path.");
			counts.print();
		}

		// execute program
		env.execute("WordCount Pojo Example");
	}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:40,代碼來源:PojoExample.java

示例15: testProgram

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Override
protected void testProgram() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<String> text = env.fromElements(WordCountData.TEXT);

	DataStream<Tuple2<String, Integer>> counts = text
			.flatMap(new Tokenizer())
			.keyBy(0).sum(1);

	counts.writeAsText(resultPath);

	env.execute("WriteAsTextTest");
}
 
開發者ID:axbaretto,項目名稱:flink,代碼行數:15,代碼來源:TextOutputFormatITCase.java


注:本文中的org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.fromElements方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。