当前位置: 首页>>代码示例>>Java>>正文


Java StreamExecutionEnvironment.fromCollection方法代码示例

本文整理汇总了Java中org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.fromCollection方法的典型用法代码示例。如果您正苦于以下问题:Java StreamExecutionEnvironment.fromCollection方法的具体用法?Java StreamExecutionEnvironment.fromCollection怎么用?Java StreamExecutionEnvironment.fromCollection使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.flink.streaming.api.environment.StreamExecutionEnvironment的用法示例。


在下文中一共展示了StreamExecutionEnvironment.fromCollection方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getEvents

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static DataStream<EventCommentFriendshipLike> getEvents(StreamExecutionEnvironment env, AppConfiguration config) {
	String commentSource = config.getComments();
	String friendshipSource = config.getFriendships();
	String likeSource = config.getLikes();		
	
	DataStream<EventCommentFriendshipLike> events = null;
	
	if (commentSource == null || friendshipSource == null || likeSource == null) {
		List<EventCommentFriendshipLike> list = EventCommentFriendshipLikeStreamgen.getDefault();
		events = env.fromCollection(list); 
	} else {
		events = env.addSource(new EventCommentFriendshipLikeSource(commentSource, friendshipSource, likeSource), "events-cfl-source");
	}			
	
	events.assignTimestampsAndWatermarks(new AscendingTimestamper<EventCommentFriendshipLike>());
	
	return events;		
}
 
开发者ID:3Cores,项目名称:sostream,代码行数:19,代码来源:EventCommentFriendshipLikeStreamgen.java

示例2: get5TupleDataStream

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static DataStream<Tuple5<Integer, Long, Integer, String, Long>> get5TupleDataStream(StreamExecutionEnvironment env) {

		List<Tuple5<Integer, Long, Integer, String, Long>> data = new ArrayList<>();
		data.add(new Tuple5<>(1, 1L, 0, "Hallo", 1L));
		data.add(new Tuple5<>(2, 2L, 1, "Hallo Welt", 2L));
		data.add(new Tuple5<>(2, 3L, 2, "Hallo Welt wie", 1L));
		data.add(new Tuple5<>(3, 4L, 3, "Hallo Welt wie gehts?", 2L));
		data.add(new Tuple5<>(3, 5L, 4, "ABC", 2L));
		data.add(new Tuple5<>(3, 6L, 5, "BCD", 3L));
		data.add(new Tuple5<>(4, 7L, 6, "CDE", 2L));
		data.add(new Tuple5<>(4, 8L, 7, "DEF", 1L));
		data.add(new Tuple5<>(4, 9L, 8, "EFG", 1L));
		data.add(new Tuple5<>(4, 10L, 9, "FGH", 2L));
		data.add(new Tuple5<>(5, 11L, 10, "GHI", 1L));
		data.add(new Tuple5<>(5, 12L, 11, "HIJ", 3L));
		data.add(new Tuple5<>(5, 13L, 12, "IJK", 3L));
		data.add(new Tuple5<>(5, 15L, 14, "KLM", 2L));
		data.add(new Tuple5<>(5, 14L, 13, "JKL", 2L));
		return env.fromCollection(data);
	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:StreamTestData.java

示例3: main

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStreamSource<Tuple2<String, Integer>> source = env.fromCollection(collection);

	CassandraSink.addSink(source)
		.setQuery(INSERT)
		.setClusterBuilder(new ClusterBuilder() {
			@Override
			protected Cluster buildCluster(Builder builder) {
				return builder.addContactPoint("127.0.0.1").build();
			}
		})
		.build();

	env.execute("WriteTupleIntoCassandra");
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:CassandraTupleSinkExample.java

示例4: testAppendTableSink

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testAppendTableSink() throws IOException {
	JDBCAppendTableSink sink = JDBCAppendTableSink.builder()
		.setDrivername("foo")
		.setDBUrl("bar")
		.setQuery("insert into %s (id) values (?)")
		.setParameterTypes(FIELD_TYPES)
		.build();

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Row> ds = env.fromCollection(Collections.singleton(Row.of("foo")), ROW_TYPE);
	sink.emitDataStream(ds);

	Collection<Integer> sinkIds = env.getStreamGraph().getSinkIDs();
	assertEquals(1, sinkIds.size());
	int sinkId = sinkIds.iterator().next();

	StreamSink planSink = (StreamSink) env.getStreamGraph().getStreamNode(sinkId).getOperator();
	assertTrue(planSink.getUserFunction() instanceof JDBCSinkFunction);

	JDBCSinkFunction sinkFunction = (JDBCSinkFunction) planSink.getUserFunction();
	assertSame(sink.getOutputFormat(), sinkFunction.outputFormat);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:25,代码来源:JDBCAppendTableSinkTest.java

示例5: testStreaming

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testStreaming() throws Exception {

	StreamExecutionEnvironment env = new DummyStreamExecutionEnvironment();
	env.setParallelism(1);

	DataStream<String> input = env.fromCollection(inputData);
	input
			.flatMap(new NotifyingMapper())
			.writeUsingOutputFormat(new NotifyingOutputFormat()).disableChaining();

	jobGraph = env.getStreamGraph().getJobGraph();
	jobID = jobGraph.getJobID();

	verifyResults();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:AccumulatorLiveITCase.java

示例6: shouldSelectFromStringDataStream

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
@SuppressWarnings("Convert2Lambda")
public void shouldSelectFromStringDataStream() throws Exception {
    StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
    executionEnvironment.setParallelism(1);

    List<String> expectedValues = Arrays.asList("first", "second");
    DataStream<String> dataStream = executionEnvironment.fromCollection(expectedValues);

    EsperStream<String> esperStream = Esper.query(dataStream, "select bytes from String");

    DataStream<String> resultStream = esperStream.select((EsperSelectFunction<String>) collector -> {
        byte[] bytes = (byte[]) collector.get("bytes");
        return new String(bytes);
    });

    resultStream.addSink(new SinkFunction<String>() {

        private static final long serialVersionUID = 284955963055337762L;

        @Override
        public void invoke(String testEvent) throws Exception {
            System.err.println(testEvent);
            stringResult.add(testEvent);
        }
    });

    executionEnvironment.execute("test-2");

    assertThat(stringResult, is(notNullValue()));
    assertThat(stringResult.size(), is(2));
    assertThat(stringResult, is(expectedValues));
}
 
开发者ID:phil3k3,项目名称:flink-esper,代码行数:34,代码来源:EsperQueryTest.java

示例7: testEsperPattern

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testEsperPattern() throws Exception {
    StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
    executionEnvironment.setParallelism(1);

    List<ComplexEvent> expectedValues = Lists.newArrayList();
    ComplexEvent complexEvent = new ComplexEvent(Event.start(), Event.end());
    expectedValues.add(complexEvent);

    List<Event> events = Arrays.asList(complexEvent.getStartEvent(), complexEvent.getEndEvent());
    DataStream<Event> dataStream = executionEnvironment.fromCollection(events);

    EsperStream<Event> esperStream = Esper.pattern(dataStream, "every (A=Event(type='start') -> B=Event(type='end'))");

    DataStream<ComplexEvent> complexEventDataStream = esperStream.select(new EsperSelectFunction<ComplexEvent>() {
        @Override
        public ComplexEvent select(EventBean eventBean) throws Exception {
            return new ComplexEvent((Event) eventBean.get("A"), (Event) eventBean.get("B"));
        }
    });

    complexEventDataStream.addSink(new SinkFunction<ComplexEvent>() {
        @Override
        public void invoke(ComplexEvent value) throws Exception {
            System.err.println(value);
            resultingEvents.add(value);
        }
    });

    executionEnvironment.execute("test-2");

    assertThat(resultingEvents, is(expectedValues));
}
 
开发者ID:phil3k3,项目名称:flink-esper,代码行数:34,代码来源:EsperPatternTest.java

示例8: dummyTest

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
	public void dummyTest() throws Exception {
		DateTime now = new DateTime();
		Collection<TaxiRide> taxiRides = new ArrayList<>();
		TaxiRide taxiRideNYC_1 = new TaxiRide(1, true, now, now, (float)GeoUtils.LON_EAST,
				(float)GeoUtils.LAT_NORTH, (float)GeoUtils.LON_WEST, (float)GeoUtils.LAT_SOUTH, (short)3);
		taxiRides.add(taxiRideNYC_1);

		TaxiRide taxiRideNYC_2 = new TaxiRide(2, true, now, now, (float)GeoUtils.LON_EAST,
				(float)GeoUtils.LAT_NORTH, (float)GeoUtils.LON_WEST, (float)GeoUtils.LAT_SOUTH, (short)3);
		taxiRides.add(taxiRideNYC_2);

		TaxiRide taxiRideNotInNYC_1 = new TaxiRide(2, true, now, now, (float)GeoUtils.LON_EAST + 1,
				(float)GeoUtils.LAT_NORTH, (float)GeoUtils.LON_WEST, (float)GeoUtils.LAT_SOUTH, (short)3);
		taxiRides.add(taxiRideNotInNYC_1);

		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);
		DataStream<TaxiRide> rides = env.fromCollection(taxiRides);

		TaxiRideCleansing taxiRideCleansing = new TaxiRideCleansing();

		DataStream<TaxiRide> filteredRides = taxiRideCleansing.execute(rides);

		Collection<TaxiRide> RESULTS = new ArrayList<>();
		// And perform an Identity map, because we want to write all values of this day to the Database:
		filteredRides.addSink(new ResultsSinkFunction(RESULTS));

		env.execute("Running Taxi Ride Cleansing");

//		Assert.assertEquals(2, RESULTS.size());
		Assert.assertTrue(true);
	}
 
开发者ID:dineshtrivedi,项目名称:flink-java-project,代码行数:34,代码来源:TaxiRideCleansingTest.java

示例9: testCassandraTupleAtLeastOnceSink

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testCassandraTupleAtLeastOnceSink() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(1);

	DataStream<Tuple3<String, Integer, Integer>> source = env.fromCollection(collection);
	source.addSink(new CassandraTupleSink<Tuple3<String, Integer, Integer>>(INSERT_DATA_QUERY, builder));

	env.execute();

	ResultSet rs = session.execute(SELECT_DATA_QUERY);
	Assert.assertEquals(20, rs.all().size());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:14,代码来源:CassandraConnectorITCase.java

示例10: getSmall3TupleDataSet

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
public static DataStream<Tuple3<Integer, Long, String>> getSmall3TupleDataSet(StreamExecutionEnvironment env) {

		List<Tuple3<Integer, Long, String>> data = new ArrayList<>();
		data.add(new Tuple3<>(1, 1L, "Hi"));
		data.add(new Tuple3<>(2, 2L, "Hello"));
		data.add(new Tuple3<>(3, 2L, "Hello world"));

		Collections.shuffle(data);

		return env.fromCollection(data);
	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:12,代码来源:StreamTestData.java

示例11: testCassandraTableSink

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testCassandraTableSink() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(4);
	StreamTableEnvironment tEnv = StreamTableEnvironment.getTableEnvironment(env);

	DataStreamSource<Row> source = env.fromCollection(rowCollection);

	tEnv.registerDataStreamInternal("testFlinkTable", source);

	tEnv.sql("select * from testFlinkTable").writeToSink(
		new CassandraAppendTableSink(builder, injectTableName(INSERT_DATA_QUERY)));

	env.execute();
	ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));

	// validate that all input was correctly written to Cassandra
	List<Row> input = new ArrayList<>(rowCollection);
	List<com.datastax.driver.core.Row> output = rs.all();
	for (com.datastax.driver.core.Row o : output) {
		Row cmp = new Row(3);
		cmp.setField(0, o.getString(0));
		cmp.setField(1, o.getInt(2));
		cmp.setField(2, o.getInt(1));
		Assert.assertTrue("Row " + cmp + " was written to Cassandra but not in input.", input.remove(cmp));
	}
	Assert.assertTrue("The input data was not completely written to Cassandra", input.isEmpty());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:29,代码来源:CassandraConnectorITCase.java

示例12: testNestedPojoFieldAccessor

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testNestedPojoFieldAccessor() throws Exception {
	StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
	see.getConfig().disableObjectReuse();
	see.setParallelism(4);

	DataStream<Data> dataStream = see.fromCollection(elements);

	DataStream<Data> summedStream = dataStream
		.keyBy("aaa")
		.sum("stats.count")
		.keyBy("aaa")
		.flatMap(new FlatMapFunction<Data, Data>() {
			Data[] first = new Data[3];
			@Override
			public void flatMap(Data value, Collector<Data> out) throws Exception {
				if (first[value.aaa] == null) {
					first[value.aaa] = value;
					if (value.stats.count != 123) {
						throw new RuntimeException("Expected stats.count to be 123");
					}
				} else {
					if (value.stats.count != 2 * 123) {
						throw new RuntimeException("Expected stats.count to be 2 * 123");
					}
				}
			}
		});

	summedStream.print();

	see.execute();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:34,代码来源:DataStreamPojoITCase.java

示例13: testSources

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testSources() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	SourceFunction<Integer> srcFun = new SourceFunction<Integer>() {
		private static final long serialVersionUID = 1L;

		@Override
		public void run(SourceContext<Integer> ctx) throws Exception {
		}

		@Override
		public void cancel() {
		}
	};
	DataStreamSource<Integer> src1 = env.addSource(srcFun);
	src1.addSink(new DiscardingSink<Integer>());
	assertEquals(srcFun, getFunctionFromDataSource(src1));

	List<Long> list = Arrays.asList(0L, 1L, 2L);

	DataStreamSource<Long> src2 = env.generateSequence(0, 2);
	assertTrue(getFunctionFromDataSource(src2) instanceof StatefulSequenceSource);

	DataStreamSource<Long> src3 = env.fromElements(0L, 1L, 2L);
	assertTrue(getFunctionFromDataSource(src3) instanceof FromElementsFunction);

	DataStreamSource<Long> src4 = env.fromCollection(list);
	assertTrue(getFunctionFromDataSource(src4) instanceof FromElementsFunction);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:31,代码来源:StreamExecutionEnvironmentTest.java

示例14: testSideOutputWithMultipleConsumersWithObjectReuse

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testSideOutputWithMultipleConsumersWithObjectReuse() throws Exception {
	final OutputTag<String> sideOutputTag = new OutputTag<String>("side"){};

	TestListResultSink<String> sideOutputResultSink1 = new TestListResultSink<>();
	TestListResultSink<String> sideOutputResultSink2 = new TestListResultSink<>();
	TestListResultSink<Integer> resultSink = new TestListResultSink<>();

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.getConfig().enableObjectReuse();
	env.setParallelism(3);

	DataStream<Integer> dataStream = env.fromCollection(elements);

	SingleOutputStreamOperator<Integer> passThroughtStream = dataStream
			.process(new ProcessFunction<Integer, Integer>() {
				private static final long serialVersionUID = 1L;

				@Override
				public void processElement(
						Integer value, Context ctx, Collector<Integer> out) throws Exception {
					out.collect(value);
					ctx.output(sideOutputTag, "sideout-" + String.valueOf(value));
				}
			});

	passThroughtStream.getSideOutput(sideOutputTag).addSink(sideOutputResultSink1);
	passThroughtStream.getSideOutput(sideOutputTag).addSink(sideOutputResultSink2);
	passThroughtStream.addSink(resultSink);
	env.execute();

	assertEquals(Arrays.asList("sideout-1", "sideout-2", "sideout-3", "sideout-4", "sideout-5"), sideOutputResultSink1.getSortedResult());
	assertEquals(Arrays.asList("sideout-1", "sideout-2", "sideout-3", "sideout-4", "sideout-5"), sideOutputResultSink2.getSortedResult());
	assertEquals(Arrays.asList(1, 2, 3, 4, 5), resultSink.getSortedResult());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:36,代码来源:SideOutputITCase.java

示例15: testDifferentSideOutputTypes

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testDifferentSideOutputTypes() throws Exception {
	final OutputTag<String> sideOutputTag1 = new OutputTag<String>("string"){};
	final OutputTag<Integer> sideOutputTag2 = new OutputTag<Integer>("int"){};

	TestListResultSink<String> sideOutputResultSink1 = new TestListResultSink<>();
	TestListResultSink<Integer> sideOutputResultSink2 = new TestListResultSink<>();
	TestListResultSink<Integer> resultSink = new TestListResultSink<>();

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.getConfig().enableObjectReuse();
	env.setParallelism(3);

	DataStream<Integer> dataStream = env.fromCollection(elements);

	SingleOutputStreamOperator<Integer> passThroughtStream = dataStream
			.process(new ProcessFunction<Integer, Integer>() {
				private static final long serialVersionUID = 1L;

				@Override
				public void processElement(
						Integer value, Context ctx, Collector<Integer> out) throws Exception {
					out.collect(value);
					ctx.output(sideOutputTag1, "sideout-" + String.valueOf(value));
					ctx.output(sideOutputTag2, 13);
				}
			});

	passThroughtStream.getSideOutput(sideOutputTag1).addSink(sideOutputResultSink1);
	passThroughtStream.getSideOutput(sideOutputTag2).addSink(sideOutputResultSink2);
	passThroughtStream.addSink(resultSink);
	env.execute();

	assertEquals(Arrays.asList("sideout-1", "sideout-2", "sideout-3", "sideout-4", "sideout-5"), sideOutputResultSink1.getSortedResult());
	assertEquals(Arrays.asList(13, 13, 13, 13, 13), sideOutputResultSink2.getSortedResult());
	assertEquals(Arrays.asList(1, 2, 3, 4, 5), resultSink.getSortedResult());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:38,代码来源:SideOutputITCase.java


注:本文中的org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.fromCollection方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。