本文整理汇总了Java中org.apache.flink.api.java.ExecutionEnvironment.createCollectionsEnvironment方法的典型用法代码示例。如果您正苦于以下问题:Java ExecutionEnvironment.createCollectionsEnvironment方法的具体用法?Java ExecutionEnvironment.createCollectionsEnvironment怎么用?Java ExecutionEnvironment.createCollectionsEnvironment使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.flink.api.java.ExecutionEnvironment
的用法示例。
在下文中一共展示了ExecutionEnvironment.createCollectionsEnvironment方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testAccumulator
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testAccumulator() {
try {
final int numElements = 100;
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
env.generateSequence(1, numElements)
.map(new CountingMapper())
.output(new DiscardingOutputFormat<Long>());
JobExecutionResult result = env.execute();
assertTrue(result.getNetRuntime() >= 0);
assertEquals(numElements, (int) result.getAccumulatorResult(ACCUMULATOR_NAME));
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
示例2: testBulkIteration
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testBulkIteration() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(10);
DataSet<Integer> result = iteration.closeWith(iteration.map(new AddSuperstepNumberMapper()));
List<Integer> collected = new ArrayList<Integer>();
result.output(new LocalCollectionOutputFormat<Integer>(collected));
env.execute();
assertEquals(1, collected.size());
assertEquals(56, collected.get(0).intValue());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
示例3: testUnaryOp
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testUnaryOp() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
DataSet<String> bcData = env.fromElements(SUFFIX);
List<String> result = new ArrayList<String>();
env.fromElements(TEST_DATA)
.map(new SuffixAppender()).withBroadcastSet(bcData, BC_VAR_NAME)
.output(new LocalCollectionOutputFormat<String>(result));
env.execute();
assertEquals(TEST_DATA.length, result.size());
for (String s : result) {
assertTrue(s.indexOf(SUFFIX) > 0);
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
示例4: testBinaryOp
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testBinaryOp() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
DataSet<String> bcData = env.fromElements(SUFFIX);
DataSet<String> inData = env.fromElements(TEST_DATA);
List<String> result = new ArrayList<String>();
inData.cross(inData).with(new SuffixCross()).withBroadcastSet(bcData, BC_VAR_NAME)
.output(new LocalCollectionOutputFormat<String>(result));
env.execute();
assertEquals(TEST_DATA.length * TEST_DATA.length, result.size());
for (String s : result) {
assertTrue(s.indexOf(SUFFIX) == 2);
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
示例5: main
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
BatchTableEnvironment tEnv = TableEnvironment.getTableEnvironment(env);
DataSet<WC> input = env.fromElements(
new WC("Hello", 1),
new WC("Ciao", 1),
new WC("Hello", 1));
Table table = tEnv.fromDataSet(input);
Table filtered = table
.groupBy("word")
.select("word, frequency.sum as frequency")
.filter("frequency = 2");
DataSet<WC> result = tEnv.toDataSet(filtered, WC.class);
result.print();
}
示例6: createBipartiteGraph
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
private BipartiteGraph<Integer, Integer, String, String, String> createBipartiteGraph() {
ExecutionEnvironment executionEnvironment = ExecutionEnvironment.createCollectionsEnvironment();
DataSet<Vertex<Integer, String>> topVertices = executionEnvironment.fromCollection(Arrays.asList(
new Vertex<>(4, "top4"),
new Vertex<>(5, "top5"),
new Vertex<>(6, "top6")
));
DataSet<Vertex<Integer, String>> bottomVertices = executionEnvironment.fromCollection(Arrays.asList(
new Vertex<>(1, "bottom1"),
new Vertex<>(2, "bottom2"),
new Vertex<>(3, "bottom3")
));
DataSet<BipartiteEdge<Integer, Integer, String>> edges = executionEnvironment.fromCollection(Arrays.asList(
new BipartiteEdge<>(4, 1, "4-1"),
new BipartiteEdge<>(5, 1, "5-1"),
new BipartiteEdge<>(5, 2, "5-2"),
new BipartiteEdge<>(6, 2, "6-2"),
new BipartiteEdge<>(6, 3, "6-3")
));
return BipartiteGraph.fromDataSet(topVertices, bottomVertices, edges, executionEnvironment);
}
示例7: setup
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Before
public void setup() {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
int count = 10;
List<Vertex<LongValue, LongValue>> vertexList = new LinkedList<>();
List<Edge<LongValue, LongValue>> edgeList = new LinkedList<>();
for (long l = 0; l < count; l++) {
LongValue lv0 = new LongValue(l);
LongValue lv1 = new LongValue(l + 1);
LongValue lv2 = new LongValue(l + 2);
vertexList.add(new Vertex<>(lv0, lv1));
edgeList.add(new Edge<>(lv0, lv1, lv2));
}
graph = Graph.fromCollection(vertexList, edgeList, env);
}
示例8: testProgram
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testProgram() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
Graph<Long, Long, Long> graph = Graph.fromCollection(TestGraphUtils.getLongLongVertices(),
TestGraphUtils.getLongLongEdges(), env).mapVertices(new AssignOneMapper());
Graph<Long, Long, Long> result = graph.runScatterGatherIteration(
new MessageFunction(), new UpdateFunction(), 10);
result.getVertices().map(
new VertexToTuple2Map<>()).output(
new DiscardingOutputFormat<>());
env.execute();
}
示例9: main
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
// initialize a new Collection-based execution environment
final ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
// create objects for users and emails
User[] usersArray = { new User(1, "Peter"), new User(2, "John"), new User(3, "Bill") };
EMail[] emailsArray = {new EMail(1, "Re: Meeting", "How about 1pm?"),
new EMail(1, "Re: Meeting", "Sorry, I'm not availble"),
new EMail(3, "Re: Re: Project proposal", "Give me a few more days to think about it.")};
// convert objects into a DataSet
DataSet<User> users = env.fromElements(usersArray);
DataSet<EMail> emails = env.fromElements(emailsArray);
// join the two DataSets
DataSet<Tuple2<User, EMail>> joined = users.join(emails).where("userIdentifier").equalTo("userId");
// retrieve the resulting Tuple2 elements into a ArrayList.
List<Tuple2<User, EMail>> result = joined.collect();
// Do some work with the resulting ArrayList (=Collection).
for (Tuple2<User, EMail> t : result) {
System.err.println("Result = " + t);
}
}
示例10: testBulkIterationWithTerminationCriterion
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testBulkIterationWithTerminationCriterion() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100);
DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper());
DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() {
public boolean filter(Integer value) {
return value < 50;
}
});
List<Integer> collected = new ArrayList<Integer>();
iteration.closeWith(iterationResult, terminationCriterion)
.output(new LocalCollectionOutputFormat<Integer>(collected));
env.execute();
assertEquals(1, collected.size());
assertEquals(56, collected.get(0).intValue());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
示例11: MockOperator
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
public MockOperator() {
super(ExecutionEnvironment.createCollectionsEnvironment(), ValueTypeInfo.NULL_VALUE_TYPE_INFO);
}
示例12: testDeltaIteration
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Test
public void testDeltaIteration() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
@SuppressWarnings("unchecked")
DataSet<Tuple2<Integer, Integer>> solInput = env.fromElements(
new Tuple2<Integer, Integer>(1, 0),
new Tuple2<Integer, Integer>(2, 0),
new Tuple2<Integer, Integer>(3, 0),
new Tuple2<Integer, Integer>(4, 0));
@SuppressWarnings("unchecked")
DataSet<Tuple1<Integer>> workInput = env.fromElements(
new Tuple1<Integer>(1),
new Tuple1<Integer>(2),
new Tuple1<Integer>(3),
new Tuple1<Integer>(4));
// Perform a delta iteration where we add those values to the workset where
// the second tuple field is smaller than the first tuple field.
// At the end both tuple fields must be the same.
DeltaIteration<Tuple2<Integer, Integer>, Tuple1<Integer>> iteration =
solInput.iterateDelta(workInput, 10, 0);
DataSet<Tuple2<Integer, Integer>> solDelta = iteration.getSolutionSet().join(
iteration.getWorkset()).where(0).equalTo(0).with(
new JoinFunction<Tuple2<Integer, Integer>, Tuple1<Integer>, Tuple2<Integer, Integer>>() {
@Override
public Tuple2<Integer, Integer> join(Tuple2<Integer, Integer> first,
Tuple1<Integer> second) throws Exception {
return new Tuple2<Integer, Integer>(first.f0, first.f1 + 1);
}
});
DataSet<Tuple1<Integer>> nextWorkset = solDelta.flatMap(
new FlatMapFunction<Tuple2<Integer, Integer>, Tuple1<Integer>>() {
@Override
public void flatMap(Tuple2<Integer, Integer> in, Collector<Tuple1<Integer>>
out) throws Exception {
if (in.f1 < in.f0) {
out.collect(new Tuple1<Integer>(in.f0));
}
}
});
List<Tuple2<Integer, Integer>> collected = new ArrayList<Tuple2<Integer, Integer>>();
iteration.closeWith(solDelta, nextWorkset)
.output(new LocalCollectionOutputFormat<Tuple2<Integer, Integer>>(collected));
env.execute();
// verify that both tuple fields are now the same
for (Tuple2<Integer, Integer> t: collected) {
assertEquals(t.f0, t.f1);
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
示例13: setup
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Before
public void setup() {
env = ExecutionEnvironment.createCollectionsEnvironment();
}
示例14: setup
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Before
public void setup() throws Exception {
env = ExecutionEnvironment.createCollectionsEnvironment();
env.getConfig().enableObjectReuse();
}
示例15: setup
import org.apache.flink.api.java.ExecutionEnvironment; //导入方法依赖的package包/类
@Before
public void setup() throws Exception {
env = ExecutionEnvironment.createCollectionsEnvironment();
env.getConfig().enableObjectReuse();
// a "fish" graph
Object[][] edges = new Object[][]{
new Object[]{0, 1},
new Object[]{0, 2},
new Object[]{2, 1},
new Object[]{2, 3},
new Object[]{3, 1},
new Object[]{3, 4},
new Object[]{5, 3},
};
List<Edge<IntValue, NullValue>> directedEdgeList = new LinkedList<>();
for (Object[] edge : edges) {
directedEdgeList.add(new Edge<>(new IntValue((int) edge[0]), new IntValue((int) edge[1]), NullValue.getInstance()));
}
directedSimpleGraph = Graph.fromCollection(directedEdgeList, env);
undirectedSimpleGraph = directedSimpleGraph
.getUndirected();
// complete graph
completeGraph = new CompleteGraph(env, completeGraphVertexCount)
.generate();
// empty graph with vertices but no edges
emptyGraphWithVertices = new EmptyGraph(env, emptyGraphVertexCount)
.generate();
// empty graph with no vertices or edges
emptyGraphWithoutVertices = new EmptyGraph(env, 0)
.generate();
// star graph
starGraph = new StarGraph(env, starGraphVertexCount)
.generate();
}