本文整理汇总了Java中org.apache.flink.streaming.api.datastream.DataStream.iterate方法的典型用法代码示例。如果您正苦于以下问题:Java DataStream.iterate方法的具体用法?Java DataStream.iterate怎么用?Java DataStream.iterate使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.flink.streaming.api.datastream.DataStream
的用法示例。
在下文中一共展示了DataStream.iterate方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testImmutabilityWithCoiteration
import org.apache.flink.streaming.api.datastream.DataStream; //导入方法依赖的package包/类
@Test
public void testImmutabilityWithCoiteration() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Integer> source = env.fromElements(1, 10).map(noOpIntMap); // for rebalance
IterativeStream<Integer> iter1 = source.iterate();
// Calling withFeedbackType should create a new iteration
ConnectedIterativeStreams<Integer, String> iter2 = iter1.withFeedbackType(String.class);
iter1.closeWith(iter1.map(noOpIntMap)).print();
iter2.closeWith(iter2.map(noOpCoMap)).print();
StreamGraph graph = env.getStreamGraph();
assertEquals(2, graph.getIterationSourceSinkPairs().size());
for (Tuple2<StreamNode, StreamNode> sourceSinkPair: graph.getIterationSourceSinkPairs()) {
assertEquals(sourceSinkPair.f0.getOutEdges().get(0).getTargetVertex(), sourceSinkPair.f1.getInEdges().get(0).getSourceVertex());
}
}
示例2: testIncorrectParallelism
import org.apache.flink.streaming.api.datastream.DataStream; //导入方法依赖的package包/类
@Test(expected = UnsupportedOperationException.class)
public void testIncorrectParallelism() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Integer> source = env.fromElements(1, 10);
IterativeStream<Integer> iter1 = source.iterate();
SingleOutputStreamOperator<Integer> map1 = iter1.map(noOpIntMap);
iter1.closeWith(map1).print();
}
示例3: testSimpleIteration
import org.apache.flink.streaming.api.datastream.DataStream; //导入方法依赖的package包/类
@SuppressWarnings("rawtypes")
@Test
public void testSimpleIteration() throws Exception {
int numRetries = 5;
int timeoutScale = 1;
for (int numRetry = 0; numRetry < numRetries; numRetry++) {
try {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
iterated = new boolean[parallelism];
DataStream<Boolean> source = env.fromCollection(Collections.nCopies(parallelism * 2, false))
.map(noOpBoolMap).name("ParallelizeMap");
IterativeStream<Boolean> iteration = source.iterate(3000 * timeoutScale);
DataStream<Boolean> increment = iteration.flatMap(new IterationHead()).map(noOpBoolMap);
iteration.map(noOpBoolMap).addSink(new ReceiveCheckNoOpSink());
iteration.closeWith(increment).addSink(new ReceiveCheckNoOpSink());
env.execute();
for (boolean iter : iterated) {
assertTrue(iter);
}
break; // success
} catch (Throwable t) {
LOG.info("Run " + (numRetry + 1) + "/" + numRetries + " failed", t);
if (numRetry >= numRetries - 1) {
throw t;
} else {
timeoutScale *= 2;
}
}
}
}
示例4: testClosingFromOutOfLoop
import org.apache.flink.streaming.api.datastream.DataStream; //导入方法依赖的package包/类
@Test(expected = UnsupportedOperationException.class)
public void testClosingFromOutOfLoop() throws Exception {
// this test verifies that we cannot close an iteration with a DataStream that does not
// have the iteration in its predecessors
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// introduce dummy mapper to get to correct parallelism
DataStream<Integer> source = env.fromElements(1, 10).map(noOpIntMap);
IterativeStream<Integer> iter1 = source.iterate();
IterativeStream<Integer> iter2 = source.iterate();
iter2.closeWith(iter1.map(noOpIntMap));
}
示例5: testCoIterClosingFromOutOfLoop
import org.apache.flink.streaming.api.datastream.DataStream; //导入方法依赖的package包/类
@Test(expected = UnsupportedOperationException.class)
public void testCoIterClosingFromOutOfLoop() throws Exception {
// this test verifies that we cannot close an iteration with a DataStream that does not
// have the iteration in its predecessors
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// introduce dummy mapper to get to correct parallelism
DataStream<Integer> source = env.fromElements(1, 10).map(noOpIntMap);
IterativeStream<Integer> iter1 = source.iterate();
ConnectedIterativeStreams<Integer, Integer> coIter = source.iterate().withFeedbackType(
Integer.class);
coIter.closeWith(iter1.map(noOpIntMap));
}
示例6: testDoubleClosing
import org.apache.flink.streaming.api.datastream.DataStream; //导入方法依赖的package包/类
@Test
public void testDoubleClosing() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// introduce dummy mapper to get to correct parallelism
DataStream<Integer> source = env.fromElements(1, 10).map(noOpIntMap);
IterativeStream<Integer> iter1 = source.iterate();
iter1.closeWith(iter1.map(noOpIntMap));
iter1.closeWith(iter1.map(noOpIntMap));
}
示例7: testDifferingParallelism
import org.apache.flink.streaming.api.datastream.DataStream; //导入方法依赖的package包/类
@Test(expected = UnsupportedOperationException.class)
public void testDifferingParallelism() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// introduce dummy mapper to get to correct parallelism
DataStream<Integer> source = env.fromElements(1, 10)
.map(noOpIntMap);
IterativeStream<Integer> iter1 = source.iterate();
iter1.closeWith(iter1.map(noOpIntMap).setParallelism(parallelism / 2));
}
示例8: testExecutionWithEmptyIteration
import org.apache.flink.streaming.api.datastream.DataStream; //导入方法依赖的package包/类
@Test(expected = IllegalStateException.class)
public void testExecutionWithEmptyIteration() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Integer> source = env.fromElements(1, 10).map(noOpIntMap);
IterativeStream<Integer> iter1 = source.iterate();
iter1.map(noOpIntMap).print();
env.execute();
}