本文整理汇总了Java中org.apache.flink.streaming.api.windowing.windows.TimeWindow.Serializer方法的典型用法代码示例。如果您正苦于以下问题:Java TimeWindow.Serializer方法的具体用法?Java TimeWindow.Serializer怎么用?Java TimeWindow.Serializer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.flink.streaming.api.windowing.windows.TimeWindow
的用法示例。
在下文中一共展示了TimeWindow.Serializer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testSlidingEventTimeWindowsApply
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Test
@SuppressWarnings("unchecked")
public void testSlidingEventTimeWindowsApply() throws Exception {
closeCalled.set(0);
final int windowSize = 3;
final int windowSlide = 1;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
SlidingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS), Time.of(windowSlide, TimeUnit.SECONDS)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
stateDesc,
new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
testSlidingEventTimeWindows(operator);
// we close once in the rest...
Assert.assertEquals("Close was not called.", 2, closeCalled.get());
}
示例2: testMergingWindows
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Test
public void testMergingWindows() throws Exception {
TriggerTestHarness<Object, TimeWindow> testHarness =
new TriggerTestHarness<>(EventTimeTrigger.create(), new TimeWindow.Serializer());
assertTrue(EventTimeTrigger.create().canMerge());
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4)));
assertEquals(0, testHarness.numStateEntries());
assertEquals(0, testHarness.numProcessingTimeTimers());
assertEquals(2, testHarness.numEventTimeTimers());
assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(2, 4)));
testHarness.mergeWindows(new TimeWindow(0, 4), Lists.newArrayList(new TimeWindow(0, 2), new TimeWindow(2, 4)));
assertEquals(0, testHarness.numStateEntries());
assertEquals(0, testHarness.numProcessingTimeTimers());
assertEquals(1, testHarness.numEventTimeTimers());
assertEquals(0, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
assertEquals(0, testHarness.numEventTimeTimers(new TimeWindow(2, 4)));
assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 4)));
assertEquals(TriggerResult.FIRE, testHarness.advanceWatermark(4, new TimeWindow(0, 4)));
assertEquals(0, testHarness.numStateEntries());
assertEquals(0, testHarness.numProcessingTimeTimers());
assertEquals(0, testHarness.numEventTimeTimers());
}
示例3: testMergingWindows
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Test
public void testMergingWindows() throws Exception {
TriggerTestHarness<Object, TimeWindow> testHarness =
new TriggerTestHarness<>(ProcessingTimeTrigger.create(), new TimeWindow.Serializer());
assertTrue(ProcessingTimeTrigger.create().canMerge());
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4)));
assertEquals(0, testHarness.numStateEntries());
assertEquals(0, testHarness.numEventTimeTimers());
assertEquals(2, testHarness.numProcessingTimeTimers());
assertEquals(1, testHarness.numProcessingTimeTimers(new TimeWindow(0, 2)));
assertEquals(1, testHarness.numProcessingTimeTimers(new TimeWindow(2, 4)));
testHarness.mergeWindows(new TimeWindow(0, 4), Lists.newArrayList(new TimeWindow(0, 2), new TimeWindow(2, 4)));
assertEquals(0, testHarness.numStateEntries());
assertEquals(0, testHarness.numEventTimeTimers());
assertEquals(1, testHarness.numProcessingTimeTimers());
assertEquals(0, testHarness.numProcessingTimeTimers(new TimeWindow(0, 2)));
assertEquals(0, testHarness.numProcessingTimeTimers(new TimeWindow(2, 4)));
assertEquals(1, testHarness.numProcessingTimeTimers(new TimeWindow(0, 4)));
assertEquals(TriggerResult.FIRE, testHarness.advanceProcessingTime(4, new TimeWindow(0, 4)));
assertEquals(0, testHarness.numStateEntries());
assertEquals(0, testHarness.numProcessingTimeTimers());
assertEquals(0, testHarness.numEventTimeTimers());
}
示例4: testClear
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
/**
* Verify that clear() does not leak across windows.
*/
@Test
public void testClear() throws Exception {
TriggerTestHarness<Object, TimeWindow> testHarness =
new TriggerTestHarness<>(EventTimeTrigger.create(), new TimeWindow.Serializer());
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4)));
assertEquals(0, testHarness.numStateEntries());
assertEquals(0, testHarness.numProcessingTimeTimers());
assertEquals(2, testHarness.numEventTimeTimers());
assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(2, 4)));
testHarness.clearTriggerState(new TimeWindow(2, 4));
assertEquals(0, testHarness.numStateEntries());
assertEquals(0, testHarness.numProcessingTimeTimers());
assertEquals(1, testHarness.numEventTimeTimers());
assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
assertEquals(0, testHarness.numEventTimeTimers(new TimeWindow(2, 4)));
testHarness.clearTriggerState(new TimeWindow(0, 2));
assertEquals(0, testHarness.numStateEntries());
assertEquals(0, testHarness.numProcessingTimeTimers());
assertEquals(0, testHarness.numEventTimeTimers());
}
示例5: testMergingWindows
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Test
public void testMergingWindows() throws Exception {
TriggerTestHarness<Object, TimeWindow> testHarness =
new TriggerTestHarness<>(CountTrigger.<TimeWindow>of(3), new TimeWindow.Serializer());
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4)));
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(4, 6)));
// shouldn't have any timers
assertEquals(0, testHarness.numProcessingTimeTimers());
assertEquals(0, testHarness.numEventTimeTimers());
assertEquals(3, testHarness.numStateEntries());
assertEquals(1, testHarness.numStateEntries(new TimeWindow(0, 2)));
assertEquals(1, testHarness.numStateEntries(new TimeWindow(2, 4)));
assertEquals(1, testHarness.numStateEntries(new TimeWindow(4, 6)));
testHarness.mergeWindows(new TimeWindow(0, 4), Lists.newArrayList(new TimeWindow(0, 2), new TimeWindow(2, 4)));
assertEquals(2, testHarness.numStateEntries());
assertEquals(0, testHarness.numStateEntries(new TimeWindow(0, 2)));
assertEquals(0, testHarness.numStateEntries(new TimeWindow(2, 4)));
assertEquals(1, testHarness.numStateEntries(new TimeWindow(0, 4)));
assertEquals(1, testHarness.numStateEntries(new TimeWindow(4, 6)));
assertEquals(TriggerResult.FIRE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 4)));
assertEquals(1, testHarness.numStateEntries());
assertEquals(0, testHarness.numStateEntries(new TimeWindow(0, 4)));
assertEquals(1, testHarness.numStateEntries(new TimeWindow(4, 6)));
assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(4, 6)));
assertEquals(TriggerResult.FIRE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(4, 6)));
assertEquals(0, testHarness.numStateEntries());
}
示例6: testCleanupTimeOverflow
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Test
public void testCleanupTimeOverflow() throws Exception {
final int windowSize = 1000;
final long lateness = 2000;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
new SumReducer(),
inputType.createSerializer(new ExecutionConfig()));
TumblingEventTimeWindows windowAssigner = TumblingEventTimeWindows.of(Time.milliseconds(windowSize));
final WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator =
new WindowOperator<>(
windowAssigner,
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
stateDesc,
new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
EventTimeTrigger.create(),
lateness,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
long timestamp = Long.MAX_VALUE - 1750;
Collection<TimeWindow> windows = windowAssigner.assignWindows(new Tuple2<>("key2", 1), timestamp, new WindowAssigner.WindowAssignerContext() {
@Override
public long getCurrentProcessingTime() {
return operator.windowAssignerContext.getCurrentProcessingTime();
}
});
TimeWindow window = Iterables.getOnlyElement(windows);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), timestamp));
// the garbage collection timer would wrap-around
Assert.assertTrue(window.maxTimestamp() + lateness < window.maxTimestamp());
// and it would prematurely fire with watermark (Long.MAX_VALUE - 1500)
Assert.assertTrue(window.maxTimestamp() + lateness < Long.MAX_VALUE - 1500);
// if we don't correctly prevent wrap-around in the garbage collection
// timers this watermark will clean our window state for the just-added
// element/window
testHarness.processWatermark(new Watermark(Long.MAX_VALUE - 1500));
// this watermark is before the end timestamp of our only window
Assert.assertTrue(Long.MAX_VALUE - 1500 < window.maxTimestamp());
Assert.assertTrue(window.maxTimestamp() < Long.MAX_VALUE);
// push in a watermark that will trigger computation of our window
testHarness.processWatermark(new Watermark(window.maxTimestamp()));
expected.add(new Watermark(Long.MAX_VALUE - 1500));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), window.maxTimestamp()));
expected.add(new Watermark(window.maxTimestamp()));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.close();
}
示例7: testLateness
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Test
public void testLateness() throws Exception {
final int windowSize = 2;
final long lateness = 500;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
new SumReducer(),
inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator =
new WindowOperator<>(
TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
stateDesc,
new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
PurgingTrigger.of(EventTimeTrigger.create()),
lateness,
lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
ConcurrentLinkedQueue<Object> lateExpected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 500));
testHarness.processWatermark(new Watermark(1500));
expected.add(new Watermark(1500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1300));
testHarness.processWatermark(new Watermark(2300));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 2), 1999));
expected.add(new Watermark(2300));
// this will not be sideoutput because window.maxTimestamp() + allowedLateness > currentWatermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1997));
testHarness.processWatermark(new Watermark(6000));
// this is 1 and not 3 because the trigger fires and purges
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
expected.add(new Watermark(6000));
// this will be side output because window.maxTimestamp() + allowedLateness < currentWatermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processWatermark(new Watermark(7000));
lateExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
expected.add(new Watermark(7000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, testHarness.getOutput(), new Tuple2ResultSortComparator());
TestHarnessUtil.assertOutputEqualsSorted(
"SideOutput was not correct.",
lateExpected,
(Iterable) testHarness.getSideOutput(lateOutputTag),
new Tuple2ResultSortComparator());
testHarness.close();
}
示例8: testCleanupTimerWithEmptyListStateForSessionWindows
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Test
public void testCleanupTimerWithEmptyListStateForSessionWindows() throws Exception {
final int gapSize = 3;
final long lateness = 10;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ListStateDescriptor<Tuple2<String, Integer>> windowStateDesc =
new ListStateDescriptor<>("window-contents", inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple2<String, Integer>, TimeWindow> operator =
new WindowOperator<>(
EventTimeSessionWindows.withGap(Time.seconds(gapSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
windowStateDesc,
new InternalIterableWindowFunction<>(new PassThroughFunction()),
EventTimeTrigger.create(),
lateness,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(4998));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
expected.add(new Watermark(4998));
testHarness.processWatermark(new Watermark(14600));
expected.add(new Watermark(14600));
ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple2ResultSortComparator());
testHarness.close();
}
示例9: getWindowSerializer
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Override
public TypeSerializer<TimeWindow> getWindowSerializer(ExecutionConfig executionConfig) {
return new TimeWindow.Serializer();
}
示例10: writeReducingEventTimeWindowsSnapshot
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeReducingEventTimeWindowsSnapshot() throws Exception {
final int windowSize = 3;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
new SumReducer(),
inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
stateDesc,
new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(999));
expectedOutput.add(new Watermark(999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(1999));
expectedOutput.add(new Watermark(1999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
// do snapshot and save to file
OperatorStateHandles snapshot = testHarness.snapshot(0, 0);
OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/win-op-migration-test-reduce-event-time-flink1.2-snapshot");
testHarness.close();
}
示例11: testCleanupTimerWithEmptyReduceStateForSessionWindows
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Test
public void testCleanupTimerWithEmptyReduceStateForSessionWindows() throws Exception {
final int gapSize = 3;
final long lateness = 10;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
new SumReducer(),
inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator =
new WindowOperator<>(
EventTimeSessionWindows.withGap(Time.seconds(gapSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
stateDesc,
new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()),
EventTimeTrigger.create(),
lateness,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness =
createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(4998));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 1000L, 4000L), 3999));
expected.add(new Watermark(4998));
testHarness.processWatermark(new Watermark(14600));
expected.add(new Watermark(14600));
ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple2ResultSortComparator());
testHarness.close();
}
示例12: testRestoreReducingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
@Test
public void testRestoreReducingProcessingTimeWindows() throws Exception {
final int windowSize = 3;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
new SumReducer(),
inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
TumblingProcessingTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
stateDesc,
new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
ProcessingTimeTrigger.create(),
0,
null /* late data output tag */);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.initializeState(
OperatorSnapshotUtil.readStateHandle(
OperatorSnapshotUtil.getResourceFilename("win-op-migration-test-reduce-processing-time-flink1.2-snapshot")));
testHarness.open();
testHarness.setProcessingTime(3020);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3)));
testHarness.setProcessingTime(6000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 5999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), 5999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key3", 1), 5999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.close();
}
示例13: testTimeEvictorEvictBefore
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
/**
* Tests TimeEvictor evictBefore behavior.
* @throws Exception
*/
@Test
public void testTimeEvictorEvictBefore() throws Exception {
AtomicInteger closeCalled = new AtomicInteger(0);
final int triggerCount = 2;
final int windowSize = 4;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
@SuppressWarnings({"unchecked", "rawtypes"})
TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(inputType.createSerializer(new ExecutionConfig()));
ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
new ListStateDescriptor<>("window-contents", streamRecordSerializer);
EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new EvictingWindowOperator<>(
TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
stateDesc,
new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>(closeCalled)),
CountTrigger.of(triggerCount),
TimeEvictor.of(Time.seconds(2)),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 5999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 2001));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1001));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 3999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 3999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 6500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1002));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 7999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 3999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.close();
Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
示例14: writeSessionWindowsWithCountTriggerSnapshot
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeSessionWindowsWithCountTriggerSnapshot() throws Exception {
final int sessionSize = 3;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(
EventTimeSessionWindows.withGap(Time.seconds(sessionSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
stateDesc,
new InternalIterableWindowFunction<>(new SessionWindowFunction()),
PurgingTrigger.of(CountTrigger.of(4)),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness =
new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 3500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
// do snapshot and save to file
OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(
snapshot,
"src/test/resources/win-op-migration-test-session-with-stateful-trigger-flink" + flinkGenerateSavepointVersion + "-snapshot");
testHarness.close();
}
示例15: writeReducingProcessingTimeWindowsSnapshot
import org.apache.flink.streaming.api.windowing.windows.TimeWindow; //导入方法依赖的package包/类
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeReducingProcessingTimeWindowsSnapshot() throws Exception {
final int windowSize = 3;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
new SumReducer(),
inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
TumblingProcessingTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
stateDesc,
new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
ProcessingTimeTrigger.create(),
0,
null /* late data output tag */);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.setProcessingTime(10);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
testHarness.setProcessingTime(3010);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key3", 1)));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 1), 2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 1), 2999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
// do snapshot and save to file
OperatorStateHandles snapshot = testHarness.snapshot(0, 0);
OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/win-op-migration-test-reduce-processing-time-flink1.2-snapshot");
testHarness.close();
}