本文整理匯總了Java中org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.setStreamTimeCharacteristic方法的典型用法代碼示例。如果您正苦於以下問題:Java StreamExecutionEnvironment.setStreamTimeCharacteristic方法的具體用法?Java StreamExecutionEnvironment.setStreamTimeCharacteristic怎麽用?Java StreamExecutionEnvironment.setStreamTimeCharacteristic使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
的用法示例。
在下文中一共展示了StreamExecutionEnvironment.setStreamTimeCharacteristic方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: main
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
public static void main(String[] args) throws IOException {
StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.createLocalEnvironment();
StreamTableEnvironment env = StreamTableEnvironment.getTableEnvironment(execEnv);
execEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
CompilationResult res = new CompilationResult();
try {
JobDescriptor job = getJobConf(System.in);
res.jobGraph(new JobCompiler(env, job).getJobGraph());
} catch (Throwable e) {
res.remoteThrowable(e);
}
try (OutputStream out = chooseOutputStream(args)) {
out.write(res.serialize());
}
}
示例2: testReduceWithRichReducerFails
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
/**
* .reduce() does not support RichReduceFunction, since the reduce function is used internally
* in a {@code ReducingState}.
*/
@Test(expected = UnsupportedOperationException.class)
public void testReduceWithRichReducerFails() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);
source
.keyBy(0)
.window(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
.reduce(new RichReduceFunction<Tuple2<String, Integer>>() {
@Override
public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1,
Tuple2<String, Integer> value2) throws Exception {
return null;
}
});
fail("exception was not thrown");
}
示例3: testFoldProcessingTime
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("rawtypes")
public void testFoldProcessingTime() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);
DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
DataStream<Tuple3<String, String, Integer>> window = source
.keyBy(new TupleKeySelector())
.window(SlidingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
.fold(new Tuple3<>("", "", 0), new DummyFolder());
OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform =
(OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator();
Assert.assertTrue(operator instanceof WindowOperator);
WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingProcessingTimeWindows);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor);
processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
示例4: testFoldEventTime
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("rawtypes")
public void testFoldEventTime() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
DataStream<Tuple3<String, String, Integer>> window1 = source
.windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
.fold(new Tuple3<>("", "", 1), new DummyFolder());
OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform =
(OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window1.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator();
Assert.assertTrue(operator instanceof WindowOperator);
WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor);
processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
示例5: testFoldEventTimeWindows
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("rawtypes")
public void testFoldEventTimeWindows() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
DataStream<Tuple2<String, Integer>> source = env.fromElements(
Tuple2.of("hello", 1),
Tuple2.of("hello", 2));
DataStream<Tuple2<String, Integer>> window1 = source
.keyBy(0)
.timeWindow(Time.of(1000, TimeUnit.MILLISECONDS), Time.of(100, TimeUnit.MILLISECONDS))
.fold(new Tuple2<>("", 1), new DummyFolder());
OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform1 = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator1 = transform1.getOperator();
Assert.assertTrue(operator1 instanceof WindowOperator);
WindowOperator winOperator1 = (WindowOperator) operator1;
Assert.assertTrue(winOperator1.getTrigger() instanceof EventTimeTrigger);
Assert.assertTrue(winOperator1.getWindowAssigner() instanceof SlidingEventTimeWindows);
Assert.assertTrue(winOperator1.getStateDescriptor() instanceof FoldingStateDescriptor);
}
示例6: main
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception {
ParameterTool params = ParameterTool.fromArgs(args);
FlinkPravegaParams helper = new FlinkPravegaParams(params);
StreamId stream = helper.createStreamFromParam("input", "examples/turbineHeatTest");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
// 1. read and decode the sensor events from a Pravega stream
long startTime = params.getLong("start", 0L);
FlinkPravegaReader<String> reader = helper.newReader(stream, startTime, String.class);
DataStream<SensorEvent> events = env.addSource(reader, "input").map(new SensorMapper()).name("events");
// 2. extract timestamp information to support 'event-time' processing
SingleOutputStreamOperator<SensorEvent> timestamped = events.assignTimestampsAndWatermarks(
new BoundedOutOfOrdernessTimestampExtractor<SensorEvent>(Time.seconds(10)) {
@Override
public long extractTimestamp(SensorEvent element) {
return element.getTimestamp();
}
});
timestamped.print();
// 3. summarize the temperature data for each sensor
SingleOutputStreamOperator<SensorAggregate> summaries = timestamped
.keyBy("sensorId")
.window(TumblingEventTimeWindows.of(Time.days(1), Time.hours(8)))
.fold(null, new SensorAggregator()).name("summaries");
// 4. save to HDFS and print to stdout. Refer to the TaskManager's 'Stdout' view in the Flink UI.
summaries.print().name("stdout");
if (params.has("output")) {
summaries.writeAsCsv(params.getRequired("output"), FileSystem.WriteMode.OVERWRITE);
}
env.execute("TurbineHeatProcessor_" + stream);
}
示例7: main
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception {
TaxiRideCleansingParameterParser params = new TaxiRideCleansingParameterParser();
// TODO: refactor this method
if(!params.parseParams(args)){
final String dataFilePath = params.getDataFilePath();
// get an ExecutionEnvironment
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment();
// configure event-time processing
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
// get the taxi ride data stream
DataStream<TaxiRide> rides = env.addSource(
new TaxiRideSource(dataFilePath, MAX_EVENT_DELAY_DEFAULT, SERVING_SPEED_FACTOR_DEFAULT));
TaxiRideCleansing taxiRideCleansing = new TaxiRideCleansing();
DataStream<TaxiRide> filteredRides = taxiRideCleansing.execute(rides);
filteredRides.addSink(new FlinkKafkaProducer010<>(
"localhost:9092", // Kafka broker host:port
"cleansedRides", // Topic to write to
new TaxiRideSchema()) // Serializer (provided as util)
);
// filteredRides.print();
env.execute("Running Taxi Ride Cleansing");
}
}
示例8: main
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception {
ParameterTool params = ParameterTool.fromArgs(args);
final String nycTaxiRidesPath = params.getRequired("nycTaxiRidesPath");
final int servingSpeedFactor = 600; // events of 10 minutes are served in 1 second
// set up streaming execution environment
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// operate in Event-time
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
// ===============================================================================
// 1. we want to persist our incrementally trained model, even on failure,
// so you must enable checkpointing! (set to a 5 second interval)
// ===============================================================================
// start the data generator
DataStream<TaxiRide> rides = env.addSource(
new CheckpointedTaxiRideSource(nycTaxiRidesPath, servingSpeedFactor));
DataStream<Tuple2<Long, Integer>> predictions = rides
// filter out rides that do not start or stop in NYC
.filter(new NYCFilter())
// map taxi ride events to the grid cell of the destination
.map(new DestinationGridCellMatcher())
// organize stream by destination
.keyBy(0)
// ===============================================================================
// 2. the PredictionModel flatMap function is not implemented yet;
// finish its implementation down below!
// ===============================================================================
.flatMap(new PredictionModel());
// print the predictions
predictions.print();
// run the prediction pipeline
env.execute("Taxi Ride Prediction");
}
示例9: testReduceWithProcessWindowFunctionProcessingTime
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("rawtypes")
public void testReduceWithProcessWindowFunctionProcessingTime() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);
DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
DataStream<Tuple3<String, String, Integer>> window = source
.windowAll(TumblingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS)))
.reduce(new DummyReducer(), new ProcessAllWindowFunction<Tuple2<String, Integer>, Tuple3<String, String, Integer>, TimeWindow>() {
private static final long serialVersionUID = 1L;
@Override
public void process(
Context ctx,
Iterable<Tuple2<String, Integer>> values,
Collector<Tuple3<String, String, Integer>> out) throws Exception {
for (Tuple2<String, Integer> in : values) {
out.collect(new Tuple3<>(in.f0, in.f0, in.f1));
}
}
});
OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform =
(OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator();
Assert.assertTrue(operator instanceof WindowOperator);
WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof TumblingProcessingTimeWindows);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof ReducingStateDescriptor);
processElementAndEnsureOutput(operator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
示例10: testOperatorChainedToSource
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
/**
* Note: this test fails if we don't check for exceptions in the source contexts and do not
* synchronize in the source contexts.
*/
@Test
public void testOperatorChainedToSource() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(timeCharacteristic);
env.setParallelism(1);
DataStream<String> source = env.addSource(new InfiniteTestSource());
source.transform("Custom Operator", BasicTypeInfo.STRING_TYPE_INFO, new TimerOperator(ChainingStrategy.ALWAYS));
boolean testSuccess = false;
try {
env.execute("Timer test");
} catch (JobExecutionException e) {
if (e.getCause() instanceof TimerException) {
TimerException te = (TimerException) e.getCause();
if (te.getCause() instanceof RuntimeException) {
RuntimeException re = (RuntimeException) te.getCause();
if (re.getMessage().equals("TEST SUCCESS")) {
testSuccess = true;
} else {
throw e;
}
} else {
throw e;
}
} else {
throw e;
}
}
Assert.assertTrue(testSuccess);
}
示例11: testAggregateWithWindowFunctionEventTime
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
public void testAggregateWithWindowFunctionEventTime() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
DataStream<Tuple3<String, String, Integer>> source = env.fromElements(
Tuple3.of("hello", "hallo", 1),
Tuple3.of("hello", "hallo", 2));
DummyReducer reducer = new DummyReducer();
DataStream<String> window = source
.keyBy(new Tuple3KeySelector())
.window(TumblingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS)))
.aggregate(new DummyAggregationFunction(), new TestWindowFunction());
final OneInputTransformation<Tuple3<String, String, Integer>, String> transform =
(OneInputTransformation<Tuple3<String, String, Integer>, String>) window.getTransformation();
final OneInputStreamOperator<Tuple3<String, String, Integer>, String> operator = transform.getOperator();
Assert.assertTrue(operator instanceof WindowOperator);
WindowOperator<String, Tuple3<String, String, Integer>, ?, ?, ?> winOperator =
(WindowOperator<String, Tuple3<String, String, Integer>, ?, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof TumblingEventTimeWindows);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof AggregatingStateDescriptor);
processElementAndEnsureOutput(
operator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple3<>("hello", "hallo", 1));
}
示例12: testMergingWindowsWithEvictor
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("rawtypes")
public void testMergingWindowsWithEvictor() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
DataStream<Integer> source = env.fromElements(1, 2);
DataStream<String> window1 = source
.keyBy(new KeySelector<Integer, String>() {
@Override
public String getKey(Integer value) throws Exception {
return value.toString();
}
})
.window(EventTimeSessionWindows.withGap(Time.seconds(5)))
.evictor(CountEvictor.of(5))
.process(new TestProcessWindowFunction());
final OneInputTransformation<Integer, String> transform = (OneInputTransformation<Integer, String>) window1.getTransformation();
final OneInputStreamOperator<Integer, String> operator = transform.getOperator();
Assert.assertTrue(operator instanceof WindowOperator);
WindowOperator<String, Integer, ?, ?, ?> winOperator = (WindowOperator<String, Integer, ?, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof EventTimeSessionWindows);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor);
processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, 1);
}
示例13: testAlignedWindowDeprecation
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
/**
* Verifies that calls to timeWindow() instantiate a regular
* windowOperator instead of an aligned one.
*/
@Test
public void testAlignedWindowDeprecation() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);
DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
DummyReducer reducer = new DummyReducer();
DataStream<Tuple2<String, Integer>> window1 = source
.keyBy(0)
.timeWindow(Time.of(1000, TimeUnit.MILLISECONDS), Time.of(100, TimeUnit.MILLISECONDS))
.reduce(reducer);
OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform1 = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator1 = transform1.getOperator();
Assert.assertTrue(operator1 instanceof WindowOperator);
DataStream<Tuple2<String, Integer>> window2 = source
.keyBy(0)
.timeWindow(Time.of(1000, TimeUnit.MILLISECONDS))
.apply(new WindowFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple, TimeWindow>() {
private static final long serialVersionUID = 1L;
@Override
public void apply(Tuple tuple,
TimeWindow window,
Iterable<Tuple2<String, Integer>> values,
Collector<Tuple2<String, Integer>> out) throws Exception {
}
});
OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform2 = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window2.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator2 = transform2.getOperator();
Assert.assertTrue(operator2 instanceof WindowOperator);
}
示例14: testFoldWithEvictorAndProcessFunction
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testFoldWithEvictorAndProcessFunction() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
DataStream<Tuple3<String, String, Integer>> window1 = source
.windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
.evictor(CountEvictor.of(100))
.fold(
new Tuple3<>("", "", 1),
new DummyFolder(),
new ProcessAllWindowFunction<Tuple3<String, String, Integer>, Tuple3<String, String, Integer>, TimeWindow>() {
@Override
public void process(
Context context,
Iterable<Tuple3<String, String, Integer>> elements,
Collector<Tuple3<String, String, Integer>> out) throws Exception {
for (Tuple3<String, String, Integer> in : elements) {
out.collect(in);
}
}
});
OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform =
(OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window1.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator();
Assert.assertTrue(operator instanceof EvictingWindowOperator);
EvictingWindowOperator<String, Tuple2<String, Integer>, ?, ?> winOperator = (EvictingWindowOperator<String, Tuple2<String, Integer>, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
Assert.assertTrue(winOperator.getEvictor() instanceof CountEvictor);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor);
winOperator.setOutputType((TypeInformation) window1.getType(), new ExecutionConfig());
processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
示例15: testApplyWithEvictor
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //導入方法依賴的package包/類
@Test
@SuppressWarnings("rawtypes")
public void testApplyWithEvictor() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
DataStream<Tuple2<String, Integer>> window1 = source
.windowAll(TumblingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS)))
.trigger(CountTrigger.of(1))
.evictor(TimeEvictor.of(Time.of(100, TimeUnit.MILLISECONDS)))
.apply(new AllWindowFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow>() {
private static final long serialVersionUID = 1L;
@Override
public void apply(
TimeWindow window,
Iterable<Tuple2<String, Integer>> values,
Collector<Tuple2<String, Integer>> out) throws Exception {
for (Tuple2<String, Integer> in : values) {
out.collect(in);
}
}
});
OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator();
Assert.assertTrue(operator instanceof EvictingWindowOperator);
EvictingWindowOperator<String, Tuple2<String, Integer>, ?, ?> winOperator = (EvictingWindowOperator<String, Tuple2<String, Integer>, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof CountTrigger);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof TumblingEventTimeWindows);
Assert.assertTrue(winOperator.getEvictor() instanceof TimeEvictor);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor);
processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}