本文整理汇总了Java中org.apache.flink.streaming.api.functions.sink.SinkFunction类的典型用法代码示例。如果您正苦于以下问题:Java SinkFunction类的具体用法?Java SinkFunction怎么用?Java SinkFunction使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
SinkFunction类属于org.apache.flink.streaming.api.functions.sink包,在下文中一共展示了SinkFunction类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: createConsumerTopology
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
private void createConsumerTopology(StreamExecutionEnvironment env, AMQSourceConfig<String> config) {
AMQSource<String> source = new AMQSource<>(config);
env.addSource(source)
.addSink(new SinkFunction<String>() {
final HashSet<Integer> set = new HashSet<>();
@Override
public void invoke(String value) throws Exception {
int val = Integer.parseInt(value.split("-")[1]);
set.add(val);
if (set.size() == MESSAGES_NUM) {
throw new SuccessException();
}
}
});
}
示例2: testKafkaTableSink
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Test
public void testKafkaTableSink() throws Exception {
DataStream dataStream = mock(DataStream.class);
when(dataStream.addSink(any(SinkFunction.class))).thenReturn(mock(DataStreamSink.class));
KafkaTableSink kafkaTableSink = spy(createTableSink());
kafkaTableSink.emitDataStream(dataStream);
// verify correct producer class
verify(dataStream).addSink(any(getProducerClass()));
// verify correctly configured producer
verify(kafkaTableSink).createKafkaProducer(
eq(TOPIC),
eq(PROPERTIES),
any(getSerializationSchemaClass()),
eq(PARTITIONER));
}
示例3: testProgram
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
/**
* Runs the following program.
* <pre>
* [ (source)->(filter) ]-s->[ (map) ] -> [ (map) ] -> [ (groupBy/count)->(sink) ]
* </pre>
*/
@Override
public void testProgram(StreamExecutionEnvironment env) {
DataStream<String> stream = env.addSource(new StringGeneratingSourceFunction(NUM_STRINGS));
stream
// -------------- first vertex, chained to the source ----------------
.filter(new StringRichFilterFunction())
.shuffle()
// -------------- seconds vertex - the stateful one that also fails ----------------
.map(new StringPrefixCountRichMapFunction())
.startNewChain()
.map(new StatefulCounterFunction())
// -------------- third vertex - counter and the sink ----------------
.keyBy("prefix")
.map(new OnceFailingPrefixCounter(NUM_STRINGS))
.addSink(new SinkFunction<PrefixCount>() {
@Override
public void invoke(PrefixCount value) throws Exception {
// Do nothing here
}
});
}
示例4: addSink
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
/**
* Adds the given sink to this DataStream. Only streams with sinks added
* will be executed once the {@link StreamExecutionEnvironment#execute()}
* method is called.
*
* @param sinkFunction
* The object containing the sink's invoke function.
* @return The closed DataStream.
*/
public DataStreamSink<T> addSink(SinkFunction<T> sinkFunction) {
// read the output type of the input Transform to coax out errors about MissingTypeInfo
transformation.getOutputType();
// configure the type if needed
if (sinkFunction instanceof InputTypeConfigurable) {
((InputTypeConfigurable) sinkFunction).setInputType(getType(), getExecutionConfig());
}
StreamSink<T> sinkOperator = new StreamSink<>(clean(sinkFunction));
DataStreamSink<T> sink = new DataStreamSink<>(this, sinkOperator);
getExecutionEnvironment().addOperator(sink.getTransformation());
return sink;
}
示例5: main
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
DataStream<Tuple2<Long, Long>> stream = env.addSource(new DataSource());
stream
.keyBy(0)
.timeWindow(Time.of(2500, MILLISECONDS), Time.of(500, MILLISECONDS))
.reduce(new SummingReducer())
// alternative: use a apply function which does not pre-aggregate
// .keyBy(new FirstFieldKeyExtractor<Tuple2<Long, Long>, Long>())
// .window(Time.of(2500, MILLISECONDS), Time.of(500, MILLISECONDS))
// .apply(new SummingWindowFunction())
.addSink(new SinkFunction<Tuple2<Long, Long>>() {
@Override
public void invoke(Tuple2<Long, Long> value) {
}
});
env.execute();
}
示例6: shouldSelectFromStreamUsingAnonymousClassSelect
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
@Test
@SuppressWarnings("Convert2Lambda")
public void shouldSelectFromStreamUsingAnonymousClassSelect() throws Exception {
StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
executionEnvironment.setParallelism(1);
DataStream<TestEvent> dataStream = executionEnvironment.fromElements(new TestEvent("peter", 10), new TestEvent("alex", 25), new TestEvent("maria", 30));
EsperStream<TestEvent> esperStream = Esper.query(dataStream, "select name, age from TestEvent");
DataStream<TestEvent> resultStream = esperStream.select(new EsperSelectFunction<TestEvent>() {
private static final long serialVersionUID = 8802852465465541287L;
@Override
public TestEvent select(EventBean eventBean) throws Exception {
String name = (String) eventBean.get("name");
int age = (int) eventBean.get("age");
return new TestEvent(name, age);
}
});
resultStream.addSink(new SinkFunction<TestEvent>() {
private static final long serialVersionUID = -8260794084029816089L;
@Override
public void invoke(TestEvent testEvent) throws Exception {
System.err.println(testEvent);
result.add(testEvent);
}
});
executionEnvironment.execute("test-2");
assertThat(result, is(notNullValue()));
assertThat(result.size(), is(3));
}
示例7: shouldSelectFromStreamUsingLambdaSelect
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
@Test
@SuppressWarnings("Convert2Lambda")
public void shouldSelectFromStreamUsingLambdaSelect() throws Exception {
StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
executionEnvironment.setParallelism(1);
DataStream<TestEvent> dataStream = executionEnvironment.fromElements(new TestEvent("peter1", 10), new TestEvent("alex1", 25), new TestEvent("maria1", 30));
EsperStream<TestEvent> esperStream = Esper.query(dataStream, "select name, age from TestEvent");
DataStream<TestEvent> resultStream = esperStream.select((EsperSelectFunction<TestEvent>) collector -> {
String name = (String) collector.get("name");
int age = (int) collector.get("age");
return new TestEvent(name, age);
});
resultStream.addSink(new SinkFunction<TestEvent>() {
private static final long serialVersionUID = 5588530728493738002L;
@Override
public void invoke(TestEvent testEvent) throws Exception {
result.add(testEvent);
}
});
executionEnvironment.execute("test-1");
assertThat(result, is(notNullValue()));
assertThat(result.size(), is(3));
}
示例8: shouldSelectFromStringDataStream
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
@Test
@SuppressWarnings("Convert2Lambda")
public void shouldSelectFromStringDataStream() throws Exception {
StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
executionEnvironment.setParallelism(1);
List<String> expectedValues = Arrays.asList("first", "second");
DataStream<String> dataStream = executionEnvironment.fromCollection(expectedValues);
EsperStream<String> esperStream = Esper.query(dataStream, "select bytes from String");
DataStream<String> resultStream = esperStream.select((EsperSelectFunction<String>) collector -> {
byte[] bytes = (byte[]) collector.get("bytes");
return new String(bytes);
});
resultStream.addSink(new SinkFunction<String>() {
private static final long serialVersionUID = 284955963055337762L;
@Override
public void invoke(String testEvent) throws Exception {
System.err.println(testEvent);
stringResult.add(testEvent);
}
});
executionEnvironment.execute("test-2");
assertThat(stringResult, is(notNullValue()));
assertThat(stringResult.size(), is(2));
assertThat(stringResult, is(expectedValues));
}
示例9: testEsperPattern
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
@Test
public void testEsperPattern() throws Exception {
StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
executionEnvironment.setParallelism(1);
List<ComplexEvent> expectedValues = Lists.newArrayList();
ComplexEvent complexEvent = new ComplexEvent(Event.start(), Event.end());
expectedValues.add(complexEvent);
List<Event> events = Arrays.asList(complexEvent.getStartEvent(), complexEvent.getEndEvent());
DataStream<Event> dataStream = executionEnvironment.fromCollection(events);
EsperStream<Event> esperStream = Esper.pattern(dataStream, "every (A=Event(type='start') -> B=Event(type='end'))");
DataStream<ComplexEvent> complexEventDataStream = esperStream.select(new EsperSelectFunction<ComplexEvent>() {
@Override
public ComplexEvent select(EventBean eventBean) throws Exception {
return new ComplexEvent((Event) eventBean.get("A"), (Event) eventBean.get("B"));
}
});
complexEventDataStream.addSink(new SinkFunction<ComplexEvent>() {
@Override
public void invoke(ComplexEvent value) throws Exception {
System.err.println(value);
resultingEvents.add(value);
}
});
executionEnvironment.execute("test-2");
assertThat(resultingEvents, is(expectedValues));
}
示例10: TimestampValidatingOperator
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
public TimestampValidatingOperator() {
super(new SinkFunction<Long>() {
private static final long serialVersionUID = -6676565693361786524L;
@Override
public void invoke(Long value) throws Exception {
throw new RuntimeException("Unexpected");
}
});
}
示例11: addSink
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
@Override
public DataStreamSink<T> addSink(SinkFunction<T> sinkFunction) {
DataStreamSink<T> result = super.addSink(sinkFunction);
result.getTransformation().setStateKeySelector(keySelector);
result.getTransformation().setStateKeyType(keyType);
return result;
}
示例12: getSimpleJob
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
private static StreamExecutionEnvironment getSimpleJob() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.generateSequence(1, 10000000)
.addSink(new SinkFunction<Long>() {
@Override
public void invoke(Long value) {
}
});
return env;
}
示例13: TimestampValidatingOperator
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
public TimestampValidatingOperator() {
super(new SinkFunction<Long>() {
@Override
public void invoke(Long value) throws Exception {
throw new RuntimeException("Unexpected");
}
});
}
示例14: invoke
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
@Override
public void invoke(T dataPoint, SinkFunction.Context context) throws Exception {
Point.Builder builder = Point.measurement(measurement)
.time(dataPoint.getTimeStampMs(), TimeUnit.MILLISECONDS)
.addField(this.fieldName, dataPoint.getValue());
if(dataPoint instanceof KeyedDataPoint){
builder.tag("key", ((KeyedDataPoint) dataPoint).getKey());
}
Point p = builder.build();
influxDB.write(DEFAULT_DATABASE_NAME, "autogen", p);
}
示例15: sink
import org.apache.flink.streaming.api.functions.sink.SinkFunction; //导入依赖的package包/类
@Override
public void sink() {
// this.dataStream.print();
this.dataStream.addSink(new SinkFunction<Tuple2<K, V>>() {
LatencyLog latency = new LatencyLog("sink");
@Override
public void invoke(Tuple2<K, V> value) throws Exception {
latency.execute((WithTime<? extends Object>) value._2());
}
});
}