本文整理汇总了Java中org.apache.flink.api.common.functions.MapFunction类的典型用法代码示例。如果您正苦于以下问题:Java MapFunction类的具体用法?Java MapFunction怎么用?Java MapFunction使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
MapFunction类属于org.apache.flink.api.common.functions包,在下文中一共展示了MapFunction类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testSimpleWriteAndRead
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@Test
public void testSimpleWriteAndRead() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Event> input = env.fromElements(
Event.of(1, "start", 1.0),
Event.of(2, "middle", 2.0),
Event.of(3, "end", 3.0),
Event.of(4, "start", 4.0),
Event.of(5, "middle", 5.0),
Event.of(6, "end", 6.0)
);
String path = tempFolder.newFile().toURI().toString();
input.transform("transformer", TypeInformation.of(Event.class), new StreamMap<>(new MapFunction<Event, Event>() {
@Override
public Event map(Event event) throws Exception {
return event;
}
})).writeAsText(path);
env.execute();
Assert.assertEquals(6, getLineCount(path));
}
示例2: main
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
Properties properties = new Properties();
properties.load(new FileInputStream("src/main/resources/application.properties"));
Properties mqttProperties = new Properties();
// client id = a:<Organization_ID>:<App_Id>
mqttProperties.setProperty(MQTTSource.CLIENT_ID,
String.format("a:%s:%s",
properties.getProperty("Org_ID"),
properties.getProperty("App_Id")));
// mqtt server url = tcp://<Org_ID>.messaging.internetofthings.ibmcloud.com:1883
mqttProperties.setProperty(MQTTSource.URL,
String.format("tcp://%s.messaging.internetofthings.ibmcloud.com:1883",
properties.getProperty("Org_ID")));
// topic = iot-2/type/<Device_Type>/id/<Device_ID>/evt/<Event_Id>/fmt/json
mqttProperties.setProperty(MQTTSource.TOPIC,
String.format("iot-2/type/%s/id/%s/evt/%s/fmt/json",
properties.getProperty("Device_Type"),
properties.getProperty("Device_ID"),
properties.getProperty("EVENT_ID")));
mqttProperties.setProperty(MQTTSource.USERNAME, properties.getProperty("API_Key"));
mqttProperties.setProperty(MQTTSource.PASSWORD, properties.getProperty("APP_Authentication_Token"));
MQTTSource mqttSource = new MQTTSource(mqttProperties);
DataStreamSource<String> tempratureDataSource = env.addSource(mqttSource);
DataStream<String> stream = tempratureDataSource.map((MapFunction<String, String>) s -> s);
stream.print();
env.execute("Temperature Analysis");
}
示例3: testSerializeWithAvro
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@Test
public void testSerializeWithAvro() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.getConfig().enableForceAvro();
Path in = new Path(inFile.getAbsoluteFile().toURI());
AvroInputFormat<User> users = new AvroInputFormat<User>(in, User.class);
DataSet<User> usersDS = env.createInput(users)
.map(new MapFunction<User, User>() {
@Override
public User map(User value) throws Exception {
Map<CharSequence, Long> ab = new HashMap<CharSequence, Long>(1);
ab.put("hehe", 12L);
value.setTypeMap(ab);
return value;
}
});
usersDS.writeAsText(resultPath);
env.execute("Simple Avro read job");
expected = "{\"name\": \"Alyssa\", \"favorite_number\": 256, \"favorite_color\": null, \"type_long_test\": null, \"type_double_test\": 123.45, \"type_null_test\": null, \"type_bool_test\": true, \"type_array_string\": [\"ELEMENT 1\", \"ELEMENT 2\"], \"type_array_boolean\": [true, false], \"type_nullable_array\": null, \"type_enum\": \"GREEN\", \"type_map\": {\"hehe\": 12}, \"type_fixed\": null, \"type_union\": null, \"type_nested\": {\"num\": 239, \"street\": \"Baker Street\", \"city\": \"London\", \"state\": \"London\", \"zip\": \"NW1 6XE\"}}\n" +
"{\"name\": \"Charlie\", \"favorite_number\": null, \"favorite_color\": \"blue\", \"type_long_test\": 1337, \"type_double_test\": 1.337, \"type_null_test\": null, \"type_bool_test\": false, \"type_array_string\": [], \"type_array_boolean\": [], \"type_nullable_array\": null, \"type_enum\": \"RED\", \"type_map\": {\"hehe\": 12}, \"type_fixed\": null, \"type_union\": null, \"type_nested\": {\"num\": 239, \"street\": \"Baker Street\", \"city\": \"London\", \"state\": \"London\", \"zip\": \"NW1 6XE\"}}\n";
}
示例4: testSimpleWriteAndRead
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@Test
public void testSimpleWriteAndRead() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Event> input = env.fromElements(
Event.of(1, "start", 1.0),
Event.of(2, "middle", 2.0),
Event.of(3, "end", 3.0),
Event.of(4, "start", 4.0),
Event.of(5, "middle", 5.0),
Event.of(6, "end", 6.0)
);
String path = tempFolder.newFile().toURI().toString();
input.transform("transformer", TypeInformation.of(Event.class), new StreamMap<>((MapFunction<Event, Event>) event -> event)).writeAsText(path);
env.execute();
Assert.assertEquals(6, getLineCount(path));
}
示例5: getEdgesDataSet
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@SuppressWarnings("serial")
private static DataSet<Edge<Long, NullValue>> getEdgesDataSet(ExecutionEnvironment env) {
if(fileOutput) {
return env.readCsvFile(edgeInputPath)
.ignoreComments("#")
.fieldDelimiter("\t")
.lineDelimiter("\n")
.types(Long.class, Long.class)
.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
@Override
public Edge<Long, NullValue> map(Tuple2<Long, Long> value) throws Exception {
return new Edge<>(value.f0, value.f1, NullValue.getInstance());
}
});
} else {
return ConnectedComponentsDefaultData.getDefaultEdgeDataSet(env);
}
}
示例6: testExecuteAfterGetExecutionPlan
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@Test
public void testExecuteAfterGetExecutionPlan() {
ExecutionEnvironment env = new LocalEnvironment();
env.getConfig().disableSysoutLogging();
DataSet<Integer> baseSet = env.fromElements(1, 2);
DataSet<Integer> result = baseSet.map(new MapFunction<Integer, Integer>() {
@Override public Integer map(Integer value) throws Exception {
return value * 2;
}});
result.output(new DiscardingOutputFormat<Integer>());
try {
env.getExecutionPlan();
env.execute();
}
catch (Exception e) {
e.printStackTrace();
fail("Cannot run both #getExecutionPlan and #execute.");
}
}
示例7: testNestedSerializable
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@Test
public void testNestedSerializable() throws Exception {
MapCreator creator = new NestedSerializableMapCreator(1);
MapFunction<Integer, Integer> map = creator.getMap();
ClosureCleaner.clean(map, true);
ClosureCleaner.ensureSerializable(map);
int result = map.map(3);
Assert.assertEquals(result, 4);
}
示例8: main
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
Configuration config = new Configuration();
config.setString(AkkaOptions.ASK_TIMEOUT, "5 s");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().disableSysoutLogging();
DataStream<Integer> data = env.createInput(new CustomInputFormat());
data.map(new MapFunction<Integer, Tuple2<Integer, Double>>() {
@Override
public Tuple2<Integer, Double> map(Integer value) throws Exception {
return new Tuple2<Integer, Double>(value, value * 0.5);
}
}).addSink(new NoOpSink());
env.execute();
}
示例9: testPartialReduceWithIdenticalInputOutputType
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@Test
public void testPartialReduceWithIdenticalInputOutputType() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
// data
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
DataSet<Tuple2<Long, Tuple3<Integer, Long, String>>> dsWrapped = ds
// wrap values as Kv pairs with the grouping key as key
.map(new Tuple3KvWrapper());
List<Tuple3<Integer, Long, String>> result = dsWrapped
.groupBy(0)
// reduce partially
.combineGroup(new Tuple3toTuple3GroupReduce())
.groupBy(0)
// reduce fully to check result
.reduceGroup(new Tuple3toTuple3GroupReduce())
//unwrap
.map(new MapFunction<Tuple2<Long, Tuple3<Integer, Long, String>>, Tuple3<Integer, Long, String>>() {
@Override
public Tuple3<Integer, Long, String> map(Tuple2<Long, Tuple3<Integer, Long, String>> value) throws Exception {
return value.f1;
}
}).collect();
String expected = "1,1,combined\n" +
"5,4,combined\n" +
"15,9,combined\n" +
"34,16,combined\n" +
"65,25,combined\n" +
"111,36,combined\n";
compareResultAsTuples(result, expected);
}
示例10: testCreatePlanAfterGetExecutionPlan
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@Test
public void testCreatePlanAfterGetExecutionPlan() {
ExecutionEnvironment env = new LocalEnvironment();
DataSet<Integer> baseSet = env.fromElements(1, 2);
DataSet<Integer> result = baseSet.map(new MapFunction<Integer, Integer>() {
@Override public Integer map(Integer value) throws Exception {
return value * 2;
}});
result.output(new DiscardingOutputFormat<Integer>());
try {
env.getExecutionPlan();
env.createProgramPlan();
} catch (Exception e) {
e.printStackTrace();
fail("Cannot run both #getExecutionPlan and #execute. Message: " + e.getMessage());
}
}
示例11: testZipWithUniqueId
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@Test
public void testZipWithUniqueId() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
long expectedSize = 100L;
DataSet<Long> numbers = env.generateSequence(1L, expectedSize);
DataSet<Long> ids = DataSetUtils.zipWithUniqueId(numbers).map(new MapFunction<Tuple2<Long, Long>, Long>() {
@Override
public Long map(Tuple2<Long, Long> value) throws Exception {
return value.f0;
}
});
Set<Long> result = new HashSet<>(ids.collect());
Assert.assertEquals(expectedSize, result.size());
}
示例12: mapVertices
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
/**
* Apply a function to the attribute of each vertex in the graph.
*
* @param mapper the map function to apply.
* @return a new graph
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public <NV> Graph<K, NV, EV> mapVertices(final MapFunction<Vertex<K, VV>, NV> mapper) {
TypeInformation<K> keyType = ((TupleTypeInfo<?>) vertices.getType()).getTypeAt(0);
TypeInformation<NV> valueType;
if (mapper instanceof ResultTypeQueryable) {
valueType = ((ResultTypeQueryable) mapper).getProducedType();
} else {
valueType = TypeExtractor.createTypeInfo(MapFunction.class, mapper.getClass(), 1, vertices.getType(), null);
}
TypeInformation<Vertex<K, NV>> returnType = (TypeInformation<Vertex<K, NV>>) new TupleTypeInfo(
Vertex.class, keyType, valueType);
return mapVertices(mapper, returnType);
}
示例13: testSimpleAvroRead
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@Test
public void testSimpleAvroRead() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Path in = new Path(inFile.getAbsoluteFile().toURI());
AvroInputFormat<User> users = new AvroInputFormat<User>(in, User.class);
DataSet<User> usersDS = env.createInput(users)
// null map type because the order changes in different JVMs (hard to test)
.map(new MapFunction<User, User>() {
@Override
public User map(User value) throws Exception {
value.setTypeMap(null);
return value;
}
});
usersDS.writeAsText(resultPath);
env.execute("Simple Avro read job");
expected = "{\"name\": \"Alyssa\", \"favorite_number\": 256, \"favorite_color\": null, \"type_long_test\": null, \"type_double_test\": 123.45, \"type_null_test\": null, \"type_bool_test\": true, \"type_array_string\": [\"ELEMENT 1\", \"ELEMENT 2\"], \"type_array_boolean\": [true, false], \"type_nullable_array\": null, \"type_enum\": \"GREEN\", \"type_map\": null, \"type_fixed\": null, \"type_union\": null, \"type_nested\": {\"num\": 239, \"street\": \"Baker Street\", \"city\": \"London\", \"state\": \"London\", \"zip\": \"NW1 6XE\"}}\n" +
"{\"name\": \"Charlie\", \"favorite_number\": null, \"favorite_color\": \"blue\", \"type_long_test\": 1337, \"type_double_test\": 1.337, \"type_null_test\": null, \"type_bool_test\": false, \"type_array_string\": [], \"type_array_boolean\": [], \"type_nullable_array\": null, \"type_enum\": \"RED\", \"type_map\": null, \"type_fixed\": null, \"type_union\": null, \"type_nested\": {\"num\": 239, \"street\": \"Baker Street\", \"city\": \"London\", \"state\": \"London\", \"zip\": \"NW1 6XE\"}}\n";
}
示例14: testEnumType
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testEnumType() {
MapFunction<?, ?> mf = new MapFunction<MyEnum, MyEnum>() {
private static final long serialVersionUID = 1L;
@Override
public MyEnum map(MyEnum value) throws Exception {
return null;
}
};
TypeInformation<?> ti = TypeExtractor.getMapReturnTypes((MapFunction) mf, new EnumTypeInfo(MyEnum.class));
Assert.assertTrue(ti instanceof EnumTypeInfo);
Assert.assertEquals(ti.getTypeClass(), MyEnum.class);
}
示例15: main
import org.apache.flink.api.common.functions.MapFunction; //导入依赖的package包/类
public static void main(String args[]) throws Exception {
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", "localhost:9092");
properties.setProperty("zookeeper.connect", "localhost:2181");
properties.setProperty("group.id", "test");
properties.setProperty("auto.offset.reset", "latest");
FlinkKafkaConsumer08<String> flinkKafkaConsumer08 = new FlinkKafkaConsumer08<>("flink-test",
new SimpleStringSchema(), properties);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<String> messageStream = env.addSource(flinkKafkaConsumer08);
// print() will write the contents of the stream to the TaskManager's
// standard out stream
// the rebelance call is causing a repartitioning of the data so that
// all machines
// see the messages (for example in cases when "num kafka partitions" <
// "num flink operators"
messageStream.rebalance().map(new MapFunction<String, String>() {
private static final long serialVersionUID = -6867736771747690202L;
@Override
public String map(String value) throws Exception {
return "Kafka and Flink says: " + value;
}
}).print();
env.execute();
}
开发者ID:PacktPublishing,项目名称:Practical-Real-time-Processing-and-Analytics,代码行数:30,代码来源:FlinkKafkaSourceExample.java