本文整理汇总了Java中org.apache.flink.table.api.Table.select方法的典型用法代码示例。如果您正苦于以下问题:Java Table.select方法的具体用法?Java Table.select怎么用?Java Table.select使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.flink.table.api.Table
的用法示例。
在下文中一共展示了Table.select方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testAggregationWithTwoCount
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testAggregationWithTwoCount() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSource<Tuple2<Float, String>> input =
env.fromElements(
new Tuple2<>(1f, "Hello"),
new Tuple2<>(2f, "Ciao"));
Table table =
tableEnv.fromDataSet(input);
Table result =
table.select("f0.count, f1.count");
DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
List<Row> results = ds.collect();
String expected = "2,2";
compareResultAsText(results, expected);
}
示例2: testSimpleRegister
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testSimpleRegister() throws Exception {
final String tableName = "MyTable";
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
tableEnv.registerDataSet(tableName, ds);
Table t = tableEnv.scan(tableName);
Table result = t.select("f0, f1");
DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
List<Row> results = resultSet.collect();
String expected = "1,1\n" + "2,2\n" + "3,2\n" + "4,3\n" + "5,3\n" + "6,3\n" + "7,4\n" +
"8,4\n" + "9,4\n" + "10,4\n" + "11,5\n" + "12,5\n" + "13,5\n" + "14,5\n" + "15,5\n" +
"16,6\n" + "17,6\n" + "18,6\n" + "19,6\n" + "20,6\n" + "21,6\n";
compareResultAsText(results, expected);
}
示例3: testRegisterWithFields
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testRegisterWithFields() throws Exception {
final String tableName = "MyTable";
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
tableEnv.registerDataSet(tableName, ds, "a, b, c");
Table t = tableEnv.scan(tableName);
Table result = t.select("a, b, c");
DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
List<Row> results = resultSet.collect();
String expected = "1,1,Hi\n" + "2,2,Hello\n" + "3,2,Hello world\n" +
"4,3,Hello world, how are you?\n" + "5,3,I am fine.\n" + "6,3,Luke Skywalker\n" +
"7,4,Comment#1\n" + "8,4,Comment#2\n" + "9,4,Comment#3\n" + "10,4,Comment#4\n" +
"11,5,Comment#5\n" + "12,5,Comment#6\n" + "13,5,Comment#7\n" +
"14,5,Comment#8\n" + "15,5,Comment#9\n" + "16,6,Comment#10\n" +
"17,6,Comment#11\n" + "18,6,Comment#12\n" + "19,6,Comment#13\n" +
"20,6,Comment#14\n" + "21,6,Comment#15\n";
compareResultAsText(results, expected);
}
示例4: testNumericAutocastInArithmetic
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testNumericAutocastInArithmetic() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSource<Tuple8<Byte, Short, Integer, Long, Float, Double, Long, Double>> input =
env.fromElements(new Tuple8<>((byte) 1, (short) 1, 1, 1L, 1.0f, 1.0d, 1L, 1001.1));
Table table =
tableEnv.fromDataSet(input);
Table result = table.select("f0 + 1, f1 +" +
" 1, f2 + 1L, f3 + 1.0f, f4 + 1.0d, f5 + 1, f6 + 1.0d, f7 + f0");
DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
List<Row> results = ds.collect();
String expected = "2,2,2,2.0,2.0,2.0,2.0,1002.1";
compareResultAsText(results, expected);
}
示例5: testCastFromString
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testCastFromString() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSource<Tuple3<String, String, String>> input =
env.fromElements(new Tuple3<>("1", "true", "2.0"));
Table table =
tableEnv.fromDataSet(input);
Table result = table.select(
"f0.cast(BYTE), f0.cast(SHORT), f0.cast(INT), f0.cast(LONG), f2.cast(DOUBLE), f2.cast(FLOAT), f1.cast(BOOL)");
DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
List<Row> results = ds.collect();
String expected = "1,1,1,1,2.0,2.0,true\n";
compareResultAsText(results, expected);
}
示例6: testWorkingAggregationDataTypes
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testWorkingAggregationDataTypes() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSource<Tuple7<Byte, Short, Integer, Long, Float, Double, String>> input =
env.fromElements(
new Tuple7<>((byte) 1, (short) 1, 1, 1L, 1.0f, 1.0d, "Hello"),
new Tuple7<>((byte) 2, (short) 2, 2, 2L, 2.0f, 2.0d, "Ciao"));
Table table = tableEnv.fromDataSet(input);
Table result =
table.select("f0.avg, f1.avg, f2.avg, f3.avg, f4.avg, f5.avg, f6.count");
DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
List<Row> results = ds.collect();
String expected = "1,1,1,1,1.5,1.5,2";
compareResultAsText(results, expected);
}
示例7: testAggregationWithArithmetic
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testAggregationWithArithmetic() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSource<Tuple2<Float, String>> input =
env.fromElements(
new Tuple2<>(1f, "Hello"),
new Tuple2<>(2f, "Ciao"));
Table table =
tableEnv.fromDataSet(input);
Table result =
table.select("(f0 + 2).avg + 2, f1.count + 5");
DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
List<Row> results = ds.collect();
String expected = "5.5,7";
compareResultAsText(results, expected);
}
示例8: testSimpleSelectAllWithAs
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testSimpleSelectAllWithAs() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
Table in = tableEnv.fromDataSet(ds, "a,b,c");
Table result = in
.select("a, b, c");
DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
List<Row> results = resultSet.collect();
String expected = "1,1,Hi\n" + "2,2,Hello\n" + "3,2,Hello world\n" +
"4,3,Hello world, how are you?\n" + "5,3,I am fine.\n" + "6,3,Luke Skywalker\n" +
"7,4,Comment#1\n" + "8,4,Comment#2\n" + "9,4,Comment#3\n" + "10,4,Comment#4\n" +
"11,5,Comment#5\n" + "12,5,Comment#6\n" + "13,5,Comment#7\n" +
"14,5,Comment#8\n" + "15,5,Comment#9\n" + "16,6,Comment#10\n" +
"17,6,Comment#11\n" + "18,6,Comment#12\n" + "19,6,Comment#13\n" +
"20,6,Comment#14\n" + "21,6,Comment#15\n";
compareResultAsText(results, expected);
}
示例9: testSelectStar
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testSelectStar() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
Table in = tableEnv.fromDataSet(ds, "a,b,c");
Table result = in
.select("*");
DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
List<Row> results = resultSet.collect();
String expected = "1,1,Hi\n" + "2,2,Hello\n" + "3,2,Hello world\n" +
"4,3,Hello world, how are you?\n" + "5,3,I am fine.\n" + "6,3,Luke Skywalker\n" +
"7,4,Comment#1\n" + "8,4,Comment#2\n" + "9,4,Comment#3\n" + "10,4,Comment#4\n" +
"11,5,Comment#5\n" + "12,5,Comment#6\n" + "13,5,Comment#7\n" +
"14,5,Comment#8\n" + "15,5,Comment#9\n" + "16,6,Comment#10\n" +
"17,6,Comment#11\n" + "18,6,Comment#12\n" + "19,6,Comment#13\n" +
"20,6,Comment#14\n" + "21,6,Comment#15\n";
compareResultAsText(results, expected);
}
示例10: testUserDefinedScalarFunction
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testUserDefinedScalarFunction() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
tableEnv.registerFunction("hashCode", new OldHashCode());
tableEnv.registerFunction("hashCode", new HashCode());
DataSource<String> input = env.fromElements("a", "b", "c");
Table table = tableEnv.fromDataSet(input, "text");
Table result = table.select("text.hashCode()");
DataSet<Integer> ds = tableEnv.toDataSet(result, Integer.class);
List<Integer> results = ds.collect();
String expected = "97\n98\n99";
compareResultAsText(results, expected);
}
示例11: main
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
public static void main(String args[]) {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env);
// Create a DataStream from a list of elements
//DataStream<Integer> ds = env.fromElements(1, 2, 3, 4, 5);
CsvTableSource csvTableSource = new CsvTableSource(
"/Users/will/Downloads/file.csv",
new String[] { "name", "id", "score", "comments" },
new TypeInformation<?>[] {
Types.STRING(),
Types.STRING(),
Types.STRING(),
Types.STRING()
}); // lenient
tableEnv.registerTableSource("mycsv", csvTableSource);
TableSink sink = new CsvTableSink("/Users/will/Downloads/out.csv", "|");
//tableEnv.registerDataStream("tbl", ds, "a");
//Table ingest = tableEnv.fromDataStream(ds, "name");
Table in = tableEnv.scan("mycsv");
//Table in = tableEnv.ingest("tbl");
//Table in = tableEnv.fromDataStream(ds, "a");
Table result = in.select("name");
result.writeToSink(sink);
try {
env.execute();
} catch (Exception e) {
}
System.out.print("DONE");
}
示例12: testCasting
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testCasting() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
DataSource<Tuple4<Integer, Double, Long, Boolean>> input =
env.fromElements(new Tuple4<>(1, 0.0, 1L, true));
Table table =
tableEnv.fromDataSet(input);
Table result = table.select(
// * -> String
"f0.cast(STRING), f1.cast(STRING), f2.cast(STRING), f3.cast(STRING)," +
// NUMERIC TYPE -> Boolean
"f0.cast(BOOL), f1.cast(BOOL), f2.cast(BOOL)," +
// NUMERIC TYPE -> NUMERIC TYPE
"f0.cast(DOUBLE), f1.cast(INT), f2.cast(SHORT)," +
// Boolean -> NUMERIC TYPE
"f3.cast(DOUBLE)," +
// identity casting
"f0.cast(INT), f1.cast(DOUBLE), f2.cast(LONG), f3.cast(BOOL)");
DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
List<Row> results = ds.collect();
String expected = "1,0.0,1,true," +
"true,false,true," +
"1.0,0,1," +
"1.0," +
"1,0.0,1,true\n";
compareResultAsText(results, expected);
}
示例13: testAggregationTypes
import org.apache.flink.table.api.Table; //导入方法依赖的package包/类
@Test
public void testAggregationTypes() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
Table table = tableEnv.fromDataSet(CollectionDataSets.get3TupleDataSet(env));
Table result = table.select("f0.sum, f0.min, f0.max, f0.count, f0.avg");
DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
List<Row> results = ds.collect();
String expected = "231,1,21,21,11";
compareResultAsText(results, expected);
}