本文整理汇总了Java中org.apache.flink.api.java.typeutils.TupleTypeInfo类的典型用法代码示例。如果您正苦于以下问题:Java TupleTypeInfo类的具体用法?Java TupleTypeInfo怎么用?Java TupleTypeInfo使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
TupleTypeInfo类属于org.apache.flink.api.java.typeutils包,在下文中一共展示了TupleTypeInfo类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getAnomalySteam
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public DataStream<Tuple2<K, AnomalyResult>> getAnomalySteam(DataStream<V> ds, KeySelector<V, K> keySelector , Time window) {
KeyedStream<V, K> keyedInput = ds
.keyBy(keySelector);
TypeInformation<Tuple2<K,Tuple4<Double,Double,Long,Long>>> resultType = (TypeInformation) new TupleTypeInfo<>(Tuple2.class,
new TypeInformation[] {keyedInput.getKeyType(), new TupleTypeInfo(Tuple4.class,
BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO,BasicTypeInfo.LONG_TYPE_INFO)});
Tuple2<K,Tuple4<Double,Double,Long,Long>> init= new Tuple2<>(null,new Tuple4<>(0d,0d,0l,0l));
KeyedStream<Tuple2<K,Tuple4<Double,Double,Long,Long>>, Tuple> kPreStream = keyedInput
.timeWindow(window)
.apply(init, new CountWindFold<>(keySelector, window, resultType),new WindowTimeExtractor(resultType))
.keyBy(0);
return kPreStream.flatMap(afm);
}
示例2: getAnomalySteam
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public DataStream<Tuple2<K, AnomalyResult>> getAnomalySteam(DataStream<V> ds, KeySelector<V, K> keySelector , KeySelector<V,Double> valueSelector, Time window) {
KeyedStream<V, K> keyedInput = ds
.keyBy(keySelector);
TypeInformation<Tuple2<K,Tuple4<Double,Double,Long,Long>>> resultType = (TypeInformation) new TupleTypeInfo<>(Tuple2.class,
new TypeInformation[] {keyedInput.getKeyType(), new TupleTypeInfo(Tuple4.class,
BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO,BasicTypeInfo.LONG_TYPE_INFO)});
Tuple2<K,Tuple4<Double,Double,Long,Long>> init= new Tuple2<>(null,new Tuple4<>(0d,0d,0l,0l));
KeyedStream<Tuple2<K,Tuple4<Double,Double,Long,Long>>, Tuple> kPreStream = keyedInput
.timeWindow(window)
.apply(init, new CountSumFold<>(keySelector, valueSelector, resultType), new WindowTimeExtractor(resultType))
.keyBy(0);
return kPreStream.flatMap(afm);
}
示例3: getAnomalySteam
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public DataStream<Tuple3<K, AnomalyResult, RV>> getAnomalySteam(DataStream<V> ds, KeySelector<V, K> keySelector, KeySelector<V,Double> valueSelector, PayloadFold<V, RV> valueFold, Time window) {
KeyedStream<V, K> keyedInput = ds
.keyBy(keySelector);
TypeInformation<Object> foldResultType = TypeExtractor.getUnaryOperatorReturnType(valueFold,
PayloadFold.class,
false,
false,
ds.getType(),
"PayloadFold",
false);
TypeInformation<Tuple3<K,Tuple4<Double,Double,Long,Long>,RV>> resultType = (TypeInformation) new TupleTypeInfo<>(Tuple3.class,
new TypeInformation[] {keyedInput.getKeyType(), new TupleTypeInfo(Tuple4.class,
BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO,BasicTypeInfo.LONG_TYPE_INFO), foldResultType});
Tuple3<K,Tuple4<Double,Double,Long,Long>, RV> init= new Tuple3<>(null,new Tuple4<>(0d,0d,0l,0l), valueFold.getInit());
KeyedStream<Tuple3<K,Tuple4<Double,Double,Long,Long>,RV>, Tuple> kPreStream = keyedInput
.timeWindow(window)
.apply(init, new ExtCountSumFold<>(keySelector,valueSelector,valueFold, resultType),new ExtWindowTimeExtractor(resultType))
.keyBy(0);
return kPreStream.flatMap(afm);
}
示例4: testStandardTupleKeys
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
@Test
public void testStandardTupleKeys() {
TupleTypeInfo<Tuple7<String, String, String, String, String, String, String>> typeInfo = new TupleTypeInfo<>(
BasicTypeInfo.STRING_TYPE_INFO,BasicTypeInfo.STRING_TYPE_INFO,BasicTypeInfo.STRING_TYPE_INFO,BasicTypeInfo.STRING_TYPE_INFO,BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,BasicTypeInfo.STRING_TYPE_INFO);
ExpressionKeys<Tuple7<String, String, String, String, String, String, String>> ek;
for( int i = 1; i < 8; i++) {
int[] ints = new int[i];
for( int j = 0; j < i; j++) {
ints[j] = j;
}
int[] inInts = Arrays.copyOf(ints, ints.length); // copy, just to make sure that the code is not cheating by changing the ints.
ek = new ExpressionKeys<>(inInts, typeInfo);
Assert.assertArrayEquals(ints, ek.computeLogicalKeyPositions());
Assert.assertEquals(ints.length, ek.computeLogicalKeyPositions().length);
ArrayUtils.reverse(ints);
inInts = Arrays.copyOf(ints, ints.length);
ek = new ExpressionKeys<>(inInts, typeInfo);
Assert.assertArrayEquals(ints, ek.computeLogicalKeyPositions());
Assert.assertEquals(ints.length, ek.computeLogicalKeyPositions().length);
}
}
示例5: getAnomalySteam
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public DataStream<Tuple3<K, AnomalyResult, RV>> getAnomalySteam(DataStream<V> ds, KeySelector<V, K> keySelector, PayloadFold<V, RV> valueFold, Time window) {
KeyedStream<V, K> keyedInput = ds
.keyBy(keySelector);
TypeInformation<Object> foldResultType = TypeExtractor.getUnaryOperatorReturnType(valueFold,
PayloadFold.class,
false,
false,
ds.getType(),
"PayloadFold",
false);
TypeInformation<Tuple3<K,Tuple4<Double,Double,Long,Long>,RV>> resultType = (TypeInformation) new TupleTypeInfo<>(Tuple3.class,
new TypeInformation[] {keyedInput.getKeyType(), new TupleTypeInfo(Tuple4.class,
BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO,BasicTypeInfo.LONG_TYPE_INFO), foldResultType});
Tuple3<K,Tuple4<Double,Double,Long,Long>, RV> init= new Tuple3<>(null,new Tuple4<>(0d,0d,0l,0l), valueFold.getInit());
KeyedStream<Tuple3<K,Tuple4<Double,Double,Long,Long>,RV>, Tuple> kPreStream = keyedInput
.timeWindow(window)
.apply(init, new ExtCountWindFold<>(keySelector,valueFold, window, resultType),new ExtWindowTimeExtractor(resultType))
.keyBy(0);
return kPreStream.flatMap(afm);
}
示例6: getAnomalySteam
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public DataStream<Tuple2<K, AnomalyResult>> getAnomalySteam(DataStream<V> ds, KeySelector<V, K> keySelector, Time window) {
KeyedStream<V, K> keyedInput = ds
.keyBy(keySelector);
TypeInformation<Tuple2<K,Tuple4<Double,Double,Long,Long>>> resultType = (TypeInformation) new TupleTypeInfo<>(Tuple2.class,
new TypeInformation[] {keyedInput.getKeyType(), new TupleTypeInfo(Tuple4.class,
BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO,BasicTypeInfo.LONG_TYPE_INFO)});
Tuple2<K,Tuple4<Double,Double,Long,Long>> init= new Tuple2<>(null,new Tuple4<>(0d,0d,0l,0l));
KeyedStream<Tuple2<K,Tuple4<Double,Double,Long,Long>>, Tuple> kPreStream = keyedInput
.timeWindow(window)
.apply(init, new CountWindFold<>(keySelector, window, resultType),new WindowTimeExtractor(resultType))
.keyBy(0);
return kPreStream.flatMap(afm);
}
示例7: getAnomalySteam
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public DataStream<Tuple3<K, AnomalyResult, RV>> getAnomalySteam(DataStream<V> ds, KeySelector<V, K> keySelector, KeySelector<V,Double> valueSelector, PayloadFold<V, RV> valueFold, Time window) {
KeyedStream<V, K> keyedInput = ds
.keyBy(keySelector);
TypeInformation<Object> foldResultType = TypeExtractor.getUnaryOperatorReturnType(valueFold,
PayloadFold.class,
false,
false,
ds.getType(),
"PayloadFold",
false);
TypeInformation<Tuple3<K,Tuple4<Double,Double,Long,Long>,RV>> resultType = (TypeInformation) new TupleTypeInfo<>(Tuple3.class,
new TypeInformation[] {keyedInput.getKeyType(), new TupleTypeInfo(Tuple4.class,
BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO,BasicTypeInfo.LONG_TYPE_INFO), foldResultType});
Tuple3<K,Tuple4<Double,Double,Long,Long>, RV> init= new Tuple3<>(null,new Tuple4<>(0d,0d,0l,0l), valueFold.getInit());
KeyedStream<Tuple3<K,Tuple4<Double,Double,Long,Long>,RV>, Tuple> kPreStream = keyedInput
.timeWindow(window)
.apply(init, new ExtCountSumFold<>(keySelector,valueSelector,valueFold, resultType),new ExtWindowTimeExtractor<>(resultType))
.keyBy(0);
return kPreStream.flatMap(afm);
}
示例8: tupleType
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
/**
* Configures the reader to read the CSV data and parse it to the given type. The type must be a subclass of
* {@link Tuple}. The type information for the fields is obtained from the type class. The type
* consequently needs to specify all generic field types of the tuple.
*
* @param targetType The class of the target type, needs to be a subclass of Tuple.
* @return The DataSet representing the parsed CSV data.
*/
public <T extends Tuple> DataSource<T> tupleType(Class<T> targetType) {
Preconditions.checkNotNull(targetType, "The target type class must not be null.");
if (!Tuple.class.isAssignableFrom(targetType)) {
throw new IllegalArgumentException("The target type must be a subclass of " + Tuple.class.getName());
}
@SuppressWarnings("unchecked")
TupleTypeInfo<T> typeInfo = (TupleTypeInfo<T>) TypeExtractor.createTypeInfo(targetType);
CsvInputFormat<T> inputFormat = new TupleCsvInputFormat<T>(path, this.lineDelimiter, this.fieldDelimiter, typeInfo, this.includedMask);
Class<?>[] classes = new Class<?>[typeInfo.getArity()];
for (int i = 0; i < typeInfo.getArity(); i++) {
classes[i] = typeInfo.getTypeAt(i).getTypeClass();
}
configureInputFormat(inputFormat);
return new DataSource<T>(executionContext, inputFormat, typeInfo, Utils.getCallLocationName());
}
示例9: testAsFromAndToTuple
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
@Test
public void testAsFromAndToTuple() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
Table table = tableEnv
.fromDataSet(CollectionDataSets.get3TupleDataSet(env), "a, b, c")
.select("a, b, c");
TypeInformation<?> ti = new TupleTypeInfo<Tuple3<Integer, Long, String>>(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO);
DataSet<?> ds = tableEnv.toDataSet(table, ti);
List<?> results = ds.collect();
String expected = "(1,1,Hi)\n" + "(2,2,Hello)\n" + "(3,2,Hello world)\n" +
"(4,3,Hello world, how are you?)\n" + "(5,3,I am fine.)\n" + "(6,3,Luke Skywalker)\n" +
"(7,4,Comment#1)\n" + "(8,4,Comment#2)\n" + "(9,4,Comment#3)\n" + "(10,4,Comment#4)\n" +
"(11,5,Comment#5)\n" + "(12,5,Comment#6)\n" + "(13,5,Comment#7)\n" +
"(14,5,Comment#8)\n" + "(15,5,Comment#9)\n" + "(16,6,Comment#10)\n" +
"(17,6,Comment#11)\n" + "(18,6,Comment#12)\n" + "(19,6,Comment#13)\n" +
"(20,6,Comment#14)\n" + "(21,6,Comment#15)\n";
compareResultAsText(results, expected);
}
示例10: getSmall5TupleDataSet
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public static DataSet<Tuple5<IntValue, LongValue, IntValue, StringValue, LongValue>> getSmall5TupleDataSet(ExecutionEnvironment env) {
List<Tuple5<IntValue, LongValue, IntValue, StringValue, LongValue>> data = new ArrayList<>();
data.add(new Tuple5<>(new IntValue(1), new LongValue(1l), new IntValue(0), new StringValue("Hallo"), new LongValue(1l)));
data.add(new Tuple5<>(new IntValue(2), new LongValue(2l), new IntValue(1), new StringValue("Hallo Welt"), new LongValue(2l)));
data.add(new Tuple5<>(new IntValue(2), new LongValue(3l), new IntValue(2), new StringValue("Hallo Welt wie"), new LongValue(1l)));
Collections.shuffle(data);
TupleTypeInfo<Tuple5<IntValue, LongValue, IntValue, StringValue, LongValue>> type = new
TupleTypeInfo<>(
ValueTypeInfo.INT_VALUE_TYPE_INFO,
ValueTypeInfo.LONG_VALUE_TYPE_INFO,
ValueTypeInfo.INT_VALUE_TYPE_INFO,
ValueTypeInfo.STRING_VALUE_TYPE_INFO,
ValueTypeInfo.LONG_VALUE_TYPE_INFO
);
return env.fromCollection(data, type);
}
示例11: getGroupSortedNestedTupleDataSet2
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public static DataSet<Tuple3<Tuple2<IntValue, IntValue>, StringValue, IntValue>> getGroupSortedNestedTupleDataSet2(ExecutionEnvironment env) {
List<Tuple3<Tuple2<IntValue, IntValue>, StringValue, IntValue>> data = new ArrayList<>();
data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(1), new IntValue(3)), new StringValue("a"), new IntValue(2)));
data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(1), new IntValue(2)), new StringValue("a"), new IntValue(1)));
data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(2), new IntValue(1)), new StringValue("a"), new IntValue(3)));
data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(2), new IntValue(2)), new StringValue("b"), new IntValue(4)));
data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(3), new IntValue(3)), new StringValue("c"), new IntValue(5)));
data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(3), new IntValue(6)), new StringValue("c"), new IntValue(6)));
data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(4), new IntValue(9)), new StringValue("c"), new IntValue(7)));
TupleTypeInfo<Tuple3<Tuple2<IntValue, IntValue>, StringValue, IntValue>> type = new
TupleTypeInfo<>(
new TupleTypeInfo<Tuple2<IntValue, IntValue>>(ValueTypeInfo.INT_VALUE_TYPE_INFO, ValueTypeInfo.INT_VALUE_TYPE_INFO),
ValueTypeInfo.STRING_VALUE_TYPE_INFO,
ValueTypeInfo.INT_VALUE_TYPE_INFO
);
return env.fromCollection(data, type);
}
示例12: getSmall5TupleDataSet
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public static DataSet<Tuple5<Integer, Long, Integer, String, Long>> getSmall5TupleDataSet(ExecutionEnvironment env) {
List<Tuple5<Integer, Long, Integer, String, Long>> data = new ArrayList<>();
data.add(new Tuple5<>(1, 1L, 0, "Hallo", 1L));
data.add(new Tuple5<>(2, 2L, 1, "Hallo Welt", 2L));
data.add(new Tuple5<>(2, 3L, 2, "Hallo Welt wie", 1L));
Collections.shuffle(data);
TupleTypeInfo<Tuple5<Integer, Long, Integer, String, Long>> type = new TupleTypeInfo<>(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO
);
return env.fromCollection(data, type);
}
示例13: getGroupSortedNestedTupleDataSet2
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public static DataSet<Tuple3<Tuple2<Integer, Integer>, String, Integer>> getGroupSortedNestedTupleDataSet2(ExecutionEnvironment env) {
List<Tuple3<Tuple2<Integer, Integer>, String, Integer>> data = new ArrayList<>();
data.add(new Tuple3<>(new Tuple2<>(1, 3), "a", 2));
data.add(new Tuple3<>(new Tuple2<>(1, 2), "a", 1));
data.add(new Tuple3<>(new Tuple2<>(2, 1), "a", 3));
data.add(new Tuple3<>(new Tuple2<>(2, 2), "b", 4));
data.add(new Tuple3<>(new Tuple2<>(3, 3), "c", 5));
data.add(new Tuple3<>(new Tuple2<>(3, 6), "c", 6));
data.add(new Tuple3<>(new Tuple2<>(4, 9), "c", 7));
TupleTypeInfo<Tuple3<Tuple2<Integer, Integer>, String, Integer>> type = new TupleTypeInfo<>(
new TupleTypeInfo<Tuple2<Integer, Integer>>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO),
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO
);
return env.fromCollection(data, type);
}
示例14: getTuple2WithByteArrayDataSet
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
public static DataSet<Tuple2<byte[], Integer>> getTuple2WithByteArrayDataSet(ExecutionEnvironment env) {
List<Tuple2<byte[], Integer>> data = new ArrayList<>();
data.add(new Tuple2<>(new byte[]{0, 4}, 1));
data.add(new Tuple2<>(new byte[]{2, 0}, 1));
data.add(new Tuple2<>(new byte[]{2, 0, 4}, 4));
data.add(new Tuple2<>(new byte[]{2, 1}, 3));
data.add(new Tuple2<>(new byte[]{0}, 0));
data.add(new Tuple2<>(new byte[]{2, 0}, 1));
TupleTypeInfo<Tuple2<byte[], Integer>> type = new TupleTypeInfo<>(
PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO
);
return env.fromCollection(data, type);
}
示例15: testIdentityMapWithMissingTypesAndTypeInformationTypeHint
import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入依赖的package包/类
@Test
public void testIdentityMapWithMissingTypesAndTypeInformationTypeHint() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.getConfig().disableSysoutLogging();
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.getSmall3TupleDataSet(env);
DataSet<Tuple3<Integer, Long, String>> identityMapDs = ds
// all following generics get erased during compilation
.map(new Mapper<Tuple3<Integer, Long, String>, Tuple3<Integer, Long, String>>())
.returns(new TupleTypeInfo<Tuple3<Integer, Long, String>>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO));
List<Tuple3<Integer, Long, String>> result = identityMapDs
.collect();
String expectedResult = "(2,2,Hello)\n" +
"(3,2,Hello world)\n" +
"(1,1,Hi)\n";
compareResultAsText(result, expectedResult);
}