本文整理汇总了Java中org.apache.flink.types.Row.setField方法的典型用法代码示例。如果您正苦于以下问题:Java Row.setField方法的具体用法?Java Row.setField怎么用?Java Row.setField使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.flink.types.Row
的用法示例。
在下文中一共展示了Row.setField方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: mapResultToOutType
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Override
protected Row mapResultToOutType(Result res) {
for (int f = 0; f < this.families.length; f++) {
// get family key
byte[] familyKey = families[f];
Row familyRow = familyRows[f];
for (int q = 0; q < this.qualifiers[f].length; q++) {
// get quantifier key
byte[] qualifier = qualifiers[f][q];
// get quantifier type idx
int typeIdx = types[f][q];
// read value
byte[] value = res.getValue(familyKey, qualifier);
if (value != null) {
familyRow.setField(q, deserialize(value, typeIdx));
} else {
familyRow.setField(q, null);
}
}
resultRow.setField(f, familyRow);
}
return resultRow;
}
示例2: testRowSerializer
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Test
public void testRowSerializer() {
TypeInformation<Row> typeInfo = new RowTypeInfo(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO);
Row row1 = new Row(2);
row1.setField(0, 1);
row1.setField(1, "a");
Row row2 = new Row(2);
row2.setField(0, 2);
row2.setField(1, null);
TypeSerializer<Row> serializer = typeInfo.createSerializer(new ExecutionConfig());
RowSerializerTestInstance instance = new RowSerializerTestInstance(serializer, row1, row2);
instance.testAll();
}
示例3: testSerializationOfTwoRows
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Test
public void testSerializationOfTwoRows() throws IOException {
RowTypeInfo rowSchema = new RowTypeInfo(
new TypeInformation[]{Types.INT(), Types.BOOLEAN(), Types.STRING()},
new String[] {"f1", "f2", "f3"}
);
Row row1 = new Row(3);
row1.setField(0, 1);
row1.setField(1, true);
row1.setField(2, "str");
JsonRowSerializationSchema serializationSchema = new JsonRowSerializationSchema(rowSchema);
JsonRowDeserializationSchema deserializationSchema = new JsonRowDeserializationSchema(rowSchema);
byte[] bytes = serializationSchema.serialize(row1);
assertEqualRows(row1, deserializationSchema.deserialize(bytes));
Row row2 = new Row(3);
row2.setField(0, 10);
row2.setField(1, false);
row2.setField(2, "newStr");
bytes = serializationSchema.serialize(row2);
assertEqualRows(row2, deserializationSchema.deserialize(bytes));
}
示例4: testRow
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Test
public void testRow() {
Row row = new Row(2);
row.setField(0, "string");
row.setField(1, 15);
TypeInformation<Row> rowInfo = TypeExtractor.getForObject(row);
Assert.assertEquals(rowInfo.getClass(), RowTypeInfo.class);
Assert.assertEquals(2, rowInfo.getArity());
Assert.assertEquals(
new RowTypeInfo(
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO),
rowInfo);
Row nullRow = new Row(2);
TypeInformation<Row> genericRowInfo = TypeExtractor.getForObject(nullRow);
Assert.assertEquals(genericRowInfo, new GenericTypeInfo<>(Row.class));
}
示例5: getSimpleTestData
import org.apache.flink.types.Row; //导入方法依赖的package包/类
/**
* Tests a simple Avro data types without nesting.
*/
public static Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> getSimpleTestData() {
final Address addr = Address.newBuilder()
.setNum(42)
.setStreet("Main Street 42")
.setCity("Test City")
.setState("Test State")
.setZip("12345")
.build();
final Row rowAddr = new Row(5);
rowAddr.setField(0, 42);
rowAddr.setField(1, "Main Street 42");
rowAddr.setField(2, "Test City");
rowAddr.setField(3, "Test State");
rowAddr.setField(4, "12345");
final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> t = new Tuple3<>();
t.f0 = Address.class;
t.f1 = addr;
t.f2 = rowAddr;
return t;
}
示例6: testExceptionOnInvalidType
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Test(expected = RuntimeException.class)
public void testExceptionOnInvalidType() throws IOException {
jdbcOutputFormat = JDBCOutputFormat.buildJDBCOutputFormat()
.setDrivername(DRIVER_CLASS)
.setDBUrl(DB_URL)
.setQuery(String.format(INSERT_TEMPLATE, OUTPUT_TABLE))
.setSqlTypes(new int[] {
Types.INTEGER,
Types.VARCHAR,
Types.VARCHAR,
Types.DOUBLE,
Types.INTEGER})
.finish();
jdbcOutputFormat.open(0, 1);
JDBCTestBase.TestEntry entry = TEST_DATA[0];
Row row = new Row(5);
row.setField(0, entry.id);
row.setField(1, entry.title);
row.setField(2, entry.author);
row.setField(3, 0L); // use incompatible type (Long instead of Double)
row.setField(4, entry.qty);
jdbcOutputFormat.writeRecord(row);
}
示例7: deserialize
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Override
public Row deserialize(Row reuse, DataInputView source) throws IOException {
int len = fieldSerializers.length;
if (reuse.getArity() != len) {
throw new RuntimeException("Row arity of from does not match serializers.");
}
// read null mask
readIntoNullMask(len, source, nullMask);
for (int i = 0; i < len; i++) {
if (nullMask[i]) {
reuse.setField(i, null);
} else {
Object reuseField = reuse.getField(i);
if (reuseField != null) {
reuse.setField(i, fieldSerializers[i].deserialize(reuseField, source));
} else {
reuse.setField(i, fieldSerializers[i].deserialize(source));
}
}
}
return reuse;
}
示例8: deserialize
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Override
public Row deserialize(byte[] message) throws IOException {
try {
JsonNode root = objectMapper.readTree(message);
Row row = new Row(fieldNames.length);
for (int i = 0; i < fieldNames.length; i++) {
JsonNode node = root.get(fieldNames[i]);
if (node == null) {
if (failOnMissingField) {
throw new IllegalStateException("Failed to find field with name '"
+ fieldNames[i] + "'.");
} else {
row.setField(i, null);
}
} else {
// Read the value as specified type
Object value = objectMapper.treeToValue(node, fieldTypes[i].getTypeClass());
row.setField(i, value);
}
}
return row;
} catch (Throwable t) {
throw new IOException("Failed to deserialize JSON object.", t);
}
}
示例9: testCassandraTableSink
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Test
public void testCassandraTableSink() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
StreamTableEnvironment tEnv = StreamTableEnvironment.getTableEnvironment(env);
DataStreamSource<Row> source = env.fromCollection(rowCollection);
tEnv.registerDataStreamInternal("testFlinkTable", source);
tEnv.sql("select * from testFlinkTable").writeToSink(
new CassandraAppendTableSink(builder, injectTableName(INSERT_DATA_QUERY)));
env.execute();
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
// validate that all input was correctly written to Cassandra
List<Row> input = new ArrayList<>(rowCollection);
List<com.datastax.driver.core.Row> output = rs.all();
for (com.datastax.driver.core.Row o : output) {
Row cmp = new Row(3);
cmp.setField(0, o.getString(0));
cmp.setField(1, o.getInt(2));
cmp.setField(2, o.getInt(1));
Assert.assertTrue("Row " + cmp + " was written to Cassandra but not in input.", input.remove(cmp));
}
Assert.assertTrue("The input data was not completely written to Cassandra", input.isEmpty());
}
示例10: convertToRow
import org.apache.flink.types.Row; //导入方法依赖的package包/类
/**
* Converts a (nested) Avro {@link SpecificRecord} into Flink's Row type.
* Avro's {@link Utf8} fields are converted into regular Java strings.
*/
private static Object convertToRow(Schema schema, Object recordObj) {
if (recordObj instanceof GenericRecord) {
// records can be wrapped in a union
if (schema.getType() == Schema.Type.UNION) {
final List<Schema> types = schema.getTypes();
if (types.size() == 2 && types.get(0).getType() == Schema.Type.NULL && types.get(1).getType() == Schema.Type.RECORD) {
schema = types.get(1);
}
else {
throw new RuntimeException("Currently we only support schemas of the following form: UNION[null, RECORD]. Given: " + schema);
}
} else if (schema.getType() != Schema.Type.RECORD) {
throw new RuntimeException("Record type for row type expected. But is: " + schema);
}
final List<Schema.Field> fields = schema.getFields();
final Row row = new Row(fields.size());
final GenericRecord record = (GenericRecord) recordObj;
for (int i = 0; i < fields.size(); i++) {
final Schema.Field field = fields.get(i);
row.setField(i, convertToRow(field.schema(), record.get(field.pos())));
}
return row;
} else if (recordObj instanceof Utf8) {
return recordObj.toString();
} else {
return recordObj;
}
}
示例11: testRowSerialization
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Test
public void testRowSerialization() throws IOException {
RowTypeInfo rowSchema = new RowTypeInfo(
new TypeInformation[]{Types.INT(), Types.BOOLEAN(), Types.STRING()},
new String[] {"f1", "f2", "f3"}
);
Row row = new Row(3);
row.setField(0, 1);
row.setField(1, true);
row.setField(2, "str");
Row resultRow = serializeAndDeserialize(rowSchema, row);
assertEqualRows(row, resultRow);
}
示例12: createRow
import org.apache.flink.types.Row; //导入方法依赖的package包/类
private static Row createRow(Object f0, Object f1, Object f2, Object f3, Object f4) {
Row row = new Row(5);
row.setField(0, f0);
row.setField(1, f1);
row.setField(2, f2);
row.setField(3, f3);
row.setField(4, f4);
return row;
}
示例13: createRow
import org.apache.flink.types.Row; //导入方法依赖的package包/类
private static Row createRow(Object... values) {
checkNotNull(values);
checkArgument(values.length == numberOfFields);
Row row = new Row(numberOfFields);
for (int i = 0; i < values.length; i++) {
row.setField(i, values[i]);
}
return row;
}
示例14: testExceptionOnClose
import org.apache.flink.types.Row; //导入方法依赖的package包/类
@Test(expected = RuntimeException.class)
public void testExceptionOnClose() throws IOException {
jdbcOutputFormat = JDBCOutputFormat.buildJDBCOutputFormat()
.setDrivername(DRIVER_CLASS)
.setDBUrl(DB_URL)
.setQuery(String.format(INSERT_TEMPLATE, OUTPUT_TABLE))
.setSqlTypes(new int[] {
Types.INTEGER,
Types.VARCHAR,
Types.VARCHAR,
Types.DOUBLE,
Types.INTEGER})
.finish();
jdbcOutputFormat.open(0, 1);
JDBCTestBase.TestEntry entry = TEST_DATA[0];
Row row = new Row(5);
row.setField(0, entry.id);
row.setField(1, entry.title);
row.setField(2, entry.author);
row.setField(3, entry.price);
row.setField(4, entry.qty);
jdbcOutputFormat.writeRecord(row);
jdbcOutputFormat.writeRecord(row); // writing the same record twice must yield a unique key violation.
jdbcOutputFormat.close();
}
示例15: toRow
import org.apache.flink.types.Row; //导入方法依赖的package包/类
private static Row toRow(TestEntry entry) {
Row row = new Row(5);
row.setField(0, entry.id);
row.setField(1, entry.title);
row.setField(2, entry.author);
row.setField(3, entry.price);
row.setField(4, entry.qty);
return row;
}