本文整理汇总了Java中org.apache.tajo.catalog.Column.getDataType方法的典型用法代码示例。如果您正苦于以下问题:Java Column.getDataType方法的具体用法?Java Column.getDataType怎么用?Java Column.getDataType使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.tajo.catalog.Column
的用法示例。
在下文中一共展示了Column.getDataType方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: newConverter
import org.apache.tajo.catalog.Column; //导入方法依赖的package包/类
private Converter newConverter(Column column, Type type,
ParentValueContainer parent) {
DataType dataType = column.getDataType();
switch (dataType.getType()) {
case BOOLEAN:
return new FieldBooleanConverter(parent);
case BIT:
return new FieldBitConverter(parent);
case CHAR:
return new FieldCharConverter(parent);
case INT2:
return new FieldInt2Converter(parent);
case INT4:
return new FieldInt4Converter(parent);
case INT8:
return new FieldInt8Converter(parent);
case FLOAT4:
return new FieldFloat4Converter(parent);
case FLOAT8:
return new FieldFloat8Converter(parent);
case DATE:
return new FieldDateConverter(parent);
case TEXT:
return new FieldTextConverter(parent);
case PROTOBUF:
return new FieldProtobufConverter(parent, dataType);
case BLOB:
return new FieldBlobConverter(parent);
case NULL_TYPE:
throw new RuntimeException("No converter for NULL_TYPE.");
default:
throw new RuntimeException("Unsupported data type");
}
}
示例2: visitColumnReference
import org.apache.tajo.catalog.Column; //导入方法依赖的package包/类
@Override
public DataType visitColumnReference(LogicalPlanner.PlanContext ctx, Stack<Expr> stack, ColumnReferenceExpr expr)
throws TajoException {
stack.push(expr);
Column column = NameResolver.resolve(ctx.plan, ctx.queryBlock, expr, NameResolvingMode.LEGACY, true);
stack.pop();
return column.getDataType();
}
示例3: newConverter
import org.apache.tajo.catalog.Column; //导入方法依赖的package包/类
private Converter newConverter(Column column, Type type,
ParentValueContainer parent) {
DataType dataType = column.getDataType();
switch (dataType.getType()) {
case BOOLEAN:
return new FieldBooleanConverter(parent);
case BIT:
return new FieldBitConverter(parent);
case CHAR:
return new FieldCharConverter(parent);
case INT2:
return new FieldInt2Converter(parent);
case INT4:
return new FieldInt4Converter(parent);
case INT8:
return new FieldInt8Converter(parent);
case FLOAT4:
return new FieldFloat4Converter(parent);
case FLOAT8:
return new FieldFloat8Converter(parent);
case INET4:
return new FieldInet4Converter(parent);
case INET6:
throw new RuntimeException("No converter for INET6");
case TEXT:
return new FieldTextConverter(parent);
case PROTOBUF:
return new FieldProtobufConverter(parent, dataType);
case BLOB:
return new FieldBlobConverter(parent);
case NULL_TYPE:
throw new RuntimeException("No converter for NULL_TYPE.");
default:
throw new RuntimeException("Unsupported data type");
}
}
示例4: next
import org.apache.tajo.catalog.Column; //导入方法依赖的package包/类
/**
* Reads the next Tuple from the Avro file.
*
* @return The next Tuple from the Avro file or null if end of file is
* reached.
*/
@Override
public Tuple next() throws IOException {
if (!dataFileReader.hasNext()) {
return null;
}
GenericRecord record = dataFileReader.next();
for (int i = 0; i < projectionMap.length; ++i) {
int columnIndex = projectionMap[i];
Object value = record.get(columnIndex);
if (value == null) {
outTuple.put(i, NullDatum.get());
continue;
}
// Get Avro type.
Schema.Field avroField = avroFields.get(columnIndex);
Schema nonNullAvroSchema = getNonNull(avroField.schema());
Schema.Type avroType = nonNullAvroSchema.getType();
// Get Tajo type.
Column column = schema.getColumn(columnIndex);
DataType dataType = column.getDataType();
TajoDataTypes.Type tajoType = dataType.getType();
switch (avroType) {
case NULL:
outTuple.put(i, NullDatum.get());
break;
case BOOLEAN:
outTuple.put(i, DatumFactory.createBool((Boolean) value));
break;
case INT:
outTuple.put(i, convertInt(value, tajoType));
break;
case LONG:
outTuple.put(i, DatumFactory.createInt8((Long) value));
break;
case FLOAT:
outTuple.put(i, DatumFactory.createFloat4((Float) value));
break;
case DOUBLE:
outTuple.put(i, DatumFactory.createFloat8((Double) value));
break;
case BYTES:
outTuple.put(i, convertBytes(value, tajoType, dataType));
break;
case STRING:
outTuple.put(i, convertString(value, tajoType));
break;
case RECORD:
throw new RuntimeException("Avro RECORD not supported.");
case ENUM:
throw new RuntimeException("Avro ENUM not supported.");
case MAP:
throw new RuntimeException("Avro MAP not supported.");
case UNION:
throw new RuntimeException("Avro UNION not supported.");
case FIXED:
outTuple.put(i, new BlobDatum(((GenericFixed) value).bytes()));
break;
default:
throw new RuntimeException("Unknown type.");
}
}
return outTuple;
}
示例5: convertToString
import org.apache.tajo.catalog.Column; //导入方法依赖的package包/类
private String convertToString(int columnIndex, Tuple tuple, String nullChars)
throws IOException {
Column col = schema.getColumn(columnIndex);
TajoDataTypes.DataType dataType = col.getDataType();
if (tuple.isBlankOrNull(columnIndex)) {
switch (dataType.getType()) {
case CHAR:
case TEXT:
return nullChars;
default:
return StringUtils.EMPTY;
}
}
switch (dataType.getType()) {
case BOOLEAN:
return tuple.getBool(columnIndex) ? "true" : "false";
case CHAR:
int size = dataType.getLength() - tuple.size(columnIndex);
if (size < 0) {
throw new ValueTooLongForTypeCharactersException(dataType.getLength());
}
return StringUtils.rightPad(tuple.getText(columnIndex), size, "");
case TEXT:
case BIT:
case INT2:
case INT4:
case INT8:
case FLOAT4:
case FLOAT8:
case DATE:
case INTERVAL:
case TIME:
return tuple.getText(columnIndex);
case TIMESTAMP:
// UTC to table timezone
return TimestampDatum.asChars(tuple.getTimeDate(columnIndex), tableTimezone, false);
case BLOB:
return Base64.encodeBase64String(tuple.getBytes(columnIndex));
case PROTOBUF:
ProtobufDatum protobuf = (ProtobufDatum) tuple.getProtobufDatum(columnIndex);
return protobufJsonFormat.printToString(protobuf.get());
case NULL_TYPE:
default:
return StringUtils.EMPTY;
}
}
示例6: serialize
import org.apache.tajo.catalog.Column; //导入方法依赖的package包/类
@Override
public int serialize(int columnIndex, Tuple tuple, OutputStream out, byte[] nullChars)
throws IOException {
byte[] bytes;
int length = 0;
Column col = schema.getColumn(columnIndex);
TajoDataTypes.DataType dataType = col.getDataType();
if (tuple.isBlankOrNull(columnIndex)) {
switch (dataType.getType()) {
case CHAR:
case TEXT:
length = nullChars.length;
out.write(nullChars);
break;
default:
break;
}
return length;
}
switch (dataType.getType()) {
case BOOLEAN:
out.write(tuple.getBool(columnIndex) ? trueBytes : falseBytes);
length = trueBytes.length;
break;
case CHAR:
int size = dataType.getLength() - tuple.size(columnIndex);
if (size < 0){
throw new ValueTooLongForTypeCharactersException(dataType.getLength());
}
byte[] pad = new byte[size];
bytes = tuple.getBytes(columnIndex);
out.write(bytes);
out.write(pad);
length = bytes.length + pad.length;
break;
case TEXT:
case BIT:
case INT2:
case INT4:
case INT8:
case FLOAT4:
case FLOAT8:
case DATE:
case INTERVAL:
bytes = tuple.getTextBytes(columnIndex);
length = bytes.length;
out.write(bytes);
break;
case TIME:
bytes = tuple.getTextBytes(columnIndex);
length = bytes.length;
out.write(bytes);
break;
case TIMESTAMP:
// UTC to table timezone
bytes = TimestampDatum.asChars(
tuple.getTimeDate(columnIndex), tableTimezone, false).getBytes(Bytes.UTF8_CHARSET);
length = bytes.length;
out.write(bytes);
break;
case BLOB:
bytes = Base64.encodeBase64(tuple.getBytes(columnIndex), false);
length = bytes.length;
out.write(bytes, 0, length);
break;
case PROTOBUF:
ProtobufDatum protobuf = (ProtobufDatum) tuple.getProtobufDatum(columnIndex);
byte[] protoBytes = protobufJsonFormat.printToString(protobuf.get()).getBytes(Bytes.UTF8_CHARSET);
length = protoBytes.length;
out.write(protoBytes, 0, protoBytes.length);
break;
case NULL_TYPE:
default:
break;
}
return length;
}
示例7: serialize
import org.apache.tajo.catalog.Column; //导入方法依赖的package包/类
@Override
public int serialize(Column col, Datum datum, OutputStream out, byte[] nullCharacters) throws IOException {
byte[] bytes;
int length = 0;
TajoDataTypes.DataType dataType = col.getDataType();
if (datum == null || datum instanceof NullDatum) {
switch (dataType.getType()) {
case CHAR:
case TEXT:
length = nullCharacters.length;
out.write(nullCharacters);
break;
default:
break;
}
return length;
}
switch (dataType.getType()) {
case BOOLEAN:
out.write(datum.asBool() ? trueBytes : falseBytes);
length = trueBytes.length;
break;
case CHAR:
byte[] pad = new byte[dataType.getLength() - datum.size()];
bytes = datum.asTextBytes();
out.write(bytes);
out.write(pad);
length = bytes.length + pad.length;
break;
case TEXT:
case BIT:
case INT2:
case INT4:
case INT8:
case FLOAT4:
case FLOAT8:
case INET4:
case DATE:
case TIME:
case TIMESTAMP:
bytes = datum.asTextBytes();
length = bytes.length;
out.write(bytes);
break;
case INET6:
case BLOB:
bytes = Base64.encodeBase64(datum.asByteArray(), false);
length = bytes.length;
out.write(bytes, 0, length);
break;
case PROTOBUF:
ProtobufDatum protobuf = (ProtobufDatum) datum;
byte[] protoBytes = protobufJsonFormat.printToString(protobuf.get()).getBytes();
length = protoBytes.length;
out.write(protoBytes, 0, protoBytes.length);
break;
case NULL_TYPE:
default:
break;
}
return length;
}
示例8: next
import org.apache.tajo.catalog.Column; //导入方法依赖的package包/类
/**
* Reads the next Tuple from the Avro file.
*
* @return The next Tuple from the Avro file or null if end of file is
* reached.
*/
@Override
public Tuple next() throws IOException {
if (!dataFileReader.hasNext()) {
return null;
}
Tuple tuple = new VTuple(schema.size());
GenericRecord record = dataFileReader.next();
for (int i = 0; i < projectionMap.length; ++i) {
int columnIndex = projectionMap[i];
Object value = record.get(columnIndex);
if (value == null) {
tuple.put(columnIndex, NullDatum.get());
continue;
}
// Get Avro type.
Schema.Field avroField = avroFields.get(columnIndex);
Schema nonNullAvroSchema = getNonNull(avroField.schema());
Schema.Type avroType = nonNullAvroSchema.getType();
// Get Tajo type.
Column column = schema.getColumn(columnIndex);
DataType dataType = column.getDataType();
TajoDataTypes.Type tajoType = dataType.getType();
switch (avroType) {
case NULL:
tuple.put(columnIndex, NullDatum.get());
break;
case BOOLEAN:
tuple.put(columnIndex, DatumFactory.createBool((Boolean)value));
break;
case INT:
tuple.put(columnIndex, convertInt(value, tajoType));
break;
case LONG:
tuple.put(columnIndex, DatumFactory.createInt8((Long)value));
break;
case FLOAT:
tuple.put(columnIndex, DatumFactory.createFloat4((Float)value));
break;
case DOUBLE:
tuple.put(columnIndex, DatumFactory.createFloat8((Double)value));
break;
case BYTES:
tuple.put(columnIndex, convertBytes(value, tajoType, dataType));
break;
case STRING:
tuple.put(columnIndex, convertString(value, tajoType));
break;
case RECORD:
throw new RuntimeException("Avro RECORD not supported.");
case ENUM:
throw new RuntimeException("Avro ENUM not supported.");
case MAP:
throw new RuntimeException("Avro MAP not supported.");
case UNION:
throw new RuntimeException("Avro UNION not supported.");
case FIXED:
tuple.put(columnIndex, new BlobDatum(((GenericFixed)value).bytes()));
break;
default:
throw new RuntimeException("Unknown type.");
}
}
return tuple;
}