本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo类的典型用法代码示例。如果您正苦于以下问题:Java DecimalTypeInfo类的具体用法?Java DecimalTypeInfo怎么用?Java DecimalTypeInfo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
DecimalTypeInfo类属于org.apache.hadoop.hive.serde2.typeinfo包,在下文中一共展示了DecimalTypeInfo类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getPrimitiveType
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
private static Type getPrimitiveType(final ObjectInspector fieldInspector) {
final PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) fieldInspector)
.getPrimitiveCategory();
if (HiveTypeMapping.getHIVE_TO_CANONICAL().containsKey(primitiveCategory.name())) {
return HiveTypeMapping.getHIVE_TO_CANONICAL().get(primitiveCategory.name());
}
switch (primitiveCategory) {
case DECIMAL:
final DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
.getTypeInfo();
return DecimalType.createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.getScale());
case CHAR:
final int cLength = ((CharTypeInfo) ((PrimitiveObjectInspector)
fieldInspector).getTypeInfo()).getLength();
return CharType.createCharType(cLength);
case VARCHAR:
final int vLength = ((VarcharTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
.getTypeInfo()).getLength();
return VarcharType.createVarcharType(vLength);
default:
return null;
}
}
示例2: getQualifiedTypeName
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
/**
* Gets the qualified type name.
*
* @param typeDesc the type desc
* @return the qualified type name
*/
public static String getQualifiedTypeName(TypeDescriptor typeDesc) {
if (typeDesc.getType().isQualifiedType()) {
switch (typeDesc.getType()) {
case VARCHAR_TYPE:
return VarcharTypeInfo.getQualifiedName(typeDesc.getTypeName(),
typeDesc.getTypeQualifiers().getCharacterMaximumLength()).toLowerCase();
case CHAR_TYPE:
return CharTypeInfo.getQualifiedName(typeDesc.getTypeName(),
typeDesc.getTypeQualifiers().getCharacterMaximumLength()).toLowerCase();
case DECIMAL_TYPE:
return DecimalTypeInfo.getQualifiedName(typeDesc.getTypeQualifiers().getPrecision(),
typeDesc.getTypeQualifiers().getScale()).toLowerCase();
}
} else if (typeDesc.getType().isComplexType()) {
switch (typeDesc.getType()) {
case ARRAY_TYPE:
case MAP_TYPE:
case STRUCT_TYPE:
return "string";
}
}
return typeDesc.getTypeName().toLowerCase();
}
示例3: getMajorTypeFromHiveTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, final OptionManager options) {
switch (typeInfo.getCategory()) {
case PRIMITIVE: {
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
MinorType minorType = HiveUtilities.getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo, options);
MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(minorType)
.setMode(DataMode.OPTIONAL); // Hive columns (both regular and partition) could have null values
if (primitiveTypeInfo.getPrimitiveCategory() == PrimitiveCategory.DECIMAL) {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
typeBuilder.setPrecision(decimalTypeInfo.precision())
.setScale(decimalTypeInfo.scale()).build();
}
return typeBuilder.build();
}
case LIST:
case MAP:
case STRUCT:
case UNION:
default:
throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
}
return null;
}
示例4: create
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
public static HiveFieldConverter create(TypeInfo typeInfo, FragmentContext fragmentContext)
throws IllegalAccessException, InstantiationException {
switch (typeInfo.getCategory()) {
case PRIMITIVE:
final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
if (pCat != PrimitiveCategory.DECIMAL) {
Class<? extends HiveFieldConverter> clazz = primMap.get(pCat);
if (clazz != null) {
return clazz.newInstance();
}
} else {
// For decimal, based on precision return appropriate converter.
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
int precision = decimalTypeInfo.precision();
int scale = decimalTypeInfo.scale();
if (precision <= 9) {
return new Decimal9(precision, scale);
} else if (precision <= 18) {
return new Decimal18(precision, scale);
} else if (precision <= 28) {
return new Decimal28(precision, scale, fragmentContext);
} else {
return new Decimal38(precision, scale, fragmentContext);
}
}
throwUnsupportedHiveDataTypeError(pCat.toString());
break;
case LIST:
case MAP:
case STRUCT:
case UNION:
default:
throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
}
return null;
}
示例5: getMajorTypeFromHiveTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, final OptionManager options) {
switch (typeInfo.getCategory()) {
case PRIMITIVE: {
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
MinorType minorType = getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo, options);
MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(getMinorTypeFromArrowMinorType(minorType))
.setMode(DataMode.OPTIONAL); // Hive columns (both regular and partition) could have null values
if (primitiveTypeInfo.getPrimitiveCategory() == PrimitiveCategory.DECIMAL) {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
typeBuilder.setPrecision(decimalTypeInfo.precision())
.setScale(decimalTypeInfo.scale()).build();
}
return typeBuilder.build();
}
case LIST:
case MAP:
case STRUCT:
case UNION:
default:
HiveUtilities.throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
}
return null;
}
示例6: create
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
public static HiveFieldConverter create(TypeInfo typeInfo, OperatorContext context)
throws IllegalAccessException, InstantiationException {
switch (typeInfo.getCategory()) {
case PRIMITIVE:
final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
if (pCat != PrimitiveCategory.DECIMAL) {
Class<? extends HiveFieldConverter> clazz = primMap.get(pCat);
if (clazz != null) {
return clazz.newInstance();
}
} else {
// For decimal, based on precision return appropriate converter.
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
int precision = decimalTypeInfo.precision();
int scale = decimalTypeInfo.scale();
return new Decimal(precision, scale, context);
}
throwUnsupportedHiveDataTypeError(pCat.toString());
break;
case LIST:
case MAP:
case STRUCT:
case UNION:
default:
throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
}
return null;
}
示例7: getMajorTypeFromHiveTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, final OptionManager options) {
switch (typeInfo.getCategory()) {
case PRIMITIVE: {
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
MinorType minorType = HiveUtilities.getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo, options);
MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(minorType)
.setMode(DataMode.OPTIONAL); // Hive columns (both regular and partition) could have null values
switch (primitiveTypeInfo.getPrimitiveCategory()) {
case CHAR:
case VARCHAR:
BaseCharTypeInfo baseCharTypeInfo = (BaseCharTypeInfo) primitiveTypeInfo;
typeBuilder.setPrecision(baseCharTypeInfo.getLength());
break;
case DECIMAL:
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
typeBuilder.setPrecision(decimalTypeInfo.getPrecision()).setScale(decimalTypeInfo.getScale());
break;
default:
// do nothing, other primitive categories do not have precision or scale
}
return typeBuilder.build();
}
case LIST:
case MAP:
case STRUCT:
case UNION:
default:
throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
}
return null;
}
示例8: getFieldObjectInspector
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
/**
* Given a Hive column type, returns the ObjectInspector that will be used to
* get data from the field. Currently using the the standard Writable object
* inspectors.
* TODO: Support all types
*/
private ObjectInspector getFieldObjectInspector(final TypeInfo typeInfo) {
if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
} else if (typeInfo instanceof DecimalTypeInfo) {
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
(DecimalTypeInfo) typeInfo);
} else if (typeInfo instanceof VarcharTypeInfo) {
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
(VarcharTypeInfo) typeInfo);
} else if (typeInfo instanceof CharTypeInfo) {
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
(CharTypeInfo) typeInfo);
} else {
throw new UnsupportedOperationException("Unknown field type: " + typeInfo);
}
}
示例9: convertPartitionType
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
/** Partition value is received in string format. Convert it into appropriate object based on the type. */
public static Object convertPartitionType(TypeInfo typeInfo, String value, final String defaultPartitionValue) {
if (typeInfo.getCategory() != Category.PRIMITIVE) {
// In Hive only primitive types are allowed as partition column types.
throw new DrillRuntimeException("Non-Primitive types are not allowed as partition column type in Hive, " +
"but received one: " + typeInfo.getCategory());
}
if (defaultPartitionValue.equals(value)) {
return null;
}
final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
try {
switch (pCat) {
case BINARY:
return value.getBytes();
case BOOLEAN:
return Boolean.parseBoolean(value);
case DECIMAL: {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
return HiveDecimalUtils.enforcePrecisionScale(HiveDecimal.create(value),
decimalTypeInfo.precision(), decimalTypeInfo.scale());
}
case DOUBLE:
return Double.parseDouble(value);
case FLOAT:
return Float.parseFloat(value);
case BYTE:
case SHORT:
case INT:
return Integer.parseInt(value);
case LONG:
return Long.parseLong(value);
case STRING:
case VARCHAR:
return value.getBytes();
case TIMESTAMP:
return Timestamp.valueOf(value);
case DATE:
return Date.valueOf(value);
}
} catch(final Exception e) {
// In Hive, partition values that can't be converted from string are considered to be NULL.
logger.trace("Failed to interpret '{}' value from partition value string '{}'", pCat, value);
return null;
}
throwUnsupportedHiveDataTypeError(pCat.toString());
return null;
}
示例10: getMinorTypeFromHivePrimitiveTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
public static TypeProtos.MinorType getMinorTypeFromHivePrimitiveTypeInfo(PrimitiveTypeInfo primitiveTypeInfo,
OptionManager options) {
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case BINARY:
return TypeProtos.MinorType.VARBINARY;
case BOOLEAN:
return TypeProtos.MinorType.BIT;
case DECIMAL: {
if (options.getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val == false) {
throw UserException.unsupportedError()
.message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG)
.build(logger);
}
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
return DecimalUtility.getDecimalDataType(decimalTypeInfo.precision());
}
case DOUBLE:
return TypeProtos.MinorType.FLOAT8;
case FLOAT:
return TypeProtos.MinorType.FLOAT4;
// TODO (DRILL-2470)
// Byte and short (tinyint and smallint in SQL types) are currently read as integers
// as these smaller integer types are not fully supported in Drill today.
case SHORT:
case BYTE:
case INT:
return TypeProtos.MinorType.INT;
case LONG:
return TypeProtos.MinorType.BIGINT;
case STRING:
case VARCHAR:
return TypeProtos.MinorType.VARCHAR;
case TIMESTAMP:
return TypeProtos.MinorType.TIMESTAMP;
case DATE:
return TypeProtos.MinorType.DATE;
}
throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString());
return null;
}
示例11: getRelDataTypeFromHivePrimitiveType
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
private RelDataType getRelDataTypeFromHivePrimitiveType(RelDataTypeFactory typeFactory, PrimitiveTypeInfo pTypeInfo) {
switch(pTypeInfo.getPrimitiveCategory()) {
case BOOLEAN:
return typeFactory.createSqlType(SqlTypeName.BOOLEAN);
case BYTE:
case SHORT:
return typeFactory.createSqlType(SqlTypeName.INTEGER);
case INT:
return typeFactory.createSqlType(SqlTypeName.INTEGER);
case LONG:
return typeFactory.createSqlType(SqlTypeName.BIGINT);
case FLOAT:
return typeFactory.createSqlType(SqlTypeName.FLOAT);
case DOUBLE:
return typeFactory.createSqlType(SqlTypeName.DOUBLE);
case DATE:
return typeFactory.createSqlType(SqlTypeName.DATE);
case TIMESTAMP:
return typeFactory.createSqlType(SqlTypeName.TIMESTAMP);
case BINARY:
return typeFactory.createSqlType(SqlTypeName.VARBINARY);
case DECIMAL: {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo)pTypeInfo;
return typeFactory.createSqlType(SqlTypeName.DECIMAL, decimalTypeInfo.precision(), decimalTypeInfo.scale());
}
case STRING:
case VARCHAR: {
int maxLen = TypeInfoUtils.getCharacterLengthForType(pTypeInfo);
return typeFactory.createTypeWithCharsetAndCollation(
typeFactory.createSqlType(SqlTypeName.VARCHAR, maxLen), /*input type*/
Charset.forName("ISO-8859-1"), /*unicode char set*/
SqlCollation.IMPLICIT /* TODO: need to decide if implicit is the correct one */
);
}
case UNKNOWN:
case VOID:
default:
throwUnsupportedHiveDataTypeError(pTypeInfo.getPrimitiveCategory().toString());
}
return null;
}
示例12: getArrowFieldFromHivePrimitiveType
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
public static Field getArrowFieldFromHivePrimitiveType(String name, TypeInfo typeInfo) {
switch (typeInfo.getCategory()) {
case PRIMITIVE:
PrimitiveTypeInfo pTypeInfo = (PrimitiveTypeInfo) typeInfo;
switch (pTypeInfo.getPrimitiveCategory()) {
case BOOLEAN:
return new Field(name, true, new Bool(), null);
case BYTE:
return new Field(name, true, new Int(32, true), null);
case SHORT:
return new Field(name, true, new Int(32, true), null);
case INT:
return new Field(name, true, new Int(32, true), null);
case LONG:
return new Field(name, true, new Int(64, true), null);
case FLOAT:
return new Field(name, true, new FloatingPoint(FloatingPointPrecision.SINGLE), null);
case DOUBLE:
return new Field(name, true, new FloatingPoint(FloatingPointPrecision.DOUBLE), null);
case DATE:
return new Field(name, true, new Date(DateUnit.MILLISECOND), null);
case TIMESTAMP:
return new Field(name, true, new Timestamp(TimeUnit.MILLISECOND, null), null);
case BINARY:
return new Field(name, true, new Binary(), null);
case DECIMAL: {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) pTypeInfo;
return new Field(name, true, new Decimal(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()), null);
}
case STRING:
case VARCHAR:
case CHAR: {
return new Field(name, true, new Utf8(), null);
}
case UNKNOWN:
case VOID:
default:
// fall through.
}
default:
}
return null;
}
示例13: getPartitionValue
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
private static PartitionValue getPartitionValue(FieldSchema partitionCol, String value) {
final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(partitionCol.getType());
PartitionValue out = new PartitionValue();
out.setColumn(partitionCol.getName());
if("__HIVE_DEFAULT_PARTITION__".equals(value)){
return out;
}
switch (typeInfo.getCategory()) {
case PRIMITIVE:
final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case BINARY:
return out.setBinaryValue(ByteString.copyFrom(value.getBytes()));
case BOOLEAN:
return out.setBitValue(Boolean.parseBoolean(value));
case DOUBLE:
return out.setDoubleValue(Double.parseDouble(value));
case FLOAT:
return out.setFloatValue(Float.parseFloat(value));
case BYTE:
case SHORT:
case INT:
return out.setIntValue(Integer.parseInt(value));
case LONG:
return out.setLongValue(Long.parseLong(value));
case STRING:
case VARCHAR:
return out.setStringValue(value);
case CHAR:
return out.setStringValue(value.trim());
case TIMESTAMP:
return out.setLongValue(DateTimes.toMillisFromJdbcTimestamp(value));
case DATE:
return out.setLongValue(DateTimes.toMillisFromJdbcDate(value));
case DECIMAL:
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
if(decimalTypeInfo.getPrecision() > 38){
throw UserException.unsupportedError()
.message("Dremio only supports decimals up to 38 digits in precision. This Hive table has a partition value with scale of %d digits.", decimalTypeInfo.getPrecision())
.build(logger);
}
HiveDecimal decimal = HiveDecimalUtils.enforcePrecisionScale(HiveDecimal.create(value), decimalTypeInfo.precision(), decimalTypeInfo.scale());
final BigDecimal original = decimal.bigDecimalValue();
// we can't just use unscaledValue() since BigDecimal doesn't store trailing zeroes and we need to ensure decoding includes the correct scale.
final BigInteger unscaled = original.movePointRight(decimalTypeInfo.scale()).unscaledValue();
return out.setBinaryValue(ByteString.copyFrom(DecimalTools.signExtend16(unscaled.toByteArray())));
default:
HiveUtilities.throwUnsupportedHiveDataTypeError(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory().toString());
}
default:
HiveUtilities.throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
}
return null; // unreachable
}
示例14: convertPartitionType
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
/** Partition value is received in string format. Convert it into appropriate object based on the type. */
public static Object convertPartitionType(TypeInfo typeInfo, String value, final String defaultPartitionValue) {
if (typeInfo.getCategory() != Category.PRIMITIVE) {
// In Hive only primitive types are allowed as partition column types.
throw new DrillRuntimeException("Non-Primitive types are not allowed as partition column type in Hive, " +
"but received one: " + typeInfo.getCategory());
}
if (defaultPartitionValue.equals(value)) {
return null;
}
final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
try {
switch (pCat) {
case BINARY:
return value.getBytes();
case BOOLEAN:
return Boolean.parseBoolean(value);
case DECIMAL: {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
return HiveDecimalUtils.enforcePrecisionScale(HiveDecimal.create(value),
decimalTypeInfo.precision(), decimalTypeInfo.scale());
}
case DOUBLE:
return Double.parseDouble(value);
case FLOAT:
return Float.parseFloat(value);
case BYTE:
case SHORT:
case INT:
return Integer.parseInt(value);
case LONG:
return Long.parseLong(value);
case STRING:
case VARCHAR:
return value.getBytes();
case CHAR:
return value.trim().getBytes();
case TIMESTAMP:
return Timestamp.valueOf(value);
case DATE:
return Date.valueOf(value);
}
} catch(final Exception e) {
// In Hive, partition values that can't be converted from string are considered to be NULL.
logger.trace("Failed to interpret '{}' value from partition value string '{}'", pCat, value);
return null;
}
throwUnsupportedHiveDataTypeError(pCat.toString());
return null;
}
示例15: getMinorTypeFromHivePrimitiveTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入依赖的package包/类
public static TypeProtos.MinorType getMinorTypeFromHivePrimitiveTypeInfo(PrimitiveTypeInfo primitiveTypeInfo,
OptionManager options) {
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case BINARY:
return TypeProtos.MinorType.VARBINARY;
case BOOLEAN:
return TypeProtos.MinorType.BIT;
case DECIMAL: {
if (!options.getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val) {
throw UserException.unsupportedError()
.message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG)
.build(logger);
}
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
return DecimalUtility.getDecimalDataType(decimalTypeInfo.precision());
}
case DOUBLE:
return TypeProtos.MinorType.FLOAT8;
case FLOAT:
return TypeProtos.MinorType.FLOAT4;
// TODO (DRILL-2470)
// Byte and short (tinyint and smallint in SQL types) are currently read as integers
// as these smaller integer types are not fully supported in Drill today.
case SHORT:
case BYTE:
case INT:
return TypeProtos.MinorType.INT;
case LONG:
return TypeProtos.MinorType.BIGINT;
case STRING:
case VARCHAR:
case CHAR:
return TypeProtos.MinorType.VARCHAR;
case TIMESTAMP:
return TypeProtos.MinorType.TIMESTAMP;
case DATE:
return TypeProtos.MinorType.DATE;
}
throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString());
return null;
}