本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo.scale方法的典型用法代码示例。如果您正苦于以下问题:Java DecimalTypeInfo.scale方法的具体用法?Java DecimalTypeInfo.scale怎么用?Java DecimalTypeInfo.scale使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo
的用法示例。
在下文中一共展示了DecimalTypeInfo.scale方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: create
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入方法依赖的package包/类
public static HiveFieldConverter create(TypeInfo typeInfo, FragmentContext fragmentContext)
throws IllegalAccessException, InstantiationException {
switch (typeInfo.getCategory()) {
case PRIMITIVE:
final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
if (pCat != PrimitiveCategory.DECIMAL) {
Class<? extends HiveFieldConverter> clazz = primMap.get(pCat);
if (clazz != null) {
return clazz.newInstance();
}
} else {
// For decimal, based on precision return appropriate converter.
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
int precision = decimalTypeInfo.precision();
int scale = decimalTypeInfo.scale();
if (precision <= 9) {
return new Decimal9(precision, scale);
} else if (precision <= 18) {
return new Decimal18(precision, scale);
} else if (precision <= 28) {
return new Decimal28(precision, scale, fragmentContext);
} else {
return new Decimal38(precision, scale, fragmentContext);
}
}
throwUnsupportedHiveDataTypeError(pCat.toString());
break;
case LIST:
case MAP:
case STRUCT:
case UNION:
default:
throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
}
return null;
}
示例2: create
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; //导入方法依赖的package包/类
public static HiveFieldConverter create(TypeInfo typeInfo, OperatorContext context)
throws IllegalAccessException, InstantiationException {
switch (typeInfo.getCategory()) {
case PRIMITIVE:
final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
if (pCat != PrimitiveCategory.DECIMAL) {
Class<? extends HiveFieldConverter> clazz = primMap.get(pCat);
if (clazz != null) {
return clazz.newInstance();
}
} else {
// For decimal, based on precision return appropriate converter.
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
int precision = decimalTypeInfo.precision();
int scale = decimalTypeInfo.scale();
return new Decimal(precision, scale, context);
}
throwUnsupportedHiveDataTypeError(pCat.toString());
break;
case LIST:
case MAP:
case STRUCT:
case UNION:
default:
throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
}
return null;
}