本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.floatTypeInfo方法的典型用法代码示例。如果您正苦于以下问题:Java TypeInfoFactory.floatTypeInfo方法的具体用法?Java TypeInfoFactory.floatTypeInfo怎么用?Java TypeInfoFactory.floatTypeInfo使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory
的用法示例。
在下文中一共展示了TypeInfoFactory.floatTypeInfo方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
private TypeInfo getTypeInfo(String fieldType) {
if (fieldType.equals(TEXT) || fieldType.equals(STRING) || fieldType.equals(STORED)) {
return TypeInfoFactory.stringTypeInfo;
} else if (fieldType.equals(LONG)) {
return TypeInfoFactory.longTypeInfo;
} else if (fieldType.equals(INT)) {
return TypeInfoFactory.intTypeInfo;
} else if (fieldType.equals(FLOAT)) {
return TypeInfoFactory.floatTypeInfo;
} else if (fieldType.equals(DOUBLE)) {
return TypeInfoFactory.doubleTypeInfo;
} else if (fieldType.equals(DATE)) {
return TypeInfoFactory.dateTypeInfo;
} else if (fieldType.equals(GEO_POINTVECTOR) || fieldType.equals(GEO_RECURSIVEPREFIX)
|| fieldType.equals(GEO_TERMPREFIX)) {
List<TypeInfo> typeInfos = Arrays.asList((TypeInfo) TypeInfoFactory.floatTypeInfo,
(TypeInfo) TypeInfoFactory.floatTypeInfo);
return TypeInfoFactory.getStructTypeInfo(Arrays.asList(LATITUDE, LONGITUDE), typeInfos);
}
// Return string for anything that is not a built in type.
return TypeInfoFactory.stringTypeInfo;
}
示例2: getTypeSupportData
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
/**
* Data provider for various type tests..
*
* @return the required arguments for the test
* {@link MonarchPredicateHandlerTest#testIsMonarchTypeSupported(TypeInfo, boolean)}
*/
@DataProvider
public static Object[][] getTypeSupportData() {
return new Object[][]{
{TypeInfoFactory.intTypeInfo, true},
{TypeInfoFactory.binaryTypeInfo, true},
{TypeInfoFactory.longTypeInfo, true},
{TypeInfoFactory.floatTypeInfo, true},
// {TypeInfoFactory.unknownTypeInfo, false},
{TypeInfoFactory.getDecimalTypeInfo(20, 10), true},
{TypeInfoFactory.getCharTypeInfo(200), true},
{TypeInfoFactory.getStructTypeInfo(Arrays.asList("c1", "c2"),
Arrays.asList(TypeInfoFactory.floatTypeInfo, TypeInfoFactory.getUnionTypeInfo(
Collections.singletonList(TypeInfoFactory.longTypeInfo)))), true},
{TypeInfoFactory.getStructTypeInfo(Arrays.asList("c1", "c2"),
Arrays.asList(TypeInfoFactory.dateTypeInfo, TypeInfoFactory.decimalTypeInfo)), true},
{TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo,
TypeInfoFactory.timestampTypeInfo), true},
{TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.doubleTypeInfo,
TypeInfoFactory.getCharTypeInfo(100)), true},
{TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.doubleTypeInfo,
TypeInfoFactory.getVarcharTypeInfo(100)), true},
{TypeInfoFactory.getListTypeInfo(
TypeInfoFactory.getListTypeInfo(TypeInfoFactory.shortTypeInfo)), true},
{TypeInfoFactory.getStructTypeInfo(Arrays.asList("c1", "c2"),
Arrays.asList(TypeInfoFactory.floatTypeInfo, TypeInfoFactory.getUnionTypeInfo(
Arrays.asList(TypeInfoFactory.decimalTypeInfo,
TypeInfoFactory.getListTypeInfo(TypeInfoFactory.shortTypeInfo))))), true},
{TypeInfoFactory.getVarcharTypeInfo(200), true},
};
}
示例3: convertFilter
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
/**
* Convert Ampool SingleColumnValueFilter to the respective ORC predicate.
*
* @param f the Ampool filter
* @param td the table descriptor
* @param udf the generic UDF corresponding to the Ampool filter
* @return the ORC predicate
*/
private static ExprNodeDesc convertFilter(final SingleColumnValueFilter f,
final TableDescriptor td, final GenericUDF udf) {
final String cName = f.getColumnNameString();
final String aType = td.getColumnByName(cName).getColumnType().toString();
ExprNodeDesc exprNodeDesc =
new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, new ArrayList<>(2));
final TypeInfo ti = OrcTypeMap.get(aType);
final ExprNodeDesc c1 = new ExprNodeColumnDesc(ti, cName, cName, false);
final ExprNodeDesc c2 = new ExprNodeConstantDesc(ti,
ti == TypeInfoFactory.floatTypeInfo ? ((Number) f.getValue()).doubleValue() : f.getValue());
exprNodeDesc.getChildren().add(c1);
exprNodeDesc.getChildren().add(c2);
return exprNodeDesc;
}
示例4: PhoenixFloatObjectInspector
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
public PhoenixFloatObjectInspector() {
super(TypeInfoFactory.floatTypeInfo);
}
示例5: getTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@SuppressWarnings({"override", "UnusedDeclaration", "RedundantCast"}) // FB Hive
public PrimitiveTypeInfo getTypeInfo() {
return (PrimitiveTypeInfo) TypeInfoFactory.floatTypeInfo;
}