本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo方法的典型用法代码示例。如果您正苦于以下问题:Java TypeInfoFactory.intTypeInfo方法的具体用法?Java TypeInfoFactory.intTypeInfo怎么用?Java TypeInfoFactory.intTypeInfo使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory
的用法示例。
在下文中一共展示了TypeInfoFactory.intTypeInfo方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
private TypeInfo getTypeInfo(String fieldType) {
if (fieldType.equals(TEXT) || fieldType.equals(STRING) || fieldType.equals(STORED)) {
return TypeInfoFactory.stringTypeInfo;
} else if (fieldType.equals(LONG)) {
return TypeInfoFactory.longTypeInfo;
} else if (fieldType.equals(INT)) {
return TypeInfoFactory.intTypeInfo;
} else if (fieldType.equals(FLOAT)) {
return TypeInfoFactory.floatTypeInfo;
} else if (fieldType.equals(DOUBLE)) {
return TypeInfoFactory.doubleTypeInfo;
} else if (fieldType.equals(DATE)) {
return TypeInfoFactory.dateTypeInfo;
} else if (fieldType.equals(GEO_POINTVECTOR) || fieldType.equals(GEO_RECURSIVEPREFIX)
|| fieldType.equals(GEO_TERMPREFIX)) {
List<TypeInfo> typeInfos = Arrays.asList((TypeInfo) TypeInfoFactory.floatTypeInfo,
(TypeInfo) TypeInfoFactory.floatTypeInfo);
return TypeInfoFactory.getStructTypeInfo(Arrays.asList(LATITUDE, LONGITUDE), typeInfos);
}
// Return string for anything that is not a built in type.
return TypeInfoFactory.stringTypeInfo;
}
示例2: pushdownTuple
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void pushdownTuple() {
setup();
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field1", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = Utilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
assertEquals(sConditions.size(), 1);
AccumuloPredicateHandler.PushdownTuple tuple = new AccumuloPredicateHandler.PushdownTuple(sConditions.get(0));
byte [] expectedVal = new byte[4];
ByteBuffer.wrap(expectedVal).putInt(5);
assertEquals(tuple.getConstVal(), expectedVal);
assertEquals(tuple.getcOpt().getClass(), Equal.class);
assertEquals(tuple.getpCompare().getClass(), IntCompare.class);
} catch (Exception e) {
fail(StringUtils.stringifyException(e));
}
}
示例3: getTypeSupportData
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
/**
* Data provider for various type tests..
*
* @return the required arguments for the test
* {@link MonarchPredicateHandlerTest#testIsMonarchTypeSupported(TypeInfo, boolean)}
*/
@DataProvider
public static Object[][] getTypeSupportData() {
return new Object[][]{
{TypeInfoFactory.intTypeInfo, true},
{TypeInfoFactory.binaryTypeInfo, true},
{TypeInfoFactory.longTypeInfo, true},
{TypeInfoFactory.floatTypeInfo, true},
// {TypeInfoFactory.unknownTypeInfo, false},
{TypeInfoFactory.getDecimalTypeInfo(20, 10), true},
{TypeInfoFactory.getCharTypeInfo(200), true},
{TypeInfoFactory.getStructTypeInfo(Arrays.asList("c1", "c2"),
Arrays.asList(TypeInfoFactory.floatTypeInfo, TypeInfoFactory.getUnionTypeInfo(
Collections.singletonList(TypeInfoFactory.longTypeInfo)))), true},
{TypeInfoFactory.getStructTypeInfo(Arrays.asList("c1", "c2"),
Arrays.asList(TypeInfoFactory.dateTypeInfo, TypeInfoFactory.decimalTypeInfo)), true},
{TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo,
TypeInfoFactory.timestampTypeInfo), true},
{TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.doubleTypeInfo,
TypeInfoFactory.getCharTypeInfo(100)), true},
{TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.doubleTypeInfo,
TypeInfoFactory.getVarcharTypeInfo(100)), true},
{TypeInfoFactory.getListTypeInfo(
TypeInfoFactory.getListTypeInfo(TypeInfoFactory.shortTypeInfo)), true},
{TypeInfoFactory.getStructTypeInfo(Arrays.asList("c1", "c2"),
Arrays.asList(TypeInfoFactory.floatTypeInfo, TypeInfoFactory.getUnionTypeInfo(
Arrays.asList(TypeInfoFactory.decimalTypeInfo,
TypeInfoFactory.getListTypeInfo(TypeInfoFactory.shortTypeInfo))))), true},
{TypeInfoFactory.getVarcharTypeInfo(200), true},
};
}
示例4: ignoreIteratorPushdown
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@Test
public void ignoreIteratorPushdown() {
setup();
conf.set(serdeConstants.LIST_COLUMNS, "field1,field2,rid");
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,string");
conf.set(AccumuloSerde.COLUMN_MAPPINGS, "cf|f1,cf|f2,rowID");
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
assertNotNull(node);
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field2", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2);
assertNotNull(node2);
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
String filterExpr = Utilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
conf.setBoolean(AccumuloSerde.NO_ITERATOR_PUSHDOWN, true);
try {
List<IteratorSetting> iterators = handler.getIterators(conf);
assertEquals(iterators.size(), 0);
} catch (Exception e) {
fail(StringUtils.stringifyException(e));
}
}
示例5: PhoenixIntObjectInspector
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
public PhoenixIntObjectInspector() {
super(TypeInfoFactory.intTypeInfo);
}
示例6: getTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入方法依赖的package包/类
@SuppressWarnings({"override", "UnusedDeclaration", "RedundantCast"}) // FB Hive
public PrimitiveTypeInfo getTypeInfo() {
return (PrimitiveTypeInfo) TypeInfoFactory.intTypeInfo;
}