本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo类的典型用法代码示例。如果您正苦于以下问题:Java PrimitiveTypeInfo类的具体用法?Java PrimitiveTypeInfo怎么用?Java PrimitiveTypeInfo使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
PrimitiveTypeInfo类属于org.apache.hadoop.hive.serde2.typeinfo包,在下文中一共展示了PrimitiveTypeInfo类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: MDSMapObjectInspector
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
public MDSMapObjectInspector( final MapTypeInfo typeInfo ){
TypeInfo keyTypeInfo = typeInfo.getMapKeyTypeInfo();
if( keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE && ( (PrimitiveTypeInfo)keyTypeInfo ).getPrimitiveCategory() == PrimitiveCategory.STRING ){
keyObjectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
else{
throw new RuntimeException( "Map key type is string only." );
}
valueObjectInspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo.getMapValueTypeInfo() );
if( valueObjectInspector.getCategory() == ObjectInspector.Category.PRIMITIVE ){
getField = new PrimitiveGetField( (PrimitiveObjectInspector)valueObjectInspector );
}
else if( valueObjectInspector.getCategory() == ObjectInspector.Category.UNION ){
getField = new UnionGetField( (UnionTypeInfo)( typeInfo.getMapValueTypeInfo() ) );
}
else{
getField = new NestedGetField();
}
}
示例2: checkArgumentTypes
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
private static void checkArgumentTypes(TypeInfo[] parameters) throws UDFArgumentTypeException {
if (parameters.length != 2) {
throw new UDFArgumentTypeException(parameters.length - 1,
"Exactly two arguments are expected.");
}
if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+ parameters[0].getTypeName() + " is passed.");
}
if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
+ parameters[1].getTypeName() + " is passed.");
}
if (!acceptedPrimitiveCategory(((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory())) {
throw new UDFArgumentTypeException(0, "Only numeric type arguments are accepted but "
+ parameters[0].getTypeName() + " is passed.");
}
if (!acceptedPrimitiveCategory(((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory())) {
throw new UDFArgumentTypeException(1, "Only numeric type arguments are accepted but "
+ parameters[1].getTypeName() + " is passed.");
}
}
示例3: isSupportedPrimitive
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
/**
* Determines if the given primitive is supported by this deserializer. At this time the only exclusions are
* BINARY, DECIMAL, VARCHAR, CHAR, and UNKNOWN.
*/
private boolean isSupportedPrimitive(PrimitiveTypeInfo type) {
switch (type.getPrimitiveCategory()) {
case VOID:
case STRING:
case BOOLEAN:
case BYTE:
case SHORT:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case DATE:
case TIMESTAMP:
return true;
default:
return false;
}
}
示例4: deserializePrimitive
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
/**
* Deserializes a primitive to its corresponding Java type, doing a best-effort conversion when necessary.
*/
private Object deserializePrimitive(PrimitiveTypeInfo type, Object value)
throws SerDeException {
switch (type.getPrimitiveCategory()) {
case VOID:
return null;
case STRING:
return deserializeString(value);
case BOOLEAN:
return deserializeBoolean(value);
case BYTE:
case SHORT:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
return deserializeNumber(value, type);
case DATE:
case TIMESTAMP:
return deserializeDate(value, type);
default:
throw new SerDeException("Unsupported type: " + type.getPrimitiveCategory());
}
}
示例5: deserializeNumber
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
private Object deserializeNumber(Object value, PrimitiveTypeInfo type)
throws SerDeException {
// Note that only numbers and booleans are supported. All other types cannot be deserialized. In particular
// String representations of numbers are not parsed.
Number number;
if (value instanceof Number) {
number = (Number) value;
} else if (value instanceof Boolean) {
number = ((Boolean) value) ? (byte) 1 : 0;
} else {
throw new SerDeException("Value is not a " + type + ": " + value);
}
switch (type.getPrimitiveCategory()) {
case BYTE: return number.byteValue();
case SHORT: return number.shortValue();
case INT: return number.intValue();
case LONG: return number.longValue();
case FLOAT: return number.floatValue();
case DOUBLE: return number.doubleValue();
}
throw new SerDeException("Primitive number did not match any expected categories"); // Unreachable
}
示例6: deserializeDate
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
private Object deserializeDate(Object value, PrimitiveTypeInfo type)
throws SerDeException {
long ts;
// Dates can be either ISO8601 Strings or numeric timestamps. Any other data type or format cannot be
// deserialized.
if (value instanceof String) {
try {
ts = JsonHelper.parseTimestamp((String) value).getTime();
} catch (Exception e) {
throw new SerDeException("Invalid time string: " + value);
}
} else if (value instanceof Number) {
ts = ((Number) value).longValue();
} else if (value instanceof java.util.Date) {
ts = ((java.util.Date) value).getTime();
} else {
throw new SerDeException("Invalid time value: " + value);
}
if (type.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DATE) {
return new Date(ts);
} else {
return new Timestamp(ts);
}
}
示例7: testThreeArgument
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
@Test
public void testThreeArgument() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[3];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
// stopWords
argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
udf.initialize(argOIs);
udf.close();
}
示例8: testFourArgument
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
@Test
public void testFourArgument() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[4];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
// stopWords
argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// stopTags
argOIs[3] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
udf.initialize(argOIs);
udf.close();
}
示例9: testFiveArgumentArray
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
@Test
public void testFiveArgumentArray() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[5];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
// stopWords
argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// stopTags
argOIs[3] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// userDictUrl
argOIs[4] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
udf.initialize(argOIs);
udf.close();
}
示例10: testFiveArgumenString
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
@Test
public void testFiveArgumenString() throws UDFArgumentException, IOException {
GenericUDF udf = new KuromojiUDF();
ObjectInspector[] argOIs = new ObjectInspector[5];
// line
argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
// mode
PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
stringType.setTypeName("string");
argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
// stopWords
argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// stopTags
argOIs[3] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
// userDictUrl
argOIs[4] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
stringType, null);
udf.initialize(argOIs);
udf.close();
}
示例11: isNumberTypeInfo
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
public static boolean isNumberTypeInfo(@Nonnull TypeInfo typeInfo) {
if (typeInfo.getCategory() != ObjectInspector.Category.PRIMITIVE) {
return false;
}
switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case BYTE:
case SHORT:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case DECIMAL:
return true;
default:
return false;
}
}
示例12: isSupportedType
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
public static boolean isSupportedType(TypeInfo typeInfo)
{
switch (typeInfo.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
return getPrimitiveType(primitiveCategory) != null;
case MAP:
MapTypeInfo mapTypeInfo = checkType(typeInfo, MapTypeInfo.class, "typeInfo");
return isSupportedType(mapTypeInfo.getMapKeyTypeInfo()) && isSupportedType(mapTypeInfo.getMapValueTypeInfo());
case LIST:
ListTypeInfo listTypeInfo = checkType(typeInfo, ListTypeInfo.class, "typeInfo");
return isSupportedType(listTypeInfo.getListElementTypeInfo());
case STRUCT:
StructTypeInfo structTypeInfo = checkType(typeInfo, StructTypeInfo.class, "typeInfo");
return structTypeInfo.getAllStructFieldTypeInfos().stream()
.allMatch(HiveType::isSupportedType);
}
return false;
}
示例13: worker
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
private Object worker(String columnName, TypeInfo columnType){
switch(columnType.getCategory()) {
case STRUCT:
return deserializeStruct(columnName, (StructTypeInfo) columnType);
case UNION:
return deserializeUnion(columnName,(UnionTypeInfo) columnType);
case LIST:
return deserializeList(columnName, (ListTypeInfo) columnType);
case MAP:
throw new RuntimeException("map type is not possible for cobol layout" + columnType.getCategory());
case PRIMITIVE:
return deserializePrimitive(columnName, (PrimitiveTypeInfo) columnType);
default:
throw new RuntimeException("Unknown TypeInfo: " + columnType.getCategory());
}
}
示例14: createObjectInspectorWorker
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
private ObjectInspector createObjectInspectorWorker(TypeInfo ti) throws SerDeException {
switch (ti.getCategory()) {
case PRIMITIVE:
PrimitiveTypeInfo pti = (PrimitiveTypeInfo) ti;
return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
case STRUCT:
StructTypeInfo sti = (StructTypeInfo) ti;
List<ObjectInspector> ois = new ArrayList<ObjectInspector>(sti.getAllStructFieldTypeInfos().size());
for (TypeInfo typeInfo : sti.getAllStructFieldTypeInfos()) {
ois.add(createObjectInspectorWorker(typeInfo));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(sti.getAllStructFieldNames(), ois);
case LIST:
ListTypeInfo lti = (ListTypeInfo) ti;
TypeInfo listElementTypeInfo = lti.getListElementTypeInfo();
return ObjectInspectorFactory.getStandardListObjectInspector(createObjectInspectorWorker(listElementTypeInfo));
default:
throw new SerDeException("No Hive categories matched for [" + ti + "]");
}
}
示例15: create
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入依赖的package包/类
public static IColumnVectorAssignor create( final TypeInfo typeInfo ){
switch ( typeInfo.getCategory() ){
case PRIMITIVE:
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
switch( primitiveTypeInfo.getPrimitiveCategory() ){
case STRING:
case BINARY:
return new BytesColumnVectorAssignor();
case BYTE:
return new LongColumnVectorAssignor( BytePrimitiveSetter.getInstance() );
case SHORT:
return new LongColumnVectorAssignor( ShortPrimitiveSetter.getInstance() );
case INT:
return new LongColumnVectorAssignor( IntegerPrimitiveSetter.getInstance() );
case BOOLEAN:
case LONG:
return new LongColumnVectorAssignor( LongPrimitiveSetter.getInstance() );
case FLOAT:
return new DoubleColumnVectorAssignor( FloatPrimitiveSetter.getInstance() );
case DOUBLE:
return new DoubleColumnVectorAssignor( DoublePrimitiveSetter.getInstance() );
case DATE:
case DECIMAL:
case TIMESTAMP:
case VOID:
default:
throw new UnsupportedOperationException( "Unsupport vectorize column " + primitiveTypeInfo.getPrimitiveCategory() );
}
case STRUCT:
case MAP:
case LIST:
case UNION:
default:
throw new UnsupportedOperationException( "Unsupport vectorize column " + typeInfo.getCategory() );
}
}