當前位置: 首頁>>代碼示例>>Java>>正文


Java TypeInfo.getCategory方法代碼示例

本文整理匯總了Java中org.apache.hadoop.hive.serde2.typeinfo.TypeInfo.getCategory方法的典型用法代碼示例。如果您正苦於以下問題:Java TypeInfo.getCategory方法的具體用法?Java TypeInfo.getCategory怎麽用?Java TypeInfo.getCategory使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.hive.serde2.typeinfo.TypeInfo的用法示例。


在下文中一共展示了TypeInfo.getCategory方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: MDSMapObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
public MDSMapObjectInspector( final MapTypeInfo typeInfo ){
  TypeInfo keyTypeInfo = typeInfo.getMapKeyTypeInfo();
  if( keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE && ( (PrimitiveTypeInfo)keyTypeInfo ).getPrimitiveCategory() == PrimitiveCategory.STRING ){
    keyObjectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
  }
  else{
    throw new RuntimeException( "Map key type is string only." );
  }

  valueObjectInspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo.getMapValueTypeInfo() ); 

  if( valueObjectInspector.getCategory() == ObjectInspector.Category.PRIMITIVE ){
    getField = new PrimitiveGetField( (PrimitiveObjectInspector)valueObjectInspector );
  }
  else if( valueObjectInspector.getCategory() == ObjectInspector.Category.UNION ){
    getField = new UnionGetField( (UnionTypeInfo)( typeInfo.getMapValueTypeInfo() ) );
  }
  else{
    getField = new NestedGetField();
  }
}
 
開發者ID:yahoojapan,項目名稱:multiple-dimension-spread,代碼行數:22,代碼來源:MDSMapObjectInspector.java

示例2: isNumberTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
public static boolean isNumberTypeInfo(@Nonnull TypeInfo typeInfo) {
    if (typeInfo.getCategory() != ObjectInspector.Category.PRIMITIVE) {
        return false;
    }
    switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
        case DECIMAL:
            return true;
        default:
            return false;
    }
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:18,代碼來源:HiveUtils.java

示例3: getJavaObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private static ObjectInspector getJavaObjectInspector(TypeInfo typeInfo)
{
    Category category = typeInfo.getCategory();
    if (category == PRIMITIVE) {
        return getPrimitiveJavaObjectInspector(getPrimitiveTypeInfo(typeInfo.getTypeName()));
    }
    if (category == LIST) {
        ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
        return getStandardListObjectInspector(getJavaObjectInspector(listTypeInfo.getListElementTypeInfo()));
    }
    if (category == MAP) {
        MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
        return getStandardMapObjectInspector(
                getJavaObjectInspector(mapTypeInfo.getMapKeyTypeInfo()),
                getJavaObjectInspector(mapTypeInfo.getMapValueTypeInfo()));
    }
    throw new PrestoException(INTERNAL_ERROR, "Unhandled storage type: " + category);
}
 
開發者ID:y-lan,項目名稱:presto,代碼行數:19,代碼來源:OrcFileWriter.java

示例4: isSupportedType

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
public static boolean isSupportedType(TypeInfo typeInfo)
{
    switch (typeInfo.getCategory()) {
        case PRIMITIVE:
            PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
            return getPrimitiveType(primitiveCategory) != null;
        case MAP:
            MapTypeInfo mapTypeInfo = checkType(typeInfo, MapTypeInfo.class, "typeInfo");
            return isSupportedType(mapTypeInfo.getMapKeyTypeInfo()) && isSupportedType(mapTypeInfo.getMapValueTypeInfo());
        case LIST:
            ListTypeInfo listTypeInfo = checkType(typeInfo, ListTypeInfo.class, "typeInfo");
            return isSupportedType(listTypeInfo.getListElementTypeInfo());
        case STRUCT:
            StructTypeInfo structTypeInfo = checkType(typeInfo, StructTypeInfo.class, "typeInfo");
            return structTypeInfo.getAllStructFieldTypeInfos().stream()
                    .allMatch(HiveType::isSupportedType);
    }
    return false;
}
 
開發者ID:y-lan,項目名稱:presto,代碼行數:20,代碼來源:HiveType.java

示例5: parseField

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
 * Parses a JSON object according to the Hive column's type.
 *
 * @param field - The JSON object to parse
 * @param fieldTypeInfo - Metadata about the Hive column
 * @return - The parsed value of the field
 */
private Object parseField(Object field, TypeInfo fieldTypeInfo) {
    switch (fieldTypeInfo.getCategory()) {
        case PRIMITIVE:
            // Jackson will return the right thing in this case, so just return
            // the object
            if (field instanceof String) {
                field = field.toString().replaceAll("\n", "\\\\n");
            }
            return field;
        case LIST:
            return parseList(field, (ListTypeInfo) fieldTypeInfo);
        case MAP:
            return parseMap(field, (MapTypeInfo) fieldTypeInfo);
        case STRUCT:
            return parseStruct(field, (StructTypeInfo) fieldTypeInfo);
        case UNION:
            // Unsupported by JSON
        default:
            return null;
    }
}
 
開發者ID:scaleoutsoftware,項目名稱:hServer,代碼行數:29,代碼來源:JsonSerDe.java

示例6: worker

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private Object worker(String columnName, TypeInfo columnType){
	
	switch(columnType.getCategory()) {
		
		case STRUCT:
			return deserializeStruct(columnName, (StructTypeInfo) columnType);
		case UNION:
			return deserializeUnion(columnName,(UnionTypeInfo) columnType);
		case LIST:
		return deserializeList(columnName, (ListTypeInfo) columnType);
		case MAP:
		throw new RuntimeException("map type is not possible for cobol layout" + columnType.getCategory());
		case PRIMITIVE:
		return deserializePrimitive(columnName, (PrimitiveTypeInfo) columnType);
		default:
		throw new RuntimeException("Unknown TypeInfo: " + columnType.getCategory());
	}
}
 
開發者ID:rbheemana,項目名稱:Cobol-to-Hive,代碼行數:19,代碼來源:CobolDeserializer.java

示例7: parseField

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
 * Parses a JSON object according to the Hive column's type.
 *
 * @param field         - The JSON object to parse
 * @param fieldTypeInfo - Metadata about the Hive column
 * @return - The parsed value of the field
 */
private Object parseField(Object field, TypeInfo fieldTypeInfo) {
	switch (fieldTypeInfo.getCategory()) {
		case PRIMITIVE:
			// Jackson will return the right thing in this case, so just return
			// the object
			if (field instanceof String) {
				field = field.toString().replaceAll("\n", "\\\\n");
			}
			return field;
		case LIST:
			return parseList(field, (ListTypeInfo) fieldTypeInfo);
		case MAP:
			return parseMap(field, (MapTypeInfo) fieldTypeInfo);
		case STRUCT:
			return parseStruct(field, (StructTypeInfo) fieldTypeInfo);
		case UNION:
			// Unsupported by JSON
		default:
			return null;
	}
}
 
開發者ID:micmiu,項目名稱:bigdata-tutorial,代碼行數:29,代碼來源:JSONCDHSerDe.java

示例8: createObjectInspectorWorker

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private ObjectInspector createObjectInspectorWorker(TypeInfo ti) throws SerDeException {
  switch (ti.getCategory()) {
  case PRIMITIVE:
    PrimitiveTypeInfo pti = (PrimitiveTypeInfo) ti;
    return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
  case STRUCT:
    StructTypeInfo sti = (StructTypeInfo) ti;
    List<ObjectInspector> ois = new ArrayList<ObjectInspector>(sti.getAllStructFieldTypeInfos().size());
    for (TypeInfo typeInfo : sti.getAllStructFieldTypeInfos()) {
      ois.add(createObjectInspectorWorker(typeInfo));
    }
    return ObjectInspectorFactory.getStandardStructObjectInspector(sti.getAllStructFieldNames(), ois);
  case LIST:
    ListTypeInfo lti = (ListTypeInfo) ti;
    TypeInfo listElementTypeInfo = lti.getListElementTypeInfo();
    return ObjectInspectorFactory.getStandardListObjectInspector(createObjectInspectorWorker(listElementTypeInfo));
  default:
    throw new SerDeException("No Hive categories matched for [" + ti + "]");
  }
}
 
開發者ID:apache,項目名稱:incubator-blur,代碼行數:21,代碼來源:BlurObjectInspectorGenerator.java

示例9: create

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
public static IColumnVectorAssignor create( final TypeInfo typeInfo ){
  switch ( typeInfo.getCategory() ){
    case PRIMITIVE:
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
      switch( primitiveTypeInfo.getPrimitiveCategory() ){
        case STRING:
        case BINARY:
          return new BytesColumnVectorAssignor();
        case BYTE:
          return new LongColumnVectorAssignor( BytePrimitiveSetter.getInstance() );
        case SHORT:
          return new LongColumnVectorAssignor( ShortPrimitiveSetter.getInstance() );
        case INT:
          return new LongColumnVectorAssignor( IntegerPrimitiveSetter.getInstance() );
        case BOOLEAN:
        case LONG:
          return new LongColumnVectorAssignor( LongPrimitiveSetter.getInstance() );
        case FLOAT:
          return new DoubleColumnVectorAssignor( FloatPrimitiveSetter.getInstance() );
        case DOUBLE:
          return new DoubleColumnVectorAssignor( DoublePrimitiveSetter.getInstance() );
        case DATE:
        case DECIMAL:
        case TIMESTAMP:
        case VOID:
        default:
          throw new UnsupportedOperationException( "Unsupport vectorize column " + primitiveTypeInfo.getPrimitiveCategory() );
      }
    case STRUCT:
    case MAP:
    case LIST:
    case UNION:
    default:
      throw new UnsupportedOperationException( "Unsupport vectorize column " + typeInfo.getCategory() );
  }
}
 
開發者ID:yahoojapan,項目名稱:multiple-dimension-spread,代碼行數:37,代碼來源:ColumnVectorAssignorFactory.java

示例10: getMajorTypeFromHiveTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, final OptionManager options) {
  switch (typeInfo.getCategory()) {
    case PRIMITIVE: {
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
      MinorType minorType = HiveUtilities.getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo, options);
      MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(minorType)
          .setMode(DataMode.OPTIONAL); // Hive columns (both regular and partition) could have null values

      if (primitiveTypeInfo.getPrimitiveCategory() == PrimitiveCategory.DECIMAL) {
        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
        typeBuilder.setPrecision(decimalTypeInfo.precision())
            .setScale(decimalTypeInfo.scale()).build();
      }

      return typeBuilder.build();
    }

    case LIST:
    case MAP:
    case STRUCT:
    case UNION:
    default:
      throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
  }

  return null;
}
 
開發者ID:skhalifa,項目名稱:QDrill,代碼行數:28,代碼來源:HiveUtilities.java

示例11: create

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
public static HiveFieldConverter create(TypeInfo typeInfo, FragmentContext fragmentContext)
    throws IllegalAccessException, InstantiationException {
  switch (typeInfo.getCategory()) {
    case PRIMITIVE:
      final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
      if (pCat != PrimitiveCategory.DECIMAL) {
        Class<? extends HiveFieldConverter> clazz = primMap.get(pCat);
        if (clazz != null) {
          return clazz.newInstance();
        }
      } else {
        // For decimal, based on precision return appropriate converter.
        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
        int precision = decimalTypeInfo.precision();
        int scale = decimalTypeInfo.scale();
        if (precision <= 9) {
          return new Decimal9(precision, scale);
        } else if (precision <= 18) {
          return new Decimal18(precision, scale);
        } else if (precision <= 28) {
          return new Decimal28(precision, scale, fragmentContext);
        } else {
          return new Decimal38(precision, scale, fragmentContext);
        }
      }

      throwUnsupportedHiveDataTypeError(pCat.toString());
      break;

    case LIST:
    case MAP:
    case STRUCT:
    case UNION:
    default:
      throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
  }

  return null;
}
 
開發者ID:skhalifa,項目名稱:QDrill,代碼行數:40,代碼來源:HiveFieldConverter.java

示例12: getRelDataTypeFromHiveType

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private RelDataType getRelDataTypeFromHiveType(RelDataTypeFactory typeFactory, TypeInfo typeInfo) {
  switch(typeInfo.getCategory()) {
    case PRIMITIVE:
      return getRelDataTypeFromHivePrimitiveType(typeFactory, ((PrimitiveTypeInfo) typeInfo));

    case LIST: {
      ListTypeInfo listTypeInfo = (ListTypeInfo)typeInfo;
      RelDataType listElemTypeInfo = getRelDataTypeFromHiveType(typeFactory, listTypeInfo.getListElementTypeInfo());
      return typeFactory.createArrayType(listElemTypeInfo, -1);
    }

    case MAP: {
      MapTypeInfo mapTypeInfo = (MapTypeInfo)typeInfo;
      RelDataType keyType = getRelDataTypeFromHiveType(typeFactory, mapTypeInfo.getMapKeyTypeInfo());
      RelDataType valueType = getRelDataTypeFromHiveType(typeFactory, mapTypeInfo.getMapValueTypeInfo());
      return typeFactory.createMapType(keyType, valueType);
    }

    case STRUCT: {
      StructTypeInfo structTypeInfo = (StructTypeInfo)typeInfo;
      ArrayList<String> fieldNames = structTypeInfo.getAllStructFieldNames();
      ArrayList<TypeInfo> fieldHiveTypeInfoList = structTypeInfo.getAllStructFieldTypeInfos();
      List<RelDataType> fieldRelDataTypeList = Lists.newArrayList();
      for(TypeInfo fieldHiveType : fieldHiveTypeInfoList) {
        fieldRelDataTypeList.add(getRelDataTypeFromHiveType(typeFactory, fieldHiveType));
      }
      return typeFactory.createStructType(fieldRelDataTypeList, fieldNames);
    }

    case UNION:
      logger.warn("There is no UNION data type in SQL. Converting it to Sql type OTHER to avoid " +
          "breaking INFORMATION_SCHEMA queries");
      return typeFactory.createSqlType(SqlTypeName.OTHER);
  }

  throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
  return null;
}
 
開發者ID:skhalifa,項目名稱:QDrill,代碼行數:39,代碼來源:DrillHiveTable.java

示例13: getMajorTypeFromHiveTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, final OptionManager options) {
  switch (typeInfo.getCategory()) {
    case PRIMITIVE: {
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
      MinorType minorType = getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo, options);
      MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(getMinorTypeFromArrowMinorType(minorType))
              .setMode(DataMode.OPTIONAL); // Hive columns (both regular and partition) could have null values

      if (primitiveTypeInfo.getPrimitiveCategory() == PrimitiveCategory.DECIMAL) {
        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
        typeBuilder.setPrecision(decimalTypeInfo.precision())
                .setScale(decimalTypeInfo.scale()).build();
      }

      return typeBuilder.build();
    }

    case LIST:
    case MAP:
    case STRUCT:
    case UNION:
    default:
      HiveUtilities.throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
  }

  return null;
}
 
開發者ID:dremio,項目名稱:dremio-oss,代碼行數:28,代碼來源:HiveAbstractReader.java

示例14: create

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
public static HiveFieldConverter create(TypeInfo typeInfo, OperatorContext context)
    throws IllegalAccessException, InstantiationException {
  switch (typeInfo.getCategory()) {
    case PRIMITIVE:
      final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
      if (pCat != PrimitiveCategory.DECIMAL) {
        Class<? extends HiveFieldConverter> clazz = primMap.get(pCat);
        if (clazz != null) {
          return clazz.newInstance();
        }
      } else {
        // For decimal, based on precision return appropriate converter.
        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
        int precision = decimalTypeInfo.precision();
        int scale = decimalTypeInfo.scale();
        return new Decimal(precision, scale, context);
      }

      throwUnsupportedHiveDataTypeError(pCat.toString());
      break;

    case LIST:
    case MAP:
    case STRUCT:
    case UNION:
    default:
      throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
  }

  return null;
}
 
開發者ID:dremio,項目名稱:dremio-oss,代碼行數:32,代碼來源:HiveFieldConverter.java

示例15: deserialize

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
 * Deserializes a raw value to the provided type.
 */
private Object deserialize(TypeInfo type, Object rawValue)
        throws SerDeException {
    Object value = null;

    if (rawValue != null) {
        switch (type.getCategory()) {
            case PRIMITIVE:
                value = deserializePrimitive((PrimitiveTypeInfo) type, rawValue);
                break;
            case STRUCT:
                value = deserializeStruct((StructTypeInfo) type, rawValue);
                break;
            case MAP:
                value = deserializeMap((MapTypeInfo) type, rawValue);
                break;
            case LIST:
                value = deserializeList((ListTypeInfo) type, rawValue);
                break;
            case UNION:
                value = deserializeUnion((UnionTypeInfo) type, rawValue);
                break;
        }
    }

    return value;
}
 
開發者ID:bazaarvoice,項目名稱:emodb,代碼行數:30,代碼來源:EmoSerDe.java


注:本文中的org.apache.hadoop.hive.serde2.typeinfo.TypeInfo.getCategory方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。