当前位置: 首页>>代码示例>>Java>>正文


Java PrimitiveTypeInfo.getPrimitiveCategory方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo.getPrimitiveCategory方法的典型用法代码示例。如果您正苦于以下问题:Java PrimitiveTypeInfo.getPrimitiveCategory方法的具体用法?Java PrimitiveTypeInfo.getPrimitiveCategory怎么用?Java PrimitiveTypeInfo.getPrimitiveCategory使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo的用法示例。


在下文中一共展示了PrimitiveTypeInfo.getPrimitiveCategory方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: isSupportedPrimitive

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
/**
 * Determines if the given primitive is supported by this deserializer.  At this time the only exclusions are
 * BINARY, DECIMAL, VARCHAR, CHAR, and UNKNOWN.
 */
private boolean isSupportedPrimitive(PrimitiveTypeInfo type) {
    switch (type.getPrimitiveCategory()) {
        case VOID:
        case STRING:
        case BOOLEAN:
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
        case DATE:
        case TIMESTAMP:
            return true;
        default:
            return false;
    }
}
 
开发者ID:bazaarvoice,项目名称:emodb,代码行数:23,代码来源:EmoSerDe.java

示例2: deserializePrimitive

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
/**
 * Deserializes a primitive to its corresponding Java type, doing a best-effort conversion when necessary.
 */
private Object deserializePrimitive(PrimitiveTypeInfo type, Object value)
        throws SerDeException {
    switch (type.getPrimitiveCategory()) {
        case VOID:
            return null;
        case STRING:
            return deserializeString(value);
        case BOOLEAN:
            return deserializeBoolean(value);
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
            return deserializeNumber(value, type);
        case DATE:
        case TIMESTAMP:
            return deserializeDate(value, type);
        default:
            throw new SerDeException("Unsupported type: " + type.getPrimitiveCategory());
    }
}
 
开发者ID:bazaarvoice,项目名称:emodb,代码行数:27,代码来源:EmoSerDe.java

示例3: deserializeNumber

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
private Object deserializeNumber(Object value, PrimitiveTypeInfo type)
        throws SerDeException {
    // Note that only numbers and booleans are supported.  All other types cannot be deserialized.  In particular
    // String representations of numbers are not parsed.
    Number number;
    if (value instanceof Number) {
        number = (Number) value;
    } else if (value instanceof Boolean) {
        number = ((Boolean) value) ? (byte) 1 : 0;
    } else {
        throw new SerDeException("Value is not a " + type + ": " + value);
    }

    switch (type.getPrimitiveCategory()) {
        case BYTE:   return number.byteValue();
        case SHORT:  return number.shortValue();
        case INT:    return number.intValue();
        case LONG:   return number.longValue();
        case FLOAT:  return number.floatValue();
        case DOUBLE: return number.doubleValue();
    }

    throw new SerDeException("Primitive number did not match any expected categories"); // Unreachable
}
 
开发者ID:bazaarvoice,项目名称:emodb,代码行数:25,代码来源:EmoSerDe.java

示例4: deserializeDate

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
private Object deserializeDate(Object value, PrimitiveTypeInfo type)
        throws SerDeException {
    long ts;
    // Dates can be either ISO8601 Strings or numeric timestamps.  Any other data type or format cannot be
    // deserialized.
    if (value instanceof String) {
        try {
            ts = JsonHelper.parseTimestamp((String) value).getTime();
        } catch (Exception e) {
            throw new SerDeException("Invalid time string: " + value);
        }
    } else if (value instanceof Number) {
        ts = ((Number) value).longValue();
    } else if (value instanceof java.util.Date) {
        ts = ((java.util.Date) value).getTime();
    } else {
        throw new SerDeException("Invalid time value: " + value);
    }

    if (type.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DATE) {
        return new Date(ts);
    } else {
        return new Timestamp(ts);
    }
}
 
开发者ID:bazaarvoice,项目名称:emodb,代码行数:26,代码来源:EmoSerDe.java

示例5: create

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
public static IColumnVectorAssignor create( final TypeInfo typeInfo ){
  switch ( typeInfo.getCategory() ){
    case PRIMITIVE:
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
      switch( primitiveTypeInfo.getPrimitiveCategory() ){
        case STRING:
        case BINARY:
          return new BytesColumnVectorAssignor();
        case BYTE:
          return new LongColumnVectorAssignor( BytePrimitiveSetter.getInstance() );
        case SHORT:
          return new LongColumnVectorAssignor( ShortPrimitiveSetter.getInstance() );
        case INT:
          return new LongColumnVectorAssignor( IntegerPrimitiveSetter.getInstance() );
        case BOOLEAN:
        case LONG:
          return new LongColumnVectorAssignor( LongPrimitiveSetter.getInstance() );
        case FLOAT:
          return new DoubleColumnVectorAssignor( FloatPrimitiveSetter.getInstance() );
        case DOUBLE:
          return new DoubleColumnVectorAssignor( DoublePrimitiveSetter.getInstance() );
        case DATE:
        case DECIMAL:
        case TIMESTAMP:
        case VOID:
        default:
          throw new UnsupportedOperationException( "Unsupport vectorize column " + primitiveTypeInfo.getPrimitiveCategory() );
      }
    case STRUCT:
    case MAP:
    case LIST:
    case UNION:
    default:
      throw new UnsupportedOperationException( "Unsupport vectorize column " + typeInfo.getCategory() );
  }
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:37,代码来源:ColumnVectorAssignorFactory.java

示例6: getMajorTypeFromHiveTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, final OptionManager options) {
  switch (typeInfo.getCategory()) {
    case PRIMITIVE: {
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
      MinorType minorType = HiveUtilities.getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo, options);
      MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(minorType)
          .setMode(DataMode.OPTIONAL); // Hive columns (both regular and partition) could have null values

      if (primitiveTypeInfo.getPrimitiveCategory() == PrimitiveCategory.DECIMAL) {
        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
        typeBuilder.setPrecision(decimalTypeInfo.precision())
            .setScale(decimalTypeInfo.scale()).build();
      }

      return typeBuilder.build();
    }

    case LIST:
    case MAP:
    case STRUCT:
    case UNION:
    default:
      throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
  }

  return null;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:28,代码来源:HiveUtilities.java

示例7: getMajorTypeFromHiveTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, final OptionManager options) {
  switch (typeInfo.getCategory()) {
    case PRIMITIVE: {
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
      MinorType minorType = getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo, options);
      MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(getMinorTypeFromArrowMinorType(minorType))
              .setMode(DataMode.OPTIONAL); // Hive columns (both regular and partition) could have null values

      if (primitiveTypeInfo.getPrimitiveCategory() == PrimitiveCategory.DECIMAL) {
        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
        typeBuilder.setPrecision(decimalTypeInfo.precision())
                .setScale(decimalTypeInfo.scale()).build();
      }

      return typeBuilder.build();
    }

    case LIST:
    case MAP:
    case STRUCT:
    case UNION:
    default:
      HiveUtilities.throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
  }

  return null;
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:28,代码来源:HiveAbstractReader.java

示例8: getMinorTypeFromHivePrimitiveTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
public static MinorType getMinorTypeFromHivePrimitiveTypeInfo(PrimitiveTypeInfo primitiveTypeInfo, OptionManager options) {
  switch(primitiveTypeInfo.getPrimitiveCategory()) {
    case BINARY:
      return MinorType.VARBINARY;
    case BOOLEAN:
      return MinorType.BIT;
    case DECIMAL: {

      if (options.getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val == false) {
        throw UserException.unsupportedError()
            .message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG)
            .build(abstractLogger);
      }
      return MinorType.DECIMAL;
    }
    case DOUBLE:
      return MinorType.FLOAT8;
    case FLOAT:
      return MinorType.FLOAT4;
    // TODO (DRILL-2470)
    // Byte and short (tinyint and smallint in SQL types) are currently read as integers
    // as these smaller integer types are not fully supported in Dremio today.
    case SHORT:
    case BYTE:
    case INT:
      return MinorType.INT;
    case LONG:
      return MinorType.BIGINT;
    case STRING:
    case VARCHAR:
    case CHAR:
      return MinorType.VARCHAR;
    case TIMESTAMP:
      return MinorType.TIMESTAMPMILLI;
    case DATE:
      return MinorType.DATEMILLI;
  }
  HiveUtilities.throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString());
  return null;
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:41,代码来源:HiveAbstractReader.java

示例9: getMajorTypeFromHiveTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, final OptionManager options) {
  switch (typeInfo.getCategory()) {
    case PRIMITIVE: {
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
      MinorType minorType = HiveUtilities.getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo, options);
      MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(minorType)
          .setMode(DataMode.OPTIONAL); // Hive columns (both regular and partition) could have null values

      switch (primitiveTypeInfo.getPrimitiveCategory()) {
        case CHAR:
        case VARCHAR:
          BaseCharTypeInfo baseCharTypeInfo = (BaseCharTypeInfo) primitiveTypeInfo;
          typeBuilder.setPrecision(baseCharTypeInfo.getLength());
          break;
        case DECIMAL:
          DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
          typeBuilder.setPrecision(decimalTypeInfo.getPrecision()).setScale(decimalTypeInfo.getScale());
          break;
        default:
          // do nothing, other primitive categories do not have precision or scale
      }

      return typeBuilder.build();
    }

    case LIST:
    case MAP:
    case STRUCT:
    case UNION:
    default:
      throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
  }

  return null;
}
 
开发者ID:axbaretto,项目名称:drill,代码行数:36,代码来源:HiveUtilities.java

示例10: typeInfoToColumnType

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
public static ColumnType typeInfoToColumnType( final TypeInfo typeInfo ){
  switch ( typeInfo.getCategory() ){
    case STRUCT:
      return ColumnType.SPREAD;
    case MAP:
      return ColumnType.SPREAD;
    case LIST:
      return ColumnType.ARRAY;
    case UNION:
      return ColumnType.UNION;
    case PRIMITIVE:
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
      switch( primitiveTypeInfo.getPrimitiveCategory() ){
        case STRING:
          return ColumnType.STRING;
        case BINARY:
          return ColumnType.BYTES;
        case BOOLEAN:
          return ColumnType.BOOLEAN;
        case BYTE:
          return ColumnType.BYTE;
        case DOUBLE:
          return ColumnType.DOUBLE;
        case FLOAT:
          return ColumnType.FLOAT;
        case INT:
          return ColumnType.INTEGER;
        case LONG:
          return ColumnType.LONG;
        case SHORT:
          return ColumnType.SHORT;

        case DATE:
        case DECIMAL:
        case TIMESTAMP:
        case VOID:
        default:
          return ColumnType.UNKNOWN;
      }
    default:
      return ColumnType.UNKNOWN;
  }
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:44,代码来源:MDSColumnTypeUtil.java

示例11: craeteGetUnionObject

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
private IGetUnionObject craeteGetUnionObject( final byte tag , final TypeInfo typeInfo ){
  switch ( typeInfo.getCategory() ){
    case STRUCT:
      return new NestedGetUnionObject( tag , ColumnType.SPREAD );
    case MAP:
      return new NestedGetUnionObject( tag , ColumnType.SPREAD );
    case LIST:
      return new NestedGetUnionObject( tag , ColumnType.ARRAY );
    case UNION:
      return new NullGetUnionObject( tag );
    case PRIMITIVE:
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
      PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector)( MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo ) );
      switch( primitiveTypeInfo.getPrimitiveCategory() ){
        case STRING:
          return new PrimitiveGetUnionObject( tag , primitiveObjectInspector , ColumnType.STRING );
        case BINARY:
          return new PrimitiveGetUnionObject( tag , primitiveObjectInspector , ColumnType.BYTES );
        case BOOLEAN:
          return new PrimitiveGetUnionObject( tag , primitiveObjectInspector , ColumnType.BOOLEAN );
        case BYTE:
          return new PrimitiveGetUnionObject( tag , primitiveObjectInspector , ColumnType.BYTE );
        case DOUBLE:
          return new PrimitiveGetUnionObject( tag , primitiveObjectInspector , ColumnType.DOUBLE );
        case FLOAT:
          return new PrimitiveGetUnionObject( tag , primitiveObjectInspector , ColumnType.FLOAT );
        case INT:
          return new PrimitiveGetUnionObject( tag , primitiveObjectInspector , ColumnType.INTEGER );
        case LONG:
          return new PrimitiveGetUnionObject( tag , primitiveObjectInspector , ColumnType.LONG );
        case SHORT:
          return new PrimitiveGetUnionObject( tag , primitiveObjectInspector , ColumnType.SHORT );
        case DATE:
        case DECIMAL:
        case TIMESTAMP:
        case VOID:
        default:
          return new NullGetUnionObject( tag );
      }
    default:
      return new NullGetUnionObject( tag );
  }
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:44,代码来源:UnionField.java

示例12: craeteObjectInspectorFromTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
public static ObjectInspector craeteObjectInspectorFromTypeInfo( final TypeInfo typeInfo ){
  switch ( typeInfo.getCategory() ){
    case STRUCT:
      return new MDSStructObjectInspector( (StructTypeInfo)typeInfo );
    case MAP:
      return new MDSMapObjectInspector( (MapTypeInfo)typeInfo );
    case LIST:
      return new MDSListObjectInspector( (ListTypeInfo)typeInfo );
    case UNION:
      UnionTypeInfo unionTypeInfo = (UnionTypeInfo)typeInfo;
      List<ObjectInspector> unionList = new ArrayList<ObjectInspector>();
      for( TypeInfo childTypeInfo : unionTypeInfo.getAllUnionObjectTypeInfos() ){
        unionList.add( craeteObjectInspectorFromTypeInfo( childTypeInfo ) );
      }
      return ObjectInspectorFactory.getStandardUnionObjectInspector( unionList );
    case PRIMITIVE:
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
      switch( primitiveTypeInfo.getPrimitiveCategory() ){
        case STRING:
          return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
        case BINARY:
          return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
        case BOOLEAN:
          return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
        case BYTE:
          return PrimitiveObjectInspectorFactory.writableByteObjectInspector;
        case DOUBLE:
          return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
        case FLOAT:
          return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
        case INT:
          return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
        case LONG:
          return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
        case SHORT:
          return PrimitiveObjectInspectorFactory.writableShortObjectInspector;

        case DATE:
        case DECIMAL:
        case TIMESTAMP:
        case VOID:
        default:
        throw new UnsupportedOperationException( "Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory() );
      }
    default:
      throw new UnsupportedOperationException( "Unknown category " + typeInfo.getCategory() );
  }
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:49,代码来源:MDSObjectInspectorFactory.java

示例13: getMinorTypeFromHivePrimitiveTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
public static TypeProtos.MinorType getMinorTypeFromHivePrimitiveTypeInfo(PrimitiveTypeInfo primitiveTypeInfo,
                                                                         OptionManager options) {
  switch(primitiveTypeInfo.getPrimitiveCategory()) {
    case BINARY:
      return TypeProtos.MinorType.VARBINARY;
    case BOOLEAN:
      return TypeProtos.MinorType.BIT;
    case DECIMAL: {

      if (options.getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val == false) {
        throw UserException.unsupportedError()
            .message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG)
            .build(logger);
      }
      DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
      return DecimalUtility.getDecimalDataType(decimalTypeInfo.precision());
    }
    case DOUBLE:
      return TypeProtos.MinorType.FLOAT8;
    case FLOAT:
      return TypeProtos.MinorType.FLOAT4;
    // TODO (DRILL-2470)
    // Byte and short (tinyint and smallint in SQL types) are currently read as integers
    // as these smaller integer types are not fully supported in Drill today.
    case SHORT:
    case BYTE:
    case INT:
      return TypeProtos.MinorType.INT;
    case LONG:
      return TypeProtos.MinorType.BIGINT;
    case STRING:
    case VARCHAR:
      return TypeProtos.MinorType.VARCHAR;
    case TIMESTAMP:
      return TypeProtos.MinorType.TIMESTAMP;
    case DATE:
      return TypeProtos.MinorType.DATE;
  }
  throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString());
  return null;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:42,代码来源:HiveUtilities.java

示例14: getRelDataTypeFromHivePrimitiveType

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
private RelDataType getRelDataTypeFromHivePrimitiveType(RelDataTypeFactory typeFactory, PrimitiveTypeInfo pTypeInfo) {
  switch(pTypeInfo.getPrimitiveCategory()) {
    case BOOLEAN:
      return typeFactory.createSqlType(SqlTypeName.BOOLEAN);

    case BYTE:
    case SHORT:
      return typeFactory.createSqlType(SqlTypeName.INTEGER);

    case INT:
      return typeFactory.createSqlType(SqlTypeName.INTEGER);

    case LONG:
      return typeFactory.createSqlType(SqlTypeName.BIGINT);

    case FLOAT:
      return typeFactory.createSqlType(SqlTypeName.FLOAT);

    case DOUBLE:
      return typeFactory.createSqlType(SqlTypeName.DOUBLE);

    case DATE:
      return typeFactory.createSqlType(SqlTypeName.DATE);

    case TIMESTAMP:
      return typeFactory.createSqlType(SqlTypeName.TIMESTAMP);

    case BINARY:
      return typeFactory.createSqlType(SqlTypeName.VARBINARY);

    case DECIMAL: {
      DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo)pTypeInfo;
      return typeFactory.createSqlType(SqlTypeName.DECIMAL, decimalTypeInfo.precision(), decimalTypeInfo.scale());
    }

    case STRING:
    case VARCHAR: {
      int maxLen = TypeInfoUtils.getCharacterLengthForType(pTypeInfo);
      return typeFactory.createTypeWithCharsetAndCollation(
        typeFactory.createSqlType(SqlTypeName.VARCHAR, maxLen), /*input type*/
        Charset.forName("ISO-8859-1"), /*unicode char set*/
        SqlCollation.IMPLICIT /* TODO: need to decide if implicit is the correct one */
      );
    }

    case UNKNOWN:
    case VOID:
    default:
      throwUnsupportedHiveDataTypeError(pTypeInfo.getPrimitiveCategory().toString());
  }

  return null;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:54,代码来源:DrillHiveTable.java

示例15: get

import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; //导入方法依赖的package包/类
public static IOrcFormatter get( final TypeInfo typeInfo ){
  if( typeInfo.getCategory()  == ObjectInspector.Category.LIST ){
    return new OrcListFormatter( (ListTypeInfo)typeInfo );
  }
  else if( typeInfo.getCategory()  == ObjectInspector.Category.MAP ){
    return new OrcMapFormatter( (MapTypeInfo)typeInfo );
  }
  else if( typeInfo.getCategory()  == ObjectInspector.Category.STRUCT ){
    return new OrcStructFormatter( (StructTypeInfo)typeInfo );
  }
  else if( typeInfo.getCategory()  == ObjectInspector.Category.UNION ){
    return new OrcUnionFormatter( (UnionTypeInfo)typeInfo );
  }
  else if( typeInfo.getCategory()  == ObjectInspector.Category.PRIMITIVE ){
    PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
    if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.BINARY ){
      return new OrcBytesFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN ){
      return new OrcBooleanFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.BYTE ){
      return new OrcByteFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DOUBLE ){
      return new OrcDoubleFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.FLOAT ){
      return new OrcFloatFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.INT ){
      return new OrcIntegerFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.LONG ){
      return new OrcLongFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.SHORT ){
      return new OrcShortFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.STRING ){
      return new OrcStringFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP ){
      return new OrcTimestampFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.VOID ){
      return new OrcVoidFormatter();
    }
    else if( primitiveTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.UNKNOWN ){
      return new OrcVoidFormatter();
    }
    else {
      return new OrcNullFormatter();
    }
  }
  else{
    return new OrcNullFormatter();
  }
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:60,代码来源:OrcFormatterFactory.java


注:本文中的org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo.getPrimitiveCategory方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。