当前位置: 首页>>代码示例>>Java>>正文


Java ObjectInspector.getCategory方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.getCategory方法的典型用法代码示例。如果您正苦于以下问题:Java ObjectInspector.getCategory方法的具体用法?Java ObjectInspector.getCategory怎么用?Java ObjectInspector.getCategory使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector的用法示例。


在下文中一共展示了ObjectInspector.getCategory方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getDrillType

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static MinorType getDrillType(ObjectInspector oi) {
  switch(oi.getCategory()) {
    case PRIMITIVE: {
      PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
      if (TYPE_HIVE2DRILL.containsKey(poi.getPrimitiveCategory())) {
        return TYPE_HIVE2DRILL.get(poi.getPrimitiveCategory());
      }
      throw new UnsupportedOperationException();
    }

    case MAP:
    case LIST:
    case STRUCT:
    default:
      throw new UnsupportedOperationException();
  }
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:18,代码来源:ObjectInspectorHelper.java

示例2: getMinorType

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static MinorType getMinorType(ObjectInspector oi) {
  switch(oi.getCategory()) {
    case PRIMITIVE: {
      PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
      if (TYPE_HIVE2MINOR.containsKey(poi.getPrimitiveCategory())) {
        return TYPE_HIVE2MINOR.get(poi.getPrimitiveCategory());
      }
      throw new UnsupportedOperationException();
    }

    case MAP:
    case LIST:
    case STRUCT:
    default:
      throw new UnsupportedOperationException();
  }
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:18,代码来源:ObjectInspectorHelper.java

示例3: getRegexpExecuter

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static IExpressionNode getRegexpExecuter(final ExprNodeConstantDesc constDesc , final IExtractNode targetColumn ){
  ObjectInspector objectInspector = constDesc.getWritableObjectInspector();
  if( objectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE ){
    return null;
  }
  PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector)objectInspector;
  IFilter filter = null;
  switch( primitiveObjectInspector.getPrimitiveCategory() ){
    case STRING:
      String regexp = UDFLike.likePatternToRegExp( ( (WritableConstantStringObjectInspector)primitiveObjectInspector ).getWritableConstantValue().toString() );
      filter = new RegexpMatchStringFilter( "^" + regexp + "$" );
      break;
    case BINARY:
    case BOOLEAN:
    case BYTE:
    case DOUBLE:
    case FLOAT:
    case INT:
    case LONG:
    case SHORT:
    case DATE:
    case DECIMAL:
    case TIMESTAMP:
    case VOID:
    default:
      filter = null;
    break;
  }
  if( filter == null ){
    return null;
  }
  return new ExecuterNode( targetColumn , filter );
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:34,代码来源:RegexpHiveExpr.java

示例4: getPrimitiveObjectInspector

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static PrimitiveObjectInspector getPrimitiveObjectInspector( final ExprNodeDesc exprNode ){
  if( ! ( exprNode instanceof ExprNodeConstantDesc ) ){
    return null;
  }
  ExprNodeConstantDesc constDesc = (ExprNodeConstantDesc)exprNode;
  ObjectInspector objectInspector = constDesc.getWritableObjectInspector();
  if( objectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE ){
    return null;
  }
  return (PrimitiveObjectInspector)objectInspector;
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:12,代码来源:InHiveExpr.java

示例5: get

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static IHivePrimitiveConverter get( final ObjectInspector objectInspector ){

    switch( objectInspector.getCategory() ){
      case PRIMITIVE:
        PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector)objectInspector;
        switch( primitiveInspector.getPrimitiveCategory() ){
          case BINARY:
            return new HiveBytesPrimitiveConverter( (BinaryObjectInspector)objectInspector );
          case BOOLEAN:
            return new HiveBooleanPrimitiveConverter( (BooleanObjectInspector)objectInspector );
          case BYTE:
            return new HiveBytePrimitiveConverter( (ByteObjectInspector)objectInspector );
          case DOUBLE:
            return new HiveDoublePrimitiveConverter( (DoubleObjectInspector)objectInspector );
          case FLOAT:
            return new HiveFloatPrimitiveConverter( (FloatObjectInspector)objectInspector );
          case INT:
            return new HiveIntegerPrimitiveConverter( (IntObjectInspector)objectInspector );
          case LONG:
            return new HiveLongPrimitiveConverter( (LongObjectInspector)objectInspector );
          case SHORT:
            return new HiveShortPrimitiveConverter( (ShortObjectInspector)objectInspector );
          case STRING:
            return new HiveStringPrimitiveConverter( (StringObjectInspector)objectInspector );
          case DATE:
          case TIMESTAMP:
          case VOID:
          case UNKNOWN:
          default:
            return new HiveDefaultPrimitiveConverter();
        }
      default :
        return new HiveDefaultPrimitiveConverter();
    }
  }
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:36,代码来源:HivePrimitiveConverterFactory.java

示例6: get

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static IParser get( final ObjectInspector objectInspector , final Object row ){

    switch( objectInspector.getCategory() ){
      case LIST:
        return new HiveListParser( row , (ListObjectInspector)objectInspector );
      case MAP:
        return new HiveMapParser( row , (MapObjectInspector)objectInspector );
      case STRUCT:
        return new HiveStructParser( row , (StructObjectInspector)objectInspector );
      case UNION:
      default:
        return new HiveNullParser();
    }
  }
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:15,代码来源:HiveParserFactory.java

示例7: hasParser

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static boolean hasParser( final ObjectInspector objectInspector ){
  switch( objectInspector.getCategory() ){
    case LIST:
    case MAP:
    case STRUCT:
      return true;
    case UNION:
    default:
      return false;
  }
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:12,代码来源:HiveParserFactory.java

示例8: get

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static IParser get( final ObjectInspector objectInspector , final Object row ){

    switch( objectInspector.getCategory() ){
      case LIST:
        return new OrcListParser( row , (ListObjectInspector)objectInspector );
      case MAP:
        return new OrcMapParser( row , (MapObjectInspector)objectInspector );
      case STRUCT:
        return new OrcStructParser( row , (StructObjectInspector)objectInspector );
      case UNION:
      default:
        return new OrcNullParser();
    }
  }
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:15,代码来源:OrcParserFactory.java

示例9: convert

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
/**
 * Convert the value using the ObjectInspector. The Writable values are converted to their
 * respective Java objects from using the provided inspector.
 *
 * @param oi the field object inspector
 * @param value the value
 * @return the corresponding Java object value
 */
public static Object convert(final ObjectInspector oi, final Object value) {
  if (value == null) {
    return null;
  }
  Object outValue = null;
  switch (oi.getCategory()) {
    case PRIMITIVE:
      outValue = OrcReadFunctionMap.get(oi.getTypeName()).apply(value);
      break;
    case LIST:
      final ListObjectInspector loi = (ListObjectInspector) oi;
      final ObjectInspector eoi = loi.getListElementObjectInspector();
      outValue =
          loi.getList(value).stream().map(e -> convert(eoi, e)).collect(Collectors.toList());
      break;
    case MAP:
      final MapObjectInspector moi = (MapObjectInspector) oi;
      final ObjectInspector koi = moi.getMapKeyObjectInspector();
      final ObjectInspector voi = moi.getMapValueObjectInspector();
      outValue = moi.getMap(value).entrySet().stream()
          .collect(Collectors.toMap(e -> convert(koi, e.getKey()),
              e -> convert(voi, e.getValue()), throwingMerger(), LinkedHashMap::new));
      break;
    case STRUCT:
      final StructObjectInspector soi = (StructObjectInspector) oi;
      outValue = soi.getAllStructFieldRefs().stream()
          .map(e -> convert(e.getFieldObjectInspector(), soi.getStructFieldData(value, e)))
          .toArray();
      break;
    case UNION:
      final UnionObjectInspector uoi = (UnionObjectInspector) oi;
      final List<? extends ObjectInspector> ois = uoi.getObjectInspectors();
      final byte tag = uoi.getTag(value);
      outValue = new Object[] {tag, convert(ois.get(tag), uoi.getField(value))};
      break;
  }
  return outValue;
}
 
开发者ID:ampool,项目名称:monarch,代码行数:47,代码来源:OrcUtils.java

示例10: getStructOI

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static StructObjectInspector getStructOI(final SerDe serDe) throws Exception {
  ObjectInspector oi = serDe.getObjectInspector();
  if (oi.getCategory() != Category.STRUCT) {
    throw new UnsupportedOperationException(String.format("%s category not supported", oi.getCategory()));
  }
  return (StructObjectInspector) oi;
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:8,代码来源:HiveAbstractReader.java

示例11: getCompareExecuter

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static IExpressionNode getCompareExecuter(final ExprNodeConstantDesc constDesc , final IExtractNode targetColumn , final StringCompareFilterType stringCompareType , final NumberFilterType numberCompareType ){
  ObjectInspector objectInspector = constDesc.getWritableObjectInspector();
  if( objectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE ){
    return null;
  }
  PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector)objectInspector;
  IFilter filter = null;
  switch( primitiveObjectInspector.getPrimitiveCategory() ){
    case STRING:
      switch( stringCompareType ){
        case LT:
          filter = new LtStringCompareFilter( ( (WritableConstantStringObjectInspector)primitiveObjectInspector ).getWritableConstantValue().toString() );
          break;
        case LE:
          filter = new LeStringCompareFilter( ( (WritableConstantStringObjectInspector)primitiveObjectInspector ).getWritableConstantValue().toString() );
          break;
        case GT:
          filter = new GtStringCompareFilter( ( (WritableConstantStringObjectInspector)primitiveObjectInspector ).getWritableConstantValue().toString() );
          break;
        case GE:
          filter = new GeStringCompareFilter( ( (WritableConstantStringObjectInspector)primitiveObjectInspector ).getWritableConstantValue().toString() );
          break;
        default:
          filter = null;
          break;
      }
      break;
    case BYTE:
      byte byteObj = ( (WritableConstantByteObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( numberCompareType , new ByteObj( byteObj ) );
      break;
    case SHORT:
      short shortObj = ( (WritableConstantShortObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( numberCompareType , new ShortObj( shortObj ) );
      break;
    case INT:
      int intObj = ( (WritableConstantIntObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( numberCompareType , new IntegerObj( intObj ) );
      break;
    case LONG:
      long longObj = ( (WritableConstantLongObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( numberCompareType , new LongObj( longObj ) );
      break;
    case FLOAT:
      float floatObj = ( (WritableConstantFloatObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( numberCompareType , new FloatObj( floatObj ) );
      break;
    case DOUBLE:
      double doubleObj = ( (WritableConstantDoubleObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( numberCompareType , new DoubleObj( doubleObj ) );
      break;
    case DATE:
    case DECIMAL:
    case TIMESTAMP:
      filter = null;
      break;
    default:
      filter = null;
      break;
  }
  if( filter == null ){
    return null;
  }
  return new ExecuterNode( targetColumn , filter );
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:66,代码来源:CompareHiveExpr.java

示例12: getPushDownFilterNode

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
@Override
public IExpressionNode getPushDownFilterNode(){
  if( nodeDescList.size() != 4 ){
    return null;
  }
  ExprNodeDesc constNode1 = nodeDescList.get( 0 );
  ExprNodeDesc columnNode = nodeDescList.get( 1 );
  ExprNodeDesc constNode2 = nodeDescList.get( 2 );
  ExprNodeDesc constNode3 = nodeDescList.get( 3 );

  if( ! ( constNode1 instanceof ExprNodeConstantDesc ) || ! ( constNode2 instanceof ExprNodeConstantDesc ) || ! ( constNode3 instanceof ExprNodeConstantDesc ) ){
    return null;
  } 
  ExprNodeConstantDesc booleanNode = (ExprNodeConstantDesc)constNode1;
  ObjectInspector booleanOjectInspector = booleanNode.getWritableObjectInspector();
  if( booleanOjectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE ){
    return null;
  }
  PrimitiveObjectInspector booleanPrimitiveObjectInspector = (PrimitiveObjectInspector)booleanOjectInspector;
  if( booleanPrimitiveObjectInspector.getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN ){
    return null;
  }
  boolean invert = ( (WritableConstantBooleanObjectInspector)booleanPrimitiveObjectInspector ).getWritableConstantValue().get();

  ExprNodeConstantDesc minNode = (ExprNodeConstantDesc)constNode2;
  ExprNodeConstantDesc maxNode = (ExprNodeConstantDesc)constNode3;

  ObjectInspector minOjectInspector = minNode.getWritableObjectInspector();
  ObjectInspector maxOjectInspector = maxNode.getWritableObjectInspector();
  if( minOjectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE || maxOjectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE ){
    return null;
  }
  PrimitiveObjectInspector minPrimitiveObjectInspector = (PrimitiveObjectInspector)minOjectInspector;
  PrimitiveObjectInspector maxPrimitiveObjectInspector = (PrimitiveObjectInspector)maxOjectInspector;
  if( minPrimitiveObjectInspector.getPrimitiveCategory() != maxPrimitiveObjectInspector.getPrimitiveCategory() ){
    return null;
  }

  IExtractNode extractNode = CreateExtractNodeUtil.getExtractNode( columnNode );
  if( extractNode == null ){
    return null;
  }

  return getRangeExecuter( invert , minPrimitiveObjectInspector , maxPrimitiveObjectInspector , extractNode );
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:46,代码来源:BetweenHiveExpr.java

示例13: getEqualsExecuter

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
public static IExpressionNode getEqualsExecuter( final ExprNodeConstantDesc constDesc , final IExtractNode targetColumn , final ColumnType targetColumnType ){
  ObjectInspector objectInspector = constDesc.getWritableObjectInspector();
  if( objectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE ){
    return null;
  }
  PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector)objectInspector;
  IFilter filter = null;
  switch( primitiveObjectInspector.getPrimitiveCategory() ){
    case STRING:
      filter = new PerfectMatchStringFilter( ( (WritableConstantStringObjectInspector)primitiveObjectInspector ).getWritableConstantValue().toString() );
      break;
    case BINARY:
      filter = null;
      break;
    case BOOLEAN:
      boolean booleanObj = ( (WritableConstantBooleanObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new BooleanFilter( booleanObj );
      break;
    case BYTE:
      byte byteObj = ( (WritableConstantByteObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( NumberFilterType.EQUAL , new ByteObj( byteObj ) );
      break;
    case SHORT:
      short shortObj = ( (WritableConstantShortObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( NumberFilterType.EQUAL , new ShortObj( shortObj ) );
      break;
    case INT:
      int intObj = ( (WritableConstantIntObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( NumberFilterType.EQUAL , new IntegerObj( intObj ) );
      break;
    case LONG:
      long longObj = ( (WritableConstantLongObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( NumberFilterType.EQUAL , new LongObj( longObj ) );
      break;
    case FLOAT:
      float floatObj = ( (WritableConstantFloatObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( NumberFilterType.EQUAL , new FloatObj( floatObj ) );
      break;
    case DOUBLE:
      double doubleObj = ( (WritableConstantDoubleObjectInspector)primitiveObjectInspector ).getWritableConstantValue().get();
      filter = new NumberFilter( NumberFilterType.EQUAL , new DoubleObj( doubleObj ) );
      break;
    case DATE:
    case DECIMAL:
    case TIMESTAMP:
      filter = null;
      break;
    case VOID:
      Object voidObj = ( (WritableVoidObjectInspector)primitiveObjectInspector ).getWritableConstantValue();
      if( voidObj == null ){
        filter = new NullFilter( targetColumnType );
      }
      else{
        filter = null;
      }
      break;
    default:
      filter = null;
      break;
  }
  if( filter == null ){
    return null;
  }
  return new ExecuterNode( targetColumn , filter );
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:66,代码来源:EqualsHiveExpr.java

示例14: serialize

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入方法依赖的package包/类
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
  if (objInspector.getCategory() != ObjectInspector.Category.STRUCT) {
    throw new SerDeException(getClass().toString()
      + " can only serialize struct types, but we got: "
      + objInspector.getTypeName());
  }


  StructObjectInspector soi = (StructObjectInspector) objInspector;
  List<? extends StructField> fields = soi.getAllStructFieldRefs();
  List<Object> list = soi.getStructFieldsDataAsList(obj);

  LazyBinarySerDe.BooleanRef warnedOnceNullMapKey = new LazyBinarySerDe.BooleanRef(false);
  serializeStream.reset();
  serializedSize = 0;
  int streamOffset = 0;
  // Serialize each field
  for (int i = 0; i < fields.size(); i++) {
    // Get the field objectInspector and the field object.
    ObjectInspector foi = fields.get(i).getFieldObjectInspector();
    Object f = (list == null ? null : list.get(i));
    //empty strings are marked by an invalid utf single byte sequence. A valid utf stream cannot
    //produce this sequence
    if ((f != null) && (foi.getCategory().equals(ObjectInspector.Category.PRIMITIVE))
      && ((PrimitiveObjectInspector) foi).getPrimitiveCategory().equals(
      PrimitiveObjectInspector.PrimitiveCategory.STRING)
      && ((StringObjectInspector) foi).getPrimitiveJavaObject(f).length() == 0) {
      serializeStream.write(INVALID_UTF__SINGLE_BYTE, 0, 1);
    } else {
      if (MonarchPredicateHandler.isMonarchTypeSupported(foi)) {
        /** wherever possible use our serialization **/
        try {
          serializeStream.write(objectTypeList.get(i).serialize(convertToJavaObject(foi, f)));
        } catch (IOException e) {
          logger.error("Failed to serialize Field= {}, Type= {}",
            fields.get(i).getFieldName(), foi.getTypeName(), e);
        }
      } else {
        /** for the rest continue to use LazyBinarySerDe as binary/bytes **/
        LazyBinarySerDe.serialize(serializeStream, f, foi, true, warnedOnceNullMapKey);
      }
    }
    field[i].set(serializeStream.getData(), streamOffset, serializeStream.getLength() - streamOffset);
    streamOffset = serializeStream.getLength();
  }
  serializedSize = serializeStream.getLength();
  lastOperationSerialize = true;
  lastOperationDeserialize = false;
  return serializeCache;
}
 
开发者ID:ampool,项目名称:monarch,代码行数:52,代码来源:MonarchSerDe.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.getCategory方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。