当前位置: 首页>>代码示例>>Java>>正文


Java UnionObjectInspector类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector的典型用法代码示例。如果您正苦于以下问题:Java UnionObjectInspector类的具体用法?Java UnionObjectInspector怎么用?Java UnionObjectInspector使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


UnionObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector包,在下文中一共展示了UnionObjectInspector类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: isMonarchTypeSupported

import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; //导入依赖的package包/类
public static boolean isMonarchTypeSupported(final ObjectInspector oi) {
  if (ObjectInspector.Category.PRIMITIVE.equals(oi.getCategory())) {
    /** handle primitive type definitions like decimal(20,20) or varchar(100) **/
    String typeStr = oi.getTypeName();
    final int argPos = typeStr.indexOf('(');
    if (argPos > 0) {
      typeStr = typeStr.substring(0, argPos);
    }
    return TYPE_HIVE_TO_MONARCH_MAP.containsKey(typeStr);
  } else if (oi instanceof ListObjectInspector) {
    ListObjectInspector loi = (ListObjectInspector)oi;
    return isMonarchTypeSupported(loi.getListElementObjectInspector());
  } else if (oi instanceof MapObjectInspector) {
    MapObjectInspector moi = (MapObjectInspector)oi;
    return isMonarchTypeSupported(moi.getMapKeyObjectInspector()) &&
      isMonarchTypeSupported(moi.getMapValueObjectInspector());
  } else if (oi instanceof StructObjectInspector) {
    return ((StructObjectInspector) oi).getAllStructFieldRefs().stream()
      .map(StructField::getFieldObjectInspector)
      .allMatch(MonarchPredicateHandler::isMonarchTypeSupported);
  } else if (oi instanceof UnionObjectInspector) {
    return ((UnionObjectInspector) oi).getObjectInspectors().stream()
      .allMatch(MonarchPredicateHandler::isMonarchTypeSupported);
  }
  return false;
}
 
开发者ID:ampool,项目名称:monarch,代码行数:27,代码来源:MonarchPredicateHandler.java

示例2: UnionTreeWriter

import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; //导入依赖的package包/类
UnionTreeWriter(int columnId,
              ObjectInspector inspector,
              StreamFactory writer,
              boolean nullable, Configuration conf,
              boolean useVInts, boolean lowMemoryMode,
              MemoryEstimate memoryEstimate) throws IOException {
  super(columnId, inspector, writer, nullable, conf, useVInts, memoryEstimate);
  UnionObjectInspector insp = (UnionObjectInspector) inspector;
  List<ObjectInspector> choices = insp.getObjectInspectors();
  childrenWriters = new TreeWriter[choices.size()];
  for(int i=0; i < childrenWriters.length; ++i) {
    childrenWriters[i] = createTreeWriter(choices.get(i), writer, true, conf, useVInts,
        lowMemoryMode, memoryEstimate);
  }
  tags =
    new RunLengthByteWriter(writer.createStream(columnId,
        OrcProto.Stream.Kind.DATA));
  recordPosition(rowIndexPosition);
}
 
开发者ID:facebookarchive,项目名称:hive-dwrf,代码行数:20,代码来源:WriterImpl.java

示例3: convert

import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; //导入依赖的package包/类
/**
 * Convert the value using the ObjectInspector. The Writable values are converted to their
 * respective Java objects from using the provided inspector.
 *
 * @param oi the field object inspector
 * @param value the value
 * @return the corresponding Java object value
 */
public static Object convert(final ObjectInspector oi, final Object value) {
  if (value == null) {
    return null;
  }
  Object outValue = null;
  switch (oi.getCategory()) {
    case PRIMITIVE:
      outValue = OrcReadFunctionMap.get(oi.getTypeName()).apply(value);
      break;
    case LIST:
      final ListObjectInspector loi = (ListObjectInspector) oi;
      final ObjectInspector eoi = loi.getListElementObjectInspector();
      outValue =
          loi.getList(value).stream().map(e -> convert(eoi, e)).collect(Collectors.toList());
      break;
    case MAP:
      final MapObjectInspector moi = (MapObjectInspector) oi;
      final ObjectInspector koi = moi.getMapKeyObjectInspector();
      final ObjectInspector voi = moi.getMapValueObjectInspector();
      outValue = moi.getMap(value).entrySet().stream()
          .collect(Collectors.toMap(e -> convert(koi, e.getKey()),
              e -> convert(voi, e.getValue()), throwingMerger(), LinkedHashMap::new));
      break;
    case STRUCT:
      final StructObjectInspector soi = (StructObjectInspector) oi;
      outValue = soi.getAllStructFieldRefs().stream()
          .map(e -> convert(e.getFieldObjectInspector(), soi.getStructFieldData(value, e)))
          .toArray();
      break;
    case UNION:
      final UnionObjectInspector uoi = (UnionObjectInspector) oi;
      final List<? extends ObjectInspector> ois = uoi.getObjectInspectors();
      final byte tag = uoi.getTag(value);
      outValue = new Object[] {tag, convert(ois.get(tag), uoi.getField(value))};
      break;
  }
  return outValue;
}
 
开发者ID:ampool,项目名称:monarch,代码行数:47,代码来源:OrcUtils.java

示例4: write

import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; //导入依赖的package包/类
@Override
void write(Object obj) throws IOException {
  long rawDataSize = 0;
  if (obj != null) {
    UnionObjectInspector insp = (UnionObjectInspector) inspector;
    byte tag = insp.getTag(obj);
    tags.write(tag);
    childrenWriters[tag].write(insp.getField(obj));
    // raw data size is size of tag (1) + size of value
    rawDataSize = childrenWriters[tag].getRowRawDataSize() + RawDatasizeConst.UNION_TAG_SIZE;
  }
  super.write(obj, rawDataSize);
}
 
开发者ID:facebookarchive,项目名称:hive-dwrf,代码行数:14,代码来源:WriterImpl.java

示例5: newConverter

import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; //导入依赖的package包/类
@Override
public Converter newConverter(ObjectInspector inspector) {
  switch (inspector.getCategory()) {
  case PRIMITIVE:
    switch (((PrimitiveObjectInspector) inspector).getPrimitiveCategory()) {
    case STRING:
      return new StringConverter();
    case BOOLEAN:
      return new BooleanConverter();
    case BYTE:
      return new ByteConverter();
    case SHORT:
      return new ShortConverter();
    case INT:
      return new IntegerConverter();
    case LONG:
      return new LongConverter();
    case FLOAT:
      return new FloatConverter();
    case DOUBLE:
      return new DoubleConverter();
    case TIMESTAMP:
      return new TimestampConverter();
    case DATE:
      return new DateConverter();
    case BINARY:
      return new BinaryConverter();
    case CHAR:
      return new CharConverter();
    case VARCHAR:
      return new VarcharConverter();
    case DECIMAL:
      return new DecimalConverter();
    default:
      throw new IllegalArgumentException(
          "Unknown Primitive Category: " + ((PrimitiveObjectInspector) inspector).getPrimitiveCategory());
    }
  case STRUCT:
    return new StructConverter(this, (SettableStructObjectInspector) inspector);
  case LIST:
    return new ListConverter(this, (ListObjectInspector) inspector);
  case MAP:
    return new MapConverter(this, (MapObjectInspector) inspector);
  case UNION:
    return new UnionConverter(this, (UnionObjectInspector) inspector);
  default:
    throw new IllegalArgumentException("Unknown Category: " + inspector.getCategory());
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:50,代码来源:DefaultConverterFactory.java

示例6: UnionConverter

import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; //导入依赖的package包/类
public UnionConverter(ConverterFactory factory, UnionObjectInspector inspector) {
  for (ObjectInspector child : inspector.getObjectInspectors()) {
    converters.add(factory.newConverter(child));
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:6,代码来源:DefaultConverterFactory.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。