當前位置: 首頁>>代碼示例>>Java>>正文


Java StructObjectInspector.getAllStructFieldRefs方法代碼示例

本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector.getAllStructFieldRefs方法的典型用法代碼示例。如果您正苦於以下問題:Java StructObjectInspector.getAllStructFieldRefs方法的具體用法?Java StructObjectInspector.getAllStructFieldRefs怎麽用?Java StructObjectInspector.getAllStructFieldRefs使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector的用法示例。


在下文中一共展示了StructObjectInspector.getAllStructFieldRefs方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: initReduceSide

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
private ObjectInspector initReduceSide(StructObjectInspector inputStructOI)
        throws HiveException {
    List<? extends StructField> fields = inputStructOI.getAllStructFieldRefs();
    int length = fields.size();
    this.inputStructOI = inputStructOI;
    this.inputOIs = new ObjectInspector[length];
    this.outputOIs = new ObjectInspector[length];

    for (int i = 0; i < length; i++) {
        StructField field = fields.get(i);
        ObjectInspector oi = field.getFieldObjectInspector();
        inputOIs[i] = oi;
        outputOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(oi);
    }

    return ObjectInspectorUtils.getStandardObjectInspector(inputStructOI);
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:18,代碼來源:MaxRowUDAF.java

示例2: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public Writable serialize(Object obj, ObjectInspector objectInspector) throws SerDeException {
    if (!objectInspector.getCategory().equals(ObjectInspector.Category.STRUCT)) {
        throw new SerDeException("Cannot serialize " + objectInspector.getCategory() + ". Can only serialize a struct");
    }

    StructObjectInspector inspector = (StructObjectInspector) objectInspector;
    List<? extends StructField> fields = inspector.getAllStructFieldRefs();
    Writable[] arr = new Writable[fields.size()];
    for (int i = 0; i < fields.size(); i++) {
        StructField field = fields.get(i);
        Object subObj = inspector.getStructFieldData(obj, field);
        ObjectInspector subInspector = field.getFieldObjectInspector();
        arr[i] = createPrimitive(subObj, (PrimitiveObjectInspector) subInspector);
    }
    serdeSize = arr.length;
    return new ArrayWritable(Writable.class, arr);
}
 
開發者ID:shunfei,項目名稱:indexr,代碼行數:19,代碼來源:IndexRSerde.java

示例3: HiveStructParser

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
public HiveStructParser( final Object row , final StructObjectInspector structObjectInspector ){
  this.row = row;
  this.inspector = structObjectInspector;
  fieldIndexMap = new HashMap<String,Integer>();

  fieldList = structObjectInspector.getAllStructFieldRefs();
  for( int i = 0 ; i < fieldList.size() ; i++ ){
    StructField field = fieldList.get(i);
    fieldIndexMap.put( field.getFieldName() , Integer.valueOf( i ) );
  }
}
 
開發者ID:yahoojapan,項目名稱:dataplatform-schema-lib,代碼行數:12,代碼來源:HiveStructParser.java

示例4: OrcStructParser

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
public OrcStructParser( final Object row , final StructObjectInspector structObjectInspector ){
  this.row = row;
  this.inspector = structObjectInspector;
  fieldIndexMap = new HashMap<String,Integer>();

  fieldList = structObjectInspector.getAllStructFieldRefs();
  for( int i = 0 ; i < fieldList.size() ; i++ ){
    StructField field = fieldList.get(i);
    fieldIndexMap.put( field.getFieldName() , Integer.valueOf( i ) );
  }
}
 
開發者ID:yahoojapan,項目名稱:dataplatform-schema-lib,代碼行數:12,代碼來源:OrcStructParser.java

示例5: convert

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public void convert(Object from, BytesArray to) {

    Assert.isTrue(from instanceof HiveType,
            String.format("Unexpected object type, expecting [%s], given [%s]", HiveType.class, from.getClass()));

    HiveType ht = (HiveType) from;
    ObjectInspector oi = ht.getObjectInspector();

    Assert.isTrue(Category.STRUCT == oi.getCategory(),
            String.format("Unexpected object category, expecting [%s], given [%s]", Category.STRUCT, oi.getTypeName()));

    StructObjectInspector soi = (StructObjectInspector) oi;
    List<? extends StructField> refs = soi.getAllStructFieldRefs();
    Assert.isTrue(refs.size() == 1, "When using JSON input, only one field is expected");

    StructField structField = refs.get(0);
    ObjectInspector foi = structField.getFieldObjectInspector();

    Assert.isTrue(Category.PRIMITIVE == foi.getCategory(),
            String.format("Unexpected object category, expecting [%s], given [%s]", Category.PRIMITIVE, oi.getTypeName()));

    Object writable = ((PrimitiveObjectInspector) foi).getPrimitiveWritableObject(soi.getStructFieldData(ht.getObject(), structField));

    // HiveVarcharWritable - Hive 0.12+
    if (writable != null && HiveConstants.VARCHAR_WRITABLE.equals(writable.getClass().getName())) {
        // TODO: add dedicated optimization
        to.bytes(writable.toString());
        return;
    }

    super.convert(writable, to);
}
 
開發者ID:xushjie1987,項目名稱:es-hadoop-v2.2.0,代碼行數:34,代碼來源:HiveBytesConverter.java

示例6: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public HiveKuduWritable serialize(Object row, ObjectInspector inspector)
    throws SerDeException {

    final StructObjectInspector structInspector = (StructObjectInspector) inspector;
    final List<? extends StructField> fields = structInspector.getAllStructFieldRefs();
    if (fields.size() != fieldCount) {
        throw new SerDeException(String.format(
                "Required %d columns, received %d.", fieldCount,
                fields.size()));
    }

    cachedWritable.clear();

    for (int i = 0; i < fieldCount; i++) {
        StructField structField = fields.get(i);
        if (structField != null) {
            Object field = structInspector.getStructFieldData(row,
                    structField);
            ObjectInspector fieldOI = structField.getFieldObjectInspector();

            Object javaObject = HiveKuduBridgeUtils.deparseObject(field,
                    fieldOI);
            LOG.warn("Column value of " + i + " is " + javaObject.toString());
            cachedWritable.set(i, javaObject);
        }
    }
    return cachedWritable;
}
 
開發者ID:BimalTandel,項目名稱:HiveKudu-Handler,代碼行數:30,代碼來源:HiveKuduSerDe.java

示例7: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
  if (objInspector.getCategory() != ObjectInspector.Category.STRUCT) {
    throw new SerDeException(getClass().toString()
      + " can only serialize struct types, but we got: "
      + objInspector.getTypeName());
  }


  StructObjectInspector soi = (StructObjectInspector) objInspector;
  List<? extends StructField> fields = soi.getAllStructFieldRefs();
  List<Object> list = soi.getStructFieldsDataAsList(obj);

  LazyBinarySerDe.BooleanRef warnedOnceNullMapKey = new LazyBinarySerDe.BooleanRef(false);
  serializeStream.reset();
  serializedSize = 0;
  int streamOffset = 0;
  // Serialize each field
  for (int i = 0; i < fields.size(); i++) {
    // Get the field objectInspector and the field object.
    ObjectInspector foi = fields.get(i).getFieldObjectInspector();
    Object f = (list == null ? null : list.get(i));
    //empty strings are marked by an invalid utf single byte sequence. A valid utf stream cannot
    //produce this sequence
    if ((f != null) && (foi.getCategory().equals(ObjectInspector.Category.PRIMITIVE))
      && ((PrimitiveObjectInspector) foi).getPrimitiveCategory().equals(
      PrimitiveObjectInspector.PrimitiveCategory.STRING)
      && ((StringObjectInspector) foi).getPrimitiveJavaObject(f).length() == 0) {
      serializeStream.write(INVALID_UTF__SINGLE_BYTE, 0, 1);
    } else {
      if (MonarchPredicateHandler.isMonarchTypeSupported(foi)) {
        /** wherever possible use our serialization **/
        try {
          serializeStream.write(objectTypeList.get(i).serialize(convertToJavaObject(foi, f)));
        } catch (IOException e) {
          logger.error("Failed to serialize Field= {}, Type= {}",
            fields.get(i).getFieldName(), foi.getTypeName(), e);
        }
      } else {
        /** for the rest continue to use LazyBinarySerDe as binary/bytes **/
        LazyBinarySerDe.serialize(serializeStream, f, foi, true, warnedOnceNullMapKey);
      }
    }
    field[i].set(serializeStream.getData(), streamOffset, serializeStream.getLength() - streamOffset);
    streamOffset = serializeStream.getLength();
  }
  serializedSize = serializeStream.getLength();
  lastOperationSerialize = true;
  lastOperationDeserialize = false;
  return serializeCache;
}
 
開發者ID:ampool,項目名稱:monarch,代碼行數:52,代碼來源:MonarchSerDe.java

示例8: getCanonicalType

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
/**
 * Returns the canonical type.
 *
 * @param fieldInspector inspector
 * @return type
 */
Type getCanonicalType(final ObjectInspector fieldInspector) {
    switch (fieldInspector.getCategory()) {
        case PRIMITIVE:
            return getPrimitiveType(fieldInspector);
        case MAP:
            final MapObjectInspector mapObjectInspector =
                TypeUtils.checkType(fieldInspector, MapObjectInspector.class,
                    "fieldInspector");
            final Type keyType = getCanonicalType(mapObjectInspector.getMapKeyObjectInspector());
            final Type valueType = getCanonicalType(mapObjectInspector.getMapValueObjectInspector());
            if (keyType == null || valueType == null) {
                return null;
            }
            return TypeRegistry.getTypeRegistry().getParameterizedType(TypeEnum.MAP,
                ImmutableList.of(keyType.getTypeSignature(), valueType.getTypeSignature()), ImmutableList.of());
        case LIST:
            final ListObjectInspector listObjectInspector =
                TypeUtils.checkType(fieldInspector, ListObjectInspector.class,
                    "fieldInspector");
            final Type elementType =
                getCanonicalType(listObjectInspector.getListElementObjectInspector());
            if (elementType == null) {
                return null;
            }
            return TypeRegistry.getTypeRegistry().getParameterizedType(TypeEnum.ARRAY,
                ImmutableList.of(elementType.getTypeSignature()), ImmutableList.of());
        case STRUCT:
            final StructObjectInspector structObjectInspector =
                TypeUtils.checkType(fieldInspector, StructObjectInspector.class, "fieldInspector");
            final List<TypeSignature> fieldTypes = new ArrayList<>();
            final List<Object> fieldNames = new ArrayList<>();
            for (StructField field : structObjectInspector.getAllStructFieldRefs()) {
                fieldNames.add(field.getFieldName());
                final Type fieldType = getCanonicalType(field.getFieldObjectInspector());
                if (fieldType == null) {
                    return null;
                }
                fieldTypes.add(fieldType.getTypeSignature());
            }
            return TypeRegistry.getTypeRegistry()
                .getParameterizedType(TypeEnum.ROW, fieldTypes, fieldNames);
        default:
            log.info("Currently unsupported type {}, returning Unknown type", fieldInspector.getTypeName());
            return BaseType.UNKNOWN;
    }
}
 
開發者ID:Netflix,項目名稱:metacat,代碼行數:53,代碼來源:HiveTypeConverter.java

示例9: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
  // Prepare the field ObjectInspectors
  StructObjectInspector soi = (StructObjectInspector) objInspector;
  List<? extends StructField> fields = soi.getAllStructFieldRefs();
  List<Object> rowData = soi.getStructFieldsDataAsList(obj);
  Map<String, AttributeValue> item = new HashMap<>();

  validateData(fields, rowData);

  for (int i = 0; i < fields.size(); i++) {
    StructField field = fields.get(i);
    Object data = rowData.get(i);
    ObjectInspector fieldObjectInspector = field.getFieldObjectInspector();

    // Get the Hive to DynamoDB mapper
    HiveDynamoDBType ddType =
        HiveDynamoDBTypeFactory.getTypeObjectFromHiveType(fieldObjectInspector.getTypeName());
    if (ddType == null) {
      throw new RuntimeException("Unsupported hive type " + fieldObjectInspector.getTypeName()
          + " Object inspector: " + fieldObjectInspector);
    }

    // Check if this column maps a DynamoDB item.
    if (ddType instanceof HiveDynamoDBItemType) {
      HiveDynamoDBItemType ddItemType = (HiveDynamoDBItemType) ddType;
      Map<String, AttributeValue> backupItem = ddItemType.parseDynamoDBData(data,
          fieldObjectInspector);

      // We give higher priority to attributes directly mapped to
      // columns. So we do not update the value of an attribute if
      // it already exists. This can happen in case of partial schemas
      // when there is a full backup column and attribute mapped
      // columns.
      for (Map.Entry<String, AttributeValue> entry : backupItem.entrySet()) {
        if (!item.containsKey(entry.getKey())) {
          item.put(entry.getKey(), entry.getValue());
        }
      }
      for (String key : backupItem.keySet()) {
      }
    } else {
      // User has mapped individual attribute in DynamoDB to
      // corresponding Hive columns.
      AttributeValue attributeValue = null;
      if (data != null) {
        attributeValue = ddType.getDynamoDBData(data, fieldObjectInspector);
      }

      if (attributeValue != null) {
        item.put(columnMappings.get(columnNames.get(i)), attributeValue);
      }
    }
  }

  DynamoDBItemWritable itemWritable = new DynamoDBItemWritable();
  itemWritable.setItem(item);
  return itemWritable;
}
 
開發者ID:awslabs,項目名稱:emr-dynamodb-connector,代碼行數:60,代碼來源:DynamoDBSerDe.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector.getAllStructFieldRefs方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。