當前位置: 首頁>>代碼示例>>Java>>正文


Java StructObjectInspector.getStructFieldsDataAsList方法代碼示例

本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector.getStructFieldsDataAsList方法的典型用法代碼示例。如果您正苦於以下問題:Java StructObjectInspector.getStructFieldsDataAsList方法的具體用法?Java StructObjectInspector.getStructFieldsDataAsList怎麽用?Java StructObjectInspector.getStructFieldsDataAsList使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector的用法示例。


在下文中一共展示了StructObjectInspector.getStructFieldsDataAsList方法的4個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: loadValues

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
private static void loadValues(Object2ObjectMap<Object, Object> map, File file,
        PrimitiveObjectInspector keyOI, PrimitiveObjectInspector valueOI) throws IOException,
        SerDeException {
    if (!file.exists()) {
        return;
    }
    if (!file.getName().endsWith(".crc")) {
        if (file.isDirectory()) {
            for (File f : file.listFiles()) {
                loadValues(map, f, keyOI, valueOI);
            }
        } else {
            LazySimpleSerDe serde = HiveUtils.getKeyValueLineSerde(keyOI, valueOI);
            StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
            StructField keyRef = lineOI.getStructFieldRef("key");
            StructField valueRef = lineOI.getStructFieldRef("value");
            PrimitiveObjectInspector keyRefOI = (PrimitiveObjectInspector) keyRef.getFieldObjectInspector();
            PrimitiveObjectInspector valueRefOI = (PrimitiveObjectInspector) valueRef.getFieldObjectInspector();

            BufferedReader reader = null;
            try {
                reader = HadoopUtils.getBufferedReader(file);
                String line;
                while ((line = reader.readLine()) != null) {
                    Text lineText = new Text(line);
                    Object lineObj = serde.deserialize(lineText);
                    List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
                    Object f0 = fields.get(0);
                    Object f1 = fields.get(1);
                    Object k = keyRefOI.getPrimitiveJavaObject(f0);
                    Object v = valueRefOI.getPrimitiveWritableObject(valueRefOI.copyObject(f1));
                    map.put(k, v);
                }
            } finally {
                IOUtils.closeQuietly(reader);
            }
        }
    }
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:40,代碼來源:DistributedCacheLookupUDF.java

示例2: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
  if (objInspector.getCategory() != ObjectInspector.Category.STRUCT) {
    throw new SerDeException(getClass().toString()
      + " can only serialize struct types, but we got: "
      + objInspector.getTypeName());
  }


  StructObjectInspector soi = (StructObjectInspector) objInspector;
  List<? extends StructField> fields = soi.getAllStructFieldRefs();
  List<Object> list = soi.getStructFieldsDataAsList(obj);

  LazyBinarySerDe.BooleanRef warnedOnceNullMapKey = new LazyBinarySerDe.BooleanRef(false);
  serializeStream.reset();
  serializedSize = 0;
  int streamOffset = 0;
  // Serialize each field
  for (int i = 0; i < fields.size(); i++) {
    // Get the field objectInspector and the field object.
    ObjectInspector foi = fields.get(i).getFieldObjectInspector();
    Object f = (list == null ? null : list.get(i));
    //empty strings are marked by an invalid utf single byte sequence. A valid utf stream cannot
    //produce this sequence
    if ((f != null) && (foi.getCategory().equals(ObjectInspector.Category.PRIMITIVE))
      && ((PrimitiveObjectInspector) foi).getPrimitiveCategory().equals(
      PrimitiveObjectInspector.PrimitiveCategory.STRING)
      && ((StringObjectInspector) foi).getPrimitiveJavaObject(f).length() == 0) {
      serializeStream.write(INVALID_UTF__SINGLE_BYTE, 0, 1);
    } else {
      if (MonarchPredicateHandler.isMonarchTypeSupported(foi)) {
        /** wherever possible use our serialization **/
        try {
          serializeStream.write(objectTypeList.get(i).serialize(convertToJavaObject(foi, f)));
        } catch (IOException e) {
          logger.error("Failed to serialize Field= {}, Type= {}",
            fields.get(i).getFieldName(), foi.getTypeName(), e);
        }
      } else {
        /** for the rest continue to use LazyBinarySerDe as binary/bytes **/
        LazyBinarySerDe.serialize(serializeStream, f, foi, true, warnedOnceNullMapKey);
      }
    }
    field[i].set(serializeStream.getData(), streamOffset, serializeStream.getLength() - streamOffset);
    streamOffset = serializeStream.getLength();
  }
  serializedSize = serializeStream.getLength();
  lastOperationSerialize = true;
  lastOperationDeserialize = false;
  return serializeCache;
}
 
開發者ID:ampool,項目名稱:monarch,代碼行數:52,代碼來源:MonarchSerDe.java

示例3: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
  // Prepare the field ObjectInspectors
  StructObjectInspector soi = (StructObjectInspector) objInspector;
  List<? extends StructField> fields = soi.getAllStructFieldRefs();
  List<Object> rowData = soi.getStructFieldsDataAsList(obj);
  Map<String, AttributeValue> item = new HashMap<>();

  validateData(fields, rowData);

  for (int i = 0; i < fields.size(); i++) {
    StructField field = fields.get(i);
    Object data = rowData.get(i);
    ObjectInspector fieldObjectInspector = field.getFieldObjectInspector();

    // Get the Hive to DynamoDB mapper
    HiveDynamoDBType ddType =
        HiveDynamoDBTypeFactory.getTypeObjectFromHiveType(fieldObjectInspector.getTypeName());
    if (ddType == null) {
      throw new RuntimeException("Unsupported hive type " + fieldObjectInspector.getTypeName()
          + " Object inspector: " + fieldObjectInspector);
    }

    // Check if this column maps a DynamoDB item.
    if (ddType instanceof HiveDynamoDBItemType) {
      HiveDynamoDBItemType ddItemType = (HiveDynamoDBItemType) ddType;
      Map<String, AttributeValue> backupItem = ddItemType.parseDynamoDBData(data,
          fieldObjectInspector);

      // We give higher priority to attributes directly mapped to
      // columns. So we do not update the value of an attribute if
      // it already exists. This can happen in case of partial schemas
      // when there is a full backup column and attribute mapped
      // columns.
      for (Map.Entry<String, AttributeValue> entry : backupItem.entrySet()) {
        if (!item.containsKey(entry.getKey())) {
          item.put(entry.getKey(), entry.getValue());
        }
      }
      for (String key : backupItem.keySet()) {
      }
    } else {
      // User has mapped individual attribute in DynamoDB to
      // corresponding Hive columns.
      AttributeValue attributeValue = null;
      if (data != null) {
        attributeValue = ddType.getDynamoDBData(data, fieldObjectInspector);
      }

      if (attributeValue != null) {
        item.put(columnMappings.get(columnNames.get(i)), attributeValue);
      }
    }
  }

  DynamoDBItemWritable itemWritable = new DynamoDBItemWritable();
  itemWritable.setItem(item);
  return itemWritable;
}
 
開發者ID:awslabs,項目名稱:emr-dynamodb-connector,代碼行數:60,代碼來源:DynamoDBSerDe.java

示例4: loadPredictionModel

import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
private long loadPredictionModel(Map<Object, PredictionModel> label2model, File file,
        PrimitiveObjectInspector labelOI, PrimitiveObjectInspector featureOI,
        WritableFloatObjectInspector weightOI) throws IOException, SerDeException {
    long count = 0L;
    if (!file.exists()) {
        return count;
    }
    if (!file.getName().endsWith(".crc")) {
        if (file.isDirectory()) {
            for (File f : file.listFiles()) {
                count += loadPredictionModel(label2model, f, labelOI, featureOI, weightOI);
            }
        } else {
            LazySimpleSerDe serde = HiveUtils.getLineSerde(labelOI, featureOI, weightOI);
            StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
            StructField c1ref = lineOI.getStructFieldRef("c1");
            StructField c2ref = lineOI.getStructFieldRef("c2");
            StructField c3ref = lineOI.getStructFieldRef("c3");
            PrimitiveObjectInspector c1refOI = (PrimitiveObjectInspector) c1ref.getFieldObjectInspector();
            PrimitiveObjectInspector c2refOI = (PrimitiveObjectInspector) c2ref.getFieldObjectInspector();
            FloatObjectInspector c3refOI = (FloatObjectInspector) c3ref.getFieldObjectInspector();

            BufferedReader reader = null;
            try {
                reader = HadoopUtils.getBufferedReader(file);
                String line;
                while ((line = reader.readLine()) != null) {
                    count++;
                    Text lineText = new Text(line);
                    Object lineObj = serde.deserialize(lineText);
                    List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
                    Object f0 = fields.get(0);
                    Object f1 = fields.get(1);
                    Object f2 = fields.get(2);
                    if (f0 == null || f1 == null || f2 == null) {
                        continue; // avoid the case that key or value is null
                    }
                    Object label = c1refOI.getPrimitiveWritableObject(c1refOI.copyObject(f0));
                    PredictionModel model = label2model.get(label);
                    if (model == null) {
                        model = createModel();
                        label2model.put(label, model);
                    }
                    Object k = c2refOI.getPrimitiveWritableObject(c2refOI.copyObject(f1));
                    float v = c3refOI.get(f2);
                    model.set(k, new WeightValue(v, false));
                }
            } finally {
                IOUtils.closeQuietly(reader);
            }
        }
    }
    return count;
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:55,代碼來源:MulticlassOnlineClassifierUDTF.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector.getStructFieldsDataAsList方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。