当前位置: 首页>>代码示例>>Java>>正文


Java MapObjectInspector.getMapKeyObjectInspector方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector.getMapKeyObjectInspector方法的典型用法代码示例。如果您正苦于以下问题:Java MapObjectInspector.getMapKeyObjectInspector方法的具体用法?Java MapObjectInspector.getMapKeyObjectInspector怎么用?Java MapObjectInspector.getMapKeyObjectInspector使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector的用法示例。


在下文中一共展示了MapObjectInspector.getMapKeyObjectInspector方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: initialize

import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments)
    throws UDFArgumentException {

  mapOI = (MapObjectInspector)arguments[0];
  arrayOI = (ListObjectInspector)arguments[1];

  // Check map against key.
  ObjectInspector mapItemOI = mapOI.getMapKeyObjectInspector();

  ObjectInspector listItemOI = arrayOI.getListElementObjectInspector();

  if (!ObjectInspectorUtils.compareTypes(mapItemOI, listItemOI)) {
    throw new UDFArgumentException("Map key type (" + mapItemOI + ") must match " + 
                                   "list element type (" + listItemOI + ").");
  }

  return ObjectInspectorUtils.getStandardObjectInspector(mapOI,
            ObjectInspectorCopyOption.WRITABLE);
}
 
开发者ID:brndnmtthws,项目名称:facebook-hive-udfs,代码行数:21,代码来源:UDFMapExclude.java

示例2: convert

import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
/**
 * Convert the value using the ObjectInspector. The Writable values are converted to their
 * respective Java objects from using the provided inspector.
 *
 * @param oi the field object inspector
 * @param value the value
 * @return the corresponding Java object value
 */
public static Object convert(final ObjectInspector oi, final Object value) {
  if (value == null) {
    return null;
  }
  Object outValue = null;
  switch (oi.getCategory()) {
    case PRIMITIVE:
      outValue = OrcReadFunctionMap.get(oi.getTypeName()).apply(value);
      break;
    case LIST:
      final ListObjectInspector loi = (ListObjectInspector) oi;
      final ObjectInspector eoi = loi.getListElementObjectInspector();
      outValue =
          loi.getList(value).stream().map(e -> convert(eoi, e)).collect(Collectors.toList());
      break;
    case MAP:
      final MapObjectInspector moi = (MapObjectInspector) oi;
      final ObjectInspector koi = moi.getMapKeyObjectInspector();
      final ObjectInspector voi = moi.getMapValueObjectInspector();
      outValue = moi.getMap(value).entrySet().stream()
          .collect(Collectors.toMap(e -> convert(koi, e.getKey()),
              e -> convert(voi, e.getValue()), throwingMerger(), LinkedHashMap::new));
      break;
    case STRUCT:
      final StructObjectInspector soi = (StructObjectInspector) oi;
      outValue = soi.getAllStructFieldRefs().stream()
          .map(e -> convert(e.getFieldObjectInspector(), soi.getStructFieldData(value, e)))
          .toArray();
      break;
    case UNION:
      final UnionObjectInspector uoi = (UnionObjectInspector) oi;
      final List<? extends ObjectInspector> ois = uoi.getObjectInspectors();
      final byte tag = uoi.getTag(value);
      outValue = new Object[] {tag, convert(ois.get(tag), uoi.getField(value))};
      break;
  }
  return outValue;
}
 
开发者ID:ampool,项目名称:monarch,代码行数:47,代码来源:OrcUtils.java

示例3: serializeMap

import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private static Block serializeMap(Type type, BlockBuilder builder, Object object, MapObjectInspector inspector)
{
    Map<?, ?> map = inspector.getMap(object);
    if (map == null) {
        requireNonNull(builder, "parent builder is null").appendNull();
        return null;
    }

    List<Type> typeParameters = type.getTypeParameters();
    checkArgument(typeParameters.size() == 2, "map must have exactly 2 type parameter");
    Type keyType = typeParameters.get(0);
    Type valueType = typeParameters.get(1);
    ObjectInspector keyInspector = inspector.getMapKeyObjectInspector();
    ObjectInspector valueInspector = inspector.getMapValueObjectInspector();
    BlockBuilder currentBuilder;
    if (builder != null) {
        currentBuilder = builder.beginBlockEntry();
    }
    else {
        currentBuilder = new InterleavedBlockBuilder(typeParameters, new BlockBuilderStatus(), map.size());
    }

    for (Map.Entry<?, ?> entry : map.entrySet()) {
        // Hive skips map entries with null keys
        if (entry.getKey() != null) {
            serializeObject(keyType, currentBuilder, entry.getKey(), keyInspector);
            serializeObject(valueType, currentBuilder, entry.getValue(), valueInspector);
        }
    }

    if (builder != null) {
        builder.closeEntry();
        return null;
    }
    else {
        Block resultBlock = currentBuilder.build();
        return resultBlock;
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:40,代码来源:SerDeUtils.java

示例4: createMap

import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private Writable createMap(final Object obj, final MapObjectInspector inspector)
    throws SerDeException {
  final Map<?, ?> sourceMap = inspector.getMap(obj);
  final ObjectInspector keyInspector = inspector.getMapKeyObjectInspector();
  final ObjectInspector valueInspector = inspector.getMapValueObjectInspector();
  final List<ArrayWritable> array = new ArrayList<ArrayWritable>();

  if (sourceMap != null) {
    for (final Entry<?, ?> keyValue : sourceMap.entrySet()) {
      final Writable key = createObject(keyValue.getKey(), keyInspector);
      final Writable value = createObject(keyValue.getValue(), valueInspector);
      if (key != null) {
        Writable[] arr = new Writable[2];
        arr[0] = key;
        arr[1] = value;
        array.add(new ArrayWritable(Writable.class, arr));
      }
    }
  }
  if (array.size() > 0) {
    final ArrayWritable subArray = new ArrayWritable(ArrayWritable.class,
        array.toArray(new ArrayWritable[array.size()]));
    return new ArrayWritable(Writable.class, new Writable[] {subArray});
  } else {
    return null;
  }
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:28,代码来源:ParquetHiveSerDe.java

示例5: parseDynamoDBData

import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
/**
 * Converts a Hive column of type {@code Map&lt;String,String&gt;} into a DynamoDB item.
 *
 * It is expected that the Hive data is a map of type &lt;String, String&gt;. The key in Hive data
 * map is converted to a DynamoDB attribute name. The corresponding value in Hive data map is
 * converted into DynamoDB AttributeValue. This attribute value is expected to be a JSON
 * serialized AttributeValue.
 *
 * @param data                 Data from Hive
 * @param fieldObjectInspector The object inspector for the Hive data. Must have TypeName
 *                             Map&lt;String,String&gt;.
 *
 * @return DynamoDB item representation of provided data from Hive as a
 *         Map&lt;String,AttributeValue&gt;.
 *
 * @throws SerDeException
 */
public Map<String, AttributeValue> parseDynamoDBData(Object data, ObjectInspector
    fieldObjectInspector) throws SerDeException {

  if (fieldObjectInspector.getCategory() != Category.MAP || !DerivedHiveTypeConstants
      .ITEM_MAP_TYPE_NAME.equals(fieldObjectInspector.getTypeName())) {
    throw new SerDeException(getClass().toString() + " Expecting a MapObjectInspector of type "
        + "map<string,string> for a column which maps DynamoDB item. But we got: "
        + fieldObjectInspector.getTypeName() + " Object inspector: " + fieldObjectInspector);
  }

  Map<String, AttributeValue> item = new HashMap<String, AttributeValue>();

  /* map is of type <String, String> */
  MapObjectInspector mapOI = (MapObjectInspector) fieldObjectInspector;
  StringObjectInspector mapKeyObjectInspector = (StringObjectInspector) mapOI
      .getMapKeyObjectInspector();
  StringObjectInspector mapValueObjectInspector = (StringObjectInspector) mapOI
      .getMapValueObjectInspector();

  /*
   * Get the underlying map object. This is expected to be of type
   * <String,String>
   */
  Map<?, ?> map = mapOI.getMap(data);

  if (map == null || map.isEmpty()) {
    throw new SerDeException("Hive data cannot be null.");
  }

  /* Reconstruct the item */
  for (Entry<?, ?> entry : map.entrySet()) {

    /* Get the string key, value pair */
    String dynamoDBAttributeName = mapKeyObjectInspector.getPrimitiveJavaObject(entry.getKey());
    String dynamoDBAttributeValue = mapValueObjectInspector.getPrimitiveJavaObject(entry
        .getValue());

    /* Deserialize the AttributeValue string */
    AttributeValue deserializedAttributeValue = deserializeAttributeValue(dynamoDBAttributeValue);

    item.put(dynamoDBAttributeName, deserializedAttributeValue);
  }
  return item;
}
 
开发者ID:awslabs,项目名称:emr-dynamodb-connector,代码行数:62,代码来源:HiveDynamoDBItemType.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector.getMapKeyObjectInspector方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。