当前位置: 首页>>代码示例>>Java>>正文


Java MapTypeInfo类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo的典型用法代码示例。如果您正苦于以下问题:Java MapTypeInfo类的具体用法?Java MapTypeInfo怎么用?Java MapTypeInfo使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


MapTypeInfo类属于org.apache.hadoop.hive.serde2.typeinfo包,在下文中一共展示了MapTypeInfo类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: MDSMapObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
public MDSMapObjectInspector( final MapTypeInfo typeInfo ){
  TypeInfo keyTypeInfo = typeInfo.getMapKeyTypeInfo();
  if( keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE && ( (PrimitiveTypeInfo)keyTypeInfo ).getPrimitiveCategory() == PrimitiveCategory.STRING ){
    keyObjectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
  }
  else{
    throw new RuntimeException( "Map key type is string only." );
  }

  valueObjectInspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo.getMapValueTypeInfo() ); 

  if( valueObjectInspector.getCategory() == ObjectInspector.Category.PRIMITIVE ){
    getField = new PrimitiveGetField( (PrimitiveObjectInspector)valueObjectInspector );
  }
  else if( valueObjectInspector.getCategory() == ObjectInspector.Category.UNION ){
    getField = new UnionGetField( (UnionTypeInfo)( typeInfo.getMapValueTypeInfo() ) );
  }
  else{
    getField = new NestedGetField();
  }
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:22,代码来源:MDSMapObjectInspector.java

示例2: deserializeMap

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
private Object deserializeMap(MapTypeInfo type, Object data)
        throws SerDeException {
    if (!(data instanceof Map)) {
        throw new SerDeException("Value not of type map");
    }
    //noinspection unchecked
    Map<String, Object> map = (Map<String, Object>) data;
    Map<Object, Object> values = Maps.newHashMap();

    for (Map.Entry<String, Object> entry : map.entrySet()) {
        Object key = deserialize(type.getMapKeyTypeInfo(), entry.getKey());
        Object value = deserialize(type.getMapValueTypeInfo(), entry.getValue());
        values.put(key, value);
    }

    return values;
}
 
开发者ID:bazaarvoice,项目名称:emodb,代码行数:18,代码来源:EmoSerDe.java

示例3: getJavaObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
private static ObjectInspector getJavaObjectInspector(TypeInfo typeInfo)
{
    Category category = typeInfo.getCategory();
    if (category == PRIMITIVE) {
        return getPrimitiveJavaObjectInspector(getPrimitiveTypeInfo(typeInfo.getTypeName()));
    }
    if (category == LIST) {
        ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
        return getStandardListObjectInspector(getJavaObjectInspector(listTypeInfo.getListElementTypeInfo()));
    }
    if (category == MAP) {
        MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
        return getStandardMapObjectInspector(
                getJavaObjectInspector(mapTypeInfo.getMapKeyTypeInfo()),
                getJavaObjectInspector(mapTypeInfo.getMapValueTypeInfo()));
    }
    throw new PrestoException(INTERNAL_ERROR, "Unhandled storage type: " + category);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:19,代码来源:OrcFileWriter.java

示例4: isSupportedType

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
public static boolean isSupportedType(TypeInfo typeInfo)
{
    switch (typeInfo.getCategory()) {
        case PRIMITIVE:
            PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
            return getPrimitiveType(primitiveCategory) != null;
        case MAP:
            MapTypeInfo mapTypeInfo = checkType(typeInfo, MapTypeInfo.class, "typeInfo");
            return isSupportedType(mapTypeInfo.getMapKeyTypeInfo()) && isSupportedType(mapTypeInfo.getMapValueTypeInfo());
        case LIST:
            ListTypeInfo listTypeInfo = checkType(typeInfo, ListTypeInfo.class, "typeInfo");
            return isSupportedType(listTypeInfo.getListElementTypeInfo());
        case STRUCT:
            StructTypeInfo structTypeInfo = checkType(typeInfo, StructTypeInfo.class, "typeInfo");
            return structTypeInfo.getAllStructFieldTypeInfos().stream()
                    .allMatch(HiveType::isSupportedType);
    }
    return false;
}
 
开发者ID:y-lan,项目名称:presto,代码行数:20,代码来源:HiveType.java

示例5: parseField

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
/**
 * Parses a JSON object according to the Hive column's type.
 *
 * @param field - The JSON object to parse
 * @param fieldTypeInfo - Metadata about the Hive column
 * @return - The parsed value of the field
 */
private Object parseField(Object field, TypeInfo fieldTypeInfo) {
    switch (fieldTypeInfo.getCategory()) {
        case PRIMITIVE:
            // Jackson will return the right thing in this case, so just return
            // the object
            if (field instanceof String) {
                field = field.toString().replaceAll("\n", "\\\\n");
            }
            return field;
        case LIST:
            return parseList(field, (ListTypeInfo) fieldTypeInfo);
        case MAP:
            return parseMap(field, (MapTypeInfo) fieldTypeInfo);
        case STRUCT:
            return parseStruct(field, (StructTypeInfo) fieldTypeInfo);
        case UNION:
            // Unsupported by JSON
        default:
            return null;
    }
}
 
开发者ID:scaleoutsoftware,项目名称:hServer,代码行数:29,代码来源:JsonSerDe.java

示例6: parseField

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
/**
 * Parses a JSON object according to the Hive column's type.
 *
 * @param field         - The JSON object to parse
 * @param fieldTypeInfo - Metadata about the Hive column
 * @return - The parsed value of the field
 */
private Object parseField(Object field, TypeInfo fieldTypeInfo) {
	switch (fieldTypeInfo.getCategory()) {
		case PRIMITIVE:
			// Jackson will return the right thing in this case, so just return
			// the object
			if (field instanceof String) {
				field = field.toString().replaceAll("\n", "\\\\n");
			}
			return field;
		case LIST:
			return parseList(field, (ListTypeInfo) fieldTypeInfo);
		case MAP:
			return parseMap(field, (MapTypeInfo) fieldTypeInfo);
		case STRUCT:
			return parseStruct(field, (StructTypeInfo) fieldTypeInfo);
		case UNION:
			// Unsupported by JSON
		default:
			return null;
	}
}
 
开发者ID:micmiu,项目名称:bigdata-tutorial,代码行数:29,代码来源:JSONCDHSerDe.java

示例7: getRelDataTypeFromHiveType

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
private RelDataType getRelDataTypeFromHiveType(RelDataTypeFactory typeFactory, TypeInfo typeInfo) {
  switch(typeInfo.getCategory()) {
    case PRIMITIVE:
      return getRelDataTypeFromHivePrimitiveType(typeFactory, ((PrimitiveTypeInfo) typeInfo));

    case LIST: {
      ListTypeInfo listTypeInfo = (ListTypeInfo)typeInfo;
      RelDataType listElemTypeInfo = getRelDataTypeFromHiveType(typeFactory, listTypeInfo.getListElementTypeInfo());
      return typeFactory.createArrayType(listElemTypeInfo, -1);
    }

    case MAP: {
      MapTypeInfo mapTypeInfo = (MapTypeInfo)typeInfo;
      RelDataType keyType = getRelDataTypeFromHiveType(typeFactory, mapTypeInfo.getMapKeyTypeInfo());
      RelDataType valueType = getRelDataTypeFromHiveType(typeFactory, mapTypeInfo.getMapValueTypeInfo());
      return typeFactory.createMapType(keyType, valueType);
    }

    case STRUCT: {
      StructTypeInfo structTypeInfo = (StructTypeInfo)typeInfo;
      ArrayList<String> fieldNames = structTypeInfo.getAllStructFieldNames();
      ArrayList<TypeInfo> fieldHiveTypeInfoList = structTypeInfo.getAllStructFieldTypeInfos();
      List<RelDataType> fieldRelDataTypeList = Lists.newArrayList();
      for(TypeInfo fieldHiveType : fieldHiveTypeInfoList) {
        fieldRelDataTypeList.add(getRelDataTypeFromHiveType(typeFactory, fieldHiveType));
      }
      return typeFactory.createStructType(fieldRelDataTypeList, fieldNames);
    }

    case UNION:
      logger.warn("There is no UNION data type in SQL. Converting it to Sql type OTHER to avoid " +
          "breaking INFORMATION_SCHEMA queries");
      return typeFactory.createSqlType(SqlTypeName.OTHER);
  }

  throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
  return null;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:39,代码来源:DrillHiveTable.java

示例8: HiveMapSchema

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
public HiveMapSchema( final MapContainerField schema ) throws IOException{
  this.schema = schema;

  MapTypeInfo mapSchema = new MapTypeInfo();
  mapSchema.setMapKeyTypeInfo( TypeInfoFactory.getPrimitiveTypeInfo( TypeInfoFactory.stringTypeInfo.getTypeName() ) );
  mapSchema.setMapValueTypeInfo( HiveSchemaFactory.getHiveSchema( schema.getField() ) );
  hiveSchema = mapSchema;
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:9,代码来源:HiveMapSchema.java

示例9: deserialize

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
/**
 * Deserializes a raw value to the provided type.
 */
private Object deserialize(TypeInfo type, Object rawValue)
        throws SerDeException {
    Object value = null;

    if (rawValue != null) {
        switch (type.getCategory()) {
            case PRIMITIVE:
                value = deserializePrimitive((PrimitiveTypeInfo) type, rawValue);
                break;
            case STRUCT:
                value = deserializeStruct((StructTypeInfo) type, rawValue);
                break;
            case MAP:
                value = deserializeMap((MapTypeInfo) type, rawValue);
                break;
            case LIST:
                value = deserializeList((ListTypeInfo) type, rawValue);
                break;
            case UNION:
                value = deserializeUnion((UnionTypeInfo) type, rawValue);
                break;
        }
    }

    return value;
}
 
开发者ID:bazaarvoice,项目名称:emodb,代码行数:30,代码来源:EmoSerDe.java

示例10: getTypeSignature

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
@Nonnull
private static TypeSignature getTypeSignature(TypeInfo typeInfo)
{
    switch (typeInfo.getCategory()) {
        case PRIMITIVE:
            PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
            Type primitiveType = getPrimitiveType(primitiveCategory);
            if (primitiveType == null) {
                break;
            }
            return primitiveType.getTypeSignature();
        case MAP:
            MapTypeInfo mapTypeInfo = checkType(typeInfo, MapTypeInfo.class, "fieldInspector");
            TypeSignature keyType = getTypeSignature(mapTypeInfo.getMapKeyTypeInfo());
            TypeSignature valueType = getTypeSignature(mapTypeInfo.getMapValueTypeInfo());
            return new TypeSignature(
                    StandardTypes.MAP,
                    ImmutableList.of(TypeSignatureParameter.of(keyType), TypeSignatureParameter.of(valueType)));
        case LIST:
            ListTypeInfo listTypeInfo = checkType(typeInfo, ListTypeInfo.class, "fieldInspector");
            TypeSignature elementType = getTypeSignature(listTypeInfo.getListElementTypeInfo());
            return new TypeSignature(
                    StandardTypes.ARRAY,
                    ImmutableList.of(TypeSignatureParameter.of(elementType)));
        case STRUCT:
            StructTypeInfo structTypeInfo = checkType(typeInfo, StructTypeInfo.class, "fieldInspector");
            List<TypeSignature> fieldTypes = structTypeInfo.getAllStructFieldTypeInfos()
                    .stream()
                    .map(HiveType::getTypeSignature)
                    .collect(toList());
            return new TypeSignature(StandardTypes.ROW, fieldTypes, structTypeInfo.getAllStructFieldNames());
    }
    throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type: %s", typeInfo));
}
 
开发者ID:y-lan,项目名称:presto,代码行数:35,代码来源:HiveType.java

示例11: parseMap

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
/**
 * Parse a JSON object as a map. This uses the Hive metadata for the map
 * values to determine how to parse the values. The map is assumed to have
 * a string for a key.
 *
 * @param field - The JSON list to parse
 * @param fieldTypeInfo - Metadata about the Hive column
 * @return
 */
private Object parseMap(Object field, MapTypeInfo fieldTypeInfo) {
    Map<Object,Object> map = (Map<Object,Object>) field;
    TypeInfo valueTypeInfo = fieldTypeInfo.getMapValueTypeInfo();

    for (Map.Entry<Object,Object> entry : map.entrySet()) {
        map.put(entry.getKey(), parseField(entry.getValue(), valueTypeInfo));
    }
    return map;
}
 
开发者ID:scaleoutsoftware,项目名称:hServer,代码行数:19,代码来源:JsonSerDe.java

示例12: parseMap

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
/**
 * Parse a JSON object as a map. This uses the Hive metadata for the map
 * values to determine how to parse the values. The map is assumed to have
 * a string for a key.
 *
 * @param field         - The JSON list to parse
 * @param fieldTypeInfo - Metadata about the Hive column
 * @return
 */
private Object parseMap(Object field, MapTypeInfo fieldTypeInfo) {
	Map<Object, Object> map = (Map<Object, Object>) field;
	TypeInfo valueTypeInfo = fieldTypeInfo.getMapValueTypeInfo();
	if (map != null) {
		for (Map.Entry<Object, Object> entry : map.entrySet()) {
			map.put(entry.getKey(), parseField(entry.getValue(), valueTypeInfo));
		}
	}
	return map;
}
 
开发者ID:micmiu,项目名称:bigdata-tutorial,代码行数:20,代码来源:JSONCDHSerDe.java

示例13: convertType

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
private static Type convertType(final String name, final TypeInfo typeInfo, final Repetition repetition) {
  if (typeInfo.getCategory().equals(Category.PRIMITIVE)) {
    if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.BINARY, name);
    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo) ||
        typeInfo.equals(TypeInfoFactory.shortTypeInfo) ||
        typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.INT32, name);
    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.INT64, name);
    } else if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.DOUBLE, name);
    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.FLOAT, name);
    } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.BOOLEAN, name);
    } else if (typeInfo.equals(TypeInfoFactory.binaryTypeInfo)) {
      // TODO : binaryTypeInfo is a byte array. Need to map it
      throw new UnsupportedOperationException("Binary type not implemented");
    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
      throw new UnsupportedOperationException("Timestamp type not implemented");
    } else if (typeInfo.equals(TypeInfoFactory.voidTypeInfo)) {
      throw new UnsupportedOperationException("Void type not implemented");
    } else if (typeInfo.equals(TypeInfoFactory.unknownTypeInfo)) {
      throw new UnsupportedOperationException("Unknown type not implemented");
    } else {
      throw new IllegalArgumentException("Unknown type: " + typeInfo);
    }
  } else if (typeInfo.getCategory().equals(Category.LIST)) {
    return convertArrayType(name, (ListTypeInfo) typeInfo);
  } else if (typeInfo.getCategory().equals(Category.STRUCT)) {
    return convertStructType(name, (StructTypeInfo) typeInfo);
  } else if (typeInfo.getCategory().equals(Category.MAP)) {
    return convertMapType(name, (MapTypeInfo) typeInfo);
  } else if (typeInfo.getCategory().equals(Category.UNION)) {
    throw new UnsupportedOperationException("Union type not implemented");
  } else {
    throw new IllegalArgumentException("Unknown type: " + typeInfo);
  }
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:41,代码来源:HiveSchemaConverter.java

示例14: convertMapType

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
private static GroupType convertMapType(final String name, final MapTypeInfo typeInfo) {
  final Type keyType = convertType(ParquetHiveSerDe.MAP_KEY.toString(),
      typeInfo.getMapKeyTypeInfo(), Repetition.REQUIRED);
  final Type valueType = convertType(ParquetHiveSerDe.MAP_VALUE.toString(),
      typeInfo.getMapValueTypeInfo());
  return ConversionPatterns.mapType(Repetition.OPTIONAL, name, keyType, valueType);
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:8,代码来源:HiveSchemaConverter.java

示例15: getObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; //导入依赖的package包/类
private ObjectInspector getObjectInspector(final TypeInfo typeInfo) {
  if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
    return ParquetPrimitiveInspectorFactory.parquetStringInspector;
  } else if (typeInfo.getCategory().equals(Category.STRUCT)) {
    return new ArrayWritableObjectInspector((StructTypeInfo) typeInfo);
  } else if (typeInfo.getCategory().equals(Category.LIST)) {
    final TypeInfo subTypeInfo = ((ListTypeInfo) typeInfo).getListElementTypeInfo();
    return new ParquetHiveArrayInspector(getObjectInspector(subTypeInfo));
  } else if (typeInfo.getCategory().equals(Category.MAP)) {
    final TypeInfo keyTypeInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo();
    final TypeInfo valueTypeInfo = ((MapTypeInfo) typeInfo).getMapValueTypeInfo();
    if (keyTypeInfo.equals(TypeInfoFactory.stringTypeInfo) || keyTypeInfo.equals(TypeInfoFactory.byteTypeInfo)
            || keyTypeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
      return new DeepParquetHiveMapInspector(getObjectInspector(keyTypeInfo), getObjectInspector(valueTypeInfo));
    } else {
      return new StandardParquetHiveMapInspector(getObjectInspector(keyTypeInfo), getObjectInspector(valueTypeInfo));
    }
  } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
    throw new UnsupportedOperationException("timestamp not implemented yet");
  } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
    return ParquetPrimitiveInspectorFactory.parquetByteInspector;
  } else if (typeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
    return ParquetPrimitiveInspectorFactory.parquetShortInspector;
  } else {
    throw new IllegalArgumentException("Unknown field info: " + typeInfo);
  }

}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:39,代码来源:ArrayWritableObjectInspector.java


注:本文中的org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。