本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector.getMapValueObjectInspector方法的典型用法代码示例。如果您正苦于以下问题:Java MapObjectInspector.getMapValueObjectInspector方法的具体用法?Java MapObjectInspector.getMapValueObjectInspector怎么用?Java MapObjectInspector.getMapValueObjectInspector使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector
的用法示例。
在下文中一共展示了MapObjectInspector.getMapValueObjectInspector方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: HiveMapParser
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
public HiveMapParser( final Object row , final MapObjectInspector mapObjectInspector ){
this.row = row;
this.mapObjectInspector = mapObjectInspector;
childObjectInspector = mapObjectInspector.getMapValueObjectInspector();
childConverter = HivePrimitiveConverterFactory.get( childObjectInspector );
childHasParser = HiveParserFactory.hasParser( childObjectInspector );
}
示例2: OrcMapParser
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
public OrcMapParser( final Object row , final MapObjectInspector mapObjectInspector ){
this.row = row;
this.mapObjectInspector = mapObjectInspector;
childObjectInspector = mapObjectInspector.getMapValueObjectInspector();
childConverter = OrcPrimitiveConverterFactory.get( childObjectInspector );
childHasParser = OrcParserFactory.hasParser( childObjectInspector );
}
示例3: convert
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
/**
* Convert the value using the ObjectInspector. The Writable values are converted to their
* respective Java objects from using the provided inspector.
*
* @param oi the field object inspector
* @param value the value
* @return the corresponding Java object value
*/
public static Object convert(final ObjectInspector oi, final Object value) {
if (value == null) {
return null;
}
Object outValue = null;
switch (oi.getCategory()) {
case PRIMITIVE:
outValue = OrcReadFunctionMap.get(oi.getTypeName()).apply(value);
break;
case LIST:
final ListObjectInspector loi = (ListObjectInspector) oi;
final ObjectInspector eoi = loi.getListElementObjectInspector();
outValue =
loi.getList(value).stream().map(e -> convert(eoi, e)).collect(Collectors.toList());
break;
case MAP:
final MapObjectInspector moi = (MapObjectInspector) oi;
final ObjectInspector koi = moi.getMapKeyObjectInspector();
final ObjectInspector voi = moi.getMapValueObjectInspector();
outValue = moi.getMap(value).entrySet().stream()
.collect(Collectors.toMap(e -> convert(koi, e.getKey()),
e -> convert(voi, e.getValue()), throwingMerger(), LinkedHashMap::new));
break;
case STRUCT:
final StructObjectInspector soi = (StructObjectInspector) oi;
outValue = soi.getAllStructFieldRefs().stream()
.map(e -> convert(e.getFieldObjectInspector(), soi.getStructFieldData(value, e)))
.toArray();
break;
case UNION:
final UnionObjectInspector uoi = (UnionObjectInspector) oi;
final List<? extends ObjectInspector> ois = uoi.getObjectInspectors();
final byte tag = uoi.getTag(value);
outValue = new Object[] {tag, convert(ois.get(tag), uoi.getField(value))};
break;
}
return outValue;
}
示例4: init
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] argOIs) throws HiveException {
super.init(mode, argOIs);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
this.inputKeyOI = HiveUtils.asPrimitiveObjectInspector(argOIs[0]);
this.inputValueOI = argOIs[1];
this.sizeOI = HiveUtils.asIntegerOI(argOIs[2]);
} else {// from partial aggregation
StructObjectInspector soi = (StructObjectInspector) argOIs[0];
this.internalMergeOI = soi;
this.partialMapField = soi.getStructFieldRef("partialMap");
// re-extract input key/value OIs
MapObjectInspector partialMapOI = (MapObjectInspector) partialMapField.getFieldObjectInspector();
this.inputKeyOI = HiveUtils.asPrimitiveObjectInspector(partialMapOI.getMapKeyObjectInspector());
this.inputValueOI = partialMapOI.getMapValueObjectInspector();
this.partialMapOI = ObjectInspectorFactory.getStandardMapObjectInspector(
ObjectInspectorUtils.getStandardObjectInspector(inputKeyOI),
ObjectInspectorUtils.getStandardObjectInspector(inputValueOI));
this.sizeField = soi.getStructFieldRef("size");
this.sizeOI = (PrimitiveObjectInspector) sizeField.getFieldObjectInspector();
}
// initialize output
final ObjectInspector outputOI;
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial
outputOI = internalMergeOI(inputKeyOI, inputValueOI);
} else {// terminate
outputOI = ObjectInspectorFactory.getStandardMapObjectInspector(
ObjectInspectorUtils.getStandardObjectInspector(inputKeyOI),
ObjectInspectorUtils.getStandardObjectInspector(inputValueOI));
}
return outputOI;
}
示例5: serializeMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private static Block serializeMap(Type type, BlockBuilder builder, Object object, MapObjectInspector inspector)
{
Map<?, ?> map = inspector.getMap(object);
if (map == null) {
requireNonNull(builder, "parent builder is null").appendNull();
return null;
}
List<Type> typeParameters = type.getTypeParameters();
checkArgument(typeParameters.size() == 2, "map must have exactly 2 type parameter");
Type keyType = typeParameters.get(0);
Type valueType = typeParameters.get(1);
ObjectInspector keyInspector = inspector.getMapKeyObjectInspector();
ObjectInspector valueInspector = inspector.getMapValueObjectInspector();
BlockBuilder currentBuilder;
if (builder != null) {
currentBuilder = builder.beginBlockEntry();
}
else {
currentBuilder = new InterleavedBlockBuilder(typeParameters, new BlockBuilderStatus(), map.size());
}
for (Map.Entry<?, ?> entry : map.entrySet()) {
// Hive skips map entries with null keys
if (entry.getKey() != null) {
serializeObject(keyType, currentBuilder, entry.getKey(), keyInspector);
serializeObject(valueType, currentBuilder, entry.getValue(), valueInspector);
}
}
if (builder != null) {
builder.closeEntry();
return null;
}
else {
Block resultBlock = currentBuilder.build();
return resultBlock;
}
}
示例6: deparseMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private Object deparseMap(Object obj, MapObjectInspector mapOI) {
Map<Object,Object> map = new HashMap<Object,Object>();
ObjectInspector mapValOI = mapOI.getMapValueObjectInspector();
Map<?,?> fields = mapOI.getMap(obj);
for (Map.Entry<?,?> field : fields.entrySet()) {
Object fieldName = field.getKey();
Object fieldObj = field.getValue();
map.put(fieldName, deparseObject(fieldObj, mapValOI));
}
return map;
}
示例7: deparseMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private Object deparseMap(Object obj, MapObjectInspector mapOI) {
Map<Object, Object> map = new HashMap<Object, Object>();
ObjectInspector mapValOI = mapOI.getMapValueObjectInspector();
Map<?, ?> fields = mapOI.getMap(obj);
for (Map.Entry<?, ?> field : fields.entrySet()) {
Object fieldName = field.getKey();
Object fieldObj = field.getValue();
map.put(fieldName, deparseObject(fieldObj, mapValOI));
}
return map;
}
示例8: createMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private Writable createMap(final Object obj, final MapObjectInspector inspector)
throws SerDeException {
final Map<?, ?> sourceMap = inspector.getMap(obj);
final ObjectInspector keyInspector = inspector.getMapKeyObjectInspector();
final ObjectInspector valueInspector = inspector.getMapValueObjectInspector();
final List<ArrayWritable> array = new ArrayList<ArrayWritable>();
if (sourceMap != null) {
for (final Entry<?, ?> keyValue : sourceMap.entrySet()) {
final Writable key = createObject(keyValue.getKey(), keyInspector);
final Writable value = createObject(keyValue.getValue(), valueInspector);
if (key != null) {
Writable[] arr = new Writable[2];
arr[0] = key;
arr[1] = value;
array.add(new ArrayWritable(Writable.class, arr));
}
}
}
if (array.size() > 0) {
final ArrayWritable subArray = new ArrayWritable(ArrayWritable.class,
array.toArray(new ArrayWritable[array.size()]));
return new ArrayWritable(Writable.class, new Writable[] {subArray});
} else {
return null;
}
}
示例9: deparseMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private Object deparseMap(final Object obj, final MapObjectInspector mapOI) {
final Map<Object, Object> map = new HashMap<Object, Object>();
final ObjectInspector mapValOI = mapOI.getMapValueObjectInspector();
final Map<?, ?> fields = mapOI.getMap(obj);
for (final Map.Entry<?, ?> field : fields.entrySet()) {
final Object fieldName = field.getKey();
final Object fieldObj = field.getValue();
map.put(fieldName, deparseObject(fieldObj, mapValOI));
}
return map;
}
示例10: parseDynamoDBData
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
/**
* Converts a Hive column of type {@code Map<String,String>} into a DynamoDB item.
*
* It is expected that the Hive data is a map of type <String, String>. The key in Hive data
* map is converted to a DynamoDB attribute name. The corresponding value in Hive data map is
* converted into DynamoDB AttributeValue. This attribute value is expected to be a JSON
* serialized AttributeValue.
*
* @param data Data from Hive
* @param fieldObjectInspector The object inspector for the Hive data. Must have TypeName
* Map<String,String>.
*
* @return DynamoDB item representation of provided data from Hive as a
* Map<String,AttributeValue>.
*
* @throws SerDeException
*/
public Map<String, AttributeValue> parseDynamoDBData(Object data, ObjectInspector
fieldObjectInspector) throws SerDeException {
if (fieldObjectInspector.getCategory() != Category.MAP || !DerivedHiveTypeConstants
.ITEM_MAP_TYPE_NAME.equals(fieldObjectInspector.getTypeName())) {
throw new SerDeException(getClass().toString() + " Expecting a MapObjectInspector of type "
+ "map<string,string> for a column which maps DynamoDB item. But we got: "
+ fieldObjectInspector.getTypeName() + " Object inspector: " + fieldObjectInspector);
}
Map<String, AttributeValue> item = new HashMap<String, AttributeValue>();
/* map is of type <String, String> */
MapObjectInspector mapOI = (MapObjectInspector) fieldObjectInspector;
StringObjectInspector mapKeyObjectInspector = (StringObjectInspector) mapOI
.getMapKeyObjectInspector();
StringObjectInspector mapValueObjectInspector = (StringObjectInspector) mapOI
.getMapValueObjectInspector();
/*
* Get the underlying map object. This is expected to be of type
* <String,String>
*/
Map<?, ?> map = mapOI.getMap(data);
if (map == null || map.isEmpty()) {
throw new SerDeException("Hive data cannot be null.");
}
/* Reconstruct the item */
for (Entry<?, ?> entry : map.entrySet()) {
/* Get the string key, value pair */
String dynamoDBAttributeName = mapKeyObjectInspector.getPrimitiveJavaObject(entry.getKey());
String dynamoDBAttributeValue = mapValueObjectInspector.getPrimitiveJavaObject(entry
.getValue());
/* Deserialize the AttributeValue string */
AttributeValue deserializedAttributeValue = deserializeAttributeValue(dynamoDBAttributeValue);
item.put(dynamoDBAttributeName, deserializedAttributeValue);
}
return item;
}