本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector.getMap方法的典型用法代码示例。如果您正苦于以下问题:Java MapObjectInspector.getMap方法的具体用法?Java MapObjectInspector.getMap怎么用?Java MapObjectInspector.getMap使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector
的用法示例。
在下文中一共展示了MapObjectInspector.getMap方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: write
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
@Override
void write(Object obj) throws IOException {
long rawDataSize = 0;
if (obj != null) {
MapObjectInspector insp = (MapObjectInspector) inspector;
// this sucks, but it will have to do until we can get a better
// accessor in the MapObjectInspector.
Map<?, ?> valueMap = insp.getMap(obj);
// Don't use getMapSize(), it's inconsistent for some object inspectors
int len = valueMap.size();
lengths.write(len);
for(Map.Entry<?, ?> entry: valueMap.entrySet()) {
childrenWriters[0].write(entry.getKey());
childrenWriters[1].write(entry.getValue());
rawDataSize += childrenWriters[0].getRowRawDataSize();
rawDataSize += childrenWriters[1].getRowRawDataSize();
}
}
super.write(obj, rawDataSize);
}
示例2: serializeMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private static Block serializeMap(Type type, BlockBuilder builder, Object object, MapObjectInspector inspector)
{
Map<?, ?> map = inspector.getMap(object);
if (map == null) {
requireNonNull(builder, "parent builder is null").appendNull();
return null;
}
List<Type> typeParameters = type.getTypeParameters();
checkArgument(typeParameters.size() == 2, "map must have exactly 2 type parameter");
Type keyType = typeParameters.get(0);
Type valueType = typeParameters.get(1);
ObjectInspector keyInspector = inspector.getMapKeyObjectInspector();
ObjectInspector valueInspector = inspector.getMapValueObjectInspector();
BlockBuilder currentBuilder;
if (builder != null) {
currentBuilder = builder.beginBlockEntry();
}
else {
currentBuilder = new InterleavedBlockBuilder(typeParameters, new BlockBuilderStatus(), map.size());
}
for (Map.Entry<?, ?> entry : map.entrySet()) {
// Hive skips map entries with null keys
if (entry.getKey() != null) {
serializeObject(keyType, currentBuilder, entry.getKey(), keyInspector);
serializeObject(valueType, currentBuilder, entry.getValue(), valueInspector);
}
}
if (builder != null) {
builder.closeEntry();
return null;
}
else {
Block resultBlock = currentBuilder.build();
return resultBlock;
}
}
示例3: deparseMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private Object deparseMap(Object obj, MapObjectInspector mapOI) {
Map<Object,Object> map = new HashMap<Object,Object>();
ObjectInspector mapValOI = mapOI.getMapValueObjectInspector();
Map<?,?> fields = mapOI.getMap(obj);
for (Map.Entry<?,?> field : fields.entrySet()) {
Object fieldName = field.getKey();
Object fieldObj = field.getValue();
map.put(fieldName, deparseObject(fieldObj, mapValOI));
}
return map;
}
示例4: deparseMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private Object deparseMap(Object obj, MapObjectInspector mapOI) {
Map<Object, Object> map = new HashMap<Object, Object>();
ObjectInspector mapValOI = mapOI.getMapValueObjectInspector();
Map<?, ?> fields = mapOI.getMap(obj);
for (Map.Entry<?, ?> field : fields.entrySet()) {
Object fieldName = field.getKey();
Object fieldObj = field.getValue();
map.put(fieldName, deparseObject(fieldObj, mapValOI));
}
return map;
}
示例5: createMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private Writable createMap(final Object obj, final MapObjectInspector inspector)
throws SerDeException {
final Map<?, ?> sourceMap = inspector.getMap(obj);
final ObjectInspector keyInspector = inspector.getMapKeyObjectInspector();
final ObjectInspector valueInspector = inspector.getMapValueObjectInspector();
final List<ArrayWritable> array = new ArrayList<ArrayWritable>();
if (sourceMap != null) {
for (final Entry<?, ?> keyValue : sourceMap.entrySet()) {
final Writable key = createObject(keyValue.getKey(), keyInspector);
final Writable value = createObject(keyValue.getValue(), valueInspector);
if (key != null) {
Writable[] arr = new Writable[2];
arr[0] = key;
arr[1] = value;
array.add(new ArrayWritable(Writable.class, arr));
}
}
}
if (array.size() > 0) {
final ArrayWritable subArray = new ArrayWritable(ArrayWritable.class,
array.toArray(new ArrayWritable[array.size()]));
return new ArrayWritable(Writable.class, new Writable[] {subArray});
} else {
return null;
}
}
示例6: deparseMap
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
private Object deparseMap(final Object obj, final MapObjectInspector mapOI) {
final Map<Object, Object> map = new HashMap<Object, Object>();
final ObjectInspector mapValOI = mapOI.getMapValueObjectInspector();
final Map<?, ?> fields = mapOI.getMap(obj);
for (final Map.Entry<?, ?> field : fields.entrySet()) {
final Object fieldName = field.getKey();
final Object fieldObj = field.getValue();
map.put(fieldName, deparseObject(fieldObj, mapValOI));
}
return map;
}
示例7: parseDynamoDBData
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; //导入方法依赖的package包/类
/**
* Converts a Hive column of type {@code Map<String,String>} into a DynamoDB item.
*
* It is expected that the Hive data is a map of type <String, String>. The key in Hive data
* map is converted to a DynamoDB attribute name. The corresponding value in Hive data map is
* converted into DynamoDB AttributeValue. This attribute value is expected to be a JSON
* serialized AttributeValue.
*
* @param data Data from Hive
* @param fieldObjectInspector The object inspector for the Hive data. Must have TypeName
* Map<String,String>.
*
* @return DynamoDB item representation of provided data from Hive as a
* Map<String,AttributeValue>.
*
* @throws SerDeException
*/
public Map<String, AttributeValue> parseDynamoDBData(Object data, ObjectInspector
fieldObjectInspector) throws SerDeException {
if (fieldObjectInspector.getCategory() != Category.MAP || !DerivedHiveTypeConstants
.ITEM_MAP_TYPE_NAME.equals(fieldObjectInspector.getTypeName())) {
throw new SerDeException(getClass().toString() + " Expecting a MapObjectInspector of type "
+ "map<string,string> for a column which maps DynamoDB item. But we got: "
+ fieldObjectInspector.getTypeName() + " Object inspector: " + fieldObjectInspector);
}
Map<String, AttributeValue> item = new HashMap<String, AttributeValue>();
/* map is of type <String, String> */
MapObjectInspector mapOI = (MapObjectInspector) fieldObjectInspector;
StringObjectInspector mapKeyObjectInspector = (StringObjectInspector) mapOI
.getMapKeyObjectInspector();
StringObjectInspector mapValueObjectInspector = (StringObjectInspector) mapOI
.getMapValueObjectInspector();
/*
* Get the underlying map object. This is expected to be of type
* <String,String>
*/
Map<?, ?> map = mapOI.getMap(data);
if (map == null || map.isEmpty()) {
throw new SerDeException("Hive data cannot be null.");
}
/* Reconstruct the item */
for (Entry<?, ?> entry : map.entrySet()) {
/* Get the string key, value pair */
String dynamoDBAttributeName = mapKeyObjectInspector.getPrimitiveJavaObject(entry.getKey());
String dynamoDBAttributeValue = mapValueObjectInspector.getPrimitiveJavaObject(entry
.getValue());
/* Deserialize the AttributeValue string */
AttributeValue deserializedAttributeValue = deserializeAttributeValue(dynamoDBAttributeValue);
item.put(dynamoDBAttributeName, deserializedAttributeValue);
}
return item;
}