本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.StructField.getFieldName方法的典型用法代码示例。如果您正苦于以下问题:Java StructField.getFieldName方法的具体用法?Java StructField.getFieldName怎么用?Java StructField.getFieldName使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.objectinspector.StructField
的用法示例。
在下文中一共展示了StructField.getFieldName方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: deparseStruct
import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
* Deparses struct data into a serializable JSON object.
*
* @param obj - Hive struct data
* @param structOI - ObjectInspector for the struct
* @param isRow - Whether or not this struct represents a top-level row
* @return - A deparsed struct
*/
private Object deparseStruct(Object obj,
StructObjectInspector structOI,
boolean isRow) {
Map<Object,Object> struct = new HashMap<Object,Object>();
List<? extends StructField> fields = structOI.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
StructField field = fields.get(i);
// The top-level row object is treated slightly differently from other
// structs, because the field names for the row do not correctly reflect
// the Hive column names. For lower-level structs, we can get the field
// name from the associated StructField object.
String fieldName = isRow ? colNames.get(i) : field.getFieldName();
ObjectInspector fieldOI = field.getFieldObjectInspector();
Object fieldObj = structOI.getStructFieldData(obj, field);
struct.put(fieldName, deparseObject(fieldObj, fieldOI));
}
return struct;
}
示例2: deparseStruct
import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
* Deparses struct data into a serializable JSON object.
*
* @param obj - Hive struct data
* @param structOI - ObjectInspector for the struct
* @param isRow - Whether or not this struct represents a top-level row
* @return - A deparsed struct
*/
private Object deparseStruct(Object obj,
StructObjectInspector structOI,
boolean isRow) {
Map<Object, Object> struct = new HashMap<Object, Object>();
List<? extends StructField> fields = structOI.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
StructField field = fields.get(i);
// The top-level row object is treated slightly differently from other
// structs, because the field names for the row do not correctly reflect
// the Hive column names. For lower-level structs, we can get the field
// name from the associated StructField object.
String fieldName = isRow ? colNames.get(i) : field.getFieldName();
ObjectInspector fieldOI = field.getFieldObjectInspector();
Object fieldObj = structOI.getStructFieldData(obj, field);
struct.put(fieldName, deparseObject(fieldObj, fieldOI));
}
return struct;
}
示例3: deparseStruct
import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
* Deparses struct data into a serializable JSON object.
*
* @param obj
* - Hive struct data
* @param structOI
* - ObjectInspector for the struct
* @param isRow
* - Whether or not this struct represents a top-level row
* @return - A deparsed struct
*/
private Object deparseStruct(final Object obj,
final StructObjectInspector structOI, final boolean isRow) {
final Map<Object, Object> struct = new HashMap<Object, Object>();
final List<? extends StructField> fields = structOI
.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
final StructField field = fields.get(i);
// The top-level row object is treated slightly differently from
// other
// structs, because the field names for the row do not correctly
// reflect
// the Hive column names. For lower-level structs, we can get the
// field
// name from the associated StructField object.
final String fieldName = isRow ? colNames.get(i) : field
.getFieldName();
final ObjectInspector fieldOI = field.getFieldObjectInspector();
final Object fieldObj = structOI.getStructFieldData(obj, field);
struct.put(fieldName, deparseObject(fieldObj, fieldOI));
}
return struct;
}
示例4: getAllKey
import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
@Override
public String[] getAllKey() throws IOException{
String[] keys = new String[fieldList.size()];
int i = 0;
for( StructField field : fieldList ){
keys[i] = field.getFieldName();
i++;
}
return keys;
}
示例5: toTableInfo
import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
* Converts to TableDto.
*
* @param table connector table
* @return Metacat table Info
*/
@Override
public TableInfo toTableInfo(final QualifiedName name, final Table table) {
final List<FieldSchema> nonPartitionColumns =
(table.getSd() != null) ? table.getSd().getCols() : Collections.emptyList();
// add the data fields to the nonPartitionColumns
//ignore all exceptions
try {
if (nonPartitionColumns.isEmpty()) {
for (StructField field : HiveTableUtil.getTableStructFields(table)) {
final FieldSchema fieldSchema = new FieldSchema(field.getFieldName(),
field.getFieldObjectInspector().getTypeName(),
field.getFieldComment());
nonPartitionColumns.add(fieldSchema);
}
}
} catch (final Exception e) {
log.error(e.getMessage(), e);
}
final List<FieldSchema> partitionColumns = table.getPartitionKeys();
final Date creationDate = table.isSetCreateTime() ? epochSecondsToDate(table.getCreateTime()) : null;
final List<FieldInfo> allFields =
Lists.newArrayListWithCapacity(nonPartitionColumns.size() + partitionColumns.size());
nonPartitionColumns.stream()
.map(field -> hiveToMetacatField(field, false))
.forEachOrdered(allFields::add);
partitionColumns.stream()
.map(field -> hiveToMetacatField(field, true))
.forEachOrdered(allFields::add);
final AuditInfo auditInfo = AuditInfo.builder().createdDate(creationDate).build();
return TableInfo.builder()
.serde(toStorageInfo(table.getSd(), table.getOwner())).fields(allFields)
.metadata(table.getParameters()).name(name).auditInfo(auditInfo)
.build();
}