当前位置: 首页>>代码示例>>Java>>正文


Java StructField.getFieldObjectInspector方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.StructField.getFieldObjectInspector方法的典型用法代码示例。如果您正苦于以下问题:Java StructField.getFieldObjectInspector方法的具体用法?Java StructField.getFieldObjectInspector怎么用?Java StructField.getFieldObjectInspector使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.serde2.objectinspector.StructField的用法示例。


在下文中一共展示了StructField.getFieldObjectInspector方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: initReduceSide

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
private ObjectInspector initReduceSide(StructObjectInspector inputStructOI)
        throws HiveException {
    List<? extends StructField> fields = inputStructOI.getAllStructFieldRefs();
    int length = fields.size();
    this.inputStructOI = inputStructOI;
    this.inputOIs = new ObjectInspector[length];
    this.outputOIs = new ObjectInspector[length];

    for (int i = 0; i < length; i++) {
        StructField field = fields.get(i);
        ObjectInspector oi = field.getFieldObjectInspector();
        inputOIs[i] = oi;
        outputOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(oi);
    }

    return ObjectInspectorUtils.getStandardObjectInspector(inputStructOI);
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:18,代码来源:MaxRowUDAF.java

示例2: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
@Override
public Writable serialize(Object obj, ObjectInspector objectInspector) throws SerDeException {
    if (!objectInspector.getCategory().equals(ObjectInspector.Category.STRUCT)) {
        throw new SerDeException("Cannot serialize " + objectInspector.getCategory() + ". Can only serialize a struct");
    }

    StructObjectInspector inspector = (StructObjectInspector) objectInspector;
    List<? extends StructField> fields = inspector.getAllStructFieldRefs();
    Writable[] arr = new Writable[fields.size()];
    for (int i = 0; i < fields.size(); i++) {
        StructField field = fields.get(i);
        Object subObj = inspector.getStructFieldData(obj, field);
        ObjectInspector subInspector = field.getFieldObjectInspector();
        arr[i] = createPrimitive(subObj, (PrimitiveObjectInspector) subInspector);
    }
    serdeSize = arr.length;
    return new ArrayWritable(Writable.class, arr);
}
 
开发者ID:shunfei,项目名称:indexr,代码行数:19,代码来源:IndexRSerde.java

示例3: deparseStruct

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
 * Deparses struct data into a serializable JSON object.
 *
 * @param obj - Hive struct data
 * @param structOI - ObjectInspector for the struct
 * @param isRow - Whether or not this struct represents a top-level row
 * @return - A deparsed struct
 */
private Object deparseStruct(Object obj,
                             StructObjectInspector structOI,
                             boolean isRow) {
    Map<Object,Object> struct = new HashMap<Object,Object>();
    List<? extends StructField> fields = structOI.getAllStructFieldRefs();
    for (int i = 0; i < fields.size(); i++) {
        StructField field = fields.get(i);
        // The top-level row object is treated slightly differently from other
        // structs, because the field names for the row do not correctly reflect
        // the Hive column names. For lower-level structs, we can get the field
        // name from the associated StructField object.
        String fieldName = isRow ? colNames.get(i) : field.getFieldName();
        ObjectInspector fieldOI = field.getFieldObjectInspector();
        Object fieldObj = structOI.getStructFieldData(obj, field);
        struct.put(fieldName, deparseObject(fieldObj, fieldOI));
    }
    return struct;
}
 
开发者ID:scaleoutsoftware,项目名称:hServer,代码行数:27,代码来源:JsonSerDe.java

示例4: deparseStruct

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
 * Deparses struct data into a serializable JSON object.
 *
 * @param obj      - Hive struct data
 * @param structOI - ObjectInspector for the struct
 * @param isRow    - Whether or not this struct represents a top-level row
 * @return - A deparsed struct
 */
private Object deparseStruct(Object obj,
							 StructObjectInspector structOI,
							 boolean isRow) {
	Map<Object, Object> struct = new HashMap<Object, Object>();
	List<? extends StructField> fields = structOI.getAllStructFieldRefs();
	for (int i = 0; i < fields.size(); i++) {
		StructField field = fields.get(i);
		// The top-level row object is treated slightly differently from other
		// structs, because the field names for the row do not correctly reflect
		// the Hive column names. For lower-level structs, we can get the field
		// name from the associated StructField object.
		String fieldName = isRow ? colNames.get(i) : field.getFieldName();
		ObjectInspector fieldOI = field.getFieldObjectInspector();
		Object fieldObj = structOI.getStructFieldData(obj, field);
		struct.put(fieldName, deparseObject(fieldObj, fieldOI));
	}
	return struct;
}
 
开发者ID:micmiu,项目名称:bigdata-tutorial,代码行数:27,代码来源:JSONCDHSerDe.java

示例5: deparseStruct

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
 * Deparses struct data into a serializable JSON object.
 *
 * @param obj
 *            - Hive struct data
 * @param structOI
 *            - ObjectInspector for the struct
 * @param isRow
 *            - Whether or not this struct represents a top-level row
 * @return - A deparsed struct
 */
private Object deparseStruct(final Object obj,
		final StructObjectInspector structOI, final boolean isRow) {
	final Map<Object, Object> struct = new HashMap<Object, Object>();
	final List<? extends StructField> fields = structOI
			.getAllStructFieldRefs();
	for (int i = 0; i < fields.size(); i++) {
		final StructField field = fields.get(i);
		// The top-level row object is treated slightly differently from
		// other
		// structs, because the field names for the row do not correctly
		// reflect
		// the Hive column names. For lower-level structs, we can get the
		// field
		// name from the associated StructField object.
		final String fieldName = isRow ? colNames.get(i) : field
				.getFieldName();
		final ObjectInspector fieldOI = field.getFieldObjectInspector();
		final Object fieldObj = structOI.getStructFieldData(obj, field);
		struct.put(fieldName, deparseObject(fieldObj, fieldOI));
	}
	return struct;
}
 
开发者ID:jaibeermalik,项目名称:searchanalytics-bigdata,代码行数:34,代码来源:JSONSerDe.java

示例6: getStructFieldData

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
 * @see org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector#getStructFieldData(java.lang.Object,
 *      org.apache.hadoop.hive.serde2.objectinspector.StructField)
 */
@SuppressWarnings("unchecked")
@Override
public Object getStructFieldData(Object data, StructField structField) {
    if ((data instanceof List) && !(data instanceof SerDeArray)) {
        MyField f = (MyField) structField;
        int fieldID = f.getFieldID();
        return ((List<Object>) data).get(fieldID);
    } else {
        ObjectInspector fieldObjectInspector = structField.getFieldObjectInspector();
        Category category = fieldObjectInspector.getCategory();
        Object fieldData = this.xmlProcessor.getObjectValue(data, structField.getFieldName());
        switch (category) {
            case PRIMITIVE: {
                PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) fieldObjectInspector;
                PrimitiveCategory primitiveCategory = primitiveObjectInspector.getPrimitiveCategory();
                return this.xmlProcessor.getPrimitiveObjectValue(fieldData, primitiveCategory);
            }
            default:
                return fieldData;
        }
    }
}
 
开发者ID:dvasilen,项目名称:Hive-XML-SerDe,代码行数:27,代码来源:XmlStructObjectInspector.java

示例7: testSimpleXmlMap

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
@SuppressWarnings("rawtypes")
public void testSimpleXmlMap() throws SerDeException {
     XmlSerDe xmlSerDe = new XmlSerDe();
     Configuration configuration = new Configuration();
     Properties properties = new Properties();
     properties.put(LIST_COLUMNS, "test");
     properties.put(LIST_COLUMN_TYPES, "map<string,string>");
     properties.setProperty("column.xpath.test", "//*[contains(name(),'test')]");
     xmlSerDe.initialize(configuration, properties);
     Text text = new Text();
     text.set("<root><test1>string1</test1><test2>string2</test2></root>");
     Object o = xmlSerDe.deserialize(text);
     XmlStructObjectInspector structInspector = ((XmlStructObjectInspector) xmlSerDe.getObjectInspector());
     StructField structField = structInspector.getStructFieldRef("test");
     Object data = structInspector.getStructFieldData(o, structField);
     XmlMapObjectInspector fieldInspector = (XmlMapObjectInspector) structField.getFieldObjectInspector();
     Map map = fieldInspector.getMap(data);
     PrimitiveObjectInspector valueObjectInspector = (PrimitiveObjectInspector) fieldInspector.getMapValueObjectInspector();
     String test = (String) valueObjectInspector.getPrimitiveJavaObject(map.get("test1"));
     assertEquals("string1", test);
}
 
开发者ID:dvasilen,项目名称:Hive-XML-SerDe,代码行数:22,代码来源:ObjectInspectorTest.java

示例8: testSimpleXmlNotMap

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
@SuppressWarnings("rawtypes")
public void testSimpleXmlNotMap() throws SerDeException {
    XmlSerDe xmlSerDe = new XmlSerDe();
    Configuration configuration = new Configuration();
    Properties properties = new Properties();
    properties.put(LIST_COLUMNS, "test");
    properties.put(LIST_COLUMN_TYPES, "map<string,string>");
    properties.setProperty("column.xpath.test", "//*[contains(name(),'test')]/text()");
    xmlSerDe.initialize(configuration, properties);
    Text text = new Text();
    text.set("<root><test1>string1</test1><test2>string2</test2></root>");
    Object o = xmlSerDe.deserialize(text);
    XmlStructObjectInspector structInspector = ((XmlStructObjectInspector) xmlSerDe.getObjectInspector());
    StructField structField = structInspector.getStructFieldRef("test");
    Object data = structInspector.getStructFieldData(o, structField);
    XmlMapObjectInspector fieldInspector = (XmlMapObjectInspector) structField.getFieldObjectInspector();
    Map map = fieldInspector.getMap(data);
    assertEquals(0, map.size());
}
 
开发者ID:dvasilen,项目名称:Hive-XML-SerDe,代码行数:20,代码来源:ObjectInspectorTest.java

示例9: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
 * This method takes an object representing a row of data from Hive, and uses
 * the ObjectInspector to get the data for each column and serialize.
 */
@Override
public DbRecordWritable serialize(Object row, ObjectInspector inspector) throws SerDeException {
    final StructObjectInspector structInspector = (StructObjectInspector) inspector;
    final List<? extends StructField> fields = structInspector.getAllStructFieldRefs();
    if(fields.size() != fieldCount) {
        throw new SerDeException(String.format("Required %d columns, received %d.", fieldCount, fields.size()));
    }

    cachedWritable.clear();

    for(int i = 0; i < fieldCount; i++) {
        StructField structField = fields.get(i);
        if(structField != null) {
            Object field = structInspector.getStructFieldData(row, structField);
            ObjectInspector fieldOI = structField.getFieldObjectInspector();
            Object javaObject = HiveJdbcBridgeUtils.deparseObject(field, fieldOI);
            cachedWritable.set(i, javaObject);
        }
    }

    return cachedWritable;
}
 
开发者ID:myui,项目名称:HiveJdbcStorageHandler,代码行数:27,代码来源:JdbcSerDe.java

示例10: extract

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
@Override
public HivePath.Value extract(final ObjectInspector oi, final Object obj) {
	final StructField fieldRef = ((StructObjectInspector) oi).getStructFieldRef(field);
	final ObjectInspector retOI = fieldRef.getFieldObjectInspector();
	final Object retObj = ((StructObjectInspector) oi).getStructFieldData(obj, fieldRef);
	return new Value(retOI, retObj);
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:8,代码来源:HivePath.java

示例11: readListOfBitcoinScriptWitnessFromTable

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
/**
* Read list of Bitcoin ScriptWitness items from a table in Hive in any format (e.g. ORC, Parquet)
*
* @param loi ObjectInspector for processing the Object containing a list
* @param listOfScriptWitnessItemObject object containing the list of scriptwitnessitems of a Bitcoin Transaction
*
* @return a list of BitcoinScriptWitnessItem 
*
*/

private List<BitcoinScriptWitnessItem> readListOfBitcoinScriptWitnessFromTable(ListObjectInspector loi, Object listOfScriptWitnessItemObject) {
int listLength=loi.getListLength(listOfScriptWitnessItemObject);
List<BitcoinScriptWitnessItem> result = new ArrayList<>(listLength);
StructObjectInspector listOfScriptwitnessItemElementObjectInspector = (StructObjectInspector)loi.getListElementObjectInspector();
for (int i=0;i<listLength;i++) {
	Object currentlistofscriptwitnessitemObject = loi.getListElement(listOfScriptWitnessItemObject,i);
	StructField stackitemcounterSF = listOfScriptwitnessItemElementObjectInspector.getStructFieldRef("stackitemcounter");
	StructField scriptwitnesslistSF = listOfScriptwitnessItemElementObjectInspector.getStructFieldRef("scriptwitnesslist");
	boolean scriptwitnessitemNull = (stackitemcounterSF==null) || (scriptwitnesslistSF==null) ; 
	if (scriptwitnessitemNull) {
		LOG.warn("Invalid BitcoinScriptWitnessItem detected at position "+i);
		return new ArrayList<>();
	}
	byte[] stackItemCounter = wboi.getPrimitiveJavaObject(listOfScriptwitnessItemElementObjectInspector.getStructFieldData(currentlistofscriptwitnessitemObject,stackitemcounterSF));
	Object listofscriptwitnessObject =  soi.getStructFieldData(currentlistofscriptwitnessitemObject,scriptwitnesslistSF);
	ListObjectInspector loiScriptWitness=(ListObjectInspector)scriptwitnesslistSF.getFieldObjectInspector();
	StructObjectInspector listOfScriptwitnessElementObjectInspector = (StructObjectInspector)loiScriptWitness.getListElementObjectInspector();
	int listWitnessLength = 	loiScriptWitness.getListLength(listofscriptwitnessObject);
	List<BitcoinScriptWitness> currentScriptWitnessList = new ArrayList<>(listWitnessLength);
	for (int j=0;j<listWitnessLength;j++) {
		Object currentlistofscriptwitnessObject = loi.getListElement(listofscriptwitnessObject,j);
		
		StructField witnessscriptlengthSF = listOfScriptwitnessElementObjectInspector.getStructFieldRef("witnessscriptlength");
		StructField witnessscriptSF = listOfScriptwitnessElementObjectInspector.getStructFieldRef("witnessscript");
		boolean scriptwitnessNull = (witnessscriptlengthSF==null)  || (witnessscriptSF==null);
		if (scriptwitnessNull) {
			LOG.warn("Invalid BitcoinScriptWitness detected at position "+j+ "for BitcoinScriptWitnessItem "+i);
			return new ArrayList<>();
		}
		byte[] scriptWitnessLength = wboi.getPrimitiveJavaObject(listOfScriptwitnessElementObjectInspector.getStructFieldData(currentlistofscriptwitnessObject,witnessscriptlengthSF));
		byte[] scriptWitness = wboi.getPrimitiveJavaObject(listOfScriptwitnessElementObjectInspector.getStructFieldData(currentlistofscriptwitnessObject,witnessscriptSF));
		currentScriptWitnessList.add(new BitcoinScriptWitness(scriptWitnessLength,scriptWitness));
	}
	BitcoinScriptWitnessItem currentBitcoinScriptWitnessItem = new BitcoinScriptWitnessItem(stackItemCounter,currentScriptWitnessList);
	result.add(currentBitcoinScriptWitnessItem);
}
return result;
}
 
开发者ID:ZuInnoTe,项目名称:hadoopcryptoledger,代码行数:49,代码来源:BitcoinTransactionHashSegwitUDF.java

示例12: loadValues

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
private static void loadValues(Object2ObjectMap<Object, Object> map, File file,
        PrimitiveObjectInspector keyOI, PrimitiveObjectInspector valueOI) throws IOException,
        SerDeException {
    if (!file.exists()) {
        return;
    }
    if (!file.getName().endsWith(".crc")) {
        if (file.isDirectory()) {
            for (File f : file.listFiles()) {
                loadValues(map, f, keyOI, valueOI);
            }
        } else {
            LazySimpleSerDe serde = HiveUtils.getKeyValueLineSerde(keyOI, valueOI);
            StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
            StructField keyRef = lineOI.getStructFieldRef("key");
            StructField valueRef = lineOI.getStructFieldRef("value");
            PrimitiveObjectInspector keyRefOI = (PrimitiveObjectInspector) keyRef.getFieldObjectInspector();
            PrimitiveObjectInspector valueRefOI = (PrimitiveObjectInspector) valueRef.getFieldObjectInspector();

            BufferedReader reader = null;
            try {
                reader = HadoopUtils.getBufferedReader(file);
                String line;
                while ((line = reader.readLine()) != null) {
                    Text lineText = new Text(line);
                    Object lineObj = serde.deserialize(lineText);
                    List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
                    Object f0 = fields.get(0);
                    Object f1 = fields.get(1);
                    Object k = keyRefOI.getPrimitiveJavaObject(f0);
                    Object v = valueRefOI.getPrimitiveWritableObject(valueRefOI.copyObject(f1));
                    map.put(k, v);
                }
            } finally {
                IOUtils.closeQuietly(reader);
            }
        }
    }
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:40,代码来源:DistributedCacheLookupUDF.java

示例13: extractField

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
@Override
protected Object extractField(Object target) {
    List<String> flNames = fieldNames;

    for (int i = 0; i < flNames.size(); i++) {
        String fl = flNames.get(i);
        if (target instanceof HiveType) {
            HiveType type = (HiveType) target;
            ObjectInspector inspector = type.getObjectInspector();
            if (inspector instanceof StructObjectInspector) {
                StructObjectInspector soi = (StructObjectInspector) inspector;
                StructField field = soi.getStructFieldRef(fl);
                ObjectInspector foi = field.getFieldObjectInspector();
                Assert.isTrue(foi.getCategory() == ObjectInspector.Category.PRIMITIVE,
                        String.format("Field [%s] needs to be a primitive; found [%s]", fl, foi.getTypeName()));

                // expecting a writeable - simply do a toString
                target = soi.getStructFieldData(type.getObject(), field);
            }
            else {
                return FieldExtractor.NOT_FOUND;
            }
        }
        else {
            return FieldExtractor.NOT_FOUND;
        }
    }

    if (target == null || target instanceof NullWritable) {
        return StringUtils.EMPTY;
    }
    return target.toString();
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:34,代码来源:HiveFieldExtractor.java

示例14: convert

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
@Override
public void convert(Object from, BytesArray to) {

    Assert.isTrue(from instanceof HiveType,
            String.format("Unexpected object type, expecting [%s], given [%s]", HiveType.class, from.getClass()));

    HiveType ht = (HiveType) from;
    ObjectInspector oi = ht.getObjectInspector();

    Assert.isTrue(Category.STRUCT == oi.getCategory(),
            String.format("Unexpected object category, expecting [%s], given [%s]", Category.STRUCT, oi.getTypeName()));

    StructObjectInspector soi = (StructObjectInspector) oi;
    List<? extends StructField> refs = soi.getAllStructFieldRefs();
    Assert.isTrue(refs.size() == 1, "When using JSON input, only one field is expected");

    StructField structField = refs.get(0);
    ObjectInspector foi = structField.getFieldObjectInspector();

    Assert.isTrue(Category.PRIMITIVE == foi.getCategory(),
            String.format("Unexpected object category, expecting [%s], given [%s]", Category.PRIMITIVE, oi.getTypeName()));

    Object writable = ((PrimitiveObjectInspector) foi).getPrimitiveWritableObject(soi.getStructFieldData(ht.getObject(), structField));

    // HiveVarcharWritable - Hive 0.12+
    if (writable != null && HiveConstants.VARCHAR_WRITABLE.equals(writable.getClass().getName())) {
        // TODO: add dedicated optimization
        to.bytes(writable.toString());
        return;
    }

    super.convert(writable, to);
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:34,代码来源:HiveBytesConverter.java

示例15: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //导入方法依赖的package包/类
@Override
public HiveKuduWritable serialize(Object row, ObjectInspector inspector)
    throws SerDeException {

    final StructObjectInspector structInspector = (StructObjectInspector) inspector;
    final List<? extends StructField> fields = structInspector.getAllStructFieldRefs();
    if (fields.size() != fieldCount) {
        throw new SerDeException(String.format(
                "Required %d columns, received %d.", fieldCount,
                fields.size()));
    }

    cachedWritable.clear();

    for (int i = 0; i < fieldCount; i++) {
        StructField structField = fields.get(i);
        if (structField != null) {
            Object field = structInspector.getStructFieldData(row,
                    structField);
            ObjectInspector fieldOI = structField.getFieldObjectInspector();

            Object javaObject = HiveKuduBridgeUtils.deparseObject(field,
                    fieldOI);
            LOG.warn("Column value of " + i + " is " + javaObject.toString());
            cachedWritable.set(i, javaObject);
        }
    }
    return cachedWritable;
}
 
开发者ID:BimalTandel,项目名称:HiveKudu-Handler,代码行数:30,代码来源:HiveKuduSerDe.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.StructField.getFieldObjectInspector方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。