當前位置: 首頁>>代碼示例>>Java>>正文


Java StructField類代碼示例

本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.StructField的典型用法代碼示例。如果您正苦於以下問題:Java StructField類的具體用法?Java StructField怎麽用?Java StructField使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


StructField類屬於org.apache.hadoop.hive.serde2.objectinspector包,在下文中一共展示了StructField類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: getFromTypeInfo

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
public static OrcSerde getFromTypeInfo( final Configuration config , final TypeInfo typeInfo )throws IOException{
  ObjectInspector objectInspector = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo( typeInfo );
  if( !( typeInfo instanceof StructTypeInfo ) ){
    throw new IOException( "Input type info is not StructTypeInfo : " + typeInfo.toString() );
  }
  String columnsName = "";
  String columnsType = "";
  List<TypeInfo> typeInfoList = ( (StructTypeInfo)typeInfo ).getAllStructFieldTypeInfos();
  List<StructField> structField = (List<StructField>)( ( (StructObjectInspector)objectInspector ).getAllStructFieldRefs() );
  for( int i = 0 ; i < structField.size() ; i++ ){
    if( ! columnsName.isEmpty() ){
      columnsName = columnsName.concat( "," );
      columnsType = columnsType.concat( "," );
    }
    columnsName = columnsName.concat( structField.get(i).getFieldName() );
    columnsType = columnsType.concat( typeInfoList.get(i).toString() );
  }

  OrcSerde serde = new OrcSerde();
  Properties table = new Properties();
  table.setProperty( serdeConstants.LIST_COLUMNS , columnsName );
  table.setProperty( serdeConstants.LIST_COLUMN_TYPES , columnsType );
  serde.initialize( config , table );

  return serde;
}
 
開發者ID:yahoojapan,項目名稱:dataplatform-schema-lib,代碼行數:27,代碼來源:OrcSerdeFactory.java

示例2: createMultiStripeFile

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
private static void createMultiStripeFile(File file)
        throws IOException, ReflectiveOperationException, SerDeException
{
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, OrcTester.Compression.NONE, javaLongObjectInspector);

    @SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", javaLongObjectInspector);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    for (int i = 0; i < 300; i += 3) {
        if ((i > 0) && (i % 60 == 0)) {
            flushWriter(writer);
        }

        objectInspector.setStructFieldData(row, field, (long) i);
        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }

    writer.close(false);
}
 
開發者ID:y-lan,項目名稱:presto,代碼行數:23,代碼來源:TestOrcReaderPositions.java

示例3: getTypeName

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
@Override
public String getTypeName() {
  StringBuilder buffer = new StringBuilder();
  buffer.append("struct<");
  for( int i = 0 ; i < fields.size() ; ++i ){
    StructField field = fields.get(i);
    if (i != 0) {
      buffer.append(",");
    }
    buffer.append( field.getFieldName() );
    buffer.append(":");
    buffer.append(field.getFieldObjectInspector().getTypeName());
  }
  buffer.append(">");
  return buffer.toString();
}
 
開發者ID:yahoojapan,項目名稱:multiple-dimension-spread,代碼行數:17,代碼來源:MDSStructObjectInspector.java

示例4: equals

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
@Override
public boolean equals( final Object o ) {
  if ( o == null || o.getClass() != getClass() ) {
    return false;
  } else if ( o == this ){
    return true;
  } else {
    List<StructField> other = ( (MDSStructObjectInspector) o ).fields;
    if ( other.size() != fields.size() ){
      return false;
    }
    for( int i = 0; i < fields.size(); ++i ){
      StructField left = other.get(i);
      StructField right = fields.get(i);
      if (!(left.getFieldName().equalsIgnoreCase(right.getFieldName()) &&
            left.getFieldObjectInspector().equals
                (right.getFieldObjectInspector()))) {
        return false;
      }
    }
    return true;
  }
}
 
開發者ID:yahoojapan,項目名稱:multiple-dimension-spread,代碼行數:24,代碼來源:MDSStructObjectInspector.java

示例5: isMonarchTypeSupported

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
public static boolean isMonarchTypeSupported(final ObjectInspector oi) {
  if (ObjectInspector.Category.PRIMITIVE.equals(oi.getCategory())) {
    /** handle primitive type definitions like decimal(20,20) or varchar(100) **/
    String typeStr = oi.getTypeName();
    final int argPos = typeStr.indexOf('(');
    if (argPos > 0) {
      typeStr = typeStr.substring(0, argPos);
    }
    return TYPE_HIVE_TO_MONARCH_MAP.containsKey(typeStr);
  } else if (oi instanceof ListObjectInspector) {
    ListObjectInspector loi = (ListObjectInspector)oi;
    return isMonarchTypeSupported(loi.getListElementObjectInspector());
  } else if (oi instanceof MapObjectInspector) {
    MapObjectInspector moi = (MapObjectInspector)oi;
    return isMonarchTypeSupported(moi.getMapKeyObjectInspector()) &&
      isMonarchTypeSupported(moi.getMapValueObjectInspector());
  } else if (oi instanceof StructObjectInspector) {
    return ((StructObjectInspector) oi).getAllStructFieldRefs().stream()
      .map(StructField::getFieldObjectInspector)
      .allMatch(MonarchPredicateHandler::isMonarchTypeSupported);
  } else if (oi instanceof UnionObjectInspector) {
    return ((UnionObjectInspector) oi).getObjectInspectors().stream()
      .allMatch(MonarchPredicateHandler::isMonarchTypeSupported);
  }
  return false;
}
 
開發者ID:ampool,項目名稱:monarch,代碼行數:27,代碼來源:MonarchPredicateHandler.java

示例6: next

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
@Override
public Object next() {
  try {
    /* convert ORC row to Ampool row */
    this.row = this.reader.next(this.row);
    int i = 0;
    this.desRow = new StoreRecord(this.fields.size());
    for (final StructField field : fields) {
      this.desRow.setValue(i++,
          convert(field.getFieldObjectInspector(), rowOI.getStructFieldData(row, field)));
    }
    return this.desRow;
  } catch (Exception e) {
    logger.error("Failed to retrieve/convert row via ORC reader. row= {}", row, e);
    return null;
  }
}
 
開發者ID:ampool,項目名稱:monarch,代碼行數:18,代碼來源:OrcUtils.java

示例7: readListOfOutputsFromTable

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
/**
* Read list of Bitcoin transaction outputs from a table in Hive in any format (e.g. ORC, Parquet)
*
* @param loi ObjectInspector for processing the Object containing a list
* @param listOfOutputsObject object containing the list of outputs to a Bitcoin Transaction
*
* @return a list of BitcoinTransactionOutputs 
*
*/

private List<BitcoinTransactionOutput> readListOfOutputsFromTable(ListObjectInspector loi, Object listOfOutputsObject) {
int listLength=loi.getListLength(listOfOutputsObject);
List<BitcoinTransactionOutput> result=new ArrayList<>(listLength);
StructObjectInspector listOfOutputsElementObjectInspector = (StructObjectInspector)loi.getListElementObjectInspector();
	for (int i=0;i<listLength;i++) {
		Object currentListOfOutputsObject = loi.getListElement(listOfOutputsObject,i);
		StructField valueSF = listOfOutputsElementObjectInspector.getStructFieldRef("value");
		StructField txoutscriptlengthSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscriptlength");
		StructField txoutscriptSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscript");
		if ((valueSF==null) || (txoutscriptlengthSF==null) || (txoutscriptSF==null)) {
			LOG.warn("Invalid BitcoinTransactionOutput detected at position "+i);
			return new ArrayList<>();
		}
		long currentValue=wloi.get(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,valueSF));	
		byte[] currentTxOutScriptLength=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptlengthSF));
		byte[] currentTxOutScript=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptSF));
		BitcoinTransactionOutput currentBitcoinTransactionOutput = new BitcoinTransactionOutput(currentValue,currentTxOutScriptLength,currentTxOutScript);
		result.add(currentBitcoinTransactionOutput);
	}
return result;
}
 
開發者ID:ZuInnoTe,項目名稱:hadoopcryptoledger,代碼行數:32,代碼來源:BitcoinTransactionHashUDF.java

示例8: initReduceSide

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
private ObjectInspector initReduceSide(StructObjectInspector inputStructOI)
        throws HiveException {
    List<? extends StructField> fields = inputStructOI.getAllStructFieldRefs();
    int length = fields.size();
    this.inputStructOI = inputStructOI;
    this.inputOIs = new ObjectInspector[length];
    this.outputOIs = new ObjectInspector[length];

    for (int i = 0; i < length; i++) {
        StructField field = fields.get(i);
        ObjectInspector oi = field.getFieldObjectInspector();
        inputOIs[i] = oi;
        outputOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(oi);
    }

    return ObjectInspectorUtils.getStandardObjectInspector(inputStructOI);
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:18,代碼來源:MaxRowUDAF.java

示例9: merge

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
@SuppressWarnings("unchecked")
void merge(@Nonnull final Object partial, @Nonnull final StructObjectInspector mergeOI,
        @Nonnull final StructField[] fields, @Nonnull final ListObjectInspector[] fieldOIs) {
    Preconditions.checkArgument(fields.length == fieldOIs.length);

    final int numFields = fieldOIs.length;
    if (identifiers == null) {
        this.identifiers = new Identifier[numFields];
    }
    Preconditions.checkArgument(fields.length == identifiers.length);

    for (int i = 0; i < numFields; i++) {
        Identifier<Writable> id = identifiers[i];
        if (id == null) {
            id = new Identifier<>(1);
            identifiers[i] = id;
        }
        final Object fieldData = mergeOI.getStructFieldData(partial, fields[i]);
        final ListObjectInspector fieldOI = fieldOIs[i];
        for (int j = 0, size = fieldOI.getListLength(fieldData); j < size; j++) {
            Object o = fieldOI.getListElement(fieldData, j);
            Preconditions.checkNotNull(o);
            id.valueOf((Writable) o);
        }
    }
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:27,代碼來源:OnehotEncodingUDAF.java

示例10: serialize

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
@Override
public Writable serialize(Object obj, ObjectInspector objectInspector) throws SerDeException {
    if (!objectInspector.getCategory().equals(ObjectInspector.Category.STRUCT)) {
        throw new SerDeException("Cannot serialize " + objectInspector.getCategory() + ". Can only serialize a struct");
    }

    StructObjectInspector inspector = (StructObjectInspector) objectInspector;
    List<? extends StructField> fields = inspector.getAllStructFieldRefs();
    Writable[] arr = new Writable[fields.size()];
    for (int i = 0; i < fields.size(); i++) {
        StructField field = fields.get(i);
        Object subObj = inspector.getStructFieldData(obj, field);
        ObjectInspector subInspector = field.getFieldObjectInspector();
        arr[i] = createPrimitive(subObj, (PrimitiveObjectInspector) subInspector);
    }
    serdeSize = arr.length;
    return new ArrayWritable(Writable.class, arr);
}
 
開發者ID:shunfei,項目名稱:indexr,代碼行數:19,代碼來源:IndexRSerde.java

示例11: ArrayWritableObjectInspector

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
public ArrayWritableObjectInspector(final StructTypeInfo rowTypeInfo) {

        typeInfo = rowTypeInfo;
        fieldNames = rowTypeInfo.getAllStructFieldNames();
        fieldInfos = rowTypeInfo.getAllStructFieldTypeInfos();
        fields = new ArrayList<StructField>(fieldNames.size());
        fieldsByName = new HashMap<String, StructFieldImpl>();

        for (int i = 0; i < fieldNames.size(); ++i) {
            final String name = fieldNames.get(i);
            final TypeInfo fieldInfo = fieldInfos.get(i);

            final StructFieldImpl field = new StructFieldImpl(name, getObjectInspector(fieldInfo), i);
            fields.add(field);
            fieldsByName.put(name, field);
        }
    }
 
開發者ID:shunfei,項目名稱:indexr,代碼行數:18,代碼來源:ArrayWritableObjectInspector.java

示例12: getStructFieldData

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
@Override
public Object getStructFieldData(final Object data, final StructField fieldRef) {
    if (data == null) {
        return null;
    }

    if (data instanceof ArrayWritable) {
        final ArrayWritable arr = (ArrayWritable) data;
        return arr.get()[((StructFieldImpl) fieldRef).getIndex()];
    }

    //since setStructFieldData and create return a list, getStructFieldData should be able to
    //handle list data. This is required when table serde is ParquetHiveSerDe and partition serde
    //is something else.
    if (data instanceof List) {
        return ((List) data).get(((StructFieldImpl) fieldRef).getIndex());
    }

    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
 
開發者ID:shunfei,項目名稱:indexr,代碼行數:21,代碼來源:ArrayWritableObjectInspector.java

示例13: createSequentialFile

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
private static void createSequentialFile(File file, int count)
        throws IOException, ReflectiveOperationException, SerDeException
{
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, OrcTester.Compression.NONE, javaLongObjectInspector);

    @SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", javaLongObjectInspector);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    for (int i = 0; i < count; i++) {
        objectInspector.setStructFieldData(row, field, (long) i);
        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }

    writer.close(false);
}
 
開發者ID:y-lan,項目名稱:presto,代碼行數:19,代碼來源:TestOrcReaderPositions.java

示例14: hasDateType

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
static boolean hasDateType(ObjectInspector objectInspector)
{
    if (objectInspector instanceof PrimitiveObjectInspector) {
        PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector) objectInspector;
        return primitiveInspector.getPrimitiveCategory() == PrimitiveCategory.DATE;
    }
    if (objectInspector instanceof ListObjectInspector) {
        ListObjectInspector listInspector = (ListObjectInspector) objectInspector;
        return hasDateType(listInspector.getListElementObjectInspector());
    }
    if (objectInspector instanceof MapObjectInspector) {
        MapObjectInspector mapInspector = (MapObjectInspector) objectInspector;
        return hasDateType(mapInspector.getMapKeyObjectInspector()) ||
                hasDateType(mapInspector.getMapValueObjectInspector());
    }
    if (objectInspector instanceof StructObjectInspector) {
        for (StructField field : ((StructObjectInspector) objectInspector).getAllStructFieldRefs()) {
            if (hasDateType(field.getFieldObjectInspector())) {
                return true;
            }
        }
        return false;
    }
    throw new IllegalArgumentException("Unknown object inspector type " + objectInspector);
}
 
開發者ID:y-lan,項目名稱:presto,代碼行數:26,代碼來源:DwrfRecordCursorProvider.java

示例15: checkIntermediateResultInspector

import org.apache.hadoop.hive.serde2.objectinspector.StructField; //導入依賴的package包/類
static void checkIntermediateResultInspector(ObjectInspector resultInspector) {
  Assert.assertNotNull(resultInspector);
  Assert.assertEquals(resultInspector.getCategory(), ObjectInspector.Category.STRUCT);
  StructObjectInspector structResultInspector = (StructObjectInspector) resultInspector;
  List<?> fields = structResultInspector.getAllStructFieldRefs();
  Assert.assertEquals(fields.size(), 3);

  ObjectInspector inspector1 = ((StructField) fields.get(0)).getFieldObjectInspector();
  Assert.assertEquals(inspector1.getCategory(), ObjectInspector.Category.PRIMITIVE);
  PrimitiveObjectInspector primitiveInspector1 = (PrimitiveObjectInspector) inspector1;
  Assert.assertEquals(primitiveInspector1.getPrimitiveCategory(), PrimitiveCategory.INT);

  ObjectInspector inspector2 = ((StructField) fields.get(1)).getFieldObjectInspector();
  Assert.assertEquals(inspector2.getCategory(), ObjectInspector.Category.PRIMITIVE);
  PrimitiveObjectInspector primitiveInspector2 = (PrimitiveObjectInspector) inspector2;
  Assert.assertEquals(primitiveInspector2.getPrimitiveCategory(), PrimitiveCategory.STRING);

  ObjectInspector inspector3 = ((StructField) fields.get(2)).getFieldObjectInspector();
  Assert.assertEquals(inspector3.getCategory(), ObjectInspector.Category.PRIMITIVE);
  PrimitiveObjectInspector primitiveInspector3 = (PrimitiveObjectInspector) inspector3;
  Assert.assertEquals(primitiveInspector3.getPrimitiveCategory(), PrimitiveCategory.BINARY);
}
 
開發者ID:DataSketches,項目名稱:sketches-hive,代碼行數:23,代碼來源:DataToSketchUDAFTest.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.StructField類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。