当前位置: 首页>>代码示例>>Java>>正文


Java PrimitiveCategory类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory的典型用法代码示例。如果您正苦于以下问题:Java PrimitiveCategory类的具体用法?Java PrimitiveCategory怎么用?Java PrimitiveCategory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


PrimitiveCategory类属于org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector包,在下文中一共展示了PrimitiveCategory类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: MDSMapObjectInspector

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
public MDSMapObjectInspector( final MapTypeInfo typeInfo ){
  TypeInfo keyTypeInfo = typeInfo.getMapKeyTypeInfo();
  if( keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE && ( (PrimitiveTypeInfo)keyTypeInfo ).getPrimitiveCategory() == PrimitiveCategory.STRING ){
    keyObjectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
  }
  else{
    throw new RuntimeException( "Map key type is string only." );
  }

  valueObjectInspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo.getMapValueTypeInfo() ); 

  if( valueObjectInspector.getCategory() == ObjectInspector.Category.PRIMITIVE ){
    getField = new PrimitiveGetField( (PrimitiveObjectInspector)valueObjectInspector );
  }
  else if( valueObjectInspector.getCategory() == ObjectInspector.Category.UNION ){
    getField = new UnionGetField( (UnionTypeInfo)( typeInfo.getMapValueTypeInfo() ) );
  }
  else{
    getField = new NestedGetField();
  }
}
 
开发者ID:yahoojapan,项目名称:multiple-dimension-spread,代码行数:22,代码来源:MDSMapObjectInspector.java

示例2: checkArgGroups

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
public static void checkArgGroups(ObjectInspector[] arguments, int i,
        PrimitiveCategory[] inputTypes, PrimitiveGrouping... grps)
        throws UDFArgumentTypeException {
    PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
    for (PrimitiveGrouping grp : grps) {
        if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType) == grp) {
            inputTypes[i] = inputType;
            return;
        }
    }
    // build error message
    StringBuilder sb = new StringBuilder();
    sb.append("_FUNC_ only takes ");
    sb.append(grps[0]);
    for (int j = 1; j < grps.length; j++) {
        sb.append(", ");
        sb.append(grps[j]);
    }
    sb.append(" types as ");
    sb.append(getArgOrder(i));
    sb.append(" argument, got ");
    sb.append(inputType);
    throw new UDFArgumentTypeException(i, sb.toString());
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:25,代码来源:BackportUtils.java

示例3: obtainIntConverter

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
public static void obtainIntConverter(ObjectInspector[] arguments, int i,
        PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
    switch (inputType) {
        case BYTE:
        case SHORT:
        case INT:
        case VOID:
            break;
        default:
            throw new UDFArgumentTypeException(i, "_FUNC_ only takes INT/SHORT/BYTE types as "
                    + getArgOrder(i) + " argument, got " + inputType);
    }

    Converter converter = ObjectInspectorConverters.getConverter(arguments[i],
        PrimitiveObjectInspectorFactory.writableIntObjectInspector);
    converters[i] = converter;
    inputTypes[i] = inputType;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:21,代码来源:BackportUtils.java

示例4: obtainLongConverter

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
public static void obtainLongConverter(ObjectInspector[] arguments, int i,
        PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
    switch (inputType) {
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
            break;
        default:
            throw new UDFArgumentTypeException(i,
                "_FUNC_ only takes LONG/INT/SHORT/BYTE types as " + getArgOrder(i)
                        + " argument, got " + inputType);
    }

    Converter converter = ObjectInspectorConverters.getConverter(arguments[i],
        PrimitiveObjectInspectorFactory.writableIntObjectInspector);
    converters[i] = converter;
    inputTypes[i] = inputType;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:22,代码来源:BackportUtils.java

示例5: obtainDateConverter

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
public static void obtainDateConverter(ObjectInspector[] arguments, int i,
        PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
    ObjectInspector outOi;
    switch (inputType) {
        case STRING:
        case VARCHAR:
        case CHAR:
            outOi = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
            break;
        case TIMESTAMP:
        case DATE:
        case VOID:
            outOi = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
            break;
        default:
            throw new UDFArgumentTypeException(i,
                "_FUNC_ only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i)
                        + " argument, got " + inputType);
    }
    converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
    inputTypes[i] = inputType;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:25,代码来源:BackportUtils.java

示例6: obtainTimestampConverter

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
public static void obtainTimestampConverter(ObjectInspector[] arguments, int i,
        PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
    ObjectInspector outOi;
    switch (inputType) {
        case STRING:
        case VARCHAR:
        case CHAR:
        case TIMESTAMP:
        case DATE:
            break;
        default:
            throw new UDFArgumentTypeException(i,
                "_FUNC_ only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i)
                        + " argument, got " + inputType);
    }
    outOi = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
    converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
    inputTypes[i] = inputType;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:22,代码来源:BackportUtils.java

示例7: isNumericObjectInspector

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
private static boolean isNumericObjectInspector(ObjectInspector oi, boolean constant) {
  Category c = oi.getCategory();
  if (c != Category.PRIMITIVE) {
    return false;
  }
  PrimitiveCategory pc = ((PrimitiveObjectInspector) oi)
      .getPrimitiveCategory();

  if (pc != PrimitiveCategory.INT  && pc != PrimitiveCategory.LONG) {
    return false;
  }
  
  if (constant && !(oi instanceof ConstantObjectInspector)) {
    return false;
  }

  return true;
}
 
开发者ID:t3rmin4t0r,项目名称:hive-faker,代码行数:19,代码来源:GenerateSeriesUDTF.java

示例8: getPrimitiveType

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
private static Type getPrimitiveType(final ObjectInspector fieldInspector) {
    final PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) fieldInspector)
        .getPrimitiveCategory();
    if (HiveTypeMapping.getHIVE_TO_CANONICAL().containsKey(primitiveCategory.name())) {
        return HiveTypeMapping.getHIVE_TO_CANONICAL().get(primitiveCategory.name());
    }
    switch (primitiveCategory) {
        case DECIMAL:
            final DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
                .getTypeInfo();
            return DecimalType.createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.getScale());
        case CHAR:
            final int cLength = ((CharTypeInfo) ((PrimitiveObjectInspector)
                fieldInspector).getTypeInfo()).getLength();
            return CharType.createCharType(cLength);
        case VARCHAR:
            final int vLength = ((VarcharTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
                .getTypeInfo()).getLength();
            return VarcharType.createVarcharType(vLength);
        default:
            return null;
    }
}
 
开发者ID:Netflix,项目名称:metacat,代码行数:24,代码来源:HiveTypeConverter.java

示例9: initialize

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 1) {
        throw new UDFArgumentLengthException(
            "add_feature_index() has an single arguments: array<double> features");
    }

    switch (arguments[0].getCategory()) {
        case LIST:
            argumentOI = (ListObjectInspector) arguments[0];
            ObjectInspector elmOI = argumentOI.getListElementObjectInspector();
            if (elmOI.getCategory().equals(Category.PRIMITIVE)) {
                if (((PrimitiveObjectInspector) elmOI).getPrimitiveCategory() == PrimitiveCategory.DOUBLE) {
                    break;
                }
            }
        default:
            throw new UDFArgumentTypeException(0, "Type mismatch: features");
    }

    return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:23,代码来源:AddFeatureIndexUDFWrapper.java

示例10: initialize

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 1) {
        throw new UDFArgumentLengthException(
            "add_bias() has an single arguments: array<string> features");
    }

    switch (arguments[0].getCategory()) {
        case LIST:
            argumentOI = (ListObjectInspector) arguments[0];
            ObjectInspector elmOI = argumentOI.getListElementObjectInspector();
            if (elmOI.getCategory().equals(Category.PRIMITIVE)) {
                if (((PrimitiveObjectInspector) elmOI).getPrimitiveCategory() == PrimitiveCategory.STRING) {
                    break;
                }
            }
        default:
            throw new UDFArgumentTypeException(0, "Type mismatch: features");
    }

    return ObjectInspectorFactory.getStandardListObjectInspector(argumentOI.getListElementObjectInspector());
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:23,代码来源:AddBiasUDFWrapper.java

示例11: hasDateType

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
static boolean hasDateType(ObjectInspector objectInspector)
{
    if (objectInspector instanceof PrimitiveObjectInspector) {
        PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector) objectInspector;
        return primitiveInspector.getPrimitiveCategory() == PrimitiveCategory.DATE;
    }
    if (objectInspector instanceof ListObjectInspector) {
        ListObjectInspector listInspector = (ListObjectInspector) objectInspector;
        return hasDateType(listInspector.getListElementObjectInspector());
    }
    if (objectInspector instanceof MapObjectInspector) {
        MapObjectInspector mapInspector = (MapObjectInspector) objectInspector;
        return hasDateType(mapInspector.getMapKeyObjectInspector()) ||
                hasDateType(mapInspector.getMapValueObjectInspector());
    }
    if (objectInspector instanceof StructObjectInspector) {
        for (StructField field : ((StructObjectInspector) objectInspector).getAllStructFieldRefs()) {
            if (hasDateType(field.getFieldObjectInspector())) {
                return true;
            }
        }
        return false;
    }
    throw new IllegalArgumentException("Unknown object inspector type " + objectInspector);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:26,代码来源:DwrfRecordCursorProvider.java

示例12: testDwrf

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
@Test
public void testDwrf()
        throws Exception
{
    List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, testColumn -> {
        ObjectInspector objectInspector = testColumn.getObjectInspector();
        return !hasType(objectInspector, PrimitiveCategory.DATE);
    }));

    HiveOutputFormat<?, ?> outputFormat = new com.facebook.hive.orc.OrcOutputFormat();
    InputFormat<?, ?> inputFormat = new com.facebook.hive.orc.OrcInputFormat();
    @SuppressWarnings("deprecation")
    SerDe serde = new com.facebook.hive.orc.OrcSerde();
    File file = File.createTempFile("presto_test", "dwrf");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns, NUM_ROWS);
        testCursorProvider(new DwrfRecordCursorProvider(), split, inputFormat, serde, testColumns, NUM_ROWS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:25,代码来源:TestHiveFileFormats.java

示例13: testDwrfDataStream

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
@Test
public void testDwrfDataStream()
        throws Exception
{
    List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, testColumn -> {
        ObjectInspector objectInspector = testColumn.getObjectInspector();
        return !hasType(objectInspector, PrimitiveCategory.DATE);
    }));

    HiveOutputFormat<?, ?> outputFormat = new com.facebook.hive.orc.OrcOutputFormat();
    InputFormat<?, ?> inputFormat = new com.facebook.hive.orc.OrcInputFormat();
    @SuppressWarnings("deprecation")
    SerDe serde = new com.facebook.hive.orc.OrcSerde();
    File file = File.createTempFile("presto_test", "dwrf");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns, NUM_ROWS);
        testPageSourceFactory(new DwrfPageSourceFactory(TYPE_MANAGER), split, inputFormat, serde, testColumns);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:25,代码来源:TestHiveFileFormats.java

示例14: init

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
@Override
public ObjectInspector init(final Mode mode, final ObjectInspector[] parameters) throws HiveException {
  super.init(mode, parameters);
  inputObjectInspector = (PrimitiveObjectInspector) parameters[0];

  // Parameters:
  // In PARTIAL1 and COMPLETE mode, the parameters are original data.
  // In PARTIAL2 and FINAL mode, the parameters are partial aggregations.
  if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
    if (parameters.length > 1) {
      kObjectInspector = (PrimitiveObjectInspector) parameters[1];
    }
  }

  return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.BINARY);
}
 
开发者ID:DataSketches,项目名称:sketches-hive,代码行数:17,代码来源:ItemsEvaluator.java

示例15: init

import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; //导入依赖的package包/类
@Override
public ObjectInspector init(final Mode mode, final ObjectInspector[] inspectors) throws HiveException {
  final ObjectInspector resultInspector = super.init(mode, inspectors);
  if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
    // input is original data
    if (inspectors.length > 2) {
      summaryModeInspector_ = (PrimitiveObjectInspector) inspectors[2];
    }
  }
  if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
    // intermediate results need to include the nominal number of entries and the summary mode
    return ObjectInspectorFactory.getStandardStructObjectInspector(
      Arrays.asList(NOMINAL_NUM_ENTRIES_FIELD, SUMMARY_MODE_FIELD, SKETCH_FIELD),
      Arrays.asList(
        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.INT),
        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.STRING),
        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.BINARY)
      )
    );
  }
  return resultInspector;
}
 
开发者ID:DataSketches,项目名称:sketches-hive,代码行数:23,代码来源:UnionDoubleSummaryWithModeSketchUDAF.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。